我试图在Swift中使用十六进制颜色值,而不是UIColor允许您使用的少数标准值,但我不知道如何做到这一点。

示例:我如何使用#ffffff作为颜色?


当前回答

extension UIColor {
    public convenience init?(hex: String) {
        let r, g, b, a: CGFloat

        if hex.hasPrefix("#") {
            let start = hex.index(hex.startIndex, offsetBy: 1)
            let hexColor = String(hex[start...])

            if hexColor.count == 8 {
                let scanner = Scanner(string: hexColor)
                var hexNumber: UInt64 = 0

                if scanner.scanHexInt64(&hexNumber) {
                    r = CGFloat((hexNumber & 0xff000000) >> 24) / 255
                    g = CGFloat((hexNumber & 0x00ff0000) >> 16) / 255
                    b = CGFloat((hexNumber & 0x0000ff00) >> 8) / 255
                    a = CGFloat(hexNumber & 0x000000ff) / 255

                    self.init(red: r, green: g, blue: b, alpha: a)
                    return
                }
            }
        }

        return nil
    }
}

用法:

let white = UIColor(hex: "#ffffff")

其他回答

只是对第一个答案的一些补充

(还没有检查alpha,可能需要添加一个if next > 0xffffff):

extension UIColor {

struct COLORS_HEX {
    static let Primary = 0xffffff
    static let PrimaryDark = 0x000000
    static let Accent = 0xe89549
    static let AccentDark = 0xe27b2a
    static let TextWhiteSemiTransparent = 0x80ffffff
}

convenience init(red: Int, green: Int, blue: Int, alphaH: Int) {
    assert(red >= 0 && red <= 255, "Invalid red component")
    assert(green >= 0 && green <= 255, "Invalid green component")
    assert(blue >= 0 && blue <= 255, "Invalid blue component")
    assert(alphaH >= 0 && alphaH <= 255, "Invalid alpha component")

    self.init(red: CGFloat(red) / 255.0, green: CGFloat(green) / 255.0, blue: CGFloat(blue) / 255.0, alpha: CGFloat(alphaH) / 255.0)
}

convenience init(netHex:Int) {
    self.init(red:(netHex >> 16) & 0xff, green:(netHex >> 8) & 0xff, blue:netHex & 0xff, alphaH: (netHex >> 24) & 0xff)
}

}

带验证的十六进制

根据爱德华多的回答

细节

Xcode 10.0, Swift 4.2 Xcode 10.2.1 (10E1001)

解决方案

import UIKit

extension UIColor {

    convenience init(r: UInt8, g: UInt8, b: UInt8, alpha: CGFloat = 1.0) {
        let divider: CGFloat = 255.0
        self.init(red: CGFloat(r)/divider, green: CGFloat(g)/divider, blue: CGFloat(b)/divider, alpha: alpha)
    }

    private convenience init(rgbWithoutValidation value: Int32, alpha: CGFloat = 1.0) {
        self.init(
            r: UInt8((value & 0xFF0000) >> 16),
            g: UInt8((value & 0x00FF00) >> 8),
            b: UInt8(value & 0x0000FF),
            alpha: alpha
        )
    }

    convenience init?(rgb: Int32, alpha: CGFloat = 1.0) {
        if rgb > 0xFFFFFF || rgb < 0 { return nil }
        self.init(rgbWithoutValidation: rgb, alpha: alpha)
    }

    convenience init?(hex: String, alpha: CGFloat = 1.0) {
        var charSet = CharacterSet.whitespacesAndNewlines
        charSet.insert("#")
        let _hex = hex.trimmingCharacters(in: charSet)
        guard _hex.range(of: "^[0-9A-Fa-f]{6}$", options: .regularExpression) != nil else { return nil }
        var rgb: UInt32 = 0
        Scanner(string: _hex).scanHexInt32(&rgb)
        self.init(rgbWithoutValidation: Int32(rgb), alpha: alpha)
    }
}

使用

let alpha: CGFloat = 1.0

// Hex
print(UIColor(rgb: 0x4F9BF5) ?? "nil")
print(UIColor(rgb: 0x4F9BF5, alpha: alpha) ?? "nil")
print(UIColor(rgb: 5217269) ?? "nil")
print(UIColor(rgb: -5217269) ?? "nil")                  // = nil
print(UIColor(rgb: 0xFFFFFF1) ?? "nil")                 // = nil

// String
print(UIColor(hex: "4F9BF5") ?? "nil")
print(UIColor(hex: "4F9BF5", alpha: alpha) ?? "nil")
print(UIColor(hex: "#4F9BF5") ?? "nil")
print(UIColor(hex: "#4F9BF5", alpha: alpha) ?? "nil")
print(UIColor(hex: "#4F9BF56") ?? "nil")                // = nil
print(UIColor(hex: "#blabla") ?? "nil")                 // = nil

// RGB
print(UIColor(r: 79, g: 155, b: 245))
print(UIColor(r: 79, g: 155, b: 245, alpha: alpha))
//print(UIColor(r: 792, g: 155, b: 245, alpha: alpha))  // Compiler will throw an error, r,g,b = [0...255]
extension UIColor {

      convenience init(hex: Int, alpha: Double = 1.0) {

      self.init(red: CGFloat((hex>>16)&0xFF)/255.0, green:CGFloat((hex>>8)&0xFF)/255.0, blue: CGFloat((hex)&0xFF)/255.0, alpha:  CGFloat(255 * alpha) / 255)
     }
}

使用这个扩展像:

let selectedColor = UIColor(hex: 0xFFFFFF)
let selectedColor = UIColor(hex: 0xFFFFFF, alpha: 0.5)

对于swift 3

extension String {
    var hexColor: UIColor {        
        let hex = trimmingCharacters(in: CharacterSet.alphanumerics.inverted)
        var int = UInt32()       
        Scanner(string: hex).scanHexInt32(&int)
        let a, r, g, b: UInt32
        switch hex.characters.count {
        case 3: // RGB (12-bit)
            (a, r, g, b) = (255, (int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17)
        case 6: // RGB (24-bit)
            (a, r, g, b) = (255, int >> 16, int >> 8 & 0xFF, int & 0xFF)
        case 8: // ARGB (32-bit)
            (a, r, g, b) = (int >> 24, int >> 16 & 0xFF, int >> 8 & 0xFF, int & 0xFF)
        default:
            return .clear
        }
        return UIColor(red: CGFloat(r) / 255, green: CGFloat(g) / 255, blue: CGFloat(b) / 255, alpha: CGFloat(a) / 255)
    }
}

Swift 5 (Swift 4, Swift 3) UIColor扩展:

extension UIColor {
    convenience init(hexString: String) {
        let hex = hexString.trimmingCharacters(in: CharacterSet.alphanumerics.inverted)
        var int = UInt64()
        Scanner(string: hex).scanHexInt64(&int)
        let a, r, g, b: UInt64
        switch hex.count {
        case 3: // RGB (12-bit)
            (a, r, g, b) = (255, (int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17)
        case 6: // RGB (24-bit)
            (a, r, g, b) = (255, int >> 16, int >> 8 & 0xFF, int & 0xFF)
        case 8: // ARGB (32-bit)
            (a, r, g, b) = (int >> 24, int >> 16 & 0xFF, int >> 8 & 0xFF, int & 0xFF)
        default:
            (a, r, g, b) = (255, 0, 0, 0)
        }
        self.init(red: CGFloat(r) / 255, green: CGFloat(g) / 255, blue: CGFloat(b) / 255, alpha: CGFloat(a) / 255)
    }
}

用法:

let darkGrey = UIColor(hexString: "#757575")

斯威夫特2。x版本:

extension UIColor {
    convenience init(hexString: String) {
        let hex = hexString.stringByTrimmingCharactersInSet(NSCharacterSet.alphanumericCharacterSet().invertedSet)
        var int = UInt32()
        NSScanner(string: hex).scanHexInt(&int)
        let a, r, g, b: UInt32
        switch hex.characters.count {
        case 3: // RGB (12-bit)
            (a, r, g, b) = (255, (int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17)
        case 6: // RGB (24-bit)
            (a, r, g, b) = (255, int >> 16, int >> 8 & 0xFF, int & 0xFF)
        case 8: // ARGB (32-bit)
            (a, r, g, b) = (int >> 24, int >> 16 & 0xFF, int >> 8 & 0xFF, int & 0xFF)
        default:
            (a, r, g, b) = (255, 0, 0, 0)
        }
        self.init(red: CGFloat(r) / 255, green: CGFloat(g) / 255, blue: CGFloat(b) / 255, alpha: CGFloat(a) / 255)
    }
}