我试图在Swift中使用十六进制颜色值,而不是UIColor允许您使用的少数标准值,但我不知道如何做到这一点。

示例:我如何使用#ffffff作为颜色?


当前回答

Swift 4:结合Sulthan和Luca Torella的回答:

extension UIColor {
    convenience init(hexFromString:String, alpha:CGFloat = 1.0) {
        var cString:String = hexFromString.trimmingCharacters(in: .whitespacesAndNewlines).uppercased()
        var rgbValue:UInt32 = 10066329 //color #999999 if string has wrong format

        if (cString.hasPrefix("#")) {
            cString.remove(at: cString.startIndex)
        }

        if ((cString.count) == 6) {
            Scanner(string: cString).scanHexInt32(&rgbValue)
        }

        self.init(
            red: CGFloat((rgbValue & 0xFF0000) >> 16) / 255.0,
            green: CGFloat((rgbValue & 0x00FF00) >> 8) / 255.0,
            blue: CGFloat(rgbValue & 0x0000FF) / 255.0,
            alpha: alpha
        )
    }
}

使用例子:

let myColor = UIColor(hexFromString: "4F9BF5")

let myColor = UIColor(hexFromString: "#4F9BF5")

let myColor = UIColor(hexFromString: "#4F9BF5", alpha: 0.5)

其他回答

用户界面颜色:

extension UIColor {

    convenience init(hex: Int) {
        let components = (
            R: CGFloat((hex >> 16) & 0xff) / 255,
            G: CGFloat((hex >> 08) & 0xff) / 255,
            B: CGFloat((hex >> 00) & 0xff) / 255
        )
        self.init(red: components.R, green: components.G, blue: components.B, alpha: 1)
    }

}

CGColor:

extension CGColor {

    class func colorWithHex(hex: Int) -> CGColorRef {

        return UIColor(hex: hex).CGColor

    }

}

使用

let purple = UIColor(hex: 0xAB47BC)

RGBA版本Swift 3/4

我喜欢卢卡的回答,因为我认为它是最优雅的。

然而,我不希望我的颜色指定在ARGB。我宁愿RGBA +,我也需要在处理字符串的情况下,为每个频道指定1个字符“#FFFA”。

这个版本还增加了错误抛出+剥离'#'字符如果它包含在字符串中。 这是我修改后的Swift表格。

public enum ColourParsingError: Error
{
    
    case invalidInput(String)
}
extension UIColor {
    public convenience init(hexString: String) throws
    {
        let hexString = hexString.replacingOccurrences(of: "#", with: "")
        let hex = hexString.trimmingCharacters(in:NSCharacterSet.alphanumerics.inverted)
        var int = UInt32()
        Scanner(string: hex).scanHexInt32(&int)
        let a, r, g, b: UInt32
        switch hex.count 
        {
        case 3: // RGB (12-bit)
            (r, g, b,a) = ((int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17,255)
        //iCSS specification in the form of #F0FA
        case 4: // RGB (24-bit)
            (r, g, b,a) = ((int >> 12) * 17, (int >> 8 & 0xF) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17)
        case 6: // RGB (24-bit)
            (r, g, b, a) = (int >> 16, int >> 8 & 0xFF, int & 0xFF,255)
        case 8: // ARGB (32-bit)
            (r, g, b, a) = (int >> 24, int >> 16 & 0xFF, int >> 8 & 0xFF, int & 0xFF)
        default:
            throw ColourParsingError.invalidInput("String is not a valid hex colour string: \(hexString)")
        }
        self.init(red: CGFloat(r) / 255, green: CGFloat(g) / 255, blue: CGFloat(b) / 255, alpha: CGFloat(a) / 255)
    }
}

Swift 4:结合Sulthan和Luca Torella的回答:

extension UIColor {
    convenience init(hexFromString:String, alpha:CGFloat = 1.0) {
        var cString:String = hexFromString.trimmingCharacters(in: .whitespacesAndNewlines).uppercased()
        var rgbValue:UInt32 = 10066329 //color #999999 if string has wrong format

        if (cString.hasPrefix("#")) {
            cString.remove(at: cString.startIndex)
        }

        if ((cString.count) == 6) {
            Scanner(string: cString).scanHexInt32(&rgbValue)
        }

        self.init(
            red: CGFloat((rgbValue & 0xFF0000) >> 16) / 255.0,
            green: CGFloat((rgbValue & 0x00FF00) >> 8) / 255.0,
            blue: CGFloat(rgbValue & 0x0000FF) / 255.0,
            alpha: alpha
        )
    }
}

使用例子:

let myColor = UIColor(hexFromString: "4F9BF5")

let myColor = UIColor(hexFromString: "#4F9BF5")

let myColor = UIColor(hexFromString: "#4F9BF5", alpha: 0.5)

这个答案展示了如何在Obj-C中实现。这座桥是要用的

let rgbValue = 0xFFEEDD
let r = Float((rgbValue & 0xFF0000) >> 16)/255.0
let g = Float((rgbValue & 0xFF00) >> 8)/255.0
let b = Float((rgbValue & 0xFF))/255.0
self.backgroundColor = UIColor(red:r, green: g, blue: b, alpha: 1.0)

修复了“'scanHexInt32' was deprecated in iOS 13.0”的警告。

示例应该可以在Swift2.2及以上版本(Swift2. exe)上运行。x, Swift3。x, Swift4。x, Swift5.x):

extension UIColor {

    // hex sample: 0xf43737
    convenience init(_ hex: Int, alpha: Double = 1.0) {
        self.init(red: CGFloat((hex >> 16) & 0xFF) / 255.0, green: CGFloat((hex >> 8) & 0xFF) / 255.0, blue: CGFloat((hex) & 0xFF) / 255.0, alpha: CGFloat(255 * alpha) / 255)
    }

    convenience init(_ hexString: String, alpha: Double = 1.0) {
        let hex = hexString.trimmingCharacters(in: CharacterSet.alphanumerics.inverted)
        var int = UInt64()
        Scanner(string: hex).scanHexInt64(&int)

        let r, g, b: UInt64
        switch hex.count {
        case 3: // RGB (12-bit)
            (r, g, b) = ((int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17)
        case 6: // RGB (24-bit)
            (r, g, b) = (int >> 16, int >> 8 & 0xFF, int & 0xFF)
        default:
            (r, g, b) = (1, 1, 0)
        }

        self.init(red: CGFloat(r) / 255, green: CGFloat(g) / 255, blue: CGFloat(b) / 255, alpha: CGFloat(255 * alpha) / 255)
    }

    convenience init(r: CGFloat, g: CGFloat, b: CGFloat, a: CGFloat = 1) {
        self.init(red: (r / 255), green: (g / 255), blue: (b / 255), alpha: a)
    }
}

像下面这样使用它们:

UIColor(0xF54A45)
UIColor(0xF54A45, alpha: 0.7)
UIColor("#f44")
UIColor("#f44", alpha: 0.7)
UIColor("#F54A45")
UIColor("#F54A45", alpha: 0.7)
UIColor("F54A45")
UIColor("F54A45", alpha: 0.7)
UIColor(r: 245.0, g: 73, b: 69)
UIColor(r: 245.0, g: 73, b: 69, a: 0.7)