我试图在Swift中使用十六进制颜色值,而不是UIColor允许您使用的少数标准值,但我不知道如何做到这一点。

示例:我如何使用#ffffff作为颜色?


当前回答

最新swift3版本

        extension UIColor {
convenience init(hexString: String) {
    let hex = hexString.trimmingCharacters(in: CharacterSet.alphanumerics.inverted)
    var int = UInt32()
    Scanner(string: hex).scanHexInt32(&int)
    let a, r, g, b: UInt32
    switch hex.characters.count {
    case 3: // RGB (12-bit)
        (a, r, g, b) = (255, (int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17)
    case 6: // RGB (24-bit)
        (a, r, g, b) = (255, int >> 16, int >> 8 & 0xFF, int & 0xFF)
    case 8: // ARGB (32-bit)
        (a, r, g, b) = (int >> 24, int >> 16 & 0xFF, int >> 8 & 0xFF, int & 0xFF)
    default:
        (a, r, g, b) = (255, 0, 0, 0)
    }
      self.init(red: CGFloat(r) / 255, green: CGFloat(g) / 255, blue:      CGFloat(b) / 255, alpha: CGFloat(a) / 255)
}
}

在你的类或任何你把hexcolor转换为uicolor的地方使用这种方法

             let color1 = UIColor(hexString: "#FF323232")

其他回答

这个答案展示了如何在Obj-C中实现。这座桥是要用的

let rgbValue = 0xFFEEDD
let r = Float((rgbValue & 0xFF0000) >> 16)/255.0
let g = Float((rgbValue & 0xFF00) >> 8)/255.0
let b = Float((rgbValue & 0xFF))/255.0
self.backgroundColor = UIColor(red:r, green: g, blue: b, alpha: 1.0)

支持7十六进制颜色类型

有7种十六进制颜色格式:""#FF0000","0xFF0000", "FF0000", "F00", "red", 0x00FF00, 16711935

NSColorParser.nsColor("#FF0000",1)//red nsColor
NSColorParser.nsColor("FF0",1)//red nsColor
NSColorParser.nsColor("0xFF0000",1)//red nsColor
NSColorParser.nsColor("#FF0000",1)//red nsColor
NSColorParser.nsColor("FF0000",1)//red nsColor
NSColorParser.nsColor(0xFF0000,1)//red nsColor
NSColorParser.nsColor(16711935,1)//red nsColor

注意:这不是一个“单文件解决方案”,有一些依赖关系,但查找它们可能比从头开始研究更快。

https://github.com/eonist/swift-utils/blob/2882002682c4d2a3dc7cb3045c45f66ed59d566d/geom/color/NSColorParser.swift

永久链接: https://github.com/eonist/Element/wiki/Progress#supporting-7-hex-color-types

最新swift3版本

        extension UIColor {
convenience init(hexString: String) {
    let hex = hexString.trimmingCharacters(in: CharacterSet.alphanumerics.inverted)
    var int = UInt32()
    Scanner(string: hex).scanHexInt32(&int)
    let a, r, g, b: UInt32
    switch hex.characters.count {
    case 3: // RGB (12-bit)
        (a, r, g, b) = (255, (int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17)
    case 6: // RGB (24-bit)
        (a, r, g, b) = (255, int >> 16, int >> 8 & 0xFF, int & 0xFF)
    case 8: // ARGB (32-bit)
        (a, r, g, b) = (int >> 24, int >> 16 & 0xFF, int >> 8 & 0xFF, int & 0xFF)
    default:
        (a, r, g, b) = (255, 0, 0, 0)
    }
      self.init(red: CGFloat(r) / 255, green: CGFloat(g) / 255, blue:      CGFloat(b) / 255, alpha: CGFloat(a) / 255)
}
}

在你的类或任何你把hexcolor转换为uicolor的地方使用这种方法

             let color1 = UIColor(hexString: "#FF323232")

只是对第一个答案的一些补充

(还没有检查alpha,可能需要添加一个if next > 0xffffff):

extension UIColor {

struct COLORS_HEX {
    static let Primary = 0xffffff
    static let PrimaryDark = 0x000000
    static let Accent = 0xe89549
    static let AccentDark = 0xe27b2a
    static let TextWhiteSemiTransparent = 0x80ffffff
}

convenience init(red: Int, green: Int, blue: Int, alphaH: Int) {
    assert(red >= 0 && red <= 255, "Invalid red component")
    assert(green >= 0 && green <= 255, "Invalid green component")
    assert(blue >= 0 && blue <= 255, "Invalid blue component")
    assert(alphaH >= 0 && alphaH <= 255, "Invalid alpha component")

    self.init(red: CGFloat(red) / 255.0, green: CGFloat(green) / 255.0, blue: CGFloat(blue) / 255.0, alpha: CGFloat(alphaH) / 255.0)
}

convenience init(netHex:Int) {
    self.init(red:(netHex >> 16) & 0xff, green:(netHex >> 8) & 0xff, blue:netHex & 0xff, alphaH: (netHex >> 24) & 0xff)
}

}
extension UIColor {

    convenience init(r: CGFloat, g: CGFloat, b: CGFloat, a: CGFloat = 1) {
        self.init(red: r/255, green: g/255, blue: b/255, alpha: a)
    }

    convenience init(hex: Int, alpha: CGFloat = 1) {
        self.init(r: CGFloat((hex >> 16) & 0xff), g: CGFloat((hex >> 08) & 0xff), b: CGFloat((hex >> 00) & 0xff), a: alpha)
    }
}