我试图在Swift中使用十六进制颜色值,而不是UIColor允许您使用的少数标准值,但我不知道如何做到这一点。

示例:我如何使用#ffffff作为颜色?


当前回答

这是我用的。适用于6和8字符的颜色字符串,带或不带#符号。在发布时默认为黑色,在调试时用无效字符串初始化时崩溃。

extension UIColor {
    public convenience init(hex: String) {
        var r: CGFloat = 0
        var g: CGFloat = 0
        var b: CGFloat = 0
        var a: CGFloat = 1

        let hexColor = hex.replacingOccurrences(of: "#", with: "")
        let scanner = Scanner(string: hexColor)
        var hexNumber: UInt64 = 0
        var valid = false

        if scanner.scanHexInt64(&hexNumber) {
            if hexColor.count == 8 {
                r = CGFloat((hexNumber & 0xff000000) >> 24) / 255
                g = CGFloat((hexNumber & 0x00ff0000) >> 16) / 255
                b = CGFloat((hexNumber & 0x0000ff00) >> 8) / 255
                a = CGFloat(hexNumber & 0x000000ff) / 255
                valid = true
            }
            else if hexColor.count == 6 {
                r = CGFloat((hexNumber & 0xff0000) >> 16) / 255
                g = CGFloat((hexNumber & 0x00ff00) >> 8) / 255
                b = CGFloat(hexNumber & 0x0000ff) / 255
                valid = true
            }
        }

        #if DEBUG
            assert(valid, "UIColor initialized with invalid hex string")
        #endif

        self.init(red: r, green: g, blue: b, alpha: a)
    }
}

用法:

UIColor(hex: "#75CC83FF")
UIColor(hex: "75CC83FF")
UIColor(hex: "#75CC83")
UIColor(hex: "75CC83")

其他回答

斯威夫特5.0

你不能在Swift中直接使用#ffffff语法。以下是我用于网络相关项目的代码。支持alpha和三位数字。

用法示例(大写值也可以):

    let hex = "#FADE2B"  // yellow
    let color = NSColor(fromHex: hex)

支持的字符串格式:

"fff" // RGB "#fff" // #RGB "ffff" // RGBA "#ffff" // #RGBA . "ffffff" // RRGGBB . "#ffffff" // #RRGGBB . "ffffffff" // RRGGBBAA . "#ffffffff" // #RRGGBBAA .

数字代表红色,绿色,蓝色和阿尔法(像透明度)。对于iOS,用UIColor替换NSColor。

代码:


    extension NSColor {
        /// Initialises NSColor from a hexadecimal string. Color is clear if string is invalid.
        /// - Parameter fromHex: supported formats are "#RGB", "#RGBA", "#RRGGBB", "#RRGGBBAA", with or without the # character
        public convenience init(fromHex:String) {
            var r = 0, g = 0, b = 0, a = 255
            let offset = fromHex.hasPrefix("#") ? 1 : 0
            let ch = fromHex.map{$0}
            switch(ch.count - offset) {
            case 8:
                a = 16 * (ch[offset+6].hexDigitValue ?? 0) + (ch[offset+7].hexDigitValue ?? 0)
                fallthrough
            case 6:
                r = 16 * (ch[offset+0].hexDigitValue ?? 0) + (ch[offset+1].hexDigitValue ?? 0)
                g = 16 * (ch[offset+2].hexDigitValue ?? 0) + (ch[offset+3].hexDigitValue ?? 0)
                b = 16 * (ch[offset+4].hexDigitValue ?? 0) + (ch[offset+5].hexDigitValue ?? 0)
                break
            case 4:
                a = 16 * (ch[offset+3].hexDigitValue ?? 0) + (ch[offset+3].hexDigitValue ?? 0)
                fallthrough
            case 3:  // Three digit #0D3 is the same as six digit #00DD33
                r = 16 * (ch[offset+0].hexDigitValue ?? 0) + (ch[offset+0].hexDigitValue ?? 0)
                g = 16 * (ch[offset+1].hexDigitValue ?? 0) + (ch[offset+1].hexDigitValue ?? 0)
                b = 16 * (ch[offset+2].hexDigitValue ?? 0) + (ch[offset+2].hexDigitValue ?? 0)
                break
            default:
                a = 0
                break
            }
            self.init(red: CGFloat(r)/255, green: CGFloat(g)/255, blue: CGFloat(b)/255, alpha: CGFloat(a)/255)
            
        }
    }
    // Author: Andrew Kingdom

授权:CC BY

我发现这比下面的复制/粘贴更整洁

选择:

您可以删除#并将其存储为32位无符号整数字面量,由0x前缀表示,即0xffffff。不过,你仍然需要代码将其转换为颜色。

如果你想要一种非编程的方式来获取颜色:打开一个颜色选择器对话框,切换到colour Sliders > RGB Sliders,并将值粘贴/输入到“Hex color #”框中。(不要粘贴#散列符号。)

我从这个回答中总结了一些想法,并针对iOS 13和Swift 5进行了更新。

extension UIColor {
  
  convenience init(_ hex: String, alpha: CGFloat = 1.0) {
    var cString = hex.trimmingCharacters(in: .whitespacesAndNewlines).uppercased()
    
    if cString.hasPrefix("#") { cString.removeFirst() }
    
    if cString.count != 6 {
      self.init("ff0000") // return red color for wrong hex input
      return
    }
    
    var rgbValue: UInt64 = 0
    Scanner(string: cString).scanHexInt64(&rgbValue)
    
    self.init(red: CGFloat((rgbValue & 0xFF0000) >> 16) / 255.0,
              green: CGFloat((rgbValue & 0x00FF00) >> 8) / 255.0,
              blue: CGFloat(rgbValue & 0x0000FF) / 255.0,
              alpha: alpha)
  }

}

然后你可以这样使用它:

UIColor("#ff0000") // with #
UIColor("ff0000")  // without #
UIColor("ff0000", alpha: 0.5) // using optional alpha value

修复了“'scanHexInt32' was deprecated in iOS 13.0”的警告。

示例应该可以在Swift2.2及以上版本(Swift2. exe)上运行。x, Swift3。x, Swift4。x, Swift5.x):

extension UIColor {

    // hex sample: 0xf43737
    convenience init(_ hex: Int, alpha: Double = 1.0) {
        self.init(red: CGFloat((hex >> 16) & 0xFF) / 255.0, green: CGFloat((hex >> 8) & 0xFF) / 255.0, blue: CGFloat((hex) & 0xFF) / 255.0, alpha: CGFloat(255 * alpha) / 255)
    }

    convenience init(_ hexString: String, alpha: Double = 1.0) {
        let hex = hexString.trimmingCharacters(in: CharacterSet.alphanumerics.inverted)
        var int = UInt64()
        Scanner(string: hex).scanHexInt64(&int)

        let r, g, b: UInt64
        switch hex.count {
        case 3: // RGB (12-bit)
            (r, g, b) = ((int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17)
        case 6: // RGB (24-bit)
            (r, g, b) = (int >> 16, int >> 8 & 0xFF, int & 0xFF)
        default:
            (r, g, b) = (1, 1, 0)
        }

        self.init(red: CGFloat(r) / 255, green: CGFloat(g) / 255, blue: CGFloat(b) / 255, alpha: CGFloat(255 * alpha) / 255)
    }

    convenience init(r: CGFloat, g: CGFloat, b: CGFloat, a: CGFloat = 1) {
        self.init(red: (r / 255), green: (g / 255), blue: (b / 255), alpha: a)
    }
}

像下面这样使用它们:

UIColor(0xF54A45)
UIColor(0xF54A45, alpha: 0.7)
UIColor("#f44")
UIColor("#f44", alpha: 0.7)
UIColor("#F54A45")
UIColor("#F54A45", alpha: 0.7)
UIColor("F54A45")
UIColor("F54A45", alpha: 0.7)
UIColor(r: 245.0, g: 73, b: 69)
UIColor(r: 245.0, g: 73, b: 69, a: 0.7)

斯威夫特5

extension UIColor{

/// Converting hex string to UIColor
///
/// - Parameter hexString: input hex string
convenience init(hexString: String) {
    let hex = hexString.trimmingCharacters(in: CharacterSet.alphanumerics.inverted)
    var int = UInt64()
    Scanner(string: hex).scanHexInt64(&int)
    let a, r, g, b: UInt64
    switch hex.count {
    case 3:    
        (a, r, g, b) = (255, (int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17)
    case 6: 
        (a, r, g, b) = (255, int >> 16, int >> 8 & 0xFF, int & 0xFF)
    case 8: 
        (a, r, g, b) = (int >> 24, int >> 16 & 0xFF, int >> 8 & 0xFF, int & 0xFF)
    default:
        (a, r, g, b) = (255, 0, 0, 0)
    }
    self.init(red: CGFloat(r) / 255, green: CGFloat(g) / 255, blue: CGFloat(b) / 255, alpha: CGFloat(a) / 255)
}
}

使用UIColor调用(hexString: "你的十六进制字符串")

Swift 5 (Swift 4, Swift 3) UIColor扩展:

extension UIColor {
    convenience init(hexString: String) {
        let hex = hexString.trimmingCharacters(in: CharacterSet.alphanumerics.inverted)
        var int = UInt64()
        Scanner(string: hex).scanHexInt64(&int)
        let a, r, g, b: UInt64
        switch hex.count {
        case 3: // RGB (12-bit)
            (a, r, g, b) = (255, (int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17)
        case 6: // RGB (24-bit)
            (a, r, g, b) = (255, int >> 16, int >> 8 & 0xFF, int & 0xFF)
        case 8: // ARGB (32-bit)
            (a, r, g, b) = (int >> 24, int >> 16 & 0xFF, int >> 8 & 0xFF, int & 0xFF)
        default:
            (a, r, g, b) = (255, 0, 0, 0)
        }
        self.init(red: CGFloat(r) / 255, green: CGFloat(g) / 255, blue: CGFloat(b) / 255, alpha: CGFloat(a) / 255)
    }
}

用法:

let darkGrey = UIColor(hexString: "#757575")

斯威夫特2。x版本:

extension UIColor {
    convenience init(hexString: String) {
        let hex = hexString.stringByTrimmingCharactersInSet(NSCharacterSet.alphanumericCharacterSet().invertedSet)
        var int = UInt32()
        NSScanner(string: hex).scanHexInt(&int)
        let a, r, g, b: UInt32
        switch hex.characters.count {
        case 3: // RGB (12-bit)
            (a, r, g, b) = (255, (int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17)
        case 6: // RGB (24-bit)
            (a, r, g, b) = (255, int >> 16, int >> 8 & 0xFF, int & 0xFF)
        case 8: // ARGB (32-bit)
            (a, r, g, b) = (int >> 24, int >> 16 & 0xFF, int >> 8 & 0xFF, int & 0xFF)
        default:
            (a, r, g, b) = (255, 0, 0, 0)
        }
        self.init(red: CGFloat(r) / 255, green: CGFloat(g) / 255, blue: CGFloat(b) / 255, alpha: CGFloat(a) / 255)
    }
}