How to use hex color values

376,471

Solution 1

#ffffff are actually 3 color components in hexadecimal notation - red ff, green ff and blue ff. You can write hexadecimal notation in Swift using 0x prefix, e.g 0xFF

To simplify the conversion, let's create an initializer that takes integer (0 - 255) values:

extension UIColor {
   convenience init(red: Int, green: Int, blue: Int) {
       assert(red >= 0 && red <= 255, "Invalid red component")
       assert(green >= 0 && green <= 255, "Invalid green component")
       assert(blue >= 0 && blue <= 255, "Invalid blue component")

       self.init(red: CGFloat(red) / 255.0, green: CGFloat(green) / 255.0, blue: CGFloat(blue) / 255.0, alpha: 1.0)
   }

   convenience init(rgb: Int) {
       self.init(
           red: (rgb >> 16) & 0xFF,
           green: (rgb >> 8) & 0xFF,
           blue: rgb & 0xFF
       )
   }
}

Usage:

let color = UIColor(red: 0xFF, green: 0xFF, blue: 0xFF)
let color2 = UIColor(rgb: 0xFFFFFF)

How to get alpha?

Depending on your use case, you can simply use the native UIColor.withAlphaComponent method, e.g.

let semitransparentBlack = UIColor(rgb: 0x000000).withAlphaComponent(0.5)

Or you can add an additional (optional) parameter to the above methods:

convenience init(red: Int, green: Int, blue: Int, a: CGFloat = 1.0) {
    self.init(
        red: CGFloat(red) / 255.0,
        green: CGFloat(green) / 255.0,
        blue: CGFloat(blue) / 255.0,
        alpha: a
    )
}

convenience init(rgb: Int, a: CGFloat = 1.0) {
    self.init(
        red: (rgb >> 16) & 0xFF,
        green: (rgb >> 8) & 0xFF,
        blue: rgb & 0xFF,
        a: a
    )
}

(we cannot name the parameter alpha because of a name collision with the existing initializer).

Called as:

let color = UIColor(red: 0xFF, green: 0xFF, blue: 0xFF, a: 0.5)
let color2 = UIColor(rgb: 0xFFFFFF, a: 0.5)

To get the alpha as an integer 0-255, we can

convenience init(red: Int, green: Int, blue: Int, a: Int = 0xFF) {
    self.init(
        red: CGFloat(red) / 255.0,
        green: CGFloat(green) / 255.0,
        blue: CGFloat(blue) / 255.0,
        alpha: CGFloat(a) / 255.0
    )
}

// let's suppose alpha is the first component (ARGB)
convenience init(argb: Int) {
    self.init(
        red: (argb >> 16) & 0xFF,
        green: (argb >> 8) & 0xFF,
        blue: argb & 0xFF,
        a: (argb >> 24) & 0xFF
    )
}

Called as

let color = UIColor(red: 0xFF, green: 0xFF, blue: 0xFF, a: 0xFF)
let color2 = UIColor(argb: 0xFFFFFFFF)

Or a combination of the previous methods. There is absolutely no need to use strings.

Solution 2

This is a function that takes a hex string and returns a UIColor.
(You can enter hex strings with either format: #ffffff or ffffff)

Usage:

var color1 = hexStringToUIColor("#d3d3d3")

Swift 5: (Swift 4+)

func hexStringToUIColor (hex:String) -> UIColor {
    var cString:String = hex.trimmingCharacters(in: .whitespacesAndNewlines).uppercased()

    if (cString.hasPrefix("#")) {
        cString.remove(at: cString.startIndex)
    }

    if ((cString.count) != 6) {
        return UIColor.gray
    }

    var rgbValue:UInt64 = 0
    Scanner(string: cString).scanHexInt64(&rgbValue)

    return UIColor(
        red: CGFloat((rgbValue & 0xFF0000) >> 16) / 255.0,
        green: CGFloat((rgbValue & 0x00FF00) >> 8) / 255.0,
        blue: CGFloat(rgbValue & 0x0000FF) / 255.0,
        alpha: CGFloat(1.0)
    )
}

Swift 3:

func hexStringToUIColor (hex:String) -> UIColor {
    var cString:String = hex.trimmingCharacters(in: .whitespacesAndNewlines).uppercased()

    if (cString.hasPrefix("#")) {
        cString.remove(at: cString.startIndex)
    }

    if ((cString.characters.count) != 6) {
        return UIColor.gray
    }

    var rgbValue:UInt32 = 0
    Scanner(string: cString).scanHexInt32(&rgbValue)

    return UIColor(
        red: CGFloat((rgbValue & 0xFF0000) >> 16) / 255.0,
        green: CGFloat((rgbValue & 0x00FF00) >> 8) / 255.0,
        blue: CGFloat(rgbValue & 0x0000FF) / 255.0,
        alpha: CGFloat(1.0)
    )
}

Swift 2:

func hexStringToUIColor (hex:String) -> UIColor {
    var cString:String = hex.stringByTrimmingCharactersInSet(NSCharacterSet.whitespaceAndNewlineCharacterSet() as NSCharacterSet).uppercaseString

    if (cString.hasPrefix("#")) {
      cString = cString.substringFromIndex(cString.startIndex.advancedBy(1))
    }

    if ((cString.characters.count) != 6) {
      return UIColor.grayColor()
    }

    var rgbValue:UInt32 = 0
    NSScanner(string: cString).scanHexInt(&rgbValue)

    return UIColor(
        red: CGFloat((rgbValue & 0xFF0000) >> 16) / 255.0,
        green: CGFloat((rgbValue & 0x00FF00) >> 8) / 255.0,
        blue: CGFloat(rgbValue & 0x0000FF) / 255.0,
        alpha: CGFloat(1.0)
    )
}



Source: arshad/gist:de147c42d7b3063ef7bc

Edit: Updated the code. Thanks, Hlung, jaytrixz, Ahmad F, Kegham K, and Adam Waite!

Solution 3

Swift 5 (Swift 4, Swift 3) UIColor extension:

extension UIColor {
    convenience init(hexString: String) {
        let hex = hexString.trimmingCharacters(in: CharacterSet.alphanumerics.inverted)
        var int = UInt64()
        Scanner(string: hex).scanHexInt64(&int)
        let a, r, g, b: UInt64
        switch hex.count {
        case 3: // RGB (12-bit)
            (a, r, g, b) = (255, (int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17)
        case 6: // RGB (24-bit)
            (a, r, g, b) = (255, int >> 16, int >> 8 & 0xFF, int & 0xFF)
        case 8: // ARGB (32-bit)
            (a, r, g, b) = (int >> 24, int >> 16 & 0xFF, int >> 8 & 0xFF, int & 0xFF)
        default:
            (a, r, g, b) = (255, 0, 0, 0)
        }
        self.init(red: CGFloat(r) / 255, green: CGFloat(g) / 255, blue: CGFloat(b) / 255, alpha: CGFloat(a) / 255)
    }
}

Usage:

let darkGrey = UIColor(hexString: "#757575")

Swift 2.x version:

extension UIColor {
    convenience init(hexString: String) {
        let hex = hexString.stringByTrimmingCharactersInSet(NSCharacterSet.alphanumericCharacterSet().invertedSet)
        var int = UInt32()
        NSScanner(string: hex).scanHexInt(&int)
        let a, r, g, b: UInt32
        switch hex.characters.count {
        case 3: // RGB (12-bit)
            (a, r, g, b) = (255, (int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17)
        case 6: // RGB (24-bit)
            (a, r, g, b) = (255, int >> 16, int >> 8 & 0xFF, int & 0xFF)
        case 8: // ARGB (32-bit)
            (a, r, g, b) = (int >> 24, int >> 16 & 0xFF, int >> 8 & 0xFF, int & 0xFF)
        default:
            (a, r, g, b) = (255, 0, 0, 0)
        }
        self.init(red: CGFloat(r) / 255, green: CGFloat(g) / 255, blue: CGFloat(b) / 255, alpha: CGFloat(a) / 255)
    }
}

Solution 4

UIColor:

extension UIColor {

    convenience init(hex: Int) {
        let components = (
            R: CGFloat((hex >> 16) & 0xff) / 255,
            G: CGFloat((hex >> 08) & 0xff) / 255,
            B: CGFloat((hex >> 00) & 0xff) / 255
        )
        self.init(red: components.R, green: components.G, blue: components.B, alpha: 1)
    }

}

CGColor:

extension CGColor {

    class func colorWithHex(hex: Int) -> CGColorRef {

        return UIColor(hex: hex).CGColor

    }

}

Usage

let purple = UIColor(hex: 0xAB47BC)

Solution 5

Swift 4 : Combining the answers of Sulthan and Luca Torella :

extension UIColor {
    convenience init(hexFromString:String, alpha:CGFloat = 1.0) {
        var cString:String = hexFromString.trimmingCharacters(in: .whitespacesAndNewlines).uppercased()
        var rgbValue:UInt32 = 10066329 //color #999999 if string has wrong format

        if (cString.hasPrefix("#")) {
            cString.remove(at: cString.startIndex)
        }

        if ((cString.count) == 6) {
            Scanner(string: cString).scanHexInt32(&rgbValue)
        }

        self.init(
            red: CGFloat((rgbValue & 0xFF0000) >> 16) / 255.0,
            green: CGFloat((rgbValue & 0x00FF00) >> 8) / 255.0,
            blue: CGFloat(rgbValue & 0x0000FF) / 255.0,
            alpha: alpha
        )
    }
}

Usage examples:

let myColor = UIColor(hexFromString: "4F9BF5")

let myColor = UIColor(hexFromString: "#4F9BF5")

let myColor = UIColor(hexFromString: "#4F9BF5", alpha: 0.5)
Share:
376,471
Stephen Fox
Author by

Stephen Fox

Software Engineer from Dublin, Ireland. Github: https://github.com/StephenFox1995

Updated on February 15, 2021

Comments

  • Stephen Fox
    Stephen Fox about 3 years

    I am trying to use hex color values in Swift, instead of the few standard ones that UIColor allows you to use, but I have no idea how to do it.

    Example: how would I use #ffffff as a color?

  • thandasoru
    thandasoru over 9 years
    In my humble opinion, I found this the easiest to use and very clear compared to other answers.
  • Hlung
    Hlung about 9 years
    countelements is now just count :)
  • Oliver Spryn
    Oliver Spryn about 9 years
    Great solution! Why on earth wouldn't Apple already have something like this????
  • confile
    confile about 9 years
    @Sulthan Could you please also define an extension with alpha and hex value?
  • Sulthan
    Sulthan about 9 years
    @confile No, because that's not standardized. Alpha can be the first component or the last. If you need alpha, just add one parameter alpha
  • confile
    confile about 9 years
    @Sulthan Can I set alpha after the color has been created?
  • Sulthan
    Sulthan about 9 years
    @confile Colors are immutable, so no, you have to create a new color with changed alpha.
  • Berik
    Berik almost 9 years
    Same solution, Swift 1.2 compatible, with alpha support: gist.github.com/berikv/ecf1f79c5bc9921c47ef
  • Josh
    Josh almost 9 years
    3 drops of albine unicorn tears, a dash of meteorite dust tossed into the magma of a millenial eruption... Still easier than setting a color hahah, anyways it works!
  • jrc
    jrc over 8 years
    assert(red >= 0 ... ) - why not use UInt and let the compiler take care of that for you?
  • SRMR
    SRMR over 8 years
    @Hlung and @ethanstrider it looks like they don't even let you do count now instead of countElements, any idea what they want us to use?
  • Ethan Strider
    Ethan Strider over 8 years
    Before Swift 2.0, count was a global function. As of Swift 2.0, it can be called as a member function (i.e. aString.count).
  • Jacob R
    Jacob R over 8 years
    For explanation of how this works, see this Apple Doc
  • Kishore Kumar
    Kishore Kumar over 8 years
    what is drop drop first ?
  • Islam
    Islam about 8 years
    How would you handle 123ABC? The compiler is burking at it not being a digit.
  • Mehul
    Mehul about 8 years
    Great simple perfect solution
  • Richard Venable
    Richard Venable about 8 years
    This is my favorite implementation because of the way it handles the 3 cases. But I prefer the default: case to return nil, instead of white.
  • Richard Venable
    Richard Venable about 8 years
    Why not use UInt8 instead of asserting that your ints are in range 0...255?
  • jaytrixz
    jaytrixz about 8 years
    Changed this line of code cString = cString.substringFromIndex(advance(cString.startIndex, 1)) to cString = cString.substringFromIndex(cString.startIndex.advancedBy(1)) for Swift 2.2 Xcode 7.3
  • Carsten
    Carsten about 8 years
    for completeness: let foo: Int = 0x123ABC - note the '0x'
  • Eric Aya
    Eric Aya about 8 years
    Please avoid posting duplicate answers. If a question is a duplicate, flag it as such instead of answering. Thank you.
  • Gui Moura
    Gui Moura about 8 years
    by the way, the default case in this implementation seems to be UIColor.yellow()
  • kuzdu
    kuzdu over 7 years
    I know how to use it and it works like a charm. But I don't really understand why. Maybe someone can give me an explanation or some good links/words to search?
  • Womble
    Womble over 7 years
    Alas, like many other hex converters, this doesn't handle alpha components. So, for example, you can't get a UIColor.clear value from it.
  • Womble
    Womble over 7 years
    This does not work correctly with alpha values. e.g. Both inputs "ff00ff00" and "#ff00ff00" will output an RGBA of 0 1 0 1. (It should be 1 0 1 0). The input "#ff00ff" results in 1 0 1 1, which is correct. (Xcode 8.2.1, iOS 9.3.)
  • Luca Torella
    Luca Torella over 7 years
    @Womble the first component is the alpha not the last one. So "#ff00ff00" has alpha 1 because of the "ff" at the beginning. I think you meant "#00ff00ff". Another example: "#ff00ff00" this is green with alpha 1, "#0000ff00" this is green with alpha 0
  • oskarko
    oskarko over 7 years
    You saved my day, mate!
  • Sulthan
    Sulthan about 7 years
    You should be dividing by 255 not 256. There is no way to get white using your code. Only "almost white".
  • Sulthan
    Sulthan about 7 years
    Utility classes are an antipattern in Swift. Swift has extensions.
  • Sulthan
    Sulthan about 7 years
    @jrc @RichardVenable Apple recommends to use Int even when only unsigned values are expected. Using one type simplifies operations, especially when we are working in a language without implicit casts.
  • AnBisw
    AnBisw over 6 years
    It's 2017, and Apple still doesn't have something like this.
  • Ahmad F
    Ahmad F over 6 years
    And all you have to do for Swift 4 is to remove characters from cString.characters.count :)
  • Sulthan
    Sulthan about 6 years
    just a combination of the previous answers.
  • Sulthan
    Sulthan about 6 years
    Again, that unnecessary usage of strings. Also, you are just copying the other answers.
  • Womble
    Womble about 6 years
    Alpha is not supported in toHexString()
  • Norman
    Norman almost 6 years
    While it doesn't handle alpha @Womble, it's trivial to add. I had to set an explicit type for "components" to prevent the Swift compiler from "taking too long" and giving up.
  • jeet.chanchawat
    jeet.chanchawat over 5 years
    Wow! you are the champ... I was looking for a really simple solution... This is the one... Cheers...
  • Sulthan
    Sulthan over 5 years
    You don't need NSPredicate just to test regular expressions. string.range(of: pattern, options: .regularExpression) works too.
  • Kegham K.
    Kegham K. about 5 years
    If you are still supporting iPhone 5 or any 32 bit devices prior to iOS 11 it will crash. You need to change the UInt32 to UInt64
  • Bradley Thomas
    Bradley Thomas about 5 years
    This is useful!
  • Adam Waite
    Adam Waite almost 5 years
    Xcode 11 and the iOS13 SDK deprecates scanHexInt32. Use a UInt64 and scanHexInt64 instead.
  • Dimitar Stefanovski
    Dimitar Stefanovski over 4 years
    'scanHexInt32' was deprecated in iOS 13.0
  • Pini Cheyni
    Pini Cheyni over 4 years
    Currently the easiest and the best solution if the color is not set dynamically
  • Raniys
    Raniys over 4 years
    @Dimitar Stefanovski I've fixed that.
  • Justyn
    Justyn over 4 years
    I didn't realise you could define colours as Assets. Really glad I found this answer!
  • Peter Lapisu
    Peter Lapisu over 4 years
    'scanHexInt32' was deprecated in iOS 13.0
  • Luca Torella
    Luca Torella over 4 years
    @PeterLapisu true, I updated the snippet, now we use UInt64
  • riciloma
    riciloma over 4 years
    This is perfect!
  • Asad Ali Choudhry
    Asad Ali Choudhry about 4 years
    Nice, but too lengthy, Here I have written a short method for this purpose handyopinion.com/…
  • Sulthan
    Sulthan about 4 years
    @AsadAliChoudhry Lengthy? Technically it's a one-liner. You are parsing a string which is rarely needed and the same is written here in 5 answers already.
  • Keyhan Kamangar
    Keyhan Kamangar about 4 years
    This should be the accepted answer, thank u very much.
  • ΩlostA
    ΩlostA about 4 years
    Great solution, I added UIColor.init(rgb: Constants.Color.mainColor).cgColor personally
  • Zorayr
    Zorayr over 3 years
    Is anyone else getting a crash here?
  • Jongers
    Jongers over 3 years
    Cleanest answer. I prefer this to be the accepted answer.
  • Saurav_Sharma
    Saurav_Sharma about 3 years
    Does ColorLitreal support Hex Color Code?
  • puneeth
    puneeth almost 3 years
    @Saurav_97 choose other option which you can see it in the attached image above and you will be able to set Hex value
  • Nhat Dinh
    Nhat Dinh almost 3 years
    Hey can someone please explain why in the case of 12-bit we need to multiple 17?
  • Nhat Dinh
    Nhat Dinh almost 3 years
    nvm, I got this, because 12-bits HEX is Shorthand hexadecimal form.
  • mpora
    mpora about 2 years
    Hex option seems to have disappeared but this idea got me somewhere
  • Carmen
    Carmen almost 2 years
  • Andrew Kingdom
    Andrew Kingdom almost 2 years
    Edit: Fixed bugs and clarified the explanation.