swift 25ms using array of char and unicodeScalars


  • 0
    X

    we need to get ASCII value of char, using: Int((String(v1[i]).unicodeScalars).first!.value - 48) //'0'==48,
    in which, v1 is the array of char: ['1','0','.','1','0'] from string "10.10";

    1. convert it to string so we can use unicodeScalars value: String(v1[i]).unicodeScalars;
    2. get the first char, force unwarp because we know the len of string is 1 from one item in [char];
    3. then use its value, aka the ASCII value, and we know the value of '0' is 48;
    4. then we finish converting char('1') into Int(1);
    class Solution {
        func compareVersion(_ version1: String, _ version2: String) -> Int {
            var i = 0, j = 0
            let v1 = Array(version1.characters)
            let v2 = Array(version2.characters)
            let n1 = v1.count, n2 = v2.count
            
            while i < n1 || j < n2 {
                var num1 = 0, num2 = 0
                while i < n1, v1[i] != "." {
                    num1 = num1 * 10 + Int((String(v1[i]).unicodeScalars).first!.value - 48) //'0'==48
                    i += 1
                }
                while j < n2, v2[j] != "." {
                    num2 = num2 * 10 + Int((String(v2[j]).unicodeScalars).first!.value - 48)
                    j += 1
                }
                if num1 != num2 {
                    return num1 > num2 ? 1 : -1
                }
                i += 1
                j += 1
            }
            
            return 0
        }
    }
    

Log in to reply
 

Looks like your connection to LeetCode Discuss was lost, please wait while we try to reconnect.