First of all, you should bettter not use `Double` when you want to control all the behavior abount rounding.
At any time, calculations on Double or converting Double to decimal representation would cause binary rounding, which may not be as expected from the decimal point of views.
let num = 6.1699999999999999
print(num == 6.17) //-> true
In Double, 6.1699999999999999 is exactly equivalent to 6.17.
Second, people tend to test one's own code with examples which works as expected, even if it is known that the code would not work as expected for some other examples.
func testManuelMB(_ number: Double) {
let stringFromNumber = String(number)
if let dotIndex = stringFromNumber.range(of: ".")?.upperBound {
let charactersCount = stringFromNumber.count
let distancToDot = stringFromNumber.distance(from: stringFromNumber.startIndex, to: dotIndex)
if charactersCount > (distancToDot + 1){
let endIndex = stringFromNumber.index(dotIndex, offsetBy:2)
print(stringFromNumber[..<endindex])
} else if charactersCount > distancToDot {
let endIndex = stringFromNumber.index(dotIndex, offsetBy:1)
print(stringFromNumber[..<endindex])
}
} else {
print(stringFromNumber)
}
}
testManuelMB(6.1699999999999999) //-> 6.17 (Wrong!!!)
testManuelMB(16.1699999999999999) //-> 16.16
func testClaude31(_ number: Double) {
let stringFromNumber = String(String(number).dropLast())
let newNumber = Double(Int(100*Double(stringFromNumber)!))/100
print(newNumber)
}
testClaude31(6.1699999999999999) //-> 6.1 (Wrong!!!, shows only one digit...)
testClaude31(16.1699999999999999) //-> 16.16
Conclusion: use Decimal instead of Double, when you want to cotnrol rounding.
func testDecimal(_ number: Decimal) {
var decNum = number
var roundedNum = Decimal()
NSDecimalRound(&roundedNum, &decNum, 2, .down)
print(roundedNum)
}
testDecimal(Decimal(string: "6.1699999999999999")!) //-> 6.16
testDecimal(Decimal(string: "16.1699999999999999")!) //-> 16.16