Reputation: 307
I am trying to make a monthly Loan payment calculator here is the code for it:
import UIKit
class monthlypayment: UIViewController {
@IBOutlet weak var loanamounttextfield: UITextField!
@IBOutlet weak var numberofmonthstextfield: UITextField!
@IBOutlet weak var loanpercentagetextfield: UITextField!
@IBOutlet weak var answerlabel: UILabel!
@IBAction func calculate(sender: AnyObject) {
var loanamounttext = loanamounttextfield.text
var loanamount:Double! = Double((loanamounttext as NSString).doubleValue)
var loanpercentage = loanpercentagetextfield.text
var loanpercentagedouble:Double! = Double((loanpercentage as NSString).doubleValue)
var numbrofmonths = numberofmonthstextfield.text
var numberofmonths:Double! = Double((numbrofmonths as NSString).doubleValue)
var rate = loanpercentagedouble/1200
var monthlypayment:Double = rate+(rate/(1.0+rate)^numberofmonths)-1.0*loanamount
}
}
I get the error here
var rate = loanpercentagedouble/1200
var monthlypayment:Double = rate+ (rate/(1.0+rate)^numberofmonths)-1.0*loanamount
Where numberofmonths and loanamount are doubles as you can see
But I am getting the error: Could not find '+' that accepts the supplied arguments
Upvotes: 0
Views: 40
Reputation: 10096
In Swift ^
is the XOR operator (which return various flavours of Ints) not the exponential, you want to use the pow(num, power)
function instead
var monthlypayment = rate + pow(rate / (1.0 + rate), numberofmonths) - 1.0 * loan amount
Alternatively you can define a custom operator
infix operator ** { associativity left precedence 160 }
func ** (left: Double, right: Double) -> Double {
return pow(left, right)
}
And rewrite your code to a more readable
var monthlypayment = rate + (rate / (1.0 + rate)) ** numberofmonths - 1.0 * loanamount
Upvotes: 2