-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathr_gradientDescent.R
54 lines (32 loc) · 1.21 KB
/
r_gradientDescent.R
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
attach(mtcars)
# n data sayýsý
myGradient <- function(x,y, learningRate, costThreshold, n, maxIteration){
plot(x,y, col="blue", pch= 20)
alfa <- runif(1,0,1) # 1 tane sec ve 0-1 arasýnda data ata
betaC <- runif(1,0,1)
ypred <- alfa * x + betaC
MeanSquareError <- sum((y - ypred)^2) / n
convergence <- F
Iterations <- 0
while (convergence == F) {
new_alfa <- alfa - learningRate *(1/n) * (sum((ypred - y)*x))
new_betaC <- betaC - learningRate *(1/n) * (sum((ypred - y)))
alfa <- new_alfa
betaC <- new_betaC
ypred <- alfa * x + betaC
MeanSquareNew <- sum((y - ypred)^2) / n
if (MeanSquareError - MeanSquareNew < costThreshold ) {
abline(betaC,alfa)
convergence <- T
return(paste("optimal betaC : ", betaC, "Optimal coefficient", alfa))
}
Iterations <- Iterations +1
if (Iterations > maxIteration) {
abline(betaC,alfa)
convergence <- T
return(paste("optimal betaC : ", betaC, "Optimal coefficient", alfa))
}
}
}
length(mtcars$cyl)
myGradient(disp,drat, 0.00002, 0.001,length(mtcars$cyl),2000000)