-
Notifications
You must be signed in to change notification settings - Fork 0
/
ML project.R
94 lines (61 loc) · 1.74 KB
/
ML project.R
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
#simulation
beta <- matrix(c(3,1.5,0,0,2,0,0,0),ncol=1)
sigma <- 3
error <- rnorm(240)
sigmaMatrix <- matrix((rep(0,8*8)),ncol=8)
for(i in 1:8){
for(j in 1:8){
sigmaMatrix[i,j] <- 0.5^abs(i-j)
}
}
library(MASS)
mysample <-mvrnorm(n = 240, mu = rep(0,8), Sigma = sigmaMatrix)
y <- mysample%*%beta + sigma*error
##
CA.Lasso <- function(y,x,lamda){
converge <- FALSE
n <- dim(x)[1]
p <- dim(x)[2]
beta.hat <- rep(0,p)
r <- matrix(rep(NA,n*p),ncol=p)
while(!converge){
for (j in 1:p){
r[,j] <- y - x[,-j]%*%beta.hat[-j]}
beta.hat.star <- (1/n) * colSums(x*r)
new.beta.hat <- sign(beta.hat.star)*pmax(abs(beta.hat.star)-lamda,0)
if(all(round(beta.hat,6) == round(new.beta.hat,6))){
converge <- TRUE
}
else{(beta.hat=new.beta.hat)
}
return(beta.hat)
}
}
CA.Lasso(y,mysample,1)
##########ELASTIC NET
CA.Elasticnet <- function(y,x,lamda1,lamda2){
converge <- FALSE
n <- dim(x)[1]
p <- dim(x)[2]
beta.hat <- rep(0,p)
r <- matrix(rep(NA,n*p),ncol=p)
while(!converge){
for (j in 1:p){
r[,j] <- y - x[,-j]%*%beta.hat[-j]}
beta.hat.star <- (1/n) * colSums(x*r)
new.beta.hat <- sign(beta.hat.star)*pmax(abs(beta.hat.star)-lamda1,0)*((beta.hat.star)**2 - lamda2)
if(all(round(beta.hat,6) == round(new.beta.hat,6))){
converge <- TRUE
}
else{(beta.hat=new.beta.hat)
}
return(beta.hat)
}
}
CA.Elasticnet(y,mysample,0.5,0.5)
#lamda = seq(0,100)
#training = sample(lamda,size = 50,replace = F)
#testing = lamda[!training]
#first we fit the training data to our model to make predictions about testing error and then we use that to calculate the MSE
#We want sth with the lowest MSE
#What model to fit it on? Linear??