Generate response and features
Variance covariance matrix
[,1] [,2] [,3]
[1,] 1.0 0.9 0.2
[2,] 0.9 1.0 0.0
[3,] 0.2 0.0 1.0
Generate response and features
Regularization path
Tuning \(\lambda\)
Regularization path
Tuning \(\lambda\)
Simulate data
OLS
Ridge
ridge <- glmnet(X, y, alpha = 0, standardize = TRUE, intercept = FALSE)
b_ridge <- coef(ridge, s = lambda)[2:(n+1),1]
Lasso
lasso <- glmnet(X, y, alpha = 1, standardize = TRUE, intercept = FALSE)
b_lasso <- coef(lasso, s = lambda)[2:(n+1),1]
Plot results
bs <- cbind(b_ols,b_lasso,b_ridge) %>%
as_tibble()
bs %>%
ggplot(aes(x = b_ols)) +
geom_line(aes(y = b_ols, color = "ols")) +
geom_line(aes(y = b_lasso, color = "lasso")) +
geom_line(aes(y = b_ridge, color = "ridge")) +
labs(
x = expression(beta^{OLS}),
y = "coefficient estimate"
)
Best subset selection
Subset selection object
11 Variables
Forced in Forced out
b FALSE FALSE
c FALSE FALSE
d FALSE FALSE
e FALSE FALSE
f FALSE FALSE
g FALSE FALSE
h FALSE FALSE
i FALSE FALSE
j FALSE FALSE
k FALSE FALSE
1 subsets of each size up to 10
Selection Algorithm: exhaustive
a b c d e f g h i j k
1 ( 1 ) "*" " " " " " " " " " " " " " " " " " " " "
2 ( 1 ) "*" " " " " " " " " " " " " " " " " " " "*"
3 ( 1 ) "*" "*" " " " " " " " " " " " " " " " " "*"
4 ( 1 ) "*" "*" " " " " " " " " " " " " " " "*" "*"
5 ( 1 ) "*" "*" " " " " " " " " " " " " "*" "*" "*"
6 ( 1 ) "*" "*" "*" " " " " " " " " " " "*" "*" "*"
7 ( 1 ) "*" "*" "*" " " " " " " " " "*" "*" "*" "*"
8 ( 1 ) "*" "*" "*" "*" " " " " " " "*" "*" "*" "*"
9 ( 1 ) "*" "*" "*" "*" " " " " "*" "*" "*" "*" "*"
10 ( 1 ) "*" "*" "*" "*" "*" " " "*" "*" "*" "*" "*"