## true conditional probabilities truep <- function(x) { return((pmax(exp(-(x-2)^2/4),exp(-(x+3)^2/4))+.1)/1.2) } ## features are {x^i} phi <- function(x,deg) { d <- matrix(0,length(x),deg+1) for (i in 0:deg) { d[,i+1] <- x ^ i } return (data.frame(d)) } ## demo L1 regularized learning of logistic regression, ## with different datasets generated, and using different ## degree polynomials as features trainx <- seq(-10,10,.5) testx <- seq(-10,10,.1) demolearnL1 <- function(trainx,testx,truep,deg) { trainp <- truep(trainx) testp <- truep(testx) trainy <- as.numeric(runif(length(trainx)) < trainp) slr <- glmnet(as.matrix(phi(trainx,deg)),as.factor(trainy), family="binomial") s <- c(0,.0001,.001,.01,.05) predp <- predict(slr,newx=as.matrix(phi(testx,deg)), s=s,type="response") par(mfrow=c(1,2),mar=c(4,4,1,2)) plot(testx,testp,type="l",col=2,lwd=3,ylim=c(-.1,1.1)) points(trainx,trainy,pch=1,col=4,cex=2) for (i in 1:dim(predp)[2]) { lines(testx,predp[,i],type="l") } plot(slr,xvar="lambda") print(coef(slr,s)) return(predp) } demolearnL1(trainx,testx,truep,10)