# DEMONSTRATION OF THE NEURAL NETWORK REGRESSION MODEL. source("wk10mlpfuncs.r") pdf("wk10mlpplots.pdf",pointsize=8) par(mfrow=c(2,2)) # Generate some synthetic data. set.seed(1) n <- 100 X <- cbind (rnorm(n), rnorm(n)) X <- X + rnorm(n) f <- function (x1, x2) 0.3 + sin (0.8*x1 + x2) + 0.2*x1^2 y <- f(X[,1],X[,2]) + rnorm(n,0,0.1) grid <- seq(-2.5,2.5,length=100) # Fit neural network model to data, and plot squared error and sum of # absolute values of weights over training run. m <- 10 fit <- mlp_train (y, X, m, 0.001, 5000) wl <- relist (fit$W[nrow(fit$W),], mlp_skeleton(2,m)) plot (fit$E, type="l", log="y") title ("Training error") plot (rowSums(abs(fit$W)), type="l") title ("Sum of absolute values of weights") # Show contour plots of the function used to generate data and the function # learned by the network (with locations of training points). contour (grid, grid, outer (grid, grid, f)) title ("True function") contour (grid, grid, outer (grid, grid, function (x1,x2) mlp_forward(cbind(x1,x2),wl)$o)) points (X[,1], X[,2], pch=20, col="grey") title ("Function learned by network, and training points") dev.off()