# STA 414, Assignment #3, Spring 2007, script for doing things. source("mlp-mc.r") options(warn=2) options(error=dump.frames) # Read the training and test data. x.train = as.matrix(read.table("a3-x-train",head=F)) x.test = as.matrix(read.table("a3-x-test",head=F)) t.train = scan("a3-t-train") t.test = scan("a3-t-test") K = 10 # Check gradient computations, if enabled. if (FALSE) { set.seed(1) n.hid = 2 n.cases = 3 ptest = rnorm (K + n.hid*(ncol(x.train)+1+K), 0, 0.001) fw = mlp.mc.forward (K, x.train[1:n.cases,], n.hid, ptest) bk = mlp.mc.backward (K, t.train[1:n.cases], x.train[1:n.cases,], n.hid, ptest, fw) gfb = mlp.mc.grad (K, x.train[1:n.cases,], n.hid, fw, bk) gck = mlp.mc.grad.check (K, t.train[1:n.cases], x.train[1:n.cases,], n.hid, ptest) print (cbind(gfb,gck)) plot(gfb,gck,pch=20) } # Train an ensemble of networks, produce CV plot. set.seed(1) postscript("ass3.ps",paper="letter",pointsize=10) params = mlp.mc.ensemble (K, t.train, x.train, eta1=0.000001, eta2=0.00005, iters=8000, q=8, n.folds=4, cv.plot=T) dev.off() # Find the error rate of predictions for test cases, when using the ensemble, # when using the individual networks. cat("Error rates with individual networks:") for (i in 1:nrow(params)) { g = mlp.mc.predict (K, x.test, params[i,]) $ guess cat(" ",round(mean(g!=t.test),3)) } cat("\n") g = mlp.mc.ensemble.predict (K, x.test, params) $ guess cat("Error rate with ensemble: ", round(mean(g!=t.test),3), "\n")