Multi Layer Perceptron
[email protected]
Multi Layer Perceptron
• Menambahkan sejumlah “hidden layer”
• “Hidden layer” terletak antara yang terletak diantara lapisan input dan output
• Contoh :
modelx<-nnet(x, t, size=10, linout=T)
require(devtools)
source_gist('5086859') #loaded as 'plot.nnet' in the workspace
# install.packages("RSNNS") library(RSNNS)
# install.packages("nnet") library(nnet)
#read the data
datax <- read.csv("iris-UCI-header-num.csv", header=TRUE) ukuran <- dim(datax)
ikolomClass <- ukuran[2]
x <- datax[, 1:(ikolomClass-1)]
names(x) <- tolower(names(x))
# create species labels t <- datax[, ikolomClass]
#neural net with one hidden layers (10 nodes) mod<-nnet(x,t,size=10,linout=T) par(mar=numeric(4),family='serif') plot.nnet(mod)
https://gist.github.com/fawda123/5086859
Multi Layer Perceptron
• Jalankan program sbb :
• Setelah itu berikut instruksi :
require(devtools)
source_gist('5086859') #loaded as 'plot.nnet' in the workspace
# install.packages("RSNNS") library(RSNNS)
# install.packages("nnet") library(nnet)
#read the data datax <- read.csv("iris-UCI-header.csv", header=TRUE) ukuran <- dim(datax) ikolomClass <- ukuran[2]
x <- datax[, 1:ikolomClass] # harus menyertakan kolom class agar confusion matrix jalan
# create species labels t <- datax[, ikolomClass]
# membuat dataset untuk training & testing set.seed(123)
# Now Selecting 75% of data as sample from total 'n' rows of the data isample <- sample.int(n = nrow(datax), size = floor(.75*nrow(datax)), replace = F) xtrain <- x[isample, ]
xtest <- x[-isample, ] ttrain <- t[isample]
ttest <- t[-isample]
#neural net
modelx <- nnet(Species ~ ., data = datax, subset = isample, size = 2, rang = 0.1, decay = 5e-4, maxit = 200)
cm <- table(ttest, predict(modelx, xtest, type = "class")) cm
akurasi = (cm[1,1] + cm[2,2] + cm[3,3]) / length(ttest) akurasi
#neural net
par(mar=numeric(4),family='serif') plot.nnet(modelx)
summary(modelx)
modelx <- nnet(Species ~ ., data = datax, subset = isample, size = 2, rang = 0.1, decay = 5e-4, maxit = 200)
MLP untuk Iris Dataset
• Jalankan program sebelumnya dengan mengubah parameter
tertentu size. Lengkapi table berikut :
Size Iterasi saat konvergen Akurasi
2 180 0.973
4 160 0.973
6 510 0.973
8 10 20 30
modelx <- nnet ( Species ~ ., data = datax, subset = isample, size = 2, rang = 0.1, decay = 5e-4, maxit = 200)
MLP dengan package ‘monmlp’
untuk 2 class
• Jalankan program tsb
• Berikan instruksi :
• Bagaimana menghitung akurasi?
# Sample data library(monmlp)
#read the data
datax <- read.csv("iris-UCI-header-2class-num.csv", header=TRUE) ukuran <- dim(datax)
ikolomClass <- ukuran[2]
x <- as.matrix(datax[, 1:ikolomClass])
# create species labels t <- as.matrix(datax[, ikolomClass])
# membuat dataset untuk training & testing set.seed(123)
# Now Selecting 75% of data as sample from total 'n' rows of the data isample <- sample.int(n = nrow(datax), size = floor(.75*nrow(datax)), replace = F) xtrain <- x[isample, ]
xtest <- x[-isample, ] ttrain <- t[isample]
ttest <- t[-isample]
# Fit the model and compute the predictions r <- monmlp.fit(x, t, hidden1=3, n.ensemble=15, monotone=1, bag=TRUE) z <- monmlp.predict(x = xtest, weights = r)
# Compute the AUC library(ROCR) plot( performance( prediction( z, ttest ), "tpr","fpr" ) ) performance( prediction( z, ttest ), "auc" )@y.values[[1]]
Z ttest
r <- monmlp.fit(x, t, hidden1=3, n.ensemble=15, monotone=1, bag=TRUE)
z <- monmlp.predict(x = xtest, weights = r)
MLP dengan package ‘nnet’ untuk dataset Iris
• Jalankan program tsb
• Coba ganti dengan dataset lainnya
rm(list=ls(all=TRUE)) library(nnet)
#---
datax <- read.csv("iris-UCI-header.csv", header=TRUE) ukuran <- dim(datax) iData <- c(1,2,3,4,5) iClass <- c(5) x <- datax[, iData]
# create species labels t <- datax[, iClass]
# membuat dataset untuk training & testing set.seed(123)
# Now Selecting 75% of data as sample from total 'n' rows of the data isample <- sample.int(n = nrow(datax), size = floor(.75*nrow(datax)), replace = F) xtrain <- x[isample, ]
xtest <- x[-isample, ] ttrain <- t[isample]
ttest <- t[-isample]
#---
modelx <- nnet(Species ~ ., data = xtrain, subset = isample, size = 2, rang = 0.1, decay = 5e-4, maxit = 2000)
hasil <- predict(modelx, xtest, type = "class") cm <- table(ttest, hasil)
akurasi = (sum(diag(cm))) / sum(cm) akurasi
*) Terdapat sejumlah 1 output dari nnet
modelx <- nnet(Species ~ ., data = xtrain, subset = isample, size = 2, rang = 0.1, decay = 5e-4, maxit = 2000)
Formula
MLP dengan package ‘nnet’ untuk dataset Iris
• Jalankan program tsb
• Coba ganti dengan dataset lainnya
rm(list=ls(all=TRUE)) library(nnet)
#---
datax <- read.csv("iris-UCI-header-output-3.csv", header=TRUE) ukuran <- dim(datax)
iData <- c(1,2,3,4) iClass <- c(5,6,7) x <- datax[, iData]
# create species labels t <- datax[, iClass]
# membuat dataset untuk training & testing set.seed(123)
# Now Selecting 75% of data as sample from total 'n' rows of the data isample <- sample.int(n = nrow(datax), size = floor(.75*nrow(datax)), replace = F) xtrain <- x[isample, ]
xtest <- x[-isample, ] ttrain <- t[isample, ] ttest <- t[-isample, ]
#---
modelx <- nnet(xtrain, ttrain, size = 2, rang = 0.1, decay = 5e-4, maxit = 2000) tentukanCM <- function(true, pred) { true <- max.col(true) cres <- max.col(pred) table(true, cres) }
cm <- tentukanCM (ttest, predict(modelx, xtest)) akurasi = (sum(diag(cm))) / sum(cm) akurasi
*) Terdapat sejumlah n output dari nnet
modelx <- nnet(xtrain, ttrain, size = 2, rang = 0.1, decay = 5e-4, maxit = 2000)
default
MLP package ‘nnet’ untuk dataset XNOR
• Jalankan program tsb
• Coba ganti dengan dataset lainnya
rm(list=ls(all=TRUE)) library(nnet)
#--- perceptron5.csv datax <- read.csv("perceptron5-class.csv", header=TRUE) ukuran <- dim(datax) iData <- c(1,2,4) iClass <- c(4) x <- datax[, iData]
# create species labels t <- datax[, iClass]
# membuat dataset untuk training & testing set.seed(123)
# Now Selecting 75% of data as sample from total 'n' rows of the data isample <- sample.int(n = nrow(datax), size = floor(.75*nrow(datax)), replace = F) xtrain <- x[isample, ]
xtest <- x[-isample, ] ttrain <- t[isample]
ttest <- t[-isample]
#---
# data untuk training dan testing adalah sama, karena terlalu sedikit modelx <- nnet(target ~ ., data = x,subset=isample, size = 2, rang = 0.1, decay = 5e-4, maxit = 2000) hasil <- predict(modelx, x, type = "class") cm <- table(t, hasil)
akurasi = (sum(diag(cm))) / sum(cm) akurasi