forked from PacktPublishing/Advanced-Deep-Learning-with-R
-
-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathCh-2
105 lines (89 loc) · 2.71 KB
/
Ch-2
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
library(keras)
# Data
data <- read.csv('https://raw.githubusercontent.com/bkrai/DeepLearningR/master/Cardiotocographic.csv')
data$NSP <- data$NSP -1
barplot(prop.table(table(data$NSP)),
col = rainbow(3),
ylim = c(0, 0.8),
ylab = 'Proportion',
xlab = 'NSP',
cex.names = 1.5)
# Matrix
data <- as.matrix(data)
dimnames(data) <- NULL
# Normalize
data[,1:21] <- normalize(data[,1:21])
data[,22] <- as.numeric(data[,22]) -1
summary(data)
# Partition
set.seed(1234)
ind <- sample(2, nrow(data), replace = T, prob=c(.7, .3))
training <- data[ind==1, 1:21]
test <- data[ind==2, 1:21]
trainingtarget <- data[ind==1, 22]
testtarget <- data[ind==2, 22]
# One hot encoding
trainLabels <- to_categorical(trainingtarget)
testLabels <- to_categorical(testtarget)
# Creat sequential model and add layers
model <- keras_model_sequential()
model %>% layer_dense(units = 8, activation = 'relu', input_shape = c(21)) %>%
layer_dense(units = 3, activation = 'softmax')
# Compile
model %>% compile(loss = 'categorical_crossentropy',
optimizer = 'adam',
metrics = 'accuracy')
# Fit
model_one <- model %>%
fit(training,
trainLabels,
epochs = 200,
batch_size = 32,
validation_split = 0.2)
# Evaluate
model %>% evaluate(test, testLabels)
# Prediction
pred <- model %>%
predict_classes(test)
table(Predicted=pred, Actual=testtarget)
# Fine-tune
model <- keras_model_sequential()
model %>% layer_dense(units = 8, activation = 'relu', input_shape = c(21)) %>%
layer_dense(units = 5, activation = 'relu') %>%
layer_dense(units = 3, activation = 'softmax')
model %>%
compile(loss = 'categorical_crossentropy',
optimizer = 'adam',
metrics = 'accuracy')
history <- model %>%
fit(training,
trainLabels,
epochs = 200,
batch_size = 32,
validation_split = 0.2)
pred <- model %>%
predict_classes(test)
table(Predicted=pred, Actual=testtarget)
# Fine-tune - add units
model <- keras_model_sequential()
model %>% layer_dense(units = 40, activation = 'relu', input_shape = c(21)) %>%
layer_dropout(rate = 0.4) %>%
layer_dense(units = 30, activation = 'relu') %>%
layer_dropout(rate = 0.3) %>%
layer_dense(units = 20, activation = 'relu') %>%
layer_dropout(rate = 0.2) %>%
layer_dense(units = 3, activation = 'softmax')
model %>%
compile(loss = 'categorical_crossentropy',
optimizer = 'adam',
metrics = 'accuracy')
history <- model %>%
fit(training,
trainLabels,
epochs = 100,
batch_size = 32,
validation_split = 0.2,
class_weight = list("0"=1,"1"=5.6, "2" = 9.4))
pred <- model %>%
predict_classes(test)
table(Predicted=pred, Actual=testtarget)