forked from PacktPublishing/Advanced-Deep-Learning-with-R
-
-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathCh-8: GAN
114 lines (100 loc) · 3.33 KB
/
Ch-8: GAN
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
# GAN
library(keras)
# MNIST data
mnist <- dataset_mnist()
c(c(trainx, trainy), c(testx, testy)) %<-% mnist
trainx <- trainx[trainy==5,,]
par(mfrow = c(8,8), mar = rep(0, 4))
for (i in 1:64) plot(as.raster(trainx[i,,], max = 255))
par(mfrow = c(1,1))
trainx <- array_reshape(trainx, c(nrow(trainx), 28, 28, 1))
trainx <- trainx / 255
# Generator network
h <- 28; w <- 28; c <- 1; l <- 28 #l-latent dim
gi <- layer_input(shape = l)
go <- gi %>% layer_dense(units = 32 * 14 * 14) %>%
layer_activation_leaky_relu() %>%
layer_reshape(target_shape = c(14, 14, 32)) %>%
layer_conv_2d(filters = 32,
kernel_size = 5,
padding = "same") %>%
layer_activation_leaky_relu() %>%
layer_conv_2d_transpose(filters = 32, kernel_size = 4,strides = 2, padding = "same") %>%
layer_activation_leaky_relu() %>%
layer_conv_2d(filters = 1,
kernel_size = 5,
activation = "tanh",
padding = "same")
g <- keras_model(gi, go)
# discriminator
di <- layer_input(shape = c(h, w, c))
do <- di %>%
layer_conv_2d(filters = 64, kernel_size = 4) %>%
layer_activation_leaky_relu() %>%
layer_flatten() %>%
layer_dropout(rate = 0.3) %>%
layer_dense(units = 1, activation = "sigmoid")
d <- keras_model(di, do)
d %>% compile(optimizer = 'rmsprop',
loss = "binary_crossentropy")
# adversarial network
freeze_weights(d)
gani <- layer_input(shape = l)
gano <- gani %>% g %>% d
#gano <- d(g(gani))
gan <- keras_model(gani, gano)
gan %>% compile(optimizer = 'rmsprop',
loss = "binary_crossentropy")
# Train
b <- 50
getwd()
setwd("~/Desktop/")
dir <- "FakeImages"
dir.create(dir)
start <- 1; dloss <- NULL; gloss <- NULL
# 50 fake images
for (i in 1:100) {noise <- matrix(rnorm(b*l), nrow = b, ncol= l)
fake <- g %>% predict(noise)
# Combines real & fake images
stop <- start + b - 1
real <- trainx[start:stop,,,]
real <- array_reshape(real, c(nrow(real), 28, 28, 1))
rows <- nrow(real)
both <- array(0, dim = c(rows * 2, dim(real)[-1]))
both[1:rows,,,] <- fake
both[(rows+1):(rows*2),,,] <- real
labels <- rbind(matrix(runif(b, 0.9,1), nrow = b, ncol = 1),
matrix(runif(b, 0, 0.1), nrow = b, ncol = 1))
# Train discriminator
dloss[i] <- d %>% train_on_batch(both, labels)
fakeAsReal <- array(runif(b, 0, 0.1), dim = c(b, 1))
# Trains generator
gloss[i] <- gan %>% train_on_batch(noise, fakeAsReal)
start <- start + b
if (start > (nrow(trainx) - b)) start <- 1
# Save fake images
cat("Discriminator Loss:", dloss[i], "\n")
cat("Gan Loss:", gloss[i], "\n")
f <- fake[1,,,]
dim(f) <- c(28,28,1)
image_array_save(f, path = file.path(dir, paste0("f", i, ".png")))}
# Plot loss
makePlot <- function() {
x <- 1:100
plot(x, dloss, col = 'red', type = 'l',
ylim = c(0,3),
xlab = 'Iteration',
ylab = 'Loss')
lines(x, gloss, col = 'black', type = 'l')}
makePlot()
legend('topright',
legend=c("Discriminator Loss", "GAN Loss"),
col = c("red", 'black'), lty = 1:2, cex=2)
# Fakes images
library(EBImage)
setwd("~/Desktop/FakeImages")
temp = list.files(pattern = "*.png")
mypic <- list()
for (i in 1:length(temp)) {mypic[[i]] <- readImage(temp[[i]])}
par(mfrow = c(10,10))
for (i in 1:length(temp)) plot(mypic[[i]])