Skip to content

Commit 00d4e9e

Browse files
author
Robert Coleman
authored
adam optimizer; val_loss monitor
Better initial results from Adam than SGD so making that switch for now. Last version was only logging accuracy of validation during training, so added validation loss tracking as well.
1 parent ee8fdbf commit 00d4e9e

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

vgg_bn.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
from keras.models import Sequential
55
from keras.layers.core import Flatten, Dense, Dropout, Lambda
66
from keras.layers.convolutional import Convolution2D, MaxPooling2D, ZeroPadding2D
7-
from keras.optimizers import SGD
7+
from keras.optimizers import Adam
88
from keras.preprocessing.image import ImageDataGenerator
99

1010

@@ -71,8 +71,8 @@ def finetune(self):
7171
model.add(Dense(self.n_classes, activation='softmax'))
7272

7373
def compile(self):
74-
self.model.compile(optimizer=SGD(lr=self.lr, decay=1e-6, momentum=0.9, nesterov=True),
75-
loss='categorical_crossentropy', metrics=['accuracy'])
74+
self.model.compile(optimizer=Adam(lr=self.lr),
75+
loss='categorical_crossentropy', metrics=['val_loss', 'accuracy'])
7676

7777
def fit(self, trn_path, val_path, nb_trn_samples, nb_val_samples, nb_epoch=1, callbacks=None, aug=False):
7878
if aug:

0 commit comments

Comments
 (0)