Skip to content

Commit fe878ea

Browse files
committed
In TF 2.2.0-rc1, validation_data expects tuples, not lists, fixes ageron#131
1 parent 40358e3 commit fe878ea

File tree

2 files changed

+18
-18
lines changed

2 files changed

+18
-18
lines changed

14_deep_computer_vision_with_cnns.ipynb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -837,7 +837,7 @@
837837
],
838838
"source": [
839839
"model.compile(loss=\"sparse_categorical_crossentropy\", optimizer=\"nadam\", metrics=[\"accuracy\"])\n",
840-
"history = model.fit(X_train, y_train, epochs=10, validation_data=[X_valid, y_valid])\n",
840+
"history = model.fit(X_train, y_train, epochs=10, validation_data=(X_valid, y_valid))\n",
841841
"score = model.evaluate(X_test, y_test)\n",
842842
"X_new = X_test[:10] # pretend we have new images\n",
843843
"y_pred = model.predict(X_new)"
@@ -2098,7 +2098,7 @@
20982098
"model.compile(loss=\"sparse_categorical_crossentropy\", optimizer=\"nadam\",\n",
20992099
" metrics=[\"accuracy\"])\n",
21002100
"\n",
2101-
"model.fit(X_train, y_train, epochs=10, validation_data=[X_valid, y_valid])\n",
2101+
"model.fit(X_train, y_train, epochs=10, validation_data=(X_valid, y_valid))\n",
21022102
"model.evaluate(X_test, y_test)"
21032103
]
21042104
},

17_autoencoders_and_gans.ipynb

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -408,7 +408,7 @@
408408
"stacked_ae.compile(loss=\"binary_crossentropy\",\n",
409409
" optimizer=keras.optimizers.SGD(lr=1.5), metrics=[rounded_accuracy])\n",
410410
"history = stacked_ae.fit(X_train, X_train, epochs=20,\n",
411-
" validation_data=[X_valid, X_valid])"
411+
" validation_data=(X_valid, X_valid))"
412412
]
413413
},
414414
{
@@ -655,7 +655,7 @@
655655
"tied_ae.compile(loss=\"binary_crossentropy\",\n",
656656
" optimizer=keras.optimizers.SGD(lr=1.5), metrics=[rounded_accuracy])\n",
657657
"history = tied_ae.fit(X_train, X_train, epochs=10,\n",
658-
" validation_data=[X_valid, X_valid])"
658+
" validation_data=(X_valid, X_valid))"
659659
]
660660
},
661661
{
@@ -708,7 +708,7 @@
708708
" autoencoder = keras.models.Sequential([encoder, decoder])\n",
709709
" autoencoder.compile(optimizer, loss, metrics=metrics)\n",
710710
" autoencoder.fit(X_train, X_train, epochs=n_epochs,\n",
711-
" validation_data=[X_valid, X_valid])\n",
711+
" validation_data=(X_valid, X_valid))\n",
712712
" return encoder, decoder, encoder(X_train), encoder(X_valid)"
713713
]
714714
},
@@ -855,7 +855,7 @@
855855
"stacked_ae_1_by_1.compile(loss=\"binary_crossentropy\",\n",
856856
" optimizer=keras.optimizers.SGD(lr=0.1), metrics=[rounded_accuracy])\n",
857857
"history = stacked_ae_1_by_1.fit(X_train, X_train, epochs=10,\n",
858-
" validation_data=[X_valid, X_valid])"
858+
" validation_data=(X_valid, X_valid))"
859859
]
860860
},
861861
{
@@ -943,7 +943,7 @@
943943
"conv_ae.compile(loss=\"binary_crossentropy\", optimizer=keras.optimizers.SGD(lr=1.0),\n",
944944
" metrics=[rounded_accuracy])\n",
945945
"history = conv_ae.fit(X_train, X_train, epochs=5,\n",
946-
" validation_data=[X_valid, X_valid])"
946+
" validation_data=(X_valid, X_valid))"
947947
]
948948
},
949949
{
@@ -1095,7 +1095,7 @@
10951095
}
10961096
],
10971097
"source": [
1098-
"history = recurrent_ae.fit(X_train, X_train, epochs=10, validation_data=[X_valid, X_valid])"
1098+
"history = recurrent_ae.fit(X_train, X_train, epochs=10, validation_data=(X_valid, X_valid))"
10991099
]
11001100
},
11011101
{
@@ -1187,7 +1187,7 @@
11871187
"denoising_ae.compile(loss=\"binary_crossentropy\", optimizer=keras.optimizers.SGD(lr=1.0),\n",
11881188
" metrics=[rounded_accuracy])\n",
11891189
"history = denoising_ae.fit(X_train, X_train, epochs=10,\n",
1190-
" validation_data=[X_valid, X_valid])"
1190+
" validation_data=(X_valid, X_valid))"
11911191
]
11921192
},
11931193
{
@@ -1276,7 +1276,7 @@
12761276
"dropout_ae.compile(loss=\"binary_crossentropy\", optimizer=keras.optimizers.SGD(lr=1.0),\n",
12771277
" metrics=[rounded_accuracy])\n",
12781278
"history = dropout_ae.fit(X_train, X_train, epochs=10,\n",
1279-
" validation_data=[X_valid, X_valid])"
1279+
" validation_data=(X_valid, X_valid))"
12801280
]
12811281
},
12821282
{
@@ -1378,7 +1378,7 @@
13781378
"simple_ae.compile(loss=\"binary_crossentropy\", optimizer=keras.optimizers.SGD(lr=1.),\n",
13791379
" metrics=[rounded_accuracy])\n",
13801380
"history = simple_ae.fit(X_train, X_train, epochs=10,\n",
1381-
" validation_data=[X_valid, X_valid])"
1381+
" validation_data=(X_valid, X_valid))"
13821382
]
13831383
},
13841384
{
@@ -1545,7 +1545,7 @@
15451545
"sparse_l1_ae.compile(loss=\"binary_crossentropy\", optimizer=keras.optimizers.SGD(lr=1.0),\n",
15461546
" metrics=[rounded_accuracy])\n",
15471547
"history = sparse_l1_ae.fit(X_train, X_train, epochs=10,\n",
1548-
" validation_data=[X_valid, X_valid])"
1548+
" validation_data=(X_valid, X_valid))"
15491549
]
15501550
},
15511551
{
@@ -1715,7 +1715,7 @@
17151715
"sparse_kl_ae.compile(loss=\"binary_crossentropy\", optimizer=keras.optimizers.SGD(lr=1.0),\n",
17161716
" metrics=[rounded_accuracy])\n",
17171717
"history = sparse_kl_ae.fit(X_train, X_train, epochs=10,\n",
1718-
" validation_data=[X_valid, X_valid])"
1718+
" validation_data=(X_valid, X_valid))"
17191719
]
17201720
},
17211721
{
@@ -1886,7 +1886,7 @@
18861886
"variational_ae.add_loss(K.mean(latent_loss) / 784.)\n",
18871887
"variational_ae.compile(loss=\"binary_crossentropy\", optimizer=\"rmsprop\", metrics=[rounded_accuracy])\n",
18881888
"history = variational_ae.fit(X_train, X_train, epochs=25, batch_size=128,\n",
1889-
" validation_data=[X_valid, X_valid])"
1889+
" validation_data=(X_valid, X_valid))"
18901890
]
18911891
},
18921892
{
@@ -4304,7 +4304,7 @@
43044304
"])\n",
43054305
"classifier.compile(loss=\"sparse_categorical_crossentropy\", optimizer=keras.optimizers.SGD(lr=0.02),\n",
43064306
" metrics=[\"accuracy\"])\n",
4307-
"history = classifier.fit(X_train_small, y_train_small, epochs=20, validation_data=[X_valid, y_valid])"
4307+
"history = classifier.fit(X_train_small, y_train_small, epochs=20, validation_data=(X_valid, y_valid))"
43084308
]
43094309
},
43104310
{
@@ -4429,7 +4429,7 @@
44294429
" optimizer=keras.optimizers.SGD(lr=0.02),\n",
44304430
" metrics=[\"accuracy\"])\n",
44314431
"history = pretrained_clf.fit(X_train_small, y_train_small, epochs=30,\n",
4432-
" validation_data=[X_valid, y_valid])"
4432+
" validation_data=(X_valid, y_valid))"
44334433
]
44344434
},
44354435
{
@@ -4493,7 +4493,7 @@
44934493
" optimizer=keras.optimizers.SGD(lr=0.02),\n",
44944494
" metrics=[\"accuracy\"])\n",
44954495
"history = pretrained_clf.fit(X_train_small, y_train_small, epochs=20,\n",
4496-
" validation_data=[X_valid, y_valid])"
4496+
" validation_data=(X_valid, y_valid))"
44974497
]
44984498
},
44994499
{
@@ -4555,7 +4555,7 @@
45554555
"hashing_ae.compile(loss=\"binary_crossentropy\", optimizer=keras.optimizers.SGD(lr=1.0),\n",
45564556
" metrics=[rounded_accuracy])\n",
45574557
"history = hashing_ae.fit(X_train, X_train, epochs=10,\n",
4558-
" validation_data=[X_valid, X_valid])"
4558+
" validation_data=(X_valid, X_valid))"
45594559
]
45604560
},
45614561
{

0 commit comments

Comments
 (0)