We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 0196051 commit 84ca81bCopy full SHA for 84ca81b
train.py
@@ -106,9 +106,12 @@ def train(
106
107
# Compile model
108
loss = tf.keras.losses.BinaryCrossentropy(from_logits=False)
109
+ # Lower lerning rate every 5th epoch.
110
+ # One step means model optimized to one mini batch aka one iteration.
111
+ decay_steps = int(5 * (len(trn_gen)/bs))
112
lr_schedule = tf.keras.optimizers.schedules.ExponentialDecay(
113
initial_learning_rate=lr,
- decay_steps=int(5 * 2940), # 5 Epochs
114
+ decay_steps=int(decay_steps),
115
decay_rate=0.8,
116
)
117
opt = tf.keras.optimizers.Adam(learning_rate=lr_schedule)
0 commit comments