Skip to content

Commit 323f246

Browse files
author
Thomas Mulc
committed
spaces commit
1 parent 969af75 commit 323f246

File tree

1 file changed

+12
-10
lines changed

1 file changed

+12
-10
lines changed

ADAG/ADAG.py

Lines changed: 12 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -72,17 +72,19 @@ def main():
7272
for t in range(update_window):
7373
if t != 0:
7474
with tf.control_dependencies([opt_local]): #compute gradients only if the local opt was run
75-
grads, varss = zip(*loptimizer.compute_gradients(loss,var_list=tf.local_variables()))
75+
grads, varss = zip(*loptimizer.compute_gradients(loss,
76+
var_list=tf.local_variables()))
7677
else:
77-
grads, varss = zip(*loptimizer.compute_gradients(loss,var_list=tf.local_variables()))
78+
grads, varss = zip(*loptimizer.compute_gradients(loss,
79+
var_list=tf.local_variables()))
7880
grad_list.append(grads) #add gradients to the list
7981
opt_local = loptimizer.apply_gradients(zip(grads,varss),
8082
global_step=local_step) #update local parameters
8183
grads = tf.reduce_mean(grad_list,axis=0)
8284
grads = tuple([grads[i]for i in range(len(varss))])
8385
opt = optimizer.apply_gradients(
84-
zip(grads,[ local_to_global[v] for v in varss])
85-
,global_step=global_step) #apply the gradients to variables on ps
86+
zip(grads,[ local_to_global[v] for v in varss])
87+
,global_step=global_step) #apply the gradients to variables on ps
8688

8789
# Pull param from global server
8890
with tf.control_dependencies([opt]):
@@ -106,12 +108,12 @@ def main():
106108

107109
#Monitored Training Session
108110
sess = tf.train.MonitoredTrainingSession(master=server.target,
109-
is_chief=is_chief,
110-
config=config,
111-
scaffold=scaff,
112-
hooks=hooks,
113-
save_checkpoint_secs=1,
114-
checkpoint_dir='logdir')
111+
is_chief=is_chief,
112+
config=config,
113+
scaffold=scaff,
114+
hooks=hooks,
115+
save_checkpoint_secs=1,
116+
checkpoint_dir='logdir')
115117
if is_chief:
116118
sess.run(assign_global) #Assigns chief's initial values to ps
117119
time.sleep(10) #grace period to wait on other workers before starting training

0 commit comments

Comments
 (0)