Skip to content

Commit 3219512

Browse files
author
Yurii Shevchuk
committed
remove some variables from adam algorithm
1 parent 0546857 commit 3219512

1 file changed

Lines changed: 7 additions & 11 deletions

File tree

neupy/algorithms/gd/adam.py

Lines changed: 7 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -86,25 +86,21 @@ def init_param_updates(self, layer, parameter):
8686
)
8787

8888
step = self.variables.step
89-
beta1 = self.beta1
90-
beta2 = self.beta2
91-
epsilon = self.epsilon
92-
9389
gradient = T.grad(self.variables.error_func, wrt=parameter)
9490

9591
first_moment = (
96-
beta1 * prev_first_moment +
97-
asfloat(1. - beta1) * gradient)
92+
self.beta1 * prev_first_moment +
93+
(1. - self.beta1) * gradient)
9894
second_moment = (
99-
beta2 * prev_second_moment +
100-
asfloat(1. - beta2) * gradient ** 2
95+
self.beta2 * prev_second_moment +
96+
(1. - self.beta2) * gradient ** 2
10197
)
10298

103-
first_moment_bias_corrected = first_moment / (1. - beta1 ** epoch)
104-
second_moment_bias_corrected = second_moment / (1. - beta2 ** epoch)
99+
first_moment_bias_corrected = first_moment / (1. - self.beta1 ** epoch)
100+
second_moment_bias_corrected = second_moment / (1. - self.beta2 ** epoch)
105101

106102
parameter_delta = first_moment_bias_corrected * (
107-
T.sqrt(second_moment_bias_corrected) + epsilon
103+
T.sqrt(second_moment_bias_corrected) + self.epsilon
108104
)
109105

110106
return [

0 commit comments

Comments
 (0)