8
8
from keras .layers import Embedding , SimpleRNN , Dense
9
9
from keras .preprocessing import sequence
10
10
11
- from utils4e import (softmax1D , conv1D , gaussian_kernel , element_wise_product , vector_add , random_weights ,
11
+ from utils4e import (conv1D , gaussian_kernel , element_wise_product , vector_add , random_weights ,
12
12
scalar_vector_product , map_vector , mean_squared_error_loss )
13
13
14
14
@@ -46,6 +46,9 @@ def function(self, x):
46
46
def derivative (self , x ):
47
47
return NotImplementedError
48
48
49
+ def __call__ (self , x ):
50
+ return self .function (x )
51
+
49
52
50
53
class Sigmoid (Activation ):
51
54
@@ -56,7 +59,7 @@ def derivative(self, value):
56
59
return value * (1 - value )
57
60
58
61
59
- class Relu (Activation ):
62
+ class ReLU (Activation ):
60
63
61
64
def function (self , x ):
62
65
return max (0 , x )
@@ -65,13 +68,28 @@ def derivative(self, value):
65
68
return 1 if value > 0 else 0
66
69
67
70
68
- class Elu (Activation ):
71
+ class ELU (Activation ):
72
+
73
+ def __init__ (self , alpha = 0.01 ):
74
+ self .alpha = alpha
69
75
70
- def function (self , x , alpha = 0.01 ):
71
- return x if x > 0 else alpha * (np .exp (x ) - 1 )
76
+ def function (self , x ):
77
+ return x if x > 0 else self . alpha * (np .exp (x ) - 1 )
72
78
73
- def derivative (self , value , alpha = 0.01 ):
74
- return 1 if value > 0 else alpha * np .exp (value )
79
+ def derivative (self , value ):
80
+ return 1 if value > 0 else self .alpha * np .exp (value )
81
+
82
+
83
+ class LeakyReLU (Activation ):
84
+
85
+ def __init__ (self , alpha = 0.01 ):
86
+ self .alpha = alpha
87
+
88
+ def function (self , x ):
89
+ return max (x , self .alpha * x )
90
+
91
+ def derivative (self , value ):
92
+ return 1 if value > 0 else self .alpha
75
93
76
94
77
95
class Tanh (Activation ):
@@ -83,13 +101,31 @@ def derivative(self, value):
83
101
return 1 - (value ** 2 )
84
102
85
103
86
- class LeakyRelu (Activation ):
104
+ class SoftMax (Activation ):
105
+
106
+ def function (self , x ):
107
+ return np .exp (x ) / np .sum (np .exp (x ))
108
+
109
+ def derivative (self , x ):
110
+ return np .ones_like (x )
111
+
112
+
113
+ class SoftPlus (Activation ):
87
114
88
- def function (self , x , alpha = 0.01 ):
89
- return x if x > 0 else alpha * x
115
+ def function (self , x ):
116
+ return np .log (1. + np .exp (x ))
117
+
118
+ def derivative (self , x ):
119
+ return 1. / (1. + np .exp (- x ))
90
120
91
- def derivative (self , value , alpha = 0.01 ):
92
- return 1 if value > 0 else alpha
121
+
122
+ class Linear (Activation ):
123
+
124
+ def function (self , x ):
125
+ return x
126
+
127
+ def derivative (self , x ):
128
+ return np .ones_like (x )
93
129
94
130
95
131
class InputLayer (Layer ):
@@ -112,9 +148,9 @@ class OutputLayer(Layer):
112
148
def __init__ (self , size = 3 ):
113
149
super ().__init__ (size )
114
150
115
- def forward (self , inputs ):
151
+ def forward (self , inputs , activation = SoftMax ):
116
152
assert len (self .nodes ) == len (inputs )
117
- res = softmax1D (inputs )
153
+ res = activation (). function (inputs )
118
154
for node , val in zip (self .nodes , res ):
119
155
node .value = val
120
156
return res
0 commit comments