-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathActivationFunction.py
More file actions
97 lines (84 loc) · 2.46 KB
/
ActivationFunction.py
File metadata and controls
97 lines (84 loc) · 2.46 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
from _ActivationFunctions import (
_ELU,
_SELU,
_Absolute,
_BinaryStep,
_HardSigmoid,
_LeakyReLU,
_Linear,
_Mish,
_ParametricReLU,
_Power,
_ReLU,
_Sigmoid,
_Softplus,
_Swish,
_Tanh,
)
from Dense import Dense
from Layer import Layer
def _Activation(activation_cls):
"""
Factory function for creating (Dense, Activation) layer pairs.
This function wraps a given activation class and returns a callable factory.
When called with layer dimensions, it creates a `Dense` layer followed by
the specified activation layer.
Parameters
----------
activation_cls : type
Activation layer class (e.g., `_ReLU`, `_Sigmoid`, `_Tanh`, etc.).
Returns
-------
Callable[[int, int, ...], tuple[Dense, Layer]]
A factory function that, when called, returns a tuple `(Dense, Activation)`.
Examples
--------
>>> Sigmoid = _Activation(_Sigmoid)
>>> dense_layer, activation_layer = Sigmoid(4, 3)
>>> type(dense_layer).__name__
'Dense'
>>> type(activation_layer).__name__
'_Sigmoid'
"""
def factory(input_size: int, output_size: int, **kwargs) -> tuple[Dense, Layer]:
"""
Creates a Dense layer followed by the given activation layer.
Parameters
----------
input_size : int
Number of input neurons.
output_size : int
Number of output neurons.
**kwargs : dict, optional
Additional keyword arguments for activation layer initialization.
Returns
-------
tuple of (Dense, Layer)
Tuple containing a `Dense` layer and an activation layer instance.
Examples
--------
>>> ReLU = _Activation(_ReLU)
>>> dense, act = ReLU(8, 4)
>>> isinstance(dense, Dense)
True
>>> isinstance(act, Layer)
True
"""
return Dense(input_size, output_size), activation_cls(**kwargs)
return factory
# Activation factories
Sigmoid = _Activation(_Sigmoid)
HardSigmoid = _Activation(_HardSigmoid)
Tanh = _Activation(_Tanh)
ReLU = _Activation(_ReLU)
LeakyReLU = _Activation(_LeakyReLU)
ELU = _Activation(_ELU)
SELU = _Activation(_SELU)
Swish = _Activation(_Swish)
Mish = _Activation(_Mish)
ParametricReLU = _Activation(_ParametricReLU)
Softplus = _Activation(_Softplus)
Power = _Activation(_Power)
Linear = _Activation(_Linear)
BinaryStep = _Activation(_BinaryStep)
Absolute = _Activation(_Absolute)