Skip to content

Commit eb56b55

Browse files
authored
Add LeakyReLU support (#9)
* Update lock * Some updates for LeakyReLU * Revert "Update lock" This reverts commit 8c30750. * support custom alpha values for leakyrelu * support custom alpha values for leakyrelu * clean up * clean up * clean up, bump version * bump * bump * bump * bump
1 parent f4f8ad6 commit eb56b55

File tree

6 files changed

+43
-14
lines changed

6 files changed

+43
-14
lines changed

README.md

+2-1
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ The goal of this tool is to provide a quick and easy way to execute Keras models
1919
- Works with all supported layers
2020
- Activations:
2121
- ReLU
22+
- LeakyReLU (supports custom alphas)
2223
- Sigmoid
2324
- Softmax
2425
- Tanh
@@ -84,7 +85,7 @@ predict([np.random.rand(3).astype(np.float32)])
8485

8586
## Dependencies
8687
Thanks to [@apiad](https://github.com/apiad) you can now use [Poetry](https://github.com/python-poetry/poetry) to install all the needed dependencies for this tool! However the requirements are a pretty short list:
87-
- It seems most versions of TensorFlow that include Keras work perfectly fine. Tested from 1.14 to 2.1.0 using Actions and no issues have occurred. **(Make sure you use implementation 2/v3 with GRU layers if not on TF 2.x)**
88+
- It seems most versions of TensorFlow that include Keras work perfectly fine. Tested from 1.14 to 2.2 using Actions and no issues have occurred. **(Make sure you use implementation 2/v3 with GRU layers if not on TF 2.x)**
8889
- **Important**: You must create your models with tf.keras currently (not keras)
8990
- Python >= 3.6 (for the glorious f-strings!)
9091

konverter/__init__.py

+10-3
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,11 @@ def build_konverted_model(self):
7676
if layer.info.activation.needs_function:
7777
lyr_w_act = f'l{idx} = {layer.info.activation.alias.lower()}(l{idx})'
7878
else: # eg. tanh or relu
79-
lyr_w_act = layer.info.activation.string.lower().format(f'l{idx}')
79+
if layer.info.activation.alpha is None:
80+
lyr_w_act = layer.info.activation.string.lower().format(f'l{idx}')
81+
else: # custom alpha for leakyrelu
82+
lyr_w_act = layer.info.activation.string.lower().format(f'l{idx}', layer.info.activation.alpha)
83+
8084
lyr_w_act = f'l{idx} = {lyr_w_act}'
8185
model_builder['model'].append(lyr_w_act)
8286

@@ -212,8 +216,11 @@ def load_model(self):
212216
if isinstance(self.input_model, str):
213217
self.input_model = self.input_model.replace('\\', '/')
214218
if os.path.exists(self.input_model):
215-
models = importlib.import_module('tensorflow.keras.models')
216-
self.model = models.load_model(self.input_model)
219+
load_model = importlib.import_module('tensorflow.keras.models').load_model # only import when needed
220+
221+
# FIXME: for some reason tf 2 can't load models with LeakyReLU without custom_objects
222+
custom_leakyrelu = importlib.import_module('tensorflow.keras.layers').LeakyReLU
223+
self.model = load_model(self.input_model, custom_objects={'LeakyReLU': custom_leakyrelu})
217224
else:
218225
raise Exception(error('The supplied model file path doesn\'t exist!', ret=True))
219226
else:

konverter/__main__.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,10 @@
33
import konverter
44
from konverter.utils.general import success, info, warning, error, COLORS, color_logo, blue_grad
55

6-
KONVERTER_VERSION = "v0.2.1" # fixme: unify this
6+
KONVERTER_VERSION = "v0.2.2" # fixme: unify this
77
KONVERTER_LOGO_COLORED = color_logo(KONVERTER_VERSION)
88

9+
910
class KonverterCLI:
1011
def __init__(self, args):
1112
self.args = args

konverter/utils/konverter_support.py

+7
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,10 @@ def get_class_from_name(self, name, search_in):
2323
for attr_class in attrs:
2424
if name == attr_class.name:
2525
return attr_class() # new instance of class
26+
if search_in == 'activations': # not found
27+
base = Activations.Unsupported()
28+
base.name = name
29+
return base
2630
return False
2731

2832
def in_models(self, name):
@@ -94,6 +98,9 @@ def get_layer_info(self, layer):
9498
raise Exception('None or multiple activations?')
9599

96100
if layer_class.info.has_activation:
101+
if layer_class.info.activation.name == 'keras.layers.LeakyReLU': # set alpha
102+
layer_class.info.activation.alpha = round(float(layer.activation.alpha), 5)
103+
97104
# check layer activation against this layer's supported activations
98105
if layer_class.info.activation.name in self.attr_map(layer_class.supported_activations, 'name'):
99106
layer_class.info.supported = True

konverter/utils/model_attributes.py

+21-8
Original file line numberDiff line numberDiff line change
@@ -21,45 +21,58 @@ class Activations:
2121
ex. activation in string format
2222
To add new activations, use the code_converter function and add them here!
2323
"""
24-
class _BaseAcivation:
24+
25+
class _BaseActivation:
2526
name = None
2627
alias = None
2728
string = None
29+
alpha = None
2830
needs_function = True
2931

30-
class ReLU(_BaseAcivation):
32+
class ReLU(_BaseActivation):
3133
name = 'keras.activations.relu'
3234
alias = 'relu'
3335
string = 'np.maximum(0, {})'
3436
needs_function = False
3537

36-
class Sigmoid(_BaseAcivation):
38+
class LeakyReLU(_BaseActivation):
39+
name = 'keras.layers.LeakyReLU'
40+
alias = 'LeakyReLU'
41+
string = 'np.where({0} > 0, {0}, {0} * {1})'
42+
alpha = 0.3 # default from tensorflow
43+
needs_function = False
44+
45+
class Sigmoid(_BaseActivation):
3746
name = 'keras.activations.sigmoid'
3847
alias = 'sigmoid'
3948
string = 'def sigmoid(x):\n\treturn 1 / (1 + np.exp(-x))'
4049

41-
class Softmax(_BaseAcivation):
50+
class Softmax(_BaseActivation):
4251
name = 'keras.activations.softmax'
4352
alias = 'softmax'
4453
string = 'def softmax(x):\n\treturn np.exp(x) / np.sum(np.exp(x), axis=0)'
4554

46-
class Tanh(_BaseAcivation):
55+
class Tanh(_BaseActivation):
4756
name = 'keras.activations.tanh'
4857
alias = 'tanh'
4958
string = 'np.tanh({})' # don't define a function if you don't want your string added to file as a function
5059
needs_function = False
5160

52-
class Linear(_BaseAcivation):
61+
class Linear(_BaseActivation):
5362
name = 'keras.activations.linear'
5463
alias = 'linear'
5564

65+
class Unsupported(_BaseActivation): # propogated with act info and returned to Konverter if act is unsupported
66+
pass
67+
5668

5769
class Layers:
5870
"""
5971
The class that contains the supported layers and any information we will need to generate models
6072
ex. function in string format
6173
To add new layers, use the code_converter function and add them here!
6274
"""
75+
6376
class _BaseLayer:
6477
name = None
6578
alias = None
@@ -70,7 +83,7 @@ class _BaseLayer:
7083
class Dense(_BaseLayer):
7184
name = 'keras.layers.Dense'
7285
alias = 'dense'
73-
supported_activations = [Activations.ReLU, Activations.Sigmoid, Activations.Softmax, Activations.Tanh, Activations.Linear]
86+
supported_activations = [Activations.ReLU, Activations.Sigmoid, Activations.Softmax, Activations.Tanh, Activations.Linear, Activations.LeakyReLU]
7487
string = 'np.dot({}, w[{}]) + b[{}]' # n0 is the previous layer, n1 is weight, n2 is bias
7588

7689
class Dropout(_BaseLayer):
@@ -128,7 +141,7 @@ class BaseLayerInfo:
128141
weights = None
129142
biases = None
130143

131-
gamma = None
144+
gamma = None # for BN
132145
beta = None
133146
mean = None
134147
std = None

pyproject.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[tool.poetry]
22
name = "keras-konverter"
3-
version = "0.2.1"
3+
version = "0.2.2"
44
description = "A tool to convert simple Keras models to pure Python + NumPy"
55
readme = "README.md"
66
repository = "https://github.com/ShaneSmiskol/Konverter"

0 commit comments

Comments
 (0)