From b5dbbf23cd253293d7b70fe6bcf027ce03327251 Mon Sep 17 00:00:00 2001 From: gabrieldemarmiesse Date: Sun, 23 Feb 2020 16:07:31 +0000 Subject: [PATCH 1/2] Add softshrink python op --- tensorflow_addons/activations/softshrink.py | 11 ++++++++ .../activations/softshrink_test.py | 25 +++++++++++++++++++ 2 files changed, 36 insertions(+) diff --git a/tensorflow_addons/activations/softshrink.py b/tensorflow_addons/activations/softshrink.py index a93faf1a98..7cb88d8a82 100644 --- a/tensorflow_addons/activations/softshrink.py +++ b/tensorflow_addons/activations/softshrink.py @@ -48,3 +48,14 @@ def _softshrink_grad(op, grad): return _activation_so.ops.addons_softshrink_grad( grad, op.inputs[0], op.get_attr("lower"), op.get_attr("upper") ) + + +def _softshrink_py(x, lower, upper): + x = tf.convert_to_tensor(x) + mask_lower = x < lower + mask_upper = upper < x + mask_middle = tf.logical_not(tf.logical_or(mask_lower, mask_upper)) + mask_lower = tf.cast(mask_lower, x.dtype) + mask_upper = tf.cast(mask_upper, x.dtype) + mask_middle = tf.cast(mask_middle, x.dtype) + return (x * (1 - mask_middle)) - mask_lower * lower - mask_upper * upper diff --git a/tensorflow_addons/activations/softshrink_test.py b/tensorflow_addons/activations/softshrink_test.py index 7aa487f6eb..5338726026 100644 --- a/tensorflow_addons/activations/softshrink_test.py +++ b/tensorflow_addons/activations/softshrink_test.py @@ -18,6 +18,7 @@ import numpy as np import tensorflow as tf from tensorflow_addons.activations import softshrink +from tensorflow_addons.activations.softshrink import _softshrink_py from tensorflow_addons.utils import test_utils @@ -53,6 +54,30 @@ def test_theoretical_gradients(self, dtype): theoretical, numerical = tf.test.compute_gradient(softshrink, [x]) self.assertAllCloseAccordingToType(theoretical, numerical, atol=1e-4) + @parameterized.named_parameters(("float32", np.float32), ("float64", np.float64)) + def test_same_as_py_func(self, dtype): + np.random.seed(1234) + for _ in range(20): + self.verify_funcs_are_equivalent(dtype) + + def verify_funcs_are_equivalent(self, dtype): + x_np = np.random.uniform(-10, 10, size=(4, 4)).astype(dtype) + x = tf.convert_to_tensor(x_np) + lower = np.random.uniform(-10, 10) + upper = lower + np.random.uniform(0, 10) + + with tf.GradientTape(persistent=True) as t: + t.watch(x) + y_native = softshrink(x, lower, upper) + y_py = _softshrink_py(x, lower, upper) + + self.assertAllCloseAccordingToType(y_native, y_py, atol=1e-4) + + grad_native = t.gradient(y_native, x) + grad_py = t.gradient(y_py, x) + + self.assertAllCloseAccordingToType(grad_native, grad_py, atol=1e-4) + if __name__ == "__main__": tf.test.main() From e5601f8418402dfbee75a379cc5ffb140434d756 Mon Sep 17 00:00:00 2001 From: gabrieldemarmiesse Date: Sun, 23 Feb 2020 16:11:21 +0000 Subject: [PATCH 2/2] Added check. --- tensorflow_addons/activations/softshrink.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/tensorflow_addons/activations/softshrink.py b/tensorflow_addons/activations/softshrink.py index 7cb88d8a82..238cc19036 100644 --- a/tensorflow_addons/activations/softshrink.py +++ b/tensorflow_addons/activations/softshrink.py @@ -51,11 +51,16 @@ def _softshrink_grad(op, grad): def _softshrink_py(x, lower, upper): - x = tf.convert_to_tensor(x) + if lower > upper: + raise ValueError( + "The value of lower is {} and should" + " not be higher than the value " + "variable upper, which is {} .".format(lower, upper) + ) mask_lower = x < lower mask_upper = upper < x mask_middle = tf.logical_not(tf.logical_or(mask_lower, mask_upper)) mask_lower = tf.cast(mask_lower, x.dtype) mask_upper = tf.cast(mask_upper, x.dtype) mask_middle = tf.cast(mask_middle, x.dtype) - return (x * (1 - mask_middle)) - mask_lower * lower - mask_upper * upper + return x * (1 - mask_middle) - mask_lower * lower - mask_upper * upper