From 19605f2bb5fbc662992c6fa038cf38ca7117616f Mon Sep 17 00:00:00 2001 From: gabrieldemarmiesse Date: Sun, 23 Feb 2020 13:20:54 +0000 Subject: [PATCH 1/2] Added py implementation for mish --- tensorflow_addons/activations/mish.py | 4 ++++ tensorflow_addons/activations/mish_test.py | 25 ++++++++++++++++++++++ 2 files changed, 29 insertions(+) diff --git a/tensorflow_addons/activations/mish.py b/tensorflow_addons/activations/mish.py index b862b122cb..2a5e5a8f05 100644 --- a/tensorflow_addons/activations/mish.py +++ b/tensorflow_addons/activations/mish.py @@ -42,3 +42,7 @@ def mish(x: types.TensorLike) -> tf.Tensor: @tf.RegisterGradient("Addons>Mish") def _mish_grad(op, grad): return _activation_so.ops.addons_mish_grad(grad, op.inputs[0]) + + +def _mish_py(x): + return x * tf.math.tanh(tf.math.softplus(x)) diff --git a/tensorflow_addons/activations/mish_test.py b/tensorflow_addons/activations/mish_test.py index 543e84a1b3..1bc69ba3f7 100644 --- a/tensorflow_addons/activations/mish_test.py +++ b/tensorflow_addons/activations/mish_test.py @@ -18,6 +18,7 @@ import numpy as np import tensorflow as tf from tensorflow_addons.activations import mish +from tensorflow_addons.activations.mish import _mish_py from tensorflow_addons.utils import test_utils @@ -42,6 +43,30 @@ def test_theoretical_gradients(self, dtype): theoretical, numerical = tf.test.compute_gradient(mish, [x]) self.assertAllCloseAccordingToType(theoretical, numerical, atol=1e-4) + @parameterized.named_parameters( + ("float16", np.float16), ("float32", np.float32), ("float64", np.float64) + ) + def test_same_as_py_func(self, dtype): + np.random.seed(1234) + for _ in range(20): + self.verify_funcs_are_equivalent(dtype) + + def verify_funcs_are_equivalent(self, dtype): + x_np = np.random.uniform(-10, 10, size=(4, 4)).astype(dtype) + x = tf.convert_to_tensor(x_np) + + with tf.GradientTape(persistent=True) as t: + t.watch(x) + y_native = mish(x) + y_py = _mish_py(x) + + self.assertAllCloseAccordingToType(y_native, y_py, atol=1e-4) + + grad_native = t.gradient(y_native, x) + grad_py = t.gradient(y_py, x) + + self.assertAllCloseAccordingToType(grad_native, grad_py, atol=1e-4) + if __name__ == "__main__": tf.test.main() From 041fbded81625908eaa697b4f3e243c1a80e07f3 Mon Sep 17 00:00:00 2001 From: gabrieldemarmiesse Date: Sun, 23 Feb 2020 15:18:49 +0000 Subject: [PATCH 2/2] Removed float16 --- tensorflow_addons/activations/mish_test.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/tensorflow_addons/activations/mish_test.py b/tensorflow_addons/activations/mish_test.py index 1bc69ba3f7..6c2a8f7ab4 100644 --- a/tensorflow_addons/activations/mish_test.py +++ b/tensorflow_addons/activations/mish_test.py @@ -43,9 +43,7 @@ def test_theoretical_gradients(self, dtype): theoretical, numerical = tf.test.compute_gradient(mish, [x]) self.assertAllCloseAccordingToType(theoretical, numerical, atol=1e-4) - @parameterized.named_parameters( - ("float16", np.float16), ("float32", np.float32), ("float64", np.float64) - ) + @parameterized.named_parameters(("float32", np.float32), ("float64", np.float64)) def test_same_as_py_func(self, dtype): np.random.seed(1234) for _ in range(20):