aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/python/kernel_tests/relu_op_test.py
diff options
context:
space:
mode:
Diffstat (limited to 'tensorflow/python/kernel_tests/relu_op_test.py')
-rw-r--r--tensorflow/python/kernel_tests/relu_op_test.py91
1 files changed, 91 insertions, 0 deletions
diff --git a/tensorflow/python/kernel_tests/relu_op_test.py b/tensorflow/python/kernel_tests/relu_op_test.py
index 63ac743843..8cd1f52d80 100644
--- a/tensorflow/python/kernel_tests/relu_op_test.py
+++ b/tensorflow/python/kernel_tests/relu_op_test.py
@@ -320,6 +320,97 @@ class EluTest(test.TestCase):
self.assertLess(err, 1e-6)
+class SeluTest(test.TestCase):
+
+ def _npSelu(self, np_features):
+ scale = 1.0507009873554804934193349852946
+ scale_alpha = 1.7580993408473768599402175208123
+ return np.where(np_features < 0, scale_alpha * (np.exp(np_features) - 1),
+ scale * np_features)
+
+ def testNpSelu(self):
+ self.assertAllClose(
+ np.array([[-1.0433095, 0.73549069, -0.6917582, 0.3152103 , -0.16730527],
+ [0.1050701 , -0.45566732, 0.5253505, -0.88505305, 0.9456309]]),
+ self._npSelu(
+ np.array([[-0.9, 0.7, -0.5, 0.3, -0.1], [0.1, -0.3, 0.5, -0.7, 0.9]
+ ])))
+
+ def _testSelu(self, np_features, use_gpu=False):
+ np_selu = self._npSelu(np_features)
+ with self.test_session(use_gpu=use_gpu):
+ selu = nn_ops.selu(np_features)
+ tf_selu = selu.eval()
+ self.assertAllClose(np_selu, tf_selu)
+ self.assertShapeEqual(np_selu, selu)
+
+ def testNumbers(self):
+ for t in [np.float16, np.float32, np.float64]:
+ self._testSelu(
+ np.array([[-9, 7, -5, 3, -1], [1, -3, 5, -7, 9]]).astype(t),
+ use_gpu=False)
+ self._testSelu(
+ np.array([[-9, 7, -5, 3, -1], [1, -3, 5, -7, 9]]).astype(t),
+ use_gpu=True)
+
+ def testGradientFloat32(self):
+ with self.test_session():
+ x_val = [[-0.9, -0.7, -0.5, -0.3, -0.1], [0.1, 0.3, 0.5, 0.7, 0.9]]
+ x = constant_op.constant(x_val, name="x")
+ y = nn_ops.selu(x, name="selu")
+ x_init = np.asarray(x_val, dtype=np.float32, order="F")
+ err = gradient_checker.compute_gradient_error(
+ x, [2, 5], y, [2, 5], x_init_value=x_init)
+ print("selu (float32) gradient err = ", err)
+ self.assertLess(err, 1e-4)
+
+ def testGradientFloat64(self):
+ with self.test_session():
+ x_val = [[-0.9, -0.7, -0.5, -0.3, -0.1], [0.1, 0.3, 0.5, 0.7, 0.9]]
+ x = constant_op.constant(x_val, dtype=dtypes.float64, name="x")
+ y = nn_ops.selu(x, name="selu")
+ x_init = np.asarray(x_val, dtype=np.float64, order="F")
+ err = gradient_checker.compute_gradient_error(
+ x, [2, 5], y, [2, 5], x_init_value=x_init)
+ print("selu (float64) gradient err = ", err)
+ self.assertLess(err, 1e-6)
+
+ def testGradGradFloat32(self):
+ with self.test_session():
+ x = constant_op.constant(
+ [-0.9, -0.7, -0.5, -0.3, -0.1, 0.1, 0.3, 0.5, 0.7, 0.9],
+ shape=[2, 5],
+ name="x")
+ y = nn_ops.selu(x, name="selu")
+ z = gradients_impl.gradients(y, x)
+ x_init = np.asarray(
+ [[-0.9, -0.7, -0.5, -0.3, -0.1], [0.1, 0.3, 0.5, 0.7, 0.9]],
+ dtype=np.float32,
+ order="F")
+ err = gradient_checker.compute_gradient_error(
+ x, [2, 5], z[0], [2, 5], x_init_value=x_init)
+ print("selu (float32) gradient of gradient err = ", err)
+ self.assertLess(err, 1e-4)
+
+ def testGradGradFloat64(self):
+ with self.test_session():
+ x = constant_op.constant(
+ [-0.9, -0.7, -0.5, -0.3, -0.1, 0.1, 0.3, 0.5, 0.7, 0.9],
+ shape=[2, 5],
+ dtype=dtypes.float64,
+ name="x")
+ y = nn_ops.selu(x, name="selu")
+ z = gradients_impl.gradients(y, x)
+ x_init = np.asarray(
+ [[-0.9, -0.7, -0.5, -0.3, -0.1], [0.1, 0.3, 0.5, 0.7, 0.9]],
+ dtype=np.float64,
+ order="F")
+ err = gradient_checker.compute_gradient_error(
+ x, [2, 5], z[0], [2, 5], x_init_value=x_init)
+ print("selu (float64) gradient of gradient err = ", err)
+ self.assertLess(err, 1e-6)
+
+
class CreluTest(test.TestCase):
def testCreluShape(self):