aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/python/kernel_tests/relu_op_test.py
diff options
context:
space:
mode:
authorGravatar Benoit Steiner <bsteiner@google.com>2016-09-13 10:02:28 -0800
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2016-09-13 11:17:59 -0700
commitd92ba3066e79258a3096ee72c105b4816f196425 (patch)
tree3e158a958fc89534a7b9f4febf77c81579e7efe7 /tensorflow/python/kernel_tests/relu_op_test.py
parentdf5ab111fff944adf19dd0ac3d007c798540ad45 (diff)
The Elu activation function supports all the number type (like the Relu and
Relu6 activations): made sure this is properly descibed in the op definition. Change: 133020769
Diffstat (limited to 'tensorflow/python/kernel_tests/relu_op_test.py')
-rw-r--r--tensorflow/python/kernel_tests/relu_op_test.py10
1 files changed, 5 insertions, 5 deletions
diff --git a/tensorflow/python/kernel_tests/relu_op_test.py b/tensorflow/python/kernel_tests/relu_op_test.py
index d12fd32fe2..b644f2a60a 100644
--- a/tensorflow/python/kernel_tests/relu_op_test.py
+++ b/tensorflow/python/kernel_tests/relu_op_test.py
@@ -43,11 +43,11 @@ class ReluTest(tf.test.TestCase):
self.assertShapeEqual(np_relu, relu)
def testNumbers(self):
- for t in [np.int32, np.int64, np.float32, np.float64]:
+ for t in [np.int32, np.int64, np.float16, np.float32, np.float64]:
self._testRelu(
np.array([[-9, 7, -5, 3, -1], [1, -3, 5, -7, 9]]).astype(t),
use_gpu=False)
- if t in [np.float32, np.float64]:
+ if t in [np.float16, np.float32, np.float64]:
self._testRelu(
np.array([[-9, 7, -5, 3, -1], [1, -3, 5, -7, 9]]).astype(t),
use_gpu=True)
@@ -160,11 +160,11 @@ class Relu6Test(tf.test.TestCase):
self.assertShapeEqual(np_relu6, relu6)
def testNumbers(self):
- for t in [np.int32, np.int64, np.float32, np.float64]:
+ for t in [np.int32, np.int64, np.float16, np.float32, np.float64]:
self._testRelu6(
np.array([[-9, 7, -5, 3, -1], [1, -3, 5, -7, 9]]).astype(t),
use_gpu=False)
- if t in [np.float, np.double]:
+ if t in [np.float16, np.float, np.double]:
self._testRelu6(
np.array([[-9, 7, -5, 3, -1], [1, -3, 5, -7, 9]]).astype(t),
use_gpu=True)
@@ -228,7 +228,7 @@ class EluTest(tf.test.TestCase):
self.assertShapeEqual(np_elu, elu)
def testNumbers(self):
- for t in [np.float32, np.float64]:
+ for t in [np.float16, np.float32, np.float64]:
self._testElu(
np.array([[-9, 7, -5, 3, -1], [1, -3, 5, -7, 9]]).astype(t),
use_gpu=False)