aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/python/keras/backend_test.py
diff options
context:
space:
mode:
authorGravatar A. Unique TensorFlower <gardener@tensorflow.org>2018-07-24 17:07:15 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2018-07-24 17:11:24 -0700
commit0cf2c612e5e6ff8c5026011e8186056801def747 (patch)
tree12793c95ad2aaa21bcddb466904ebd936feb326b /tensorflow/python/keras/backend_test.py
parent4c161d7306eb934232e3fe65de2c31c3bb7cf875 (diff)
Keras ReLU Consolidation
Consolidate functionality of ThresholdedReLU and LeakyReLU layers into ReLU layer PiperOrigin-RevId: 205917439
Diffstat (limited to 'tensorflow/python/keras/backend_test.py')
-rw-r--r--tensorflow/python/keras/backend_test.py61
1 files changed, 61 insertions, 0 deletions
diff --git a/tensorflow/python/keras/backend_test.py b/tensorflow/python/keras/backend_test.py
index 36478ea089..40e7910061 100644
--- a/tensorflow/python/keras/backend_test.py
+++ b/tensorflow/python/keras/backend_test.py
@@ -23,6 +23,7 @@ import scipy.sparse
from tensorflow.python import keras
from tensorflow.python.framework import dtypes
+from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
@@ -490,6 +491,66 @@ class BackendLinearAlgebraTest(test.TestCase):
input_shape_a=(4, 7),
input_shape_b=(4, 7))
+ def test_relu(self):
+ x = ops.convert_to_tensor([[-4, 0], [2, 7]], 'float32')
+ with self.test_session():
+ # standard relu
+ relu_op = keras.backend.relu(x)
+ self.assertAllClose(keras.backend.eval(relu_op), [[0, 0], [2, 7]])
+
+ # alpha
+ relu_op = keras.backend.relu(x, alpha=0.5)
+ self.assertAllClose(keras.backend.eval(relu_op), [[-2, 0], [2, 7]])
+
+ # max_value < some elements
+ relu_op = keras.backend.relu(x, max_value=5)
+ self.assertAllClose(keras.backend.eval(relu_op), [[0, 0], [2, 5]])
+
+ # nn.relu6 used
+ relu_op = keras.backend.relu(x, max_value=6)
+ self.assertTrue('Relu6' in relu_op.name) # uses tf.nn.relu6
+ self.assertAllClose(keras.backend.eval(relu_op), [[0, 0], [2, 6]])
+
+ # max value > 6
+ relu_op = keras.backend.relu(x, max_value=10)
+ self.assertAllClose(keras.backend.eval(relu_op), [[0, 0], [2, 7]])
+
+ # max value is float
+ relu_op = keras.backend.relu(x, max_value=4.3)
+ self.assertAllClose(keras.backend.eval(relu_op), [[0, 0], [2, 4.3]])
+
+ # max value == 0
+ relu_op = keras.backend.relu(x, max_value=0)
+ self.assertAllClose(keras.backend.eval(relu_op), [[0, 0], [0, 0]])
+
+ # alpha and max_value
+ relu_op = keras.backend.relu(x, alpha=0.25, max_value=3)
+ self.assertAllClose(keras.backend.eval(relu_op), [[-1, 0], [2, 3]])
+
+ # threshold
+ relu_op = keras.backend.relu(x, threshold=3)
+ self.assertAllClose(keras.backend.eval(relu_op), [[0, 0], [0, 7]])
+
+ # threshold is float
+ relu_op = keras.backend.relu(x, threshold=1.5)
+ self.assertAllClose(keras.backend.eval(relu_op), [[0, 0], [2, 7]])
+
+ # threshold is negative
+ relu_op = keras.backend.relu(x, threshold=-5)
+ self.assertAllClose(keras.backend.eval(relu_op), [[-4, 0], [2, 7]])
+
+ # threshold and max_value
+ relu_op = keras.backend.relu(x, threshold=3, max_value=5)
+ self.assertAllClose(keras.backend.eval(relu_op), [[0, 0], [0, 5]])
+
+ # threshold and alpha
+ relu_op = keras.backend.relu(x, alpha=0.25, threshold=4)
+ self.assertAllClose(keras.backend.eval(relu_op), [[-2, -1], [-0.5, 7]])
+
+ # threshold, alpha, and max_value
+ relu_op = keras.backend.relu(x, alpha=0.25, threshold=4, max_value=5)
+ self.assertAllClose(keras.backend.eval(relu_op), [[-2, -1], [-0.5, 5]])
+
class BackendShapeOpsTest(test.TestCase):