aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authorGravatar Francois Chollet <fchollet@google.com>2018-09-14 12:36:51 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2018-09-14 12:42:08 -0700
commit9da83f0701bcece95372ee8da09f886dfd2fa2a1 (patch)
tree3fa697c3b5c13b490daffdf611558c92ec0998a7
parentcba65fbcecb828a3e6e7743f7e784c7d08d37ffb (diff)
Make ReLU layer use nn.leaky_relu when appropriate.
PiperOrigin-RevId: 213026080
-rw-r--r--tensorflow/python/keras/backend.py6
-rw-r--r--tensorflow/python/keras/backend_test.py3
-rw-r--r--tensorflow/python/keras/layers/advanced_activations.py12
3 files changed, 12 insertions, 9 deletions
diff --git a/tensorflow/python/keras/backend.py b/tensorflow/python/keras/backend.py
index 529b07dc12..5e1722ba20 100644
--- a/tensorflow/python/keras/backend.py
+++ b/tensorflow/python/keras/backend.py
@@ -3459,14 +3459,18 @@ def relu(x, alpha=0., max_value=None, threshold=0):
Returns:
A tensor.
"""
- clip_max = max_value is not None
if alpha != 0.:
+ if max_value is None and threshold == 0:
+ return nn.leaky_relu(x, alpha=alpha)
+
if threshold != 0:
negative_part = nn.relu(-x + threshold)
else:
negative_part = nn.relu(-x)
+ clip_max = max_value is not None
+
if threshold != 0:
# computes x for x > threshold else 0
x = x * math_ops.cast(math_ops.greater(x, threshold), floatx())
diff --git a/tensorflow/python/keras/backend_test.py b/tensorflow/python/keras/backend_test.py
index 2f271c4f50..ab71589940 100644
--- a/tensorflow/python/keras/backend_test.py
+++ b/tensorflow/python/keras/backend_test.py
@@ -522,8 +522,9 @@ class BackendLinearAlgebraTest(test.TestCase):
relu_op = keras.backend.relu(x)
self.assertAllClose(keras.backend.eval(relu_op), [[0, 0], [2, 7]])
- # alpha
+ # alpha (leaky relu used)
relu_op = keras.backend.relu(x, alpha=0.5)
+ self.assertTrue('LeakyRelu' in relu_op.name)
self.assertAllClose(keras.backend.eval(relu_op), [[-2, 0], [2, 7]])
# max_value < some elements
diff --git a/tensorflow/python/keras/layers/advanced_activations.py b/tensorflow/python/keras/layers/advanced_activations.py
index 61ab69c16f..4ab786a184 100644
--- a/tensorflow/python/keras/layers/advanced_activations.py
+++ b/tensorflow/python/keras/layers/advanced_activations.py
@@ -18,7 +18,6 @@ from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
-from tensorflow.python.keras import activations
from tensorflow.python.keras import backend as K
from tensorflow.python.keras import constraints
from tensorflow.python.keras import initializers
@@ -268,7 +267,7 @@ class Softmax(Layer):
self.axis = axis
def call(self, inputs):
- return activations.softmax(inputs, axis=self.axis)
+ return K.softmax(inputs, axis=self.axis)
def get_config(self):
config = {'axis': self.axis}
@@ -322,11 +321,10 @@ class ReLU(Layer):
def call(self, inputs):
# alpha is used for leaky relu slope in activations instead of
# negative_slope.
- return activations.relu(
- inputs,
- alpha=self.negative_slope,
- max_value=self.max_value,
- threshold=self.threshold)
+ return K.relu(inputs,
+ alpha=self.negative_slope,
+ max_value=self.max_value,
+ threshold=self.threshold)
def get_config(self):
config = {