aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/python/ops/math_grad.py
diff options
context:
space:
mode:
authorGravatar RJ Ryan <rjryan@google.com>2017-10-05 10:03:12 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2017-10-05 10:14:21 -0700
commit376147cd71d1a240dad428c3ff82ca4ea5f4e88e (patch)
treeb3cedfceb8cec23d1a32a5521fd576f0e5c3580d /tensorflow/python/ops/math_grad.py
parentc49eeeee5463aff02b4bafbd1596288ba4b27739 (diff)
Save an unnecessary logical_not in the maximum/minimum gradient.
PiperOrigin-RevId: 171167415
Diffstat (limited to 'tensorflow/python/ops/math_grad.py')
-rw-r--r--tensorflow/python/ops/math_grad.py2
1 files changed, 1 insertions, 1 deletions
diff --git a/tensorflow/python/ops/math_grad.py b/tensorflow/python/ops/math_grad.py
index ee9cbda0c0..d36d66f899 100644
--- a/tensorflow/python/ops/math_grad.py
+++ b/tensorflow/python/ops/math_grad.py
@@ -851,7 +851,7 @@ def _MaximumMinimumGrad(op, grad, selector_op):
xmask = selector_op(x, y)
rx, ry = gen_array_ops._broadcast_gradient_args(sx, sy)
xgrad = array_ops.where(xmask, grad, zeros)
- ygrad = array_ops.where(math_ops.logical_not(xmask), grad, zeros)
+ ygrad = array_ops.where(xmask, zeros, grad)
gx = array_ops.reshape(math_ops.reduce_sum(xgrad, rx), sx)
gy = array_ops.reshape(math_ops.reduce_sum(ygrad, ry), sy)
return (gx, gy)