aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/cc/gradients
diff options
context:
space:
mode:
authorGravatar RJ Ryan <rjryan@google.com>2017-10-05 10:03:12 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2017-10-05 10:14:21 -0700
commit376147cd71d1a240dad428c3ff82ca4ea5f4e88e (patch)
treeb3cedfceb8cec23d1a32a5521fd576f0e5c3580d /tensorflow/cc/gradients
parentc49eeeee5463aff02b4bafbd1596288ba4b27739 (diff)
Save an unnecessary logical_not in the maximum/minimum gradient.
PiperOrigin-RevId: 171167415
Diffstat (limited to 'tensorflow/cc/gradients')
-rw-r--r--tensorflow/cc/gradients/math_grad.cc2
1 files changed, 1 insertions, 1 deletions
diff --git a/tensorflow/cc/gradients/math_grad.cc b/tensorflow/cc/gradients/math_grad.cc
index ac288b1d83..2417bf18a9 100644
--- a/tensorflow/cc/gradients/math_grad.cc
+++ b/tensorflow/cc/gradients/math_grad.cc
@@ -484,7 +484,7 @@ Status MaximumMinimumGradCommon(const Scope& scope, const Operation& op,
auto grad = grad_inputs[0];
auto zeros = ZerosLike(scope, grad);
auto gx_1 = Where3(scope, comparator, grad, zeros);
- auto gx_2 = Where3(scope, LogicalNot(scope, comparator), grad, zeros);
+ auto gx_2 = Where3(scope, comparator, zeros, grad);
return BinaryGradCommon(scope, op, grad_outputs, gx_1, gx_2);
}