aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authorGravatar Mrinal Kalakrishnan <kalakris@google.com>2016-09-02 09:21:47 -0800
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2016-09-02 10:31:21 -0700
commit6c110d3984f771a95925b663436168a87585191b (patch)
tree62833fe05fef4b19ef6c7f17a5c9feddf6d7c376
parent5432568cde33a35758d36220e12392b220018d03 (diff)
Remove NoGradient for ops which could have gradients defined but don't.
Some ops (notably ops which implement a gradient themselves) had NoGradient defined, which meant that attempts to compute their gradient would implicitly return 0. This CL changes this implicit behavior to one that produces an error during graph construction when taking the gradient of one of these ops. Change: 132076657
-rw-r--r--tensorflow/python/ops/array_grad.py3
-rw-r--r--tensorflow/python/ops/linalg_grad.py9
-rw-r--r--tensorflow/python/ops/math_grad.py3
3 files changed, 1 insertions, 14 deletions
diff --git a/tensorflow/python/ops/array_grad.py b/tensorflow/python/ops/array_grad.py
index 1cf54de465..7e3fb6edb5 100644
--- a/tensorflow/python/ops/array_grad.py
+++ b/tensorflow/python/ops/array_grad.py
@@ -379,9 +379,6 @@ def _TileGrad(op, grad):
return [input_grad, None]
-ops.NoGradient("TileGrad")
-
-
ops.NoGradient("BroadcastGradientArgs")
diff --git a/tensorflow/python/ops/linalg_grad.py b/tensorflow/python/ops/linalg_grad.py
index 7c10239043..e45d7e69e6 100644
--- a/tensorflow/python/ops/linalg_grad.py
+++ b/tensorflow/python/ops/linalg_grad.py
@@ -30,15 +30,6 @@ from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import math_ops
-ops.NoGradient("CholeskyGrad")
-ops.NoGradient("BatchCholeskyGrad")
-ops.NoGradient("SelfAdjointEig")
-ops.NoGradient("BatchSelfAdjointEig")
-ops.NoGradient("SelfAdjointEigV2")
-ops.NoGradient("BatchSelfAdjointEigV2")
-ops.NoGradient("Svd")
-ops.NoGradient("BatchSvd")
-
@ops.RegisterGradient("MatrixInverse")
def _MatrixInverseGrad(op, grad):
diff --git a/tensorflow/python/ops/math_grad.py b/tensorflow/python/ops/math_grad.py
index fe2d057a0f..b5c33f84bb 100644
--- a/tensorflow/python/ops/math_grad.py
+++ b/tensorflow/python/ops/math_grad.py
@@ -33,8 +33,7 @@ from tensorflow.python.ops import math_ops
# Gradient ops that do not have gradients themselves.
ops.NoGradient("SigmoidGrad")
ops.NoGradient("TanhGrad")
-ops.NoGradient("InvGrad")
-ops.NoGradient("RsqrtGrad")
+
def _safe_shape_div(x, y):
"""Divides `x / y` assuming `x, y >= 0`, treating `0 / 0 = 0`."""