aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/python/ops/linalg_grad.py
diff options
context:
space:
mode:
authorGravatar Mrinal Kalakrishnan <kalakris@google.com>2016-09-02 09:21:47 -0800
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2016-09-02 10:31:21 -0700
commit6c110d3984f771a95925b663436168a87585191b (patch)
tree62833fe05fef4b19ef6c7f17a5c9feddf6d7c376 /tensorflow/python/ops/linalg_grad.py
parent5432568cde33a35758d36220e12392b220018d03 (diff)
Remove NoGradient for ops which could have gradients defined but don't.
Some ops (notably ops which implement a gradient themselves) had NoGradient defined, which meant that attempts to compute their gradient would implicitly return 0. This CL changes this implicit behavior to one that produces an error during graph construction when taking the gradient of one of these ops. Change: 132076657
Diffstat (limited to 'tensorflow/python/ops/linalg_grad.py')
-rw-r--r--tensorflow/python/ops/linalg_grad.py9
1 files changed, 0 insertions, 9 deletions
diff --git a/tensorflow/python/ops/linalg_grad.py b/tensorflow/python/ops/linalg_grad.py
index 7c10239043..e45d7e69e6 100644
--- a/tensorflow/python/ops/linalg_grad.py
+++ b/tensorflow/python/ops/linalg_grad.py
@@ -30,15 +30,6 @@ from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import math_ops
-ops.NoGradient("CholeskyGrad")
-ops.NoGradient("BatchCholeskyGrad")
-ops.NoGradient("SelfAdjointEig")
-ops.NoGradient("BatchSelfAdjointEig")
-ops.NoGradient("SelfAdjointEigV2")
-ops.NoGradient("BatchSelfAdjointEigV2")
-ops.NoGradient("Svd")
-ops.NoGradient("BatchSvd")
-
@ops.RegisterGradient("MatrixInverse")
def _MatrixInverseGrad(op, grad):