aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
-rw-r--r--tensorflow/python/ops/array_grad.py3
-rw-r--r--tensorflow/python/ops/linalg_grad.py9
-rw-r--r--tensorflow/python/ops/math_grad.py3
3 files changed, 1 insertions, 14 deletions
diff --git a/tensorflow/python/ops/array_grad.py b/tensorflow/python/ops/array_grad.py
index 1cf54de465..7e3fb6edb5 100644
--- a/tensorflow/python/ops/array_grad.py
+++ b/tensorflow/python/ops/array_grad.py
@@ -379,9 +379,6 @@ def _TileGrad(op, grad):
return [input_grad, None]
-ops.NoGradient("TileGrad")
-
-
ops.NoGradient("BroadcastGradientArgs")
diff --git a/tensorflow/python/ops/linalg_grad.py b/tensorflow/python/ops/linalg_grad.py
index 7c10239043..e45d7e69e6 100644
--- a/tensorflow/python/ops/linalg_grad.py
+++ b/tensorflow/python/ops/linalg_grad.py
@@ -30,15 +30,6 @@ from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import math_ops
-ops.NoGradient("CholeskyGrad")
-ops.NoGradient("BatchCholeskyGrad")
-ops.NoGradient("SelfAdjointEig")
-ops.NoGradient("BatchSelfAdjointEig")
-ops.NoGradient("SelfAdjointEigV2")
-ops.NoGradient("BatchSelfAdjointEigV2")
-ops.NoGradient("Svd")
-ops.NoGradient("BatchSvd")
-
@ops.RegisterGradient("MatrixInverse")
def _MatrixInverseGrad(op, grad):
diff --git a/tensorflow/python/ops/math_grad.py b/tensorflow/python/ops/math_grad.py
index fe2d057a0f..b5c33f84bb 100644
--- a/tensorflow/python/ops/math_grad.py
+++ b/tensorflow/python/ops/math_grad.py
@@ -33,8 +33,7 @@ from tensorflow.python.ops import math_ops
# Gradient ops that do not have gradients themselves.
ops.NoGradient("SigmoidGrad")
ops.NoGradient("TanhGrad")
-ops.NoGradient("InvGrad")
-ops.NoGradient("RsqrtGrad")
+
def _safe_shape_div(x, y):
"""Divides `x / y` assuming `x, y >= 0`, treating `0 / 0 = 0`."""