aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/python/ops/math_grad.py
diff options
context:
space:
mode:
authorGravatar Anna R <annarev@google.com>2018-02-28 14:50:02 -0800
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2018-02-28 14:53:59 -0800
commit91d49c7d98114da4e4647c62d9f9b69119296b69 (patch)
treef471ce837b915650f1128a57d505ac2c46dc51da /tensorflow/python/ops/math_grad.py
parentf28e4d6faf94c08464f430f9cd01ef32dde6ad46 (diff)
Removing underscore prefixes from hidden generated Python functions.
PiperOrigin-RevId: 187386941
Diffstat (limited to 'tensorflow/python/ops/math_grad.py')
-rw-r--r--tensorflow/python/ops/math_grad.py59
1 files changed, 22 insertions, 37 deletions
diff --git a/tensorflow/python/ops/math_grad.py b/tensorflow/python/ops/math_grad.py
index bf28f74153..51e19b4ad3 100644
--- a/tensorflow/python/ops/math_grad.py
+++ b/tensorflow/python/ops/math_grad.py
@@ -302,16 +302,14 @@ def _NegGrad(_, grad):
def _InvGrad(op, grad):
"""Returns -grad * (1 / x^2)."""
y = op.outputs[0] # y = 1 / x
- # pylint: disable=protected-access
- return gen_math_ops._reciprocal_grad(y, grad)
+ return gen_math_ops.reciprocal_grad(y, grad)
@ops.RegisterGradient("Reciprocal")
def _ReciprocalGrad(op, grad):
"""Returns -grad * (1 / x^2)."""
y = op.outputs[0] # y = 1 / x
- # pylint: disable=protected-access
- return gen_math_ops._reciprocal_grad(y, grad)
+ return gen_math_ops.reciprocal_grad(y, grad)
@ops.RegisterGradient("InvGrad")
@@ -321,8 +319,7 @@ def _InvGradGrad(op, grad):
with ops.control_dependencies([grad]):
ca = math_ops.conj(op.inputs[0])
cg = math_ops.conj(grad)
- # pylint: disable=protected-access
- return cg * -2.0 * b * ca, gen_math_ops._reciprocal_grad(ca, grad)
+ return cg * -2.0 * b * ca, gen_math_ops.reciprocal_grad(ca, grad)
@ops.RegisterGradient("ReciprocalGrad")
@@ -332,8 +329,7 @@ def _ReciprocalGradGrad(op, grad):
with ops.control_dependencies([grad]):
ca = math_ops.conj(op.inputs[0])
cg = math_ops.conj(grad)
- # pylint: disable=protected-access
- return cg * -2.0 * b * ca, gen_math_ops._reciprocal_grad(ca, grad)
+ return cg * -2.0 * b * ca, gen_math_ops.reciprocal_grad(ca, grad)
@ops.RegisterGradient("Square")
@@ -348,9 +344,7 @@ def _SquareGrad(op, grad):
@ops.RegisterGradient("Sqrt")
def _SqrtGrad(op, grad):
y = op.outputs[0] # y = x^(1/2)
- # pylint: disable=protected-access
- return gen_math_ops._sqrt_grad(y, grad)
- # pylint: enable=protected-access
+ return gen_math_ops.sqrt_grad(y, grad)
@ops.RegisterGradient("SqrtGrad")
@@ -366,9 +360,7 @@ def _SqrtGradGrad(op, grad):
def _RsqrtGrad(op, grad):
"""Returns -0.5 * grad * conj(y)^3."""
y = op.outputs[0] # y = x^(-1/2)
- # pylint: disable=protected-access
- return gen_math_ops._rsqrt_grad(y, grad)
- # pylint: enable=protected-access
+ return gen_math_ops.rsqrt_grad(y, grad)
@ops.RegisterGradient("RsqrtGrad")
@@ -380,8 +372,7 @@ def _RsqrtGradGrad(op, grad):
ca = math_ops.conj(a)
cg = math_ops.conj(grad)
grad_a = -1.5 * cg * b * math_ops.square(ca)
- # pylint: disable=protected-access
- grad_b = gen_math_ops._rsqrt_grad(ca, grad)
+ grad_b = gen_math_ops.rsqrt_grad(ca, grad)
return grad_a, grad_b
@@ -446,8 +437,7 @@ def _TanhGrad(op, grad):
y = op.outputs[0] # y = tanh(x)
with ops.control_dependencies([grad]):
y = math_ops.conj(y)
- # pylint: disable=protected-access
- return gen_math_ops._tanh_grad(y, grad)
+ return gen_math_ops.tanh_grad(y, grad)
@ops.RegisterGradient("Asinh")
@@ -485,8 +475,7 @@ def _TanhGradGrad(op, grad):
with ops.control_dependencies([grad]):
a = math_ops.conj(op.inputs[0])
b = math_ops.conj(op.inputs[1])
- # pylint: disable=protected-access
- return grad * -2.0 * b * a, gen_math_ops._tanh_grad(a, grad)
+ return grad * -2.0 * b * a, gen_math_ops.tanh_grad(a, grad)
@ops.RegisterGradient("Erf")
@@ -634,8 +623,7 @@ def _SigmoidGrad(op, grad):
y = op.outputs[0] # y = sigmoid(x)
with ops.control_dependencies([grad]):
y = math_ops.conj(y)
- # pylint: disable=protected-access
- return gen_math_ops._sigmoid_grad(y, grad)
+ return gen_math_ops.sigmoid_grad(y, grad)
@ops.RegisterGradient("SigmoidGrad")
@@ -644,8 +632,7 @@ def _SigmoidGradGrad(op, grad):
a = math_ops.conj(op.inputs[0])
b = math_ops.conj(op.inputs[1])
gb = grad * b
- # pylint: disable=protected-access
- return gb - 2.0 * gb * a, gen_math_ops._sigmoid_grad(a, grad)
+ return gb - 2.0 * gb * a, gen_math_ops.sigmoid_grad(a, grad)
@ops.RegisterGradient("Sign")
@@ -792,7 +779,7 @@ def _MulGrad(op, grad):
if (isinstance(grad, ops.Tensor) and
_ShapesFullySpecifiedAndEqual(x, y, grad) and
grad.dtype in (dtypes.int32, dtypes.float32)):
- return gen_math_ops._mul(grad, y), gen_math_ops._mul(grad, x)
+ return gen_math_ops.mul(grad, y), gen_math_ops.mul(grad, x)
assert x.dtype.base_dtype == y.dtype.base_dtype, (x.dtype, " vs. ", y.dtype)
sx = array_ops.shape(x)
sy = array_ops.shape(y)
@@ -800,9 +787,9 @@ def _MulGrad(op, grad):
x = math_ops.conj(x)
y = math_ops.conj(y)
return (array_ops.reshape(
- math_ops.reduce_sum(gen_math_ops._mul(grad, y), rx), sx),
+ math_ops.reduce_sum(gen_math_ops.mul(grad, y), rx), sx),
array_ops.reshape(
- math_ops.reduce_sum(gen_math_ops._mul(x, grad), ry), sy))
+ math_ops.reduce_sum(gen_math_ops.mul(x, grad), ry), sy))
# pylint: enable=protected-access
@@ -976,20 +963,18 @@ def _MatMulGrad(op, grad):
t_b = op.get_attr("transpose_b")
a = math_ops.conj(op.inputs[0])
b = math_ops.conj(op.inputs[1])
- # pylint: disable=protected-access
if not t_a and not t_b:
- grad_a = gen_math_ops._mat_mul(grad, b, transpose_b=True)
- grad_b = gen_math_ops._mat_mul(a, grad, transpose_a=True)
+ grad_a = gen_math_ops.mat_mul(grad, b, transpose_b=True)
+ grad_b = gen_math_ops.mat_mul(a, grad, transpose_a=True)
elif not t_a and t_b:
- grad_a = gen_math_ops._mat_mul(grad, b)
- grad_b = gen_math_ops._mat_mul(grad, a, transpose_a=True)
+ grad_a = gen_math_ops.mat_mul(grad, b)
+ grad_b = gen_math_ops.mat_mul(grad, a, transpose_a=True)
elif t_a and not t_b:
- grad_a = gen_math_ops._mat_mul(b, grad, transpose_b=True)
- grad_b = gen_math_ops._mat_mul(a, grad)
+ grad_a = gen_math_ops.mat_mul(b, grad, transpose_b=True)
+ grad_b = gen_math_ops.mat_mul(a, grad)
elif t_a and t_b:
- grad_a = gen_math_ops._mat_mul(b, grad, transpose_a=True, transpose_b=True)
- grad_b = gen_math_ops._mat_mul(grad, a, transpose_a=True, transpose_b=True)
- # pylint: enable=protected-access
+ grad_a = gen_math_ops.mat_mul(b, grad, transpose_a=True, transpose_b=True)
+ grad_b = gen_math_ops.mat_mul(grad, a, transpose_a=True, transpose_b=True)
return grad_a, grad_b