aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/cc/gradients
diff options
context:
space:
mode:
authorGravatar Martin Wicke <wicke@google.com>2016-11-15 13:35:35 -0800
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2016-11-15 13:45:09 -0800
commitfb01ebb8c38b2d274f6fe9a7115b2362828a452e (patch)
tree81b598b8c41108b36c9f08331d1adf2415af0051 /tensorflow/cc/gradients
parent7632193992bb77e08cfe93f752ccfd7a27cb2618 (diff)
Deprecate tf.inv in favor of tf.reciprocal.
Change: 139240711
Diffstat (limited to 'tensorflow/cc/gradients')
-rw-r--r--tensorflow/cc/gradients/math_grad.cc16
-rw-r--r--tensorflow/cc/gradients/math_grad_test.cc4
2 files changed, 11 insertions, 9 deletions
diff --git a/tensorflow/cc/gradients/math_grad.cc b/tensorflow/cc/gradients/math_grad.cc
index d841899bbd..11c6207599 100644
--- a/tensorflow/cc/gradients/math_grad.cc
+++ b/tensorflow/cc/gradients/math_grad.cc
@@ -50,6 +50,7 @@ Status InvGrad(const Scope& scope, const Operation& op,
return scope.status();
}
REGISTER_GRADIENT_OP("Inv", InvGrad);
+REGISTER_GRADIENT_OP("Reciprocal", InvGrad);
Status SquareGrad(const Scope& scope, const Operation& op,
const std::vector<Output>& grad_inputs,
@@ -68,7 +69,7 @@ Status SqrtGrad(const Scope& scope, const Operation& op,
// y = sqrt(x)
// dy/dx = 0.5 * (1 / sqrt(x)) = 0.5 * (1 / y)
// dx = dy * (0.5 * (1 / y))
- auto y_inv = Inv(scope, op.output(0));
+ auto y_inv = Reciprocal(scope, op.output(0));
auto half = Cast(scope, Const(scope, 0.5), op.input(0).type());
auto dx = Mul(scope, grad_inputs[0], Mul(scope, half, y_inv));
grad_outputs->push_back(dx);
@@ -82,7 +83,7 @@ Status RsqrtGrad(const Scope& scope, const Operation& op,
// y = 1/x^1/2 = x^-1/2
// dy/dx = -1/2 * x^-3/2 = -1/2 * x^-1/2 * x^-1 = -1/2 * y * x^-1
// dx = dy * (-1/2 * y * x^-1)
- auto x_inv = Inv(scope, op.input(0));
+ auto x_inv = Reciprocal(scope, op.input(0));
auto y = op.output(0);
auto neghalf = Cast(scope, Const(scope, -0.5), op.input(0).type());
auto a = Mul(scope, neghalf, x_inv);
@@ -110,7 +111,8 @@ Status LogGrad(const Scope& scope, const Operation& op,
// f(x) = log(x) = y
// df/dx = 1 / x
// dx = dy * (1 / x)
- grad_outputs->push_back(Mul(scope, grad_inputs[0], Inv(scope, op.input(0))));
+ grad_outputs->push_back(
+ Mul(scope, grad_inputs[0], Reciprocal(scope, op.input(0))));
return scope.status();
}
REGISTER_GRADIENT_OP("Log", LogGrad);
@@ -186,7 +188,7 @@ Status AsinGrad(const Scope& scope, const Operation& op,
// dx = dy * (1 / (1 - x * x)^1/2)
auto x2 = Square(scope, op.input(0));
auto one = Cast(scope, Const(scope, 1.0), op.input(0).type());
- auto dydx = Inv(scope, Sqrt(scope, Sub(scope, one, x2)));
+ auto dydx = Reciprocal(scope, Sqrt(scope, Sub(scope, one, x2)));
auto dx = Mul(scope, grad_inputs[0], dydx);
grad_outputs->push_back(dx);
return scope.status();
@@ -201,7 +203,7 @@ Status AcosGrad(const Scope& scope, const Operation& op,
// dx = dy * (- 1 / (1 - x * x)^1/2)
auto x2 = Square(scope, op.input(0));
auto one = Cast(scope, Const(scope, 1.0), op.input(0).type());
- auto dydx = Neg(scope, Inv(scope, Sqrt(scope, Sub(scope, one, x2))));
+ auto dydx = Neg(scope, Reciprocal(scope, Sqrt(scope, Sub(scope, one, x2))));
auto dx = Mul(scope, grad_inputs[0], dydx);
grad_outputs->push_back(dx);
return scope.status();
@@ -214,7 +216,7 @@ Status TanGrad(const Scope& scope, const Operation& op,
// y = tan(x)
// dy/dx = sec(x)^2 = 1 / cos(x)^2
// dx = dy * (1 / cos(x)^2)
- auto dydx = Square(scope, Inv(scope, Cos(scope, op.input(0))));
+ auto dydx = Square(scope, Reciprocal(scope, Cos(scope, op.input(0))));
auto dx = Mul(scope, grad_inputs[0], dydx);
grad_outputs->push_back(dx);
return scope.status();
@@ -228,7 +230,7 @@ Status AtanGrad(const Scope& scope, const Operation& op,
// dy/dx = 1 / (1 + x^2)
// dx = dy * (1 / (1 + x^2)
auto one = Cast(scope, Const(scope, 1.0), op.input(0).type());
- auto dydx = Inv(scope, Add(scope, one, Square(scope, op.input(0))));
+ auto dydx = Reciprocal(scope, Add(scope, one, Square(scope, op.input(0))));
auto dx = Mul(scope, grad_inputs[0], dydx);
grad_outputs->push_back(dx);
return scope.status();
diff --git a/tensorflow/cc/gradients/math_grad_test.cc b/tensorflow/cc/gradients/math_grad_test.cc
index 456a036111..8b7fb8d765 100644
--- a/tensorflow/cc/gradients/math_grad_test.cc
+++ b/tensorflow/cc/gradients/math_grad_test.cc
@@ -84,7 +84,7 @@ class CWiseUnaryGradTest : public ::testing::Test {
y = Neg(scope_, x);
break;
case INV:
- y = Inv(scope_, x);
+ y = Reciprocal(scope_, x);
break;
case SQUARE:
y = Square(scope_, x);
@@ -157,7 +157,7 @@ TEST_F(CWiseUnaryGradTest, Neg) {
TestCWiseGrad(NEG, x_fn, dy_fn, dx_fn);
}
-TEST_F(CWiseUnaryGradTest, Inv) {
+TEST_F(CWiseUnaryGradTest, Reciprocal) {
auto x_fn = [this](const int i) { return RV({-1, 1, -2, 2, -3, 3, -4, 4}); };
auto dy_fn = [this](const float x) { return RV({0, -2, 2, -3, 3, -4, 4}); };
auto dx_fn = [this](const float x, const float dy) {