aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/cc/gradients
diff options
context:
space:
mode:
Diffstat (limited to 'tensorflow/cc/gradients')
-rw-r--r--tensorflow/cc/gradients/math_grad.cc26
-rw-r--r--tensorflow/cc/gradients/math_grad_test.cc52
-rw-r--r--tensorflow/cc/gradients/nn_grad.cc13
-rw-r--r--tensorflow/cc/gradients/nn_grad_test.cc13
4 files changed, 104 insertions, 0 deletions
diff --git a/tensorflow/cc/gradients/math_grad.cc b/tensorflow/cc/gradients/math_grad.cc
index 8c1a01f518..71d9a8ed7b 100644
--- a/tensorflow/cc/gradients/math_grad.cc
+++ b/tensorflow/cc/gradients/math_grad.cc
@@ -162,6 +162,32 @@ Status Log1pGrad(const Scope& scope, const Operation& op,
}
REGISTER_GRADIENT_OP("Log1p", Log1pGrad);
+Status SinhGrad(const Scope& scope, const Operation& op,
+ const std::vector<Output>& grad_inputs,
+ std::vector<Output>* grad_outputs) {
+ // y = sinh(x)
+ // dy/dx = cosh(x)
+ auto dydx = Cosh(scope, op.input(0));
+ // grad(x) = grad(y) * conj(dy/dx)
+ grad_outputs->push_back(
+ Mul(scope, grad_inputs[0], ConjugateHelper(scope, dydx)));
+ return scope.status();
+}
+REGISTER_GRADIENT_OP("Sinh", SinhGrad);
+
+Status CoshGrad(const Scope& scope, const Operation& op,
+ const std::vector<Output>& grad_inputs,
+ std::vector<Output>* grad_outputs) {
+ // y = cosh(x)
+ // dy/dx = sinh(x)
+ auto dydx = Sinh(scope, op.input(0));
+ // grad(x) = grad(y) * conj(dy/dx)
+ grad_outputs->push_back(
+ Mul(scope, grad_inputs[0], ConjugateHelper(scope, dydx)));
+ return scope.status();
+}
+REGISTER_GRADIENT_OP("Cosh", CoshGrad);
+
Status TanhGrad(const Scope& scope, const Operation& op,
const std::vector<Output>& grad_inputs,
std::vector<Output>* grad_outputs) {
diff --git a/tensorflow/cc/gradients/math_grad_test.cc b/tensorflow/cc/gradients/math_grad_test.cc
index de6baa1769..1653b04378 100644
--- a/tensorflow/cc/gradients/math_grad_test.cc
+++ b/tensorflow/cc/gradients/math_grad_test.cc
@@ -45,6 +45,8 @@ class CWiseUnaryGradTest : public ::testing::Test {
EXPM1,
LOG,
LOG1P,
+ SINH,
+ COSH,
TANH,
SIGMOID,
SIGN,
@@ -111,6 +113,12 @@ class CWiseUnaryGradTest : public ::testing::Test {
case LOG1P:
y = Log1p(scope_, x);
break;
+ case SINH:
+ y = Sinh(scope_, x);
+ break;
+ case COSH:
+ y = Cosh(scope_, x);
+ break;
case TANH:
y = Tanh(scope_, x);
break;
@@ -337,6 +345,50 @@ TEST_F(CWiseUnaryGradTest, Log1p_Complex) {
TestCWiseGrad<complex64>(LOG1P, x_fn, dy_fn, dx_fn);
}
+TEST_F(CWiseUnaryGradTest, Sinh) {
+ auto x_fn = [this](const int i) { return RV({0, -1, 1, -2, 2, -3, 3}); };
+ auto dy_fn = [this](const float x) { return x + RV({-2, 2, -3, 3, -4, 4}); };
+ auto dx_fn = [this](const float x, const float dy) {
+ return dy * std::cosh(x);
+ };
+ TestCWiseGrad<float>(SINH, x_fn, dy_fn, dx_fn);
+}
+
+TEST_F(CWiseUnaryGradTest, Sinh_Complex) {
+ auto x_fn = [this](const int i) {
+ return CRV({{1, 0}, {0, 1}, {2, -1}, {1, 2}, {3, 4}});
+ };
+ auto dy_fn = [this](const complex64& x) {
+ return x + CRV({{-2, 2}, {-3, 3}, {1, -4}});
+ };
+ auto dx_fn = [this](const complex64& x, const complex64& dy) {
+ return dy * conjugate(std::cosh(x));
+ };
+ TestCWiseGrad<complex64>(SINH, x_fn, dy_fn, dx_fn);
+}
+
+TEST_F(CWiseUnaryGradTest, Cosh) {
+ auto x_fn = [this](const int i) { return RV({0, -1, 1, -2, 2, -3, 3}); };
+ auto dy_fn = [this](const float x) { return x + RV({-2, 2, -3, 3, -4, 4}); };
+ auto dx_fn = [this](const float x, const float dy) {
+ return dy * std::sinh(x);
+ };
+ TestCWiseGrad<float>(COSH, x_fn, dy_fn, dx_fn);
+}
+
+TEST_F(CWiseUnaryGradTest, Cosh_Complex) {
+ auto x_fn = [this](const int i) {
+ return CRV({{1, 0}, {0, 1}, {2, -1}, {1, 2}, {3, 4}});
+ };
+ auto dy_fn = [this](const complex64& x) {
+ return x + CRV({{-2, 2}, {-3, 3}, {1, -4}});
+ };
+ auto dx_fn = [this](const complex64& x, const complex64& dy) {
+ return dy * conjugate(std::sinh(x));
+ };
+ TestCWiseGrad<complex64>(COSH, x_fn, dy_fn, dx_fn);
+}
+
TEST_F(CWiseUnaryGradTest, Tanh) {
auto x_fn = [this](const int i) { return RV({0, -1, 1, -2, 2, -3, 3}); };
auto dy_fn = [this](const float x) { return x + RV({-2, 2, -3, 3, -4, 4}); };
diff --git a/tensorflow/cc/gradients/nn_grad.cc b/tensorflow/cc/gradients/nn_grad.cc
index 5e5203d090..952b2015ed 100644
--- a/tensorflow/cc/gradients/nn_grad.cc
+++ b/tensorflow/cc/gradients/nn_grad.cc
@@ -46,6 +46,19 @@ Status SoftmaxGrad(const Scope& scope, const Operation& op,
}
REGISTER_GRADIENT_OP("Softmax", SoftmaxGrad);
+Status LogSoftmaxGrad(const Scope& scope, const Operation& op,
+ const std::vector<Output>& grad_inputs,
+ std::vector<Output>* grad_outputs) {
+
+ auto softmax = Exp(scope, op.output(0));
+ auto sum = Sum(scope, grad_inputs[0], {1}, Sum::KeepDims(true));
+ auto mul = Mul(scope, sum, softmax);
+ auto dx = Sub(scope, grad_inputs[0], mul);
+ grad_outputs->push_back(dx);
+ return scope.status();
+}
+REGISTER_GRADIENT_OP("LogSoftmax", LogSoftmaxGrad);
+
Status ReluGradHelper(const Scope& scope, const Operation& op,
const std::vector<Output>& grad_inputs,
std::vector<Output>* grad_outputs) {
diff --git a/tensorflow/cc/gradients/nn_grad_test.cc b/tensorflow/cc/gradients/nn_grad_test.cc
index 70c9bd4e08..daa87546ec 100644
--- a/tensorflow/cc/gradients/nn_grad_test.cc
+++ b/tensorflow/cc/gradients/nn_grad_test.cc
@@ -57,6 +57,19 @@ TEST_F(NNGradTest, SoftmaxGrad) {
RunTest(x, shape, y, shape);
}
+TEST_F(NNGradTest, LogSoftmaxGrad) {
+ TensorShape shape({5, 3});
+ auto x = Placeholder(scope_, DT_FLOAT, Placeholder::Shape(shape));
+ auto y = LogSoftmax(scope_, x);
+ // Avoid numerical instability when computing finite differences.
+ Tensor x_init_value = test::AsTensor<float>(
+ {-0.9f, -0.7f, -0.5f, -0.3f, -0.1f,
+ 0.1f, 0.3f, 0.5f, 0.7f, 0.8f,
+ -0.1f, 0.1f, 0.1f, 0.1f, 1.2f},
+ {5, 3});
+ RunTest(x, x_init_value, y, shape);
+}
+
TEST_F(NNGradTest, ReluGrad) {
TensorShape shape({5, 2});
auto x = Placeholder(scope_, DT_FLOAT, Placeholder::Shape(shape));