aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/cc/gradients/math_grad.cc
diff options
context:
space:
mode:
Diffstat (limited to 'tensorflow/cc/gradients/math_grad.cc')
-rw-r--r--tensorflow/cc/gradients/math_grad.cc26
1 files changed, 0 insertions, 26 deletions
diff --git a/tensorflow/cc/gradients/math_grad.cc b/tensorflow/cc/gradients/math_grad.cc
index 71d9a8ed7b..8c1a01f518 100644
--- a/tensorflow/cc/gradients/math_grad.cc
+++ b/tensorflow/cc/gradients/math_grad.cc
@@ -162,32 +162,6 @@ Status Log1pGrad(const Scope& scope, const Operation& op,
}
REGISTER_GRADIENT_OP("Log1p", Log1pGrad);
-Status SinhGrad(const Scope& scope, const Operation& op,
- const std::vector<Output>& grad_inputs,
- std::vector<Output>* grad_outputs) {
- // y = sinh(x)
- // dy/dx = cosh(x)
- auto dydx = Cosh(scope, op.input(0));
- // grad(x) = grad(y) * conj(dy/dx)
- grad_outputs->push_back(
- Mul(scope, grad_inputs[0], ConjugateHelper(scope, dydx)));
- return scope.status();
-}
-REGISTER_GRADIENT_OP("Sinh", SinhGrad);
-
-Status CoshGrad(const Scope& scope, const Operation& op,
- const std::vector<Output>& grad_inputs,
- std::vector<Output>* grad_outputs) {
- // y = cosh(x)
- // dy/dx = sinh(x)
- auto dydx = Sinh(scope, op.input(0));
- // grad(x) = grad(y) * conj(dy/dx)
- grad_outputs->push_back(
- Mul(scope, grad_inputs[0], ConjugateHelper(scope, dydx)));
- return scope.status();
-}
-REGISTER_GRADIENT_OP("Cosh", CoshGrad);
-
Status TanhGrad(const Scope& scope, const Operation& op,
const std::vector<Output>& grad_inputs,
std::vector<Output>* grad_outputs) {