aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/cc/gradients/math_grad.cc
diff options
context:
space:
mode:
Diffstat (limited to 'tensorflow/cc/gradients/math_grad.cc')
-rw-r--r--tensorflow/cc/gradients/math_grad.cc40
1 files changed, 40 insertions, 0 deletions
diff --git a/tensorflow/cc/gradients/math_grad.cc b/tensorflow/cc/gradients/math_grad.cc
index 71d9a8ed7b..0b9b665b1e 100644
--- a/tensorflow/cc/gradients/math_grad.cc
+++ b/tensorflow/cc/gradients/math_grad.cc
@@ -203,6 +203,46 @@ Status TanhGrad(const Scope& scope, const Operation& op,
}
REGISTER_GRADIENT_OP("Tanh", TanhGrad);
+Status AsinhGrad(const Scope& scope, const Operation& op,
+ const std::vector<Output>& grad_inputs,
+ std::vector<Output>* grad_outputs) {
+ // y = asinh(x)
+ // dy/dx = 1 / cosh(y)
+ auto dydx = Reciprocal(scope, Cosh(scope, op.output(0)));
+ // grad(x) = grad(y) * conj(dy/dx)
+ grad_outputs->push_back(
+ Mul(scope, grad_inputs[0], ConjugateHelper(scope, dydx)));
+ return scope.status();
+}
+REGISTER_GRADIENT_OP("Asinh", AsinhGrad);
+
+Status AcoshGrad(const Scope& scope, const Operation& op,
+ const std::vector<Output>& grad_inputs,
+ std::vector<Output>* grad_outputs) {
+ // y = acosh(x)
+ // dy/dx = 1 / sinh(y)
+ auto dydx = Reciprocal(scope, Sinh(scope, op.output(0)));
+ // grad(x) = grad(y) * conj(dy/dx)
+ grad_outputs->push_back(
+ Mul(scope, grad_inputs[0], ConjugateHelper(scope, dydx)));
+ return scope.status();
+}
+REGISTER_GRADIENT_OP("Acosh", AcoshGrad);
+
+Status AtanhGrad(const Scope& scope, const Operation& op,
+ const std::vector<Output>& grad_inputs,
+ std::vector<Output>* grad_outputs) {
+ // y = atanh(x)
+ // dy/dx = 1 / (1 - x^2)
+ auto one = Cast(scope, Const(scope, 1.0), op.input(0).type());
+ auto dydx = Reciprocal(scope, Sub(scope, one, Square(scope, op.input(0))));
+ // grad(x) = grad(y) * conj(dy/dx)
+ grad_outputs->push_back(
+ Mul(scope, grad_inputs[0], ConjugateHelper(scope, dydx)));
+ return scope.status();
+}
+REGISTER_GRADIENT_OP("Atanh", AtanhGrad);
+
Status SigmoidGrad(const Scope& scope, const Operation& op,
const std::vector<Output>& grad_inputs,
std::vector<Output>* grad_outputs) {