diff options
author | Yan Facai (颜发才) <facai.yan@gmail.com> | 2018-08-14 14:08:15 +0800 |
---|---|---|
committer | Yan Facai (颜发才) <facai.yan@gmail.com> | 2018-08-14 14:08:15 +0800 |
commit | 3c83ef9fbc8dc23ab0878cffa13ecbfd07ac70e5 (patch) | |
tree | b53c9e3df64f7911ec847c7c8e0617b0fd8e1b91 /tensorflow/cc/gradients/math_grad.cc | |
parent | a90fce71faaa356b531157c2e00804046961b39d (diff) |
CLN: rename UnsafeDiv => DivNoNan
Diffstat (limited to 'tensorflow/cc/gradients/math_grad.cc')
-rw-r--r-- | tensorflow/cc/gradients/math_grad.cc | 15 |
1 files changed, 7 insertions, 8 deletions
diff --git a/tensorflow/cc/gradients/math_grad.cc b/tensorflow/cc/gradients/math_grad.cc index c6e60689fa..cd215f740d 100644 --- a/tensorflow/cc/gradients/math_grad.cc +++ b/tensorflow/cc/gradients/math_grad.cc @@ -441,21 +441,20 @@ Status RealDivGrad(const Scope& scope, const Operation& op, } REGISTER_GRADIENT_OP("RealDiv", RealDivGrad); -Status UnsafeDivGrad(const Scope& scope, const Operation& op, - const std::vector<Output>& grad_inputs, - std::vector<Output>* grad_outputs) { +Status DivNoNanGrad(const Scope& scope, const Operation& op, + const std::vector<Output>& grad_inputs, + std::vector<Output>* grad_outputs) { auto x_1 = ConjugateHelper(scope, op.input(0)); auto x_2 = ConjugateHelper(scope, op.input(1)); // y = x_1 / x_2 // dy/dx_1 = 1/x_2 // dy/dx_2 = -x_1/x_2^2 - auto gx_1 = UnsafeDiv(scope, grad_inputs[0], x_2); - auto gx_2 = - Mul(scope, grad_inputs[0], - UnsafeDiv(scope, UnsafeDiv(scope, Neg(scope, x_1), x_2), x_2)); + auto gx_1 = DivNoNan(scope, grad_inputs[0], x_2); + auto gx_2 = Mul(scope, grad_inputs[0], + DivNoNan(scope, DivNoNan(scope, Neg(scope, x_1), x_2), x_2)); return BinaryGradCommon(scope, op, grad_outputs, gx_1, gx_2); } -REGISTER_GRADIENT_OP("UnsafeDiv", UnsafeDivGrad); +REGISTER_GRADIENT_OP("DivNoNan", DivNoNanGrad); Status SquaredDifferenceGrad(const Scope& scope, const Operation& op, const std::vector<Output>& grad_inputs, |