diff options
Diffstat (limited to 'tensorflow/cc/gradients/nn_grad.cc')
-rw-r--r-- | tensorflow/cc/gradients/nn_grad.cc | 13 |
1 files changed, 0 insertions, 13 deletions
diff --git a/tensorflow/cc/gradients/nn_grad.cc b/tensorflow/cc/gradients/nn_grad.cc index 952b2015ed..5e5203d090 100644 --- a/tensorflow/cc/gradients/nn_grad.cc +++ b/tensorflow/cc/gradients/nn_grad.cc @@ -46,19 +46,6 @@ Status SoftmaxGrad(const Scope& scope, const Operation& op, } REGISTER_GRADIENT_OP("Softmax", SoftmaxGrad); -Status LogSoftmaxGrad(const Scope& scope, const Operation& op, - const std::vector<Output>& grad_inputs, - std::vector<Output>* grad_outputs) { - - auto softmax = Exp(scope, op.output(0)); - auto sum = Sum(scope, grad_inputs[0], {1}, Sum::KeepDims(true)); - auto mul = Mul(scope, sum, softmax); - auto dx = Sub(scope, grad_inputs[0], mul); - grad_outputs->push_back(dx); - return scope.status(); -} -REGISTER_GRADIENT_OP("LogSoftmax", LogSoftmaxGrad); - Status ReluGradHelper(const Scope& scope, const Operation& op, const std::vector<Output>& grad_inputs, std::vector<Output>* grad_outputs) { |