aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/cc/gradients/nn_grad.cc
diff options
context:
space:
mode:
authorGravatar Cao Zongyan <zongyan.cao@alibaba-inc.com>2018-08-29 17:05:43 +0800
committerGravatar Cao Zongyan <zongyan.cao@alibaba-inc.com>2018-08-29 17:05:43 +0800
commit4e72dd865a3fc83baa69f6b7c08720a1b546a464 (patch)
tree9ac73a11393bf248e4f85c64feafda9316081781 /tensorflow/cc/gradients/nn_grad.cc
parentcb5c61a3e11a37fb39a246aaf8ed6d02dd9ae9ab (diff)
Refine LeakyRelu codes.
1. Add C++ gradient of gradient definition of LeakyReLu and revalant UT. 2. Using forward compatibility layer for python code changes.
Diffstat (limited to 'tensorflow/cc/gradients/nn_grad.cc')
-rw-r--r--tensorflow/cc/gradients/nn_grad.cc18
1 files changed, 16 insertions, 2 deletions
diff --git a/tensorflow/cc/gradients/nn_grad.cc b/tensorflow/cc/gradients/nn_grad.cc
index 0fc23d0bf7..2a32a2ed6f 100644
--- a/tensorflow/cc/gradients/nn_grad.cc
+++ b/tensorflow/cc/gradients/nn_grad.cc
@@ -149,13 +149,27 @@ Status LeakyReluGradHelper(const Scope& scope, const Operation& op,
float alpha;
TF_RETURN_IF_ERROR(GetNodeAttr(op.node()->attrs(), "alpha", &alpha));
internal::LeakyReluGrad::Attrs attrs;
- attrs.Alpha(alpha);
- auto dx = internal::LeakyReluGrad(scope, grad_inputs[0], op.input(0), attrs);
+ auto dx = internal::LeakyReluGrad(scope, grad_inputs[0], op.input(0),
+ attrs.Alpha(alpha));
grad_outputs->push_back(dx);
return scope.status();
}
REGISTER_GRADIENT_OP("LeakyRelu", LeakyReluGradHelper);
+Status LeakyReluGradGradHelper(const Scope& scope, const Operation& op,
+ const std::vector<Output>& grad_inputs,
+ std::vector<Output>* grad_outputs) {
+ float alpha;
+ TF_RETURN_IF_ERROR(GetNodeAttr(op.node()->attrs(), "alpha", &alpha));
+ internal::LeakyReluGrad::Attrs attrs;
+ auto dx = internal::LeakyReluGrad(scope, grad_inputs[0], op.input(1),
+ attrs.Alpha(alpha));
+ grad_outputs->push_back(dx);
+ grad_outputs->push_back(NoGradient());
+ return scope.status();
+}
+REGISTER_GRADIENT_OP("LeakyReluGrad", LeakyReluGradGradHelper);
+
Status EluGradHelper(const Scope& scope, const Operation& op,
const std::vector<Output>& grad_inputs,
std::vector<Output>* grad_outputs) {