diff options
author | 2016-05-31 13:10:05 -0800 | |
---|---|---|
committer | 2016-05-31 14:17:15 -0700 | |
commit | 77a5ee5e64f26a235c4c8583f24f3af10cb1b2a8 (patch) | |
tree | bd4115cd289fc03f9bc0b6dbdbd6729be64538a2 | |
parent | 87dbd70a4d8f168593347e532ba59e1e5ac96705 (diff) |
add relu6 symbolic gradient
Change: 123681493
-rw-r--r-- | tensorflow/core/ops/nn_grad.cc | 18 |
1 files changed, 18 insertions, 0 deletions
diff --git a/tensorflow/core/ops/nn_grad.cc b/tensorflow/core/ops/nn_grad.cc index 2565fc0719..3055586629 100644 --- a/tensorflow/core/ops/nn_grad.cc +++ b/tensorflow/core/ops/nn_grad.cc @@ -67,6 +67,24 @@ Status ReluGrad(const AttrSlice& attrs, FunctionDef* g) { } REGISTER_OP_GRADIENT("Relu", ReluGrad); +Status Relu6Grad(const AttrSlice& attrs, FunctionDef* g) { + // clang-format off + *g = FDH::Define( + // Arg defs + {"x: T", "dy: T"}, + // Ret val defs + {"dx: T"}, + // Attr defs + {{"T: {float, double}"}}, + // Nodes + { + {{"dx"}, "Relu6Grad", {"dy", "x"}, {{"T", "$T"}}} + }); + // clang-format on + return Status::OK(); +} +REGISTER_OP_GRADIENT("Relu6", Relu6Grad); + Status CrossEntropyGrad(const AttrSlice& attrs, FunctionDef* g) { // clang-format off *g = FDH::Define( |