aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/cc/gradients/nn_grad.cc
diff options
context:
space:
mode:
Diffstat (limited to 'tensorflow/cc/gradients/nn_grad.cc')
-rw-r--r--tensorflow/cc/gradients/nn_grad.cc64
1 files changed, 64 insertions, 0 deletions
diff --git a/tensorflow/cc/gradients/nn_grad.cc b/tensorflow/cc/gradients/nn_grad.cc
index 9b732421e5..0cb3132e94 100644
--- a/tensorflow/cc/gradients/nn_grad.cc
+++ b/tensorflow/cc/gradients/nn_grad.cc
@@ -182,6 +182,70 @@ Status MaxPoolGradV2Helper(const Scope& scope, const Operation& op,
}
REGISTER_GRADIENT_OP("MaxPoolV2", MaxPoolGradV2Helper);
+Status MaxPool3DGradHelper(const Scope& scope, const Operation& op,
+ const std::vector<Output>& grad_inputs,
+ std::vector<Output>* grad_outputs) {
+ std::vector<int32> ksize;
+ std::vector<int32> strides;
+ string padding;
+ string data_format;
+ auto attrs = op.output(0).node()->attrs();
+ TF_RETURN_IF_ERROR(GetNodeAttr(attrs, "ksize", &ksize));
+ TF_RETURN_IF_ERROR(GetNodeAttr(attrs, "strides", &strides));
+ TF_RETURN_IF_ERROR(GetNodeAttr(attrs, "padding", &padding));
+ TF_RETURN_IF_ERROR(GetNodeAttr(attrs, "data_format", &data_format));
+ MaxPool3DGrad::Attrs grad_attrs;
+ auto dx = MaxPool3DGrad(scope, op.input(0), op.output(0), grad_inputs[0],
+ ksize, strides, padding,
+ grad_attrs.DataFormat(data_format));
+ grad_outputs->push_back(dx);
+ return scope.status();
+}
+REGISTER_GRADIENT_OP("MaxPool3D", MaxPool3DGradHelper);
+
+Status AvgPoolGradHelper(const Scope& scope, const Operation& op,
+ const std::vector<Output>& grad_inputs,
+ std::vector<Output>* grad_outputs) {
+ std::vector<int32> ksize;
+ std::vector<int32> strides;
+ string padding;
+ string data_format;
+ auto attrs = op.output(0).node()->attrs();
+ TF_RETURN_IF_ERROR(GetNodeAttr(attrs, "ksize", &ksize));
+ TF_RETURN_IF_ERROR(GetNodeAttr(attrs, "strides", &strides));
+ TF_RETURN_IF_ERROR(GetNodeAttr(attrs, "padding", &padding));
+ TF_RETURN_IF_ERROR(GetNodeAttr(attrs, "data_format", &data_format));
+ internal::AvgPoolGrad::Attrs grad_attrs;
+ auto dx =
+ internal::AvgPoolGrad(scope, Shape(scope, op.input(0)), grad_inputs[0],
+ ksize, strides, padding,
+ grad_attrs.DataFormat(data_format));
+ grad_outputs->push_back(dx);
+ return scope.status();
+}
+REGISTER_GRADIENT_OP("AvgPool", AvgPoolGradHelper);
+
+Status AvgPool3DGradHelper(const Scope& scope, const Operation& op,
+ const std::vector<Output>& grad_inputs,
+ std::vector<Output>* grad_outputs) {
+ std::vector<int32> ksize;
+ std::vector<int32> strides;
+ string padding;
+ string data_format;
+ auto attrs = op.output(0).node()->attrs();
+ TF_RETURN_IF_ERROR(GetNodeAttr(attrs, "ksize", &ksize));
+ TF_RETURN_IF_ERROR(GetNodeAttr(attrs, "strides", &strides));
+ TF_RETURN_IF_ERROR(GetNodeAttr(attrs, "padding", &padding));
+ TF_RETURN_IF_ERROR(GetNodeAttr(attrs, "data_format", &data_format));
+ AvgPool3DGrad::Attrs grad_attrs;
+ auto dx = AvgPool3DGrad(scope, Shape(scope, op.input(0)), grad_inputs[0],
+ ksize, strides, padding,
+ grad_attrs.DataFormat(data_format));
+ grad_outputs->push_back(dx);
+ return scope.status();
+}
+REGISTER_GRADIENT_OP("AvgPool3D", AvgPool3DGradHelper);
+
Status LRNGradHelper(const Scope& scope, const Operation& op,
const std::vector<Output>& grad_inputs,
std::vector<Output>* grad_outputs) {