aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/cc/gradients
diff options
context:
space:
mode:
authorGravatar Suharsh Sivakumar <suharshs@google.com>2017-03-06 13:51:37 -0800
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2017-03-06 14:06:30 -0800
commit5254f59f253db3d314fc62a75a3443d7e8503c84 (patch)
tree1d182a1a57beb5439541cf40e9dd459879e406c2 /tensorflow/cc/gradients
parent1ba1aaf324af0acf160ffdc98165afdb6c09dd7d (diff)
Add NoGradients for QuantizeAndDequantizeV2Op's input_min and input_max arguments.
Change: 149340183
Diffstat (limited to 'tensorflow/cc/gradients')
-rw-r--r--tensorflow/cc/gradients/array_grad.cc11
1 files changed, 10 insertions, 1 deletions
diff --git a/tensorflow/cc/gradients/array_grad.cc b/tensorflow/cc/gradients/array_grad.cc
index 58a34bd277..26abd2438e 100644
--- a/tensorflow/cc/gradients/array_grad.cc
+++ b/tensorflow/cc/gradients/array_grad.cc
@@ -89,7 +89,16 @@ Status QuantizeAndDequantizeGrad(const Scope& scope, const Operation& op,
return scope.status();
}
REGISTER_GRADIENT_OP("QuantizeAndDequantize", QuantizeAndDequantizeGrad);
-REGISTER_GRADIENT_OP("QuantizeAndDequantizeV2", QuantizeAndDequantizeGrad);
+
+Status QuantizeAndDequantizeV2Grad(const Scope& scope, const Operation& op,
+ const std::vector<Output>& grad_inputs,
+ std::vector<Output>* grad_outputs) {
+ grad_outputs->push_back(Identity(scope, grad_inputs[0]));
+ grad_outputs->push_back(NoGradient());
+ grad_outputs->push_back(NoGradient());
+ return scope.status();
+}
+REGISTER_GRADIENT_OP("QuantizeAndDequantizeV2", QuantizeAndDequantizeV2Grad);
Status SplitGrad(const Scope& scope, const Operation& op,
const std::vector<Output>& grad_inputs,