aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/contrib/compiler
diff options
context:
space:
mode:
authorGravatar A. Unique TensorFlower <gardener@tensorflow.org>2018-04-26 12:42:54 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2018-04-26 12:45:22 -0700
commitf63750645826df65b05cad505546a86f0e347674 (patch)
tree8467d73780d74b0f7ef4c87f8866d3bf0a233254 /tensorflow/contrib/compiler
parent667077cbd2cc86c4a656233a2d5f579aa4caf1f1 (diff)
For tf.gradients(), do not backpropagate through integer tensors.
All integer tensors are now considered constant with respect to all `xs`. This fixes a bug in gradients through tf.while_loop. PiperOrigin-RevId: 194438529
Diffstat (limited to 'tensorflow/contrib/compiler')
-rw-r--r--tensorflow/contrib/compiler/jit_test.py10
1 files changed, 5 insertions, 5 deletions
diff --git a/tensorflow/contrib/compiler/jit_test.py b/tensorflow/contrib/compiler/jit_test.py
index 29a593f6bc..b2f678fb29 100644
--- a/tensorflow/contrib/compiler/jit_test.py
+++ b/tensorflow/contrib/compiler/jit_test.py
@@ -175,7 +175,7 @@ class CompilationEnabledInGradientTest(test.TestCase):
def testCompilationInGradient(self):
with self.test_session():
- x = constant_op.constant([[3]])
+ x = constant_op.constant([[3.]])
y_nc = math_ops.matmul(x, x, name="not_compiled")
with jit.experimental_jit_scope():
y_c = math_ops.matmul(y_nc, y_nc, name="compiled")
@@ -200,11 +200,11 @@ class CompilationEnabledInGradientTest(test.TestCase):
with self.test_session(graph=ops.Graph()):
with jit.experimental_jit_scope():
# XlaScope 0
- a1 = constant_op.constant([[1]])
+ a1 = constant_op.constant([[1.]])
a1t = math_ops.matmul(a1, a1)
with jit.experimental_jit_scope():
# XlaScope 1
- a2 = constant_op.constant([[1]])
+ a2 = constant_op.constant([[1.]])
a2t = math_ops.matmul(a2, a2)
self.assertEqual(b"jit_scope_0", a1.op.get_attr("_XlaScope"))
@@ -222,11 +222,11 @@ class CompilationEnabledInGradientTest(test.TestCase):
with self.test_session(graph=ops.Graph()):
with jit.experimental_jit_scope(True, separate_compiled_gradients=True):
# XlaScope 0
- a1 = constant_op.constant([[1]])
+ a1 = constant_op.constant([[1.]])
a1t = math_ops.matmul(a1, a1)
with jit.experimental_jit_scope(True, separate_compiled_gradients=True):
# XlaScope 1
- a2 = constant_op.constant([[1]])
+ a2 = constant_op.constant([[1.]])
a2t = math_ops.matmul(a2, a2)
self.assertEqual(b"jit_scope_0", a1.op.get_attr("_XlaScope"))