aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/contrib/compiler
diff options
context:
space:
mode:
authorGravatar A. Unique TensorFlower <gardener@tensorflow.org>2017-08-16 10:02:18 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2017-08-16 10:06:14 -0700
commit48c48729a1770eb1e59e86345b8768b466722041 (patch)
treef6c8043d78282c968e0332065c6d689cb7510a08 /tensorflow/contrib/compiler
parentea1a9de103eeb13846d9bf8cf80c43c76ed8f36e (diff)
Allow Defun to inherit enclosing XLA compilation scope.
PiperOrigin-RevId: 165457067
Diffstat (limited to 'tensorflow/contrib/compiler')
-rw-r--r--tensorflow/contrib/compiler/jit_test.py43
1 files changed, 37 insertions, 6 deletions
diff --git a/tensorflow/contrib/compiler/jit_test.py b/tensorflow/contrib/compiler/jit_test.py
index 5e03833260..94aff13a49 100644
--- a/tensorflow/contrib/compiler/jit_test.py
+++ b/tensorflow/contrib/compiler/jit_test.py
@@ -137,6 +137,37 @@ class JITTest(test.TestCase):
self.assertAllClose(v_true_1, v_true_2)
self.assertAllClose(v_false_1, v_true_1)
+ def testDefunNoJitScope(self):
+ with self.test_session(graph=ops.Graph()):
+ @function.Defun(compiled=True, noinline=True)
+ def mulop(x1, x2):
+ return x1 * x2
+ x = constant_op.constant(1.0)
+ r = mulop(x, x)
+
+ # Ensure the forward function is compiled.
+ graph_def = r.graph.as_graph_def()
+ func_attrs = graph_def.library.function[0].attr
+ self.assertTrue(func_attrs["_XlaCompile"].b)
+ # No enclosing jit scope so function sets its own value for _XlaScope.
+ self.assertEqual(b"function_mulop", func_attrs["_XlaScope"].s)
+
+ def testDefunInheritsJitScope(self):
+ with self.test_session(graph=ops.Graph()):
+ with jit.experimental_jit_scope(True):
+ @function.Defun(compiled=True, noinline=True)
+ def mulop(x1, x2):
+ return x1 * x2
+ x = constant_op.constant(1.0)
+ r = mulop(x, x)
+
+ # Ensure the forward function is compiled.
+ graph_def = r.graph.as_graph_def()
+ func_attrs = graph_def.library.function[0].attr
+ self.assertTrue(func_attrs["_XlaCompile"].b)
+ # Ensure _XlaScope is inherited from enclosing context.
+ self.assertEqual(b"jit_scope_0", func_attrs["_XlaScope"].s)
+
class CompilationEnabledInGradientTest(test.TestCase):
@@ -211,7 +242,7 @@ class CompilationEnabledInGradientTest(test.TestCase):
def testPlaysNicelyWithDefun(self):
with self.test_session(graph=ops.Graph()) as sess:
- with jit.experimental_jit_scope(True): # This should be ignored
+ with jit.experimental_jit_scope(True):
@function.Defun(compiled=True, noinline=True)
def mulop(x1, x2):
return x1 * x2
@@ -223,20 +254,20 @@ class CompilationEnabledInGradientTest(test.TestCase):
graph_def = r.graph.as_graph_def()
func_attrs = graph_def.library.function[0].attr
self.assertTrue(func_attrs["_XlaCompile"].b)
- self.assertEqual(b"function_mulop", func_attrs["_XlaScope"].s)
+ self.assertEqual(b"jit_scope_0", func_attrs["_XlaScope"].s)
# Ensure the gradient (SymbolicGradient) is compiled, with the same
# _XlaScope as the function itself.
grad_op = g_r.op.inputs[0].op
self.assertTrue(grad_op.get_attr("_XlaCompile"))
- self.assertEqual(b"function_mulop", grad_op.get_attr("_XlaScope"))
+ self.assertEqual(b"jit_scope_0", grad_op.get_attr("_XlaScope"))
# Ensure the ops run: grad(x1*x1) = 2*x1
self.assertAllClose([1.0, 1.0, 2.0], sess.run([x, r, g_r]))
def testPlaysNicelyWithDefunSeparateGradientScope(self):
with self.test_session(graph=ops.Graph()) as sess:
- with jit.experimental_jit_scope(True): # This should be ignored
+ with jit.experimental_jit_scope(True):
@function.Defun(
compiled=True, noinline=True, separate_compiled_gradients=True)
@@ -251,13 +282,13 @@ class CompilationEnabledInGradientTest(test.TestCase):
graph_def = r.graph.as_graph_def()
func_attrs = graph_def.library.function[0].attr
self.assertTrue(func_attrs["_XlaCompile"].b)
- self.assertEqual(b"function_mulop", func_attrs["_XlaScope"].s)
+ self.assertEqual(b"jit_scope_0", func_attrs["_XlaScope"].s)
# Ensure the gradient (SymbolicGradient) is compiled, with a different
# _XlaScope from the function itself.
grad_op = g_r.op.inputs[0].op
self.assertTrue(grad_op.get_attr("_XlaCompile"))
- self.assertEqual(b"function_mulop_grad_GA",
+ self.assertEqual(b"jit_scope_0_grad_GA",
grad_op.get_attr("_XlaScope"))
# Ensure the ops run: grad(x1*x1) = 2*x1