diff options
author | Alexandre Passos <apassos@google.com> | 2018-05-25 13:20:13 -0700 |
---|---|---|
committer | TensorFlower Gardener <gardener@tensorflow.org> | 2018-05-25 13:23:08 -0700 |
commit | 68430112b2ca5c160db6dd412d43f572ec69e72f (patch) | |
tree | 90b2c71f25bd56ddcc179bfd0855bf858f9a6384 /tensorflow/contrib/optimizer_v2 | |
parent | b6ae98b4ac1ec3051d81f3133b827d6bb305aa2b (diff) |
Public API to switch between eager execution and graph building.
Now, after tf.enable_eager_execution() has been executed, entering the context
manager of a tf.Graph will enable graph mode. So, for example
```
tf.enable_eager_execution()
with tf.Graph().as_default():
c = tf.constant(1.0) # this is a graph tensor
c2 = tf.constant(1.0) # this is an eager tensor
```
The main use-case of this is allowing documentation writers to make a single
notebook which starts with eager execution and seamlessly transitions to
building graphs.
This also makes many explicit enablings of graph mode in the code redundant
(a cleanup cl will follow).
PiperOrigin-RevId: 198092991
Diffstat (limited to 'tensorflow/contrib/optimizer_v2')
-rw-r--r-- | tensorflow/contrib/optimizer_v2/momentum_test.py | 11 |
1 files changed, 2 insertions, 9 deletions
diff --git a/tensorflow/contrib/optimizer_v2/momentum_test.py b/tensorflow/contrib/optimizer_v2/momentum_test.py index 26724f66c2..24cdab4626 100644 --- a/tensorflow/contrib/optimizer_v2/momentum_test.py +++ b/tensorflow/contrib/optimizer_v2/momentum_test.py @@ -134,7 +134,6 @@ class MomentumOptimizerTest(test.TestCase): with context.eager_mode(): self.doTestBasic(use_resource=True, use_callable_params=True) - @test_util.run_in_graph_and_eager_modes(reset_test=True) def testVariablesAcrossGraphs(self): optimizer = momentum_lib.MomentumOptimizer(0.01, 0.5) with ops.Graph().as_default(): @@ -142,10 +141,7 @@ class MomentumOptimizerTest(test.TestCase): [1.0, 2.0], dtype=dtypes.float32, name="var0") var1 = resource_variable_ops.ResourceVariable( [3.0, 4.0], dtype=dtypes.float32, name="var1") - if context.executing_eagerly(): - loss = lambda: math_ops.reduce_sum(var0 + var1) - else: - loss = math_ops.reduce_sum(var0 + var1) + loss = math_ops.reduce_sum(var0 + var1) optimizer.minimize(loss) optimizer_variables = optimizer.variables() self.assertStartsWith(optimizer_variables[0].name, "var0") @@ -157,10 +153,7 @@ class MomentumOptimizerTest(test.TestCase): [1.0, 2.0], dtype=dtypes.float32, name="var2") var3 = resource_variable_ops.ResourceVariable( [3.0, 4.0], dtype=dtypes.float32, name="var3") - if context.executing_eagerly(): - loss = lambda: math_ops.reduce_sum(var2 + var3) - else: - loss = math_ops.reduce_sum(var2 + var3) + loss = math_ops.reduce_sum(var2 + var3) optimizer.minimize(loss) optimizer_variables = optimizer.variables() self.assertStartsWith(optimizer_variables[0].name, "var2") |