aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/contrib/optimizer_v2
diff options
context:
space:
mode:
authorGravatar Tom Hennigan <tomhennigan@google.com>2018-06-22 01:46:03 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2018-06-22 01:49:29 -0700
commit945d1a77aebb2071b571598cb1d02fac5b1370c1 (patch)
treeefce5ed23c87ad2460916ad1b08211ee6359a98c /tensorflow/contrib/optimizer_v2
parent9682324b40ed36963cced138e21de29518d6843c (diff)
Replace unnecessary `()` in `run_in_graph_and_eager_modes()`.
PiperOrigin-RevId: 201652888
Diffstat (limited to 'tensorflow/contrib/optimizer_v2')
-rw-r--r--tensorflow/contrib/optimizer_v2/checkpointable_utils_test.py12
-rw-r--r--tensorflow/contrib/optimizer_v2/optimizer_v2_test.py14
2 files changed, 13 insertions, 13 deletions
diff --git a/tensorflow/contrib/optimizer_v2/checkpointable_utils_test.py b/tensorflow/contrib/optimizer_v2/checkpointable_utils_test.py
index 64b95786b5..b6972a7a45 100644
--- a/tensorflow/contrib/optimizer_v2/checkpointable_utils_test.py
+++ b/tensorflow/contrib/optimizer_v2/checkpointable_utils_test.py
@@ -226,7 +226,7 @@ class CheckpointingTests(test.TestCase):
optimizer_node.slot_variables[0]
.slot_variable_node_id].attributes[0].checkpoint_key)
- @test_util.run_in_graph_and_eager_modes()
+ @test_util.run_in_graph_and_eager_modes
def testSaveRestore(self):
model = MyModel()
optimizer = adam.AdamOptimizer(0.001)
@@ -347,7 +347,7 @@ class CheckpointingTests(test.TestCase):
self.assertEqual(training_continuation + 1,
session.run(root.save_counter))
- @test_util.run_in_graph_and_eager_modes()
+ @test_util.run_in_graph_and_eager_modes
def testAgnosticUsage(self):
"""Graph/eager agnostic usage."""
# Does create garbage when executing eagerly due to ops.Graph() creation.
@@ -381,7 +381,7 @@ class CheckpointingTests(test.TestCase):
self.evaluate(root.save_counter))
# pylint: disable=cell-var-from-loop
- @test_util.run_in_graph_and_eager_modes()
+ @test_util.run_in_graph_and_eager_modes
def testWithDefun(self):
num_training_steps = 2
checkpoint_directory = self.get_temp_dir()
@@ -453,7 +453,7 @@ class CheckpointingTests(test.TestCase):
optimizer.apply_gradients(
[(g, v) for g, v in zip(grad, model.vars)])
- @test_util.run_in_graph_and_eager_modes()
+ @test_util.run_in_graph_and_eager_modes
def testDeferredSlotRestoration(self):
checkpoint_directory = self.get_temp_dir()
@@ -616,7 +616,7 @@ class CheckpointingTests(test.TestCase):
class TemplateTests(test.TestCase):
- @test_util.run_in_graph_and_eager_modes()
+ @test_util.run_in_graph_and_eager_modes
def test_checkpointable_save_restore(self):
def _templated():
@@ -712,7 +712,7 @@ class CheckpointCompatibilityTests(test.TestCase):
sess=session, save_path=checkpoint_prefix,
global_step=root.optimizer_step)
- @test_util.run_in_graph_and_eager_modes()
+ @test_util.run_in_graph_and_eager_modes
def testLoadFromNameBasedSaver(self):
"""Save a name-based checkpoint, load it using the object-based API."""
with test_util.device(use_gpu=True):
diff --git a/tensorflow/contrib/optimizer_v2/optimizer_v2_test.py b/tensorflow/contrib/optimizer_v2/optimizer_v2_test.py
index 8599af32f6..ec033c4a01 100644
--- a/tensorflow/contrib/optimizer_v2/optimizer_v2_test.py
+++ b/tensorflow/contrib/optimizer_v2/optimizer_v2_test.py
@@ -35,7 +35,7 @@ from tensorflow.python.platform import test
class OptimizerTest(test.TestCase):
- @test_util.run_in_graph_and_eager_modes()
+ @test_util.run_in_graph_and_eager_modes
def testBasic(self):
for i, dtype in enumerate([dtypes.half, dtypes.float32, dtypes.float64]):
# Note that we name the variables uniquely here since the variables don't
@@ -113,7 +113,7 @@ class OptimizerTest(test.TestCase):
self.assertAllClose([3.0 - 3 * 3 * 42.0, 4.0 - 3 * 3 * (-42.0)],
var1.eval())
- @test_util.run_in_graph_and_eager_modes()
+ @test_util.run_in_graph_and_eager_modes
def testNoVariables(self):
for dtype in [dtypes.half, dtypes.float32, dtypes.float64]:
# pylint: disable=cell-var-from-loop
@@ -128,7 +128,7 @@ class OptimizerTest(test.TestCase):
with self.assertRaisesRegexp(ValueError, 'No.*variables'):
sgd_op.minimize(loss)
- @test_util.run_in_graph_and_eager_modes()
+ @test_util.run_in_graph_and_eager_modes
def testNoGradients(self):
for i, dtype in enumerate([dtypes.half, dtypes.float32, dtypes.float64]):
# Note that we name the variables uniquely here since the variables don't
@@ -146,7 +146,7 @@ class OptimizerTest(test.TestCase):
# var1 has no gradient
sgd_op.minimize(loss, var_list=[var1])
- @test_util.run_in_graph_and_eager_modes()
+ @test_util.run_in_graph_and_eager_modes
def testNoGradientsForAnyVariables_Minimize(self):
for i, dtype in enumerate([dtypes.half, dtypes.float32, dtypes.float64]):
# Note that we name the variables uniquely here since the variables don't
@@ -162,7 +162,7 @@ class OptimizerTest(test.TestCase):
'No gradients provided for any variable'):
sgd_op.minimize(loss, var_list=[var0, var1])
- @test_util.run_in_graph_and_eager_modes()
+ @test_util.run_in_graph_and_eager_modes
def testNoGradientsForAnyVariables_ApplyGradients(self):
for i, dtype in enumerate([dtypes.half, dtypes.float32, dtypes.float64]):
# Note that we name the variables uniquely here since the variables don't
@@ -176,7 +176,7 @@ class OptimizerTest(test.TestCase):
'No gradients provided for any variable'):
sgd_op.apply_gradients([(None, var0), (None, var1)])
- @test_util.run_in_graph_and_eager_modes()
+ @test_util.run_in_graph_and_eager_modes
def testGradientsAsVariables(self):
for i, dtype in enumerate([dtypes.half, dtypes.float32, dtypes.float64]):
# Note that we name the variables uniquely here since the variables don't
@@ -216,7 +216,7 @@ class OptimizerTest(test.TestCase):
self.assertAllClose([-14., -13.], self.evaluate(var0))
self.assertAllClose([-6., -5.], self.evaluate(var1))
- @test_util.run_in_graph_and_eager_modes()
+ @test_util.run_in_graph_and_eager_modes
def testComputeGradientsWithTensors(self):
x = ops.convert_to_tensor(1.0)
def f():