diff options
author | A. Unique TensorFlower <gardener@tensorflow.org> | 2018-08-21 18:22:15 -0700 |
---|---|---|
committer | TensorFlower Gardener <gardener@tensorflow.org> | 2018-08-21 18:25:59 -0700 |
commit | 708b30f4cb82271bb28cb70a1e0c89a1933f5b64 (patch) | |
tree | 22470a9314f7f4225b6d08170a3d7ea91b0216a1 /tensorflow/contrib/opt | |
parent | d0cac47a767dd972516f75ce57f0d6185e3b6514 (diff) |
Move from deprecated self.test_session() to self.session() when a graph is set.
self.test_session() has been deprecated in cl/208545396 as its behavior confuses readers of the test. Moving to self.session() instead.
PiperOrigin-RevId: 209696110
Diffstat (limited to 'tensorflow/contrib/opt')
4 files changed, 4 insertions, 4 deletions
diff --git a/tensorflow/contrib/opt/python/training/adamax_test.py b/tensorflow/contrib/opt/python/training/adamax_test.py index 915e6504e1..5790d8a3f1 100644 --- a/tensorflow/contrib/opt/python/training/adamax_test.py +++ b/tensorflow/contrib/opt/python/training/adamax_test.py @@ -172,7 +172,7 @@ class AdaMaxOptimizerTest(test.TestCase): def doTestBasic(self, use_resource=False): for i, dtype in enumerate([dtypes.half, dtypes.float32, dtypes.float64]): - with self.test_session(graph=ops.Graph()): + with self.session(graph=ops.Graph()): # Initialize variables for numpy implementation. m0, v0, m1, v1 = 0.0, 0.0, 0.0, 0.0 var0_np = np.array([1.0, 2.0], dtype=dtype.as_numpy_dtype) diff --git a/tensorflow/contrib/opt/python/training/ggt_test.py b/tensorflow/contrib/opt/python/training/ggt_test.py index 42162960b0..1d2a79957b 100644 --- a/tensorflow/contrib/opt/python/training/ggt_test.py +++ b/tensorflow/contrib/opt/python/training/ggt_test.py @@ -76,7 +76,7 @@ class GGTOptimizerTest(test.TestCase): def doTestBasic(self, use_resource=False): # SVD does not support float16 for i, dtype in enumerate([dtypes.float32, dtypes.float64]): - with self.test_session(graph=ops.Graph()): + with self.session(graph=ops.Graph()): # Initialize variables for numpy implementation. m0 = 0.0 window = 3 diff --git a/tensorflow/contrib/opt/python/training/moving_average_optimizer_test.py b/tensorflow/contrib/opt/python/training/moving_average_optimizer_test.py index ac04ad9911..d15716f6f6 100644 --- a/tensorflow/contrib/opt/python/training/moving_average_optimizer_test.py +++ b/tensorflow/contrib/opt/python/training/moving_average_optimizer_test.py @@ -46,7 +46,7 @@ class MovingAverageOptimizerTest(test.TestCase): def _helpTestRun(self, use_resource=False): for sequential_update in [True, False]: for dtype in [dtypes.half, dtypes.float32, dtypes.float64]: - with self.test_session(graph=ops.Graph()) as sess: + with self.session(graph=ops.Graph()) as sess: orig_val0 = [1.0, 2.0] orig_val1 = [3.0, 4.0] var0 = variable_scope.get_variable( diff --git a/tensorflow/contrib/opt/python/training/weight_decay_optimizers_test.py b/tensorflow/contrib/opt/python/training/weight_decay_optimizers_test.py index 76d8a5697a..9c91078301 100644 --- a/tensorflow/contrib/opt/python/training/weight_decay_optimizers_test.py +++ b/tensorflow/contrib/opt/python/training/weight_decay_optimizers_test.py @@ -58,7 +58,7 @@ class WeightDecayOptimizerTest(test.TestCase): def doTest(self, optimizer, update_fn, optimizer_name, slot_name, use_resource=False, do_sparse=False): for i, dtype in enumerate([dtypes.half, dtypes.float32, dtypes.float64]): - with self.test_session(graph=ops.Graph()): + with self.session(graph=ops.Graph()): # Initialize variables for numpy implementation. m0, v0, m1, v1 = 0.0, 0.0, 0.0, 0.0 var0_np = np.array([1.0, 2.0], dtype=dtype.as_numpy_dtype) |