aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/contrib/nn
diff options
context:
space:
mode:
authorGravatar A. Unique TensorFlower <gardener@tensorflow.org>2018-08-21 19:53:43 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2018-08-21 20:00:41 -0700
commit47c0bda0e7f736a9328aaf76aba7c8006e24556f (patch)
treead2a6ab71adddc0d07c7f306c270122937b6a5b0 /tensorflow/contrib/nn
parent1ab795b54274a26a92690f36eff65674fb500f91 (diff)
Move from deprecated self.test_session() to self.cached_session().
self.test_session() has been deprecated in 9962eb5e84b15e309410071b06c2ed2d6148ed44 as its name confuses readers of the test. Moving to cached_session() instead which is more explicit about: * the fact that the session may be reused. * the session is not closed even when doing a "with self.test_session()" statement. PiperOrigin-RevId: 209703607
Diffstat (limited to 'tensorflow/contrib/nn')
-rw-r--r--tensorflow/contrib/nn/python/ops/alpha_dropout_test.py2
-rw-r--r--tensorflow/contrib/nn/python/ops/fwd_gradients_test.py4
-rw-r--r--tensorflow/contrib/nn/python/ops/sampling_ops_test.py4
3 files changed, 5 insertions, 5 deletions
diff --git a/tensorflow/contrib/nn/python/ops/alpha_dropout_test.py b/tensorflow/contrib/nn/python/ops/alpha_dropout_test.py
index 54a98e6f14..3aec88bcbf 100644
--- a/tensorflow/contrib/nn/python/ops/alpha_dropout_test.py
+++ b/tensorflow/contrib/nn/python/ops/alpha_dropout_test.py
@@ -32,7 +32,7 @@ class AlphaDropoutTest(test.TestCase):
def testAlphaDropout(self):
x_dim, y_dim = 40, 30
for keep_prob in [0.1, 0.5, 0.8]:
- with self.test_session():
+ with self.cached_session():
t = random_ops.random_normal([x_dim, y_dim])
output = alpha_dropout(t, keep_prob)
self.assertEqual([x_dim, y_dim], output.get_shape())
diff --git a/tensorflow/contrib/nn/python/ops/fwd_gradients_test.py b/tensorflow/contrib/nn/python/ops/fwd_gradients_test.py
index 56062c3cab..4cdac6a742 100644
--- a/tensorflow/contrib/nn/python/ops/fwd_gradients_test.py
+++ b/tensorflow/contrib/nn/python/ops/fwd_gradients_test.py
@@ -35,7 +35,7 @@ class ForwardAdTest(test.TestCase):
dydx_tf = fwd_gradients.fwd_gradients([y], [x], [grad_x])[0]
dydx_py = 2. * grad_x
- with self.test_session() as sess:
+ with self.cached_session() as sess:
self.assertAllClose(sess.run(dydx_tf), dydx_py, 1e-6)
def testGather(self):
@@ -44,7 +44,7 @@ class ForwardAdTest(test.TestCase):
y.set_shape([2])
dydx = fwd_gradients.fwd_gradients([y], [x], assert_unused=True)
- with self.test_session() as sess:
+ with self.cached_session() as sess:
sess.run(dydx)
diff --git a/tensorflow/contrib/nn/python/ops/sampling_ops_test.py b/tensorflow/contrib/nn/python/ops/sampling_ops_test.py
index 1d4fe1321b..11738bb215 100644
--- a/tensorflow/contrib/nn/python/ops/sampling_ops_test.py
+++ b/tensorflow/contrib/nn/python/ops/sampling_ops_test.py
@@ -227,7 +227,7 @@ class RankSampledSoftmaxLossTest(test.TestCase):
sampled_values=self._resampled_values,
remove_accidental_hits=self._remove_accidental_hits,
partition_strategy=partition_strategy)
- with self.test_session() as sess:
+ with self.cached_session() as sess:
loss_val = sess.run(loss)
loss_nn_val = sess.run(loss_nn)
@@ -299,7 +299,7 @@ class RankSampledSoftmaxLossTest(test.TestCase):
sampled_values=resampled_values,
remove_accidental_hits=remove_accidental_hits,
partition_strategy='div')
- with self.test_session() as sess:
+ with self.cached_session() as sess:
loss_val = sess.run(loss)
loss_nn_val = sess.run(loss_nn)