aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/python/layers
diff options
context:
space:
mode:
authorGravatar A. Unique TensorFlower <gardener@tensorflow.org>2018-08-21 19:24:19 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2018-08-21 19:27:54 -0700
commit496023e9dc84a076caeb2e5e8e13b6a3d819ad6d (patch)
tree9776c9865f7b98a15817bc6be4c2b683323a67b1 /tensorflow/python/layers
parent361a82d73a50a800510674b3aaa20e4845e56434 (diff)
Move from deprecated self.test_session() to self.cached_session().
self.test_session() has been deprecated in 9962eb5e84b15e309410071b06c2ed2d6148ed44 as its name confuses readers of the test. Moving to cached_session() instead which is more explicit about: * the fact that the session may be reused. * the session is not closed even when doing a "with self.test_session()" statement. PiperOrigin-RevId: 209701635
Diffstat (limited to 'tensorflow/python/layers')
-rw-r--r--tensorflow/python/layers/convolutional_test.py8
-rw-r--r--tensorflow/python/layers/core_test.py23
-rw-r--r--tensorflow/python/layers/normalization_test.py24
3 files changed, 28 insertions, 27 deletions
diff --git a/tensorflow/python/layers/convolutional_test.py b/tensorflow/python/layers/convolutional_test.py
index 625320b48b..d61d3b6dba 100644
--- a/tensorflow/python/layers/convolutional_test.py
+++ b/tensorflow/python/layers/convolutional_test.py
@@ -264,7 +264,7 @@ class ConvTest(test.TestCase):
self.assertEqual(len(variables.trainable_variables()), 2)
def testFunctionalConv2DInitializerFromScope(self):
- with self.test_session() as sess:
+ with self.cached_session() as sess:
with variable_scope.variable_scope(
'scope', initializer=init_ops.ones_initializer()):
height, width = 7, 9
@@ -647,7 +647,7 @@ class SeparableConv2DTest(test.TestCase):
self.assertEqual(len(variables.trainable_variables()), 3)
def testFunctionalConv2DInitializerFromScope(self):
- with self.test_session() as sess:
+ with self.cached_session() as sess:
with variable_scope.variable_scope(
'scope', initializer=init_ops.ones_initializer()):
height, width = 7, 9
@@ -882,7 +882,7 @@ class Conv2DTransposeTest(test.TestCase):
self.assertEqual(len(variables.trainable_variables()), 2)
def testFunctionalConv2DTransposeInitializerFromScope(self):
- with self.test_session() as sess:
+ with self.cached_session() as sess:
with variable_scope.variable_scope(
'scope', initializer=init_ops.ones_initializer()):
height, width = 7, 9
@@ -1061,7 +1061,7 @@ class Conv3DTransposeTest(test.TestCase):
self.assertEqual(len(variables.trainable_variables()), 2)
def testFunctionalConv3DTransposeInitializerFromScope(self):
- with self.test_session() as sess:
+ with self.cached_session() as sess:
with variable_scope.variable_scope(
'scope', initializer=init_ops.ones_initializer()):
depth, height, width = 5, 7, 9
diff --git a/tensorflow/python/layers/core_test.py b/tensorflow/python/layers/core_test.py
index 040c1cddc0..46009a30ac 100644
--- a/tensorflow/python/layers/core_test.py
+++ b/tensorflow/python/layers/core_test.py
@@ -60,7 +60,7 @@ class DenseTest(test.TestCase):
self.assertEqual(dense.name, 'dense_2')
def testVariableInput(self):
- with self.test_session():
+ with self.cached_session():
v = variable_scope.get_variable(
'X', initializer=init_ops.zeros_initializer(), shape=(1, 1))
x = core_layers.Dense(1)(v)
@@ -221,7 +221,7 @@ class DenseTest(test.TestCase):
self.assertListEqual(dense.losses, loss_keys)
def testFunctionalDense(self):
- with self.test_session():
+ with self.cached_session():
inputs = random_ops.random_uniform((5, 3), seed=1)
outputs = core_layers.dense(
inputs, 2, activation=nn_ops.relu, name='my_dense')
@@ -240,7 +240,7 @@ class DenseTest(test.TestCase):
# TODO(alive): get this to work in eager mode.
def testFunctionalDenseTwiceReuse(self):
- with self.test_session():
+ with self.cached_session():
inputs = random_ops.random_uniform((5, 3), seed=1)
core_layers.dense(inputs, 2, name='my_dense')
vars1 = variables.trainable_variables()
@@ -250,7 +250,7 @@ class DenseTest(test.TestCase):
# TODO(alive): get this to work in eager mode.
def testFunctionalDenseTwiceReuseFromScope(self):
- with self.test_session():
+ with self.cached_session():
with variable_scope.variable_scope('scope'):
inputs = random_ops.random_uniform((5, 3), seed=1)
core_layers.dense(inputs, 2, name='my_dense')
@@ -262,7 +262,8 @@ class DenseTest(test.TestCase):
def testFunctionalDenseInitializerFromScope(self):
with variable_scope.variable_scope(
- 'scope', initializer=init_ops.ones_initializer()), self.test_session():
+ 'scope',
+ initializer=init_ops.ones_initializer()), self.cached_session():
inputs = random_ops.random_uniform((5, 3), seed=1)
core_layers.dense(inputs, 2)
variables.global_variables_initializer().run()
@@ -305,7 +306,7 @@ class DenseTest(test.TestCase):
self.assertEqual(called[0], 2)
def testFunctionalDenseInScope(self):
- with self.test_session():
+ with self.cached_session():
with variable_scope.variable_scope('test'):
inputs = random_ops.random_uniform((5, 3), seed=1)
core_layers.dense(inputs, 2, name='my_dense')
@@ -391,7 +392,7 @@ class DropoutTest(test.TestCase):
self.assertAllClose(np.ones((5, 3)), np_output)
def testDynamicLearningPhase(self):
- with self.test_session() as sess:
+ with self.cached_session() as sess:
dp = core_layers.Dropout(0.5, seed=1)
inputs = array_ops.ones((5, 5))
training = array_ops.placeholder(dtype='bool')
@@ -424,7 +425,7 @@ class DropoutTest(test.TestCase):
self.assertAllClose(np_output[:, 0, :], np_output[:, 1, :])
def testFunctionalDropout(self):
- with self.test_session():
+ with self.cached_session():
inputs = array_ops.ones((5, 5))
dropped = core_layers.dropout(inputs, 0.5, training=True, seed=1)
variables.global_variables_initializer().run()
@@ -435,7 +436,7 @@ class DropoutTest(test.TestCase):
self.assertAllClose(np.ones((5, 5)), np_output)
def testDynamicRate(self):
- with self.test_session() as sess:
+ with self.cached_session() as sess:
rate = array_ops.placeholder(dtype='float32', name='rate')
dp = core_layers.Dropout(rate, name='dropout')
inputs = array_ops.ones((5, 5))
@@ -450,7 +451,7 @@ class DropoutTest(test.TestCase):
class FlattenTest(test.TestCase):
def testCreateFlatten(self):
- with self.test_session() as sess:
+ with self.cached_session() as sess:
x = array_ops.placeholder(shape=(None, 2, 3), dtype='float32')
y = core_layers.Flatten()(x)
np_output = sess.run(y, feed_dict={x: np.zeros((3, 2, 3))})
@@ -484,7 +485,7 @@ class FlattenTest(test.TestCase):
core_layers.Flatten()(x)
def testFlattenUnknownAxes(self):
- with self.test_session() as sess:
+ with self.cached_session() as sess:
x = array_ops.placeholder(shape=(5, None, None), dtype='float32')
y = core_layers.Flatten()(x)
np_output = sess.run(y, feed_dict={x: np.zeros((5, 2, 3))})
diff --git a/tensorflow/python/layers/normalization_test.py b/tensorflow/python/layers/normalization_test.py
index 57394f29d7..a72d147a0b 100644
--- a/tensorflow/python/layers/normalization_test.py
+++ b/tensorflow/python/layers/normalization_test.py
@@ -319,7 +319,7 @@ class BNTest(test.TestCase):
training = array_ops.placeholder(dtype='bool')
outputs = bn.apply(inputs, training=training)
- with self.test_session() as sess:
+ with self.cached_session() as sess:
# Test training with placeholder learning phase.
sess.run(variables.global_variables_initializer())
@@ -361,7 +361,7 @@ class BNTest(test.TestCase):
training = array_ops.placeholder(dtype='bool')
outputs = bn.apply(inputs, training=training)
- with self.test_session() as sess:
+ with self.cached_session() as sess:
# Test training with placeholder learning phase.
sess.run(variables.global_variables_initializer())
np_gamma, np_beta = sess.run([bn.gamma, bn.beta])
@@ -442,7 +442,7 @@ class BNTest(test.TestCase):
training = array_ops.placeholder(dtype='bool')
outputs = bn.apply(inputs, training=training)
- with self.test_session() as sess:
+ with self.cached_session() as sess:
# Test training with placeholder learning phase.
sess.run(variables.global_variables_initializer())
np_gamma, np_beta = sess.run([bn.gamma, bn.beta])
@@ -482,7 +482,7 @@ class BNTest(test.TestCase):
training = array_ops.placeholder(dtype='bool')
outputs = bn.apply(inputs, training=training)
- with self.test_session() as sess:
+ with self.cached_session() as sess:
# Test training with placeholder learning phase.
sess.run(variables.global_variables_initializer())
np_gamma, np_beta = sess.run([bn.gamma, bn.beta])
@@ -522,7 +522,7 @@ class BNTest(test.TestCase):
training = array_ops.placeholder(dtype='bool')
outputs = bn.apply(inputs, training=training)
- with self.test_session() as sess:
+ with self.cached_session() as sess:
# Test training with placeholder learning phase.
sess.run(variables.global_variables_initializer())
np_gamma, np_beta = sess.run([bn.gamma, bn.beta])
@@ -563,7 +563,7 @@ class BNTest(test.TestCase):
training = array_ops.placeholder(dtype='bool')
outputs = bn.apply(inputs, training=training)
- with self.test_session() as sess:
+ with self.cached_session() as sess:
# Test training with placeholder learning phase.
sess.run(variables.global_variables_initializer())
np_gamma, np_beta = sess.run([bn.gamma, bn.beta])
@@ -603,7 +603,7 @@ class BNTest(test.TestCase):
training = array_ops.placeholder(dtype='bool')
outputs = bn.apply(inputs, training=training)
- with self.test_session() as sess:
+ with self.cached_session() as sess:
# Test training with placeholder learning phase.
sess.run(variables.global_variables_initializer())
np_gamma, np_beta = sess.run([bn.gamma, bn.beta])
@@ -644,7 +644,7 @@ class BNTest(test.TestCase):
outputs_training = bn.apply(inputs, training=True)
outputs_infer = bn.apply(inputs, training=False)
- with self.test_session() as sess:
+ with self.cached_session() as sess:
# Test training with placeholder learning phase.
sess.run(variables.global_variables_initializer())
np_gamma, np_beta = sess.run([bn.gamma, bn.beta])
@@ -694,7 +694,7 @@ class BNTest(test.TestCase):
beta = all_vars['bn/beta:0']
gamma = all_vars['bn/gamma:0']
- with self.test_session() as sess:
+ with self.cached_session() as sess:
# Test training with placeholder learning phase.
sess.run(variables.global_variables_initializer())
np_gamma, np_beta = sess.run([gamma, beta])
@@ -756,7 +756,7 @@ class BNTest(test.TestCase):
beta = all_vars['bn/beta:0']
gamma = all_vars['bn/gamma:0']
- with self.test_session() as sess:
+ with self.cached_session() as sess:
# Test training with placeholder learning phase.
sess.run(variables.global_variables_initializer())
for _ in range(100):
@@ -1254,7 +1254,7 @@ class BNTest(test.TestCase):
training = array_ops.placeholder(dtype='bool')
outputs = bn.apply(inputs, training=training)
- with self.test_session() as sess:
+ with self.cached_session() as sess:
# Test training with placeholder learning phase.
sess.run(variables.global_variables_initializer())
@@ -1294,7 +1294,7 @@ class BNTest(test.TestCase):
training = array_ops.placeholder(dtype='bool')
outputs = bn.apply(inputs, training=training)
- with self.test_session() as sess:
+ with self.cached_session() as sess:
# Test training with placeholder learning phase.
sess.run(variables.global_variables_initializer())