aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/contrib/slim
diff options
context:
space:
mode:
authorGravatar A. Unique TensorFlower <gardener@tensorflow.org>2018-09-12 16:35:07 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2018-09-12 16:42:21 -0700
commit8f8b2497dbccf4b33557088b82b562205aa47c36 (patch)
treea26117486ba3d87a32c7abfd2c192f6f7501296c /tensorflow/contrib/slim
parentacc32e741935545d8e600a67361c388d14556538 (diff)
Move from deprecated self.test_session() to self.cached_session().
self.test_session() has been deprecated in 9962eb5e84b15e309410071b06c2ed2d6148ed44 as its name confuses readers of the test. Moving to cached_session() instead which is more explicit about: * the fact that the session may be reused. * the session is not closed even when doing a "with self.test_session()" statement. PiperOrigin-RevId: 212725342
Diffstat (limited to 'tensorflow/contrib/slim')
-rw-r--r--tensorflow/contrib/slim/python/slim/data/dataset_data_provider_test.py6
-rw-r--r--tensorflow/contrib/slim/python/slim/data/parallel_reader_test.py14
-rw-r--r--tensorflow/contrib/slim/python/slim/data/prefetch_queue_test.py8
-rw-r--r--tensorflow/contrib/slim/python/slim/data/tfexample_decoder_test.py46
4 files changed, 37 insertions, 37 deletions
diff --git a/tensorflow/contrib/slim/python/slim/data/dataset_data_provider_test.py b/tensorflow/contrib/slim/python/slim/data/dataset_data_provider_test.py
index 1bb6fbc570..795de6a408 100644
--- a/tensorflow/contrib/slim/python/slim/data/dataset_data_provider_test.py
+++ b/tensorflow/contrib/slim/python/slim/data/dataset_data_provider_test.py
@@ -88,7 +88,7 @@ class DatasetDataProviderTest(test.TestCase):
height = 300
width = 280
- with self.test_session():
+ with self.cached_session():
test_dataset = _create_tfrecord_dataset(dataset_dir)
provider = dataset_data_provider.DatasetDataProvider(test_dataset)
key, image, label = provider.get(['record_key', 'image', 'label'])
@@ -111,7 +111,7 @@ class DatasetDataProviderTest(test.TestCase):
height = 300
width = 280
- with self.test_session():
+ with self.cached_session():
provider = dataset_data_provider.DatasetDataProvider(
_create_tfrecord_dataset(dataset_dir))
[image] = provider.get(['image'])
@@ -128,7 +128,7 @@ class DatasetDataProviderTest(test.TestCase):
dataset_dir = tempfile.mkdtemp(prefix=os.path.join(self.get_temp_dir(),
'tfrecord_dataset'))
- with self.test_session():
+ with self.cached_session():
with self.assertRaises(ValueError):
dataset_data_provider.DatasetDataProvider(
_create_tfrecord_dataset(dataset_dir), record_key='image')
diff --git a/tensorflow/contrib/slim/python/slim/data/parallel_reader_test.py b/tensorflow/contrib/slim/python/slim/data/parallel_reader_test.py
index ea8cc0ff61..c457d44e07 100644
--- a/tensorflow/contrib/slim/python/slim/data/parallel_reader_test.py
+++ b/tensorflow/contrib/slim/python/slim/data/parallel_reader_test.py
@@ -39,7 +39,7 @@ class ParallelReaderTest(test.TestCase):
ops.reset_default_graph()
def _verify_all_data_sources_read(self, shared_queue):
- with self.test_session():
+ with self.cached_session():
tfrecord_paths = test_utils.create_tfrecord_files(
self.get_temp_dir(), num_files=3)
@@ -76,7 +76,7 @@ class ParallelReaderTest(test.TestCase):
self.assertEquals(count0 + count1 + count2, num_reads)
def _verify_read_up_to_out(self, shared_queue):
- with self.test_session():
+ with self.cached_session():
num_files = 3
num_records_per_file = 7
tfrecord_paths = test_utils.create_tfrecord_files(
@@ -161,7 +161,7 @@ class ParallelReadTest(test.TestCase):
ops.reset_default_graph()
def testTFRecordReader(self):
- with self.test_session():
+ with self.cached_session():
self._tfrecord_paths = test_utils.create_tfrecord_files(
self.get_temp_dir(), num_files=3)
@@ -188,7 +188,7 @@ class SinglePassReadTest(test.TestCase):
ops.reset_default_graph()
def testOutOfRangeError(self):
- with self.test_session():
+ with self.cached_session():
[tfrecord_path] = test_utils.create_tfrecord_files(
self.get_temp_dir(), num_files=1)
@@ -196,7 +196,7 @@ class SinglePassReadTest(test.TestCase):
tfrecord_path, reader_class=io_ops.TFRecordReader)
init_op = variables.local_variables_initializer()
- with self.test_session() as sess:
+ with self.cached_session() as sess:
sess.run(init_op)
with queues.QueueRunners(sess):
num_reads = 11
@@ -205,7 +205,7 @@ class SinglePassReadTest(test.TestCase):
sess.run([key, value])
def testTFRecordReader(self):
- with self.test_session():
+ with self.cached_session():
[tfrecord_path] = test_utils.create_tfrecord_files(
self.get_temp_dir(), num_files=1)
@@ -213,7 +213,7 @@ class SinglePassReadTest(test.TestCase):
tfrecord_path, reader_class=io_ops.TFRecordReader)
init_op = variables.local_variables_initializer()
- with self.test_session() as sess:
+ with self.cached_session() as sess:
sess.run(init_op)
with queues.QueueRunners(sess):
flowers = 0
diff --git a/tensorflow/contrib/slim/python/slim/data/prefetch_queue_test.py b/tensorflow/contrib/slim/python/slim/data/prefetch_queue_test.py
index 6c3e57c47d..7caa42dcb9 100644
--- a/tensorflow/contrib/slim/python/slim/data/prefetch_queue_test.py
+++ b/tensorflow/contrib/slim/python/slim/data/prefetch_queue_test.py
@@ -37,7 +37,7 @@ from tensorflow.python.training import queue_runner_impl
class PrefetchQueueTest(test.TestCase):
def testOneThread(self):
- with self.test_session() as sess:
+ with self.cached_session() as sess:
batch_size = 10
image_size = 32
num_batches = 5
@@ -74,7 +74,7 @@ class PrefetchQueueTest(test.TestCase):
thread.join()
def testMultiThread(self):
- with self.test_session() as sess:
+ with self.cached_session() as sess:
batch_size = 10
image_size = 32
num_batches = 5
@@ -114,7 +114,7 @@ class PrefetchQueueTest(test.TestCase):
thread.join()
def testMultipleDequeue(self):
- with self.test_session() as sess:
+ with self.cached_session() as sess:
batch_size = 10
image_size = 32
num_batches = 4
@@ -162,7 +162,7 @@ class PrefetchQueueTest(test.TestCase):
prefetch_queue.prefetch_queue([variable_tensor])
def testDynamicPad(self):
- with self.test_session() as sess:
+ with self.cached_session() as sess:
# Create 3 tensors of variable but compatible shapes.
var_shape = [None, 2]
p1 = constant_op.constant([[1, 2], [3, 4]])
diff --git a/tensorflow/contrib/slim/python/slim/data/tfexample_decoder_test.py b/tensorflow/contrib/slim/python/slim/data/tfexample_decoder_test.py
index 826242c9d7..3114949b82 100644
--- a/tensorflow/contrib/slim/python/slim/data/tfexample_decoder_test.py
+++ b/tensorflow/contrib/slim/python/slim/data/tfexample_decoder_test.py
@@ -45,7 +45,7 @@ class TFExampleDecoderTest(test.TestCase):
int64_list=feature_pb2.Int64List(value=ndarray.flatten().tolist()))
def _EncodedBytesFeature(self, tf_encoded):
- with self.test_session():
+ with self.cached_session():
encoded = tf_encoded.eval()
def BytesList(value):
@@ -133,7 +133,7 @@ class TFExampleDecoderTest(test.TestCase):
tf_image = self.DecodeExample(serialized_example, item_handler,
image_format)
- with self.test_session():
+ with self.cached_session():
decoded_image = tf_image.eval()
# We need to recast them here to avoid some issues with uint8.
@@ -265,7 +265,7 @@ class TFExampleDecoderTest(test.TestCase):
serialized_example = example.SerializeToString()
- with self.test_session():
+ with self.cached_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'labels':
@@ -296,7 +296,7 @@ class TFExampleDecoderTest(test.TestCase):
serialized_example = example.SerializeToString()
- with self.test_session():
+ with self.cached_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'array': parsing_ops.FixedLenFeature(np_array.shape, dtypes.float32)
@@ -319,7 +319,7 @@ class TFExampleDecoderTest(test.TestCase):
serialized_example = example.SerializeToString()
- with self.test_session():
+ with self.cached_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'array': parsing_ops.FixedLenFeature(np_array.shape, dtypes.int64)
@@ -342,7 +342,7 @@ class TFExampleDecoderTest(test.TestCase):
serialized_example = example.SerializeToString()
- with self.test_session():
+ with self.cached_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'labels': parsing_ops.VarLenFeature(dtype=dtypes.int64),
@@ -366,7 +366,7 @@ class TFExampleDecoderTest(test.TestCase):
serialized_example = example.SerializeToString()
- with self.test_session():
+ with self.cached_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'labels':
@@ -390,7 +390,7 @@ class TFExampleDecoderTest(test.TestCase):
serialized_example = example.SerializeToString()
- with self.test_session():
+ with self.cached_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'labels': parsing_ops.VarLenFeature(dtype=dtypes.int64),
@@ -423,7 +423,7 @@ class TFExampleDecoderTest(test.TestCase):
serialized_example = example.SerializeToString()
- with self.test_session():
+ with self.cached_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'image': parsing_ops.VarLenFeature(dtype=dtypes.float32),
@@ -468,7 +468,7 @@ class TFExampleDecoderTest(test.TestCase):
serialized_example = example.SerializeToString()
- with self.test_session():
+ with self.cached_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'image': parsing_ops.VarLenFeature(dtype=dtypes.float32),
@@ -505,7 +505,7 @@ class TFExampleDecoderTest(test.TestCase):
serialized_example = example.SerializeToString()
- with self.test_session():
+ with self.cached_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'indices': parsing_ops.VarLenFeature(dtype=dtypes.int64),
@@ -536,7 +536,7 @@ class TFExampleDecoderTest(test.TestCase):
serialized_example = example.SerializeToString()
- with self.test_session():
+ with self.cached_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'indices': parsing_ops.VarLenFeature(dtype=dtypes.int64),
@@ -567,7 +567,7 @@ class TFExampleDecoderTest(test.TestCase):
serialized_example = example.SerializeToString()
- with self.test_session():
+ with self.cached_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'indices': parsing_ops.VarLenFeature(dtype=dtypes.int64),
@@ -598,7 +598,7 @@ class TFExampleDecoderTest(test.TestCase):
serialized_example = example.SerializeToString()
- with self.test_session():
+ with self.cached_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'indices': parsing_ops.VarLenFeature(dtype=dtypes.int64),
@@ -625,7 +625,7 @@ class TFExampleDecoderTest(test.TestCase):
serialized_example = example.SerializeToString()
- with self.test_session():
+ with self.cached_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
@@ -657,7 +657,7 @@ class TFExampleDecoderTest(test.TestCase):
serialized_example = example.SerializeToString()
- with self.test_session():
+ with self.cached_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
@@ -692,7 +692,7 @@ class TFExampleDecoderTest(test.TestCase):
image, serialized_example = self.GenerateImage(
image_format=image_encoding, image_shape=image_shape)
- with self.test_session():
+ with self.cached_session():
def ConditionalDecoding(keys_to_tensors):
"""See base class."""
@@ -759,7 +759,7 @@ class TFExampleDecoderTest(test.TestCase):
}))
serialized_example = example.SerializeToString()
- with self.test_session():
+ with self.cached_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
@@ -800,7 +800,7 @@ class TFExampleDecoderTest(test.TestCase):
}))
serialized_example = example.SerializeToString()
- with self.test_session():
+ with self.cached_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
@@ -837,7 +837,7 @@ class TFExampleDecoderTest(test.TestCase):
image, _ = self.GenerateImage(
image_format=image_format, image_shape=image_shape)
tf_encoded = self._Encoder(image, image_format)
- with self.test_session():
+ with self.cached_session():
tf_string = tf_encoded.eval()
example = example_pb2.Example(
@@ -852,7 +852,7 @@ class TFExampleDecoderTest(test.TestCase):
}))
serialized_example = example.SerializeToString()
- with self.test_session():
+ with self.cached_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
decoder = tfexample_decoder.TFExampleDecoder(
@@ -885,7 +885,7 @@ class TFExampleDecoderTest(test.TestCase):
table = lookup_ops.index_table_from_tensor(
constant_op.constant(['dog', 'guinea pig', 'cat']))
- with self.test_session() as sess:
+ with self.cached_session() as sess:
sess.run(lookup_ops.tables_initializer())
serialized_example = array_ops.reshape(serialized_example, shape=[])
@@ -943,7 +943,7 @@ class TFExampleDecoderTest(test.TestCase):
decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
items_to_handlers)
obtained_class_ids_each_example = []
- with self.test_session() as sess:
+ with self.cached_session() as sess:
sess.run(lookup_ops.tables_initializer())
for example in [example1, example2, example3]:
serialized_example = array_ops.reshape(