aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/contrib/layers
diff options
context:
space:
mode:
authorGravatar A. Unique TensorFlower <gardener@tensorflow.org>2018-08-21 18:22:15 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2018-08-21 18:25:59 -0700
commit708b30f4cb82271bb28cb70a1e0c89a1933f5b64 (patch)
tree22470a9314f7f4225b6d08170a3d7ea91b0216a1 /tensorflow/contrib/layers
parentd0cac47a767dd972516f75ce57f0d6185e3b6514 (diff)
Move from deprecated self.test_session() to self.session() when a graph is set.
self.test_session() has been deprecated in cl/208545396 as its behavior confuses readers of the test. Moving to self.session() instead. PiperOrigin-RevId: 209696110
Diffstat (limited to 'tensorflow/contrib/layers')
-rw-r--r--tensorflow/contrib/layers/python/layers/initializers_test.py2
-rw-r--r--tensorflow/contrib/layers/python/layers/layers_test.py22
-rw-r--r--tensorflow/contrib/layers/python/layers/optimizers_test.py28
-rw-r--r--tensorflow/contrib/layers/python/layers/utils_test.py2
4 files changed, 27 insertions, 27 deletions
diff --git a/tensorflow/contrib/layers/python/layers/initializers_test.py b/tensorflow/contrib/layers/python/layers/initializers_test.py
index b7fe878893..bd3692b258 100644
--- a/tensorflow/contrib/layers/python/layers/initializers_test.py
+++ b/tensorflow/contrib/layers/python/layers/initializers_test.py
@@ -85,7 +85,7 @@ class VarianceScalingInitializerTest(test.TestCase):
def _test_variance(self, initializer, shape, variance, factor, mode, uniform):
with ops.Graph().as_default() as g:
- with self.test_session(graph=g) as sess:
+ with self.session(graph=g) as sess:
var = variable_scope.get_variable(
name='test',
shape=shape,
diff --git a/tensorflow/contrib/layers/python/layers/layers_test.py b/tensorflow/contrib/layers/python/layers/layers_test.py
index 51c7abb105..eee90864b4 100644
--- a/tensorflow/contrib/layers/python/layers/layers_test.py
+++ b/tensorflow/contrib/layers/python/layers/layers_test.py
@@ -1067,7 +1067,7 @@ class Convolution2dTransposeTests(test.TestCase):
conv = layers_lib.conv2d(
transpose, num_filters, filter_size, stride=stride, padding='VALID')
- with self.test_session(graph=graph) as sess:
+ with self.session(graph=graph) as sess:
sess.run(variables_lib.global_variables_initializer())
self.assertListEqual(list(conv.eval().shape), input_size)
@@ -1460,14 +1460,14 @@ class DropoutTest(test.TestCase):
class FlattenTest(test.TestCase):
def testInvalidRank(self):
- with ops.Graph().as_default() as g, self.test_session(g):
+ with ops.Graph().as_default() as g, self.session(g):
inputs = array_ops.placeholder(dtype=dtypes.float32)
inputs.set_shape(tensor_shape.TensorShape((5,)))
with self.assertRaisesRegexp(ValueError, 'incompatible with the layer'):
_layers.flatten(inputs)
def testUnknownLastDim(self):
- with ops.Graph().as_default() as g, self.test_session(g):
+ with ops.Graph().as_default() as g, self.session(g):
inputs = array_ops.placeholder(dtype=dtypes.float32)
inputs.set_shape(tensor_shape.TensorShape((5, None)))
output = _layers.flatten(inputs)
@@ -1629,7 +1629,7 @@ class FCTest(test.TestCase):
def testCreateFC(self):
height, width = 3, 3
for layer_fn in (_layers.fully_connected, layers_lib.relu):
- with ops.Graph().as_default() as g, self.test_session(g):
+ with ops.Graph().as_default() as g, self.session(g):
inputs = np.random.uniform(size=(5, height * width * 3))
output = layer_fn(inputs, 32)
self.assertEqual(output.op.name, 'fully_connected/Relu')
@@ -1814,27 +1814,27 @@ class BatchNormTest(test.TestCase):
a, center=False, data_format='NCHW', zero_debias_moving_mean=True)
def testUnknownShape(self):
- with ops.Graph().as_default() as g, self.test_session(g):
+ with ops.Graph().as_default() as g, self.session(g):
inputs = array_ops.placeholder(dtype=dtypes.float32)
with self.assertRaisesRegexp(ValueError, 'undefined rank'):
_layers.batch_norm(inputs)
def testInvalidDataFormat(self):
- with ops.Graph().as_default() as g, self.test_session(g):
+ with ops.Graph().as_default() as g, self.session(g):
inputs = array_ops.placeholder(dtype=dtypes.float32)
with self.assertRaisesRegexp(
ValueError, 'data_format has to be either NCHW or NHWC.'):
_layers.batch_norm(inputs, data_format='CHWN')
def testUnknownChannelsDimNHWC(self):
- with ops.Graph().as_default() as g, self.test_session(g):
+ with ops.Graph().as_default() as g, self.session(g):
inputs = array_ops.placeholder(dtype=dtypes.float32)
inputs.set_shape(tensor_shape.TensorShape((5, 3, 3, None)))
with self.assertRaisesRegexp(ValueError, 'undefined'):
_layers.batch_norm(inputs, data_format='NHWC')
def testUnknownChannelsDimNCHW(self):
- with ops.Graph().as_default() as g, self.test_session(g):
+ with ops.Graph().as_default() as g, self.session(g):
inputs = array_ops.placeholder(dtype=dtypes.float32)
inputs.set_shape(tensor_shape.TensorShape((5, None, 3, 3)))
with self.assertRaisesRegexp(ValueError, 'undefined'):
@@ -2810,13 +2810,13 @@ class BatchNormTest(test.TestCase):
class LayerNormTest(test.TestCase):
def testUnknownShape(self):
- with ops.Graph().as_default() as g, self.test_session(g):
+ with ops.Graph().as_default() as g, self.session(g):
inputs = array_ops.placeholder(dtype=dtypes.float32)
with self.assertRaisesRegexp(ValueError, 'undefined rank'):
_layers.layer_norm(inputs)
def testParamsDimsNotFullyDefined(self):
- with ops.Graph().as_default() as g, self.test_session(g):
+ with ops.Graph().as_default() as g, self.session(g):
inputs = array_ops.placeholder(dtype=dtypes.float32)
inputs.set_shape(tensor_shape.TensorShape((5, 3, 3, None)))
with self.assertRaisesRegexp(ValueError, 'is not fully defined'):
@@ -2876,7 +2876,7 @@ class LayerNormTest(test.TestCase):
for sigma in [1.0, 0.1]:
input_values = np.random.randn(*input_shape) * sigma + mu
with ops.Graph().as_default() as g:
- with self.test_session(graph=g) as sess:
+ with self.session(graph=g) as sess:
inputs = constant_op.constant(
input_values, shape=input_shape, dtype=dtype)
output_t = _layers.layer_norm(
diff --git a/tensorflow/contrib/layers/python/layers/optimizers_test.py b/tensorflow/contrib/layers/python/layers/optimizers_test.py
index a4461a20e5..0f037e24ad 100644
--- a/tensorflow/contrib/layers/python/layers/optimizers_test.py
+++ b/tensorflow/contrib/layers/python/layers/optimizers_test.py
@@ -66,7 +66,7 @@ class OptimizersTest(test.TestCase):
]
for optimizer in optimizers:
with ops.Graph().as_default() as g:
- with self.test_session(graph=g) as session:
+ with self.session(graph=g) as session:
x, var, loss, global_step = _setup_model()
train = optimizers_lib.optimize_loss(
loss, global_step, learning_rate=0.1, optimizer=optimizer)
@@ -82,7 +82,7 @@ class OptimizersTest(test.TestCase):
return gradient_descent.GradientDescentOptimizer(learning_rate=0.1)
with ops.Graph().as_default() as g:
- with self.test_session(graph=g) as session:
+ with self.session(graph=g) as session:
x, var, loss, global_step = _setup_model()
train = optimizers_lib.optimize_loss(
loss, global_step, learning_rate=None, optimizer=optimizer_fn)
@@ -96,14 +96,14 @@ class OptimizersTest(test.TestCase):
optimizers = ["blah", variables.Variable, object(), lambda x: None]
for optimizer in optimizers:
with ops.Graph().as_default() as g:
- with self.test_session(graph=g):
+ with self.session(graph=g):
_, _, loss, global_step = _setup_model()
with self.assertRaises(ValueError):
optimizers_lib.optimize_loss(
loss, global_step, learning_rate=0.1, optimizer=optimizer)
def testBadSummaries(self):
- with ops.Graph().as_default() as g, self.test_session(graph=g):
+ with ops.Graph().as_default() as g, self.session(graph=g):
_, _, loss, global_step = _setup_model()
with self.assertRaises(ValueError):
optimizers_lib.optimize_loss(
@@ -111,7 +111,7 @@ class OptimizersTest(test.TestCase):
summaries=["loss", "bad_summary"])
def testInvalidLoss(self):
- with ops.Graph().as_default() as g, self.test_session(graph=g):
+ with ops.Graph().as_default() as g, self.session(graph=g):
_, _, _, global_step = _setup_model()
with self.assertRaises(ValueError):
optimizers_lib.optimize_loss(
@@ -121,7 +121,7 @@ class OptimizersTest(test.TestCase):
[[1.0]], global_step, learning_rate=0.1, optimizer="SGD")
def testInvalidGlobalStep(self):
- with ops.Graph().as_default() as g, self.test_session(graph=g):
+ with ops.Graph().as_default() as g, self.session(graph=g):
x = array_ops.placeholder(dtypes.float32, [])
var = variable_scope.get_variable(
"test", [], initializer=init_ops.constant_initializer(10))
@@ -157,7 +157,7 @@ class OptimizersTest(test.TestCase):
optimizer="SGD")
def testInvalidLearningRate(self):
- with ops.Graph().as_default() as g, self.test_session(graph=g):
+ with ops.Graph().as_default() as g, self.session(graph=g):
_, _, loss, global_step = _setup_model()
with self.assertRaises(ValueError):
optimizers_lib.optimize_loss(
@@ -270,7 +270,7 @@ class OptimizersTest(test.TestCase):
gradient_descent.GradientDescentOptimizer(learning_rate=0.1)
]
for optimizer in optimizers:
- with ops.Graph().as_default() as g, self.test_session(graph=g) as session:
+ with ops.Graph().as_default() as g, self.session(graph=g) as session:
x = array_ops.placeholder(dtypes.float32, [])
var = variable_scope.get_variable(
"test", [], initializer=init_ops.constant_initializer(10))
@@ -295,7 +295,7 @@ class OptimizersTest(test.TestCase):
gradient_descent.GradientDescentOptimizer(learning_rate=0.1)
]
for optimizer in optimizers:
- with ops.Graph().as_default() as g, self.test_session(graph=g):
+ with ops.Graph().as_default() as g, self.session(graph=g):
x = array_ops.placeholder(dtypes.float32, [])
var = variable_scope.get_variable(
"test", [], initializer=init_ops.constant_initializer(10))
@@ -319,7 +319,7 @@ class OptimizersTest(test.TestCase):
gradient_descent.GradientDescentOptimizer(learning_rate=0.1)
]
for optimizer in optimizers:
- with ops.Graph().as_default() as g, self.test_session(graph=g) as session:
+ with ops.Graph().as_default() as g, self.session(graph=g) as session:
x, var, loss, global_step = _setup_model()
update_var = variable_scope.get_variable(
"update", [], initializer=init_ops.constant_initializer(10))
@@ -342,7 +342,7 @@ class OptimizersTest(test.TestCase):
gradient_descent.GradientDescentOptimizer(learning_rate=0.1)
]
for optimizer in optimizers:
- with ops.Graph().as_default() as g, self.test_session(graph=g) as session:
+ with ops.Graph().as_default() as g, self.session(graph=g) as session:
x, var, loss, global_step = _setup_model()
update_var = variable_scope.get_variable(
"update", [], initializer=init_ops.constant_initializer(10))
@@ -365,7 +365,7 @@ class OptimizersTest(test.TestCase):
gradient_descent.GradientDescentOptimizer(learning_rate=0.1)
]
for optimizer in optimizers:
- with ops.Graph().as_default() as g, self.test_session(graph=g) as session:
+ with ops.Graph().as_default() as g, self.session(graph=g) as session:
x, var, loss, global_step = _setup_model()
update_var = variable_scope.get_variable(
"update", [], initializer=init_ops.constant_initializer(10))
@@ -389,7 +389,7 @@ class OptimizersTest(test.TestCase):
gradient_descent.GradientDescentOptimizer(learning_rate=0.1)
]
for optimizer in optimizers:
- with ops.Graph().as_default() as g, self.test_session(graph=g) as session:
+ with ops.Graph().as_default() as g, self.session(graph=g) as session:
x, var, loss, global_step = _setup_model()
update_var = variable_scope.get_variable(
"update", [], initializer=init_ops.constant_initializer(10))
@@ -413,7 +413,7 @@ class OptimizersTest(test.TestCase):
gradient_descent.GradientDescentOptimizer(learning_rate=0.1)
]
for optimizer in optimizers:
- with ops.Graph().as_default() as g, self.test_session(graph=g) as session:
+ with ops.Graph().as_default() as g, self.session(graph=g) as session:
x, var, loss, global_step = _setup_model()
update_var = variable_scope.get_variable(
"update", [], initializer=init_ops.constant_initializer(10))
diff --git a/tensorflow/contrib/layers/python/layers/utils_test.py b/tensorflow/contrib/layers/python/layers/utils_test.py
index 645dc1291e..a9bd89532a 100644
--- a/tensorflow/contrib/layers/python/layers/utils_test.py
+++ b/tensorflow/contrib/layers/python/layers/utils_test.py
@@ -47,7 +47,7 @@ class ConstantValueTest(test.TestCase):
def test_variable(self):
for v in [True, False, 1, 0, 1.0]:
- with ops.Graph().as_default() as g, self.test_session(g) as sess:
+ with ops.Graph().as_default() as g, self.session(g) as sess:
x = variables.Variable(v)
value = utils.constant_value(x)
self.assertEqual(value, None)