aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/contrib/distributions
diff options
context:
space:
mode:
authorGravatar Asim Shankar <ashankar@google.com>2018-08-23 11:21:30 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2018-08-23 11:27:54 -0700
commitd63812f1f4a37d8a03779ee5caa062978c7176e8 (patch)
tree6335490aa5b3369d688c9231bcd68d0d83e9fc48 /tensorflow/contrib/distributions
parent92a7c84e6ffb830078d91a67887a5d155178f98d (diff)
contrib/distributions: Test cleanup.
- Don't need a test_session() when using @test_util.run_in_graph_and_eager_modes - Can use @test_util.run_all_in_graph_and_eager_modes when all tests in the TestCase should be run in both modes. PiperOrigin-RevId: 209968609
Diffstat (limited to 'tensorflow/contrib/distributions')
-rw-r--r--tensorflow/contrib/distributions/python/kernel_tests/bijectors/matrix_inverse_tril_test.py96
-rw-r--r--tensorflow/contrib/distributions/python/kernel_tests/bijectors/ordered_test.py56
-rw-r--r--tensorflow/contrib/distributions/python/kernel_tests/bijectors/softsign_test.py19
-rw-r--r--tensorflow/contrib/distributions/python/kernel_tests/distribution_util_test.py3
4 files changed, 80 insertions, 94 deletions
diff --git a/tensorflow/contrib/distributions/python/kernel_tests/bijectors/matrix_inverse_tril_test.py b/tensorflow/contrib/distributions/python/kernel_tests/bijectors/matrix_inverse_tril_test.py
index 49a9afe3f6..31ee36f024 100644
--- a/tensorflow/contrib/distributions/python/kernel_tests/bijectors/matrix_inverse_tril_test.py
+++ b/tensorflow/contrib/distributions/python/kernel_tests/bijectors/matrix_inverse_tril_test.py
@@ -26,6 +26,7 @@ from tensorflow.python.framework import test_util
from tensorflow.python.platform import test
+@test_util.run_all_in_graph_and_eager_modes
class MatrixInverseTriLBijectorTest(test.TestCase):
"""Tests the correctness of the Y = inv(tril) transformation."""
@@ -40,7 +41,6 @@ class MatrixInverseTriLBijectorTest(test.TestCase):
y[idx][np.triu_indices(y[idx].shape[-1], 1)] = 0
return y
- @test_util.run_in_graph_and_eager_modes
def testComputesCorrectValues(self):
inv = bijectors.MatrixInverseTriL(validate_args=True)
self.assertEqual("matrix_inverse_tril", inv.name)
@@ -62,7 +62,6 @@ class MatrixInverseTriLBijectorTest(test.TestCase):
self.assertNear(expected_fldj_, fldj_, err=1e-3)
self.assertNear(-expected_fldj_, ildj_, err=1e-3)
- @test_util.run_in_graph_and_eager_modes
def testOneByOneMatrix(self):
inv = bijectors.MatrixInverseTriL(validate_args=True)
x_ = np.array([[5.]], dtype=np.float32)
@@ -81,7 +80,6 @@ class MatrixInverseTriLBijectorTest(test.TestCase):
self.assertNear(expected_fldj_, fldj_, err=1e-3)
self.assertNear(-expected_fldj_, ildj_, err=1e-3)
- @test_util.run_in_graph_and_eager_modes
def testZeroByZeroMatrix(self):
inv = bijectors.MatrixInverseTriL(validate_args=True)
x_ = np.eye(0, dtype=np.float32)
@@ -100,7 +98,6 @@ class MatrixInverseTriLBijectorTest(test.TestCase):
self.assertNear(expected_fldj_, fldj_, err=1e-3)
self.assertNear(-expected_fldj_, ildj_, err=1e-3)
- @test_util.run_in_graph_and_eager_modes
def testBatch(self):
# Test batch computation with input shape (2, 1, 2, 2), i.e. batch shape
# (2, 1).
@@ -125,20 +122,18 @@ class MatrixInverseTriLBijectorTest(test.TestCase):
self.assertAllClose(expected_fldj_, fldj_, atol=0., rtol=1e-3)
self.assertAllClose(-expected_fldj_, ildj_, atol=0., rtol=1e-3)
- @test_util.run_in_graph_and_eager_modes
def testErrorOnInputRankTooLow(self):
inv = bijectors.MatrixInverseTriL(validate_args=True)
x_ = np.array([0.1], dtype=np.float32)
rank_error_msg = "must have rank at least 2"
- with self.test_session():
- with self.assertRaisesWithPredicateMatch(ValueError, rank_error_msg):
- inv.forward(x_).eval()
- with self.assertRaisesWithPredicateMatch(ValueError, rank_error_msg):
- inv.inverse(x_).eval()
- with self.assertRaisesWithPredicateMatch(ValueError, rank_error_msg):
- inv.forward_log_det_jacobian(x_, event_ndims=2).eval()
- with self.assertRaisesWithPredicateMatch(ValueError, rank_error_msg):
- inv.inverse_log_det_jacobian(x_, event_ndims=2).eval()
+ with self.assertRaisesWithPredicateMatch(ValueError, rank_error_msg):
+ self.evaluate(inv.forward(x_))
+ with self.assertRaisesWithPredicateMatch(ValueError, rank_error_msg):
+ self.evaluate(inv.inverse(x_))
+ with self.assertRaisesWithPredicateMatch(ValueError, rank_error_msg):
+ self.evaluate(inv.forward_log_det_jacobian(x_, event_ndims=2))
+ with self.assertRaisesWithPredicateMatch(ValueError, rank_error_msg):
+ self.evaluate(inv.inverse_log_det_jacobian(x_, event_ndims=2))
# TODO(b/80481923): Figure out why these assertions fail, and fix them.
## def testErrorOnInputNonSquare(self):
@@ -146,55 +141,50 @@ class MatrixInverseTriLBijectorTest(test.TestCase):
## x_ = np.array([[1., 2., 3.],
## [4., 5., 6.]], dtype=np.float32)
## square_error_msg = "must be a square matrix"
- ## with self.test_session():
- ## with self.assertRaisesWithPredicateMatch(errors.InvalidArgumentError,
- ## square_error_msg):
- ## inv.forward(x_).eval()
- ## with self.assertRaisesWithPredicateMatch(errors.InvalidArgumentError,
- ## square_error_msg):
- ## inv.inverse(x_).eval()
- ## with self.assertRaisesWithPredicateMatch(errors.InvalidArgumentError,
- ## square_error_msg):
- ## inv.forward_log_det_jacobian(x_, event_ndims=2).eval()
- ## with self.assertRaisesWithPredicateMatch(errors.InvalidArgumentError,
- ## square_error_msg):
- ## inv.inverse_log_det_jacobian(x_, event_ndims=2).eval()
-
- @test_util.run_in_graph_and_eager_modes
+ ## with self.assertRaisesWithPredicateMatch(errors.InvalidArgumentError,
+ ## square_error_msg):
+ ## self.evaluate(inv.forward(x_))
+ ## with self.assertRaisesWithPredicateMatch(errors.InvalidArgumentError,
+ ## square_error_msg):
+ ## self.evaluate(inv.inverse(x_))
+ ## with self.assertRaisesWithPredicateMatch(errors.InvalidArgumentError,
+ ## square_error_msg):
+ ## self.evaluate(inv.forward_log_det_jacobian(x_, event_ndims=2))
+ ## with self.assertRaisesWithPredicateMatch(errors.InvalidArgumentError,
+ ## square_error_msg):
+ ## self.evaluate(inv.inverse_log_det_jacobian(x_, event_ndims=2))
+
def testErrorOnInputNotLowerTriangular(self):
inv = bijectors.MatrixInverseTriL(validate_args=True)
x_ = np.array([[1., 2.],
[3., 4.]], dtype=np.float32)
triangular_error_msg = "must be lower triangular"
- with self.test_session():
- with self.assertRaisesWithPredicateMatch(errors.InvalidArgumentError,
- triangular_error_msg):
- inv.forward(x_).eval()
- with self.assertRaisesWithPredicateMatch(errors.InvalidArgumentError,
- triangular_error_msg):
- inv.inverse(x_).eval()
- with self.assertRaisesWithPredicateMatch(errors.InvalidArgumentError,
- triangular_error_msg):
- inv.forward_log_det_jacobian(x_, event_ndims=2).eval()
- with self.assertRaisesWithPredicateMatch(errors.InvalidArgumentError,
- triangular_error_msg):
- inv.inverse_log_det_jacobian(x_, event_ndims=2).eval()
-
- @test_util.run_in_graph_and_eager_modes
+ with self.assertRaisesWithPredicateMatch(errors.InvalidArgumentError,
+ triangular_error_msg):
+ self.evaluate(inv.forward(x_))
+ with self.assertRaisesWithPredicateMatch(errors.InvalidArgumentError,
+ triangular_error_msg):
+ self.evaluate(inv.inverse(x_))
+ with self.assertRaisesWithPredicateMatch(errors.InvalidArgumentError,
+ triangular_error_msg):
+ self.evaluate(inv.forward_log_det_jacobian(x_, event_ndims=2))
+ with self.assertRaisesWithPredicateMatch(errors.InvalidArgumentError,
+ triangular_error_msg):
+ self.evaluate(inv.inverse_log_det_jacobian(x_, event_ndims=2))
+
def testErrorOnInputSingular(self):
inv = bijectors.MatrixInverseTriL(validate_args=True)
x_ = np.array([[1., 0.],
[0., 0.]], dtype=np.float32)
nonsingular_error_msg = "must have all diagonal entries nonzero"
- with self.test_session():
- with self.assertRaisesOpError(nonsingular_error_msg):
- inv.forward(x_).eval()
- with self.assertRaisesOpError(nonsingular_error_msg):
- inv.inverse(x_).eval()
- with self.assertRaisesOpError(nonsingular_error_msg):
- inv.forward_log_det_jacobian(x_, event_ndims=2).eval()
- with self.assertRaisesOpError(nonsingular_error_msg):
- inv.inverse_log_det_jacobian(x_, event_ndims=2).eval()
+ with self.assertRaisesOpError(nonsingular_error_msg):
+ self.evaluate(inv.forward(x_))
+ with self.assertRaisesOpError(nonsingular_error_msg):
+ self.evaluate(inv.inverse(x_))
+ with self.assertRaisesOpError(nonsingular_error_msg):
+ self.evaluate(inv.forward_log_det_jacobian(x_, event_ndims=2))
+ with self.assertRaisesOpError(nonsingular_error_msg):
+ self.evaluate(inv.inverse_log_det_jacobian(x_, event_ndims=2))
if __name__ == "__main__":
diff --git a/tensorflow/contrib/distributions/python/kernel_tests/bijectors/ordered_test.py b/tensorflow/contrib/distributions/python/kernel_tests/bijectors/ordered_test.py
index a188843952..9a88f8f1bc 100644
--- a/tensorflow/contrib/distributions/python/kernel_tests/bijectors/ordered_test.py
+++ b/tensorflow/contrib/distributions/python/kernel_tests/bijectors/ordered_test.py
@@ -38,23 +38,22 @@ class OrderedBijectorTest(test.TestCase):
@test_util.run_in_graph_and_eager_modes
def testBijectorVector(self):
- with self.cached_session():
- ordered = Ordered()
- self.assertEqual("ordered", ordered.name)
- x = np.asarray([[2., 3, 4], [4., 8, 13]])
- y = [[2., 0, 0], [4., np.log(4.), np.log(5.)]]
- self.assertAllClose(y, self.evaluate(ordered.forward(x)))
- self.assertAllClose(x, self.evaluate(ordered.inverse(y)))
- self.assertAllClose(
- np.sum(np.asarray(y)[..., 1:], axis=-1),
- self.evaluate(ordered.inverse_log_det_jacobian(y, event_ndims=1)),
- atol=0.,
- rtol=1e-7)
- self.assertAllClose(
- self.evaluate(-ordered.inverse_log_det_jacobian(y, event_ndims=1)),
- self.evaluate(ordered.forward_log_det_jacobian(x, event_ndims=1)),
- atol=0.,
- rtol=1e-7)
+ ordered = Ordered()
+ self.assertEqual("ordered", ordered.name)
+ x = np.asarray([[2., 3, 4], [4., 8, 13]])
+ y = [[2., 0, 0], [4., np.log(4.), np.log(5.)]]
+ self.assertAllClose(y, self.evaluate(ordered.forward(x)))
+ self.assertAllClose(x, self.evaluate(ordered.inverse(y)))
+ self.assertAllClose(
+ np.sum(np.asarray(y)[..., 1:], axis=-1),
+ self.evaluate(ordered.inverse_log_det_jacobian(y, event_ndims=1)),
+ atol=0.,
+ rtol=1e-7)
+ self.assertAllClose(
+ self.evaluate(-ordered.inverse_log_det_jacobian(y, event_ndims=1)),
+ self.evaluate(ordered.forward_log_det_jacobian(x, event_ndims=1)),
+ atol=0.,
+ rtol=1e-7)
def testBijectorUnknownShape(self):
with self.cached_session():
@@ -84,18 +83,17 @@ class OrderedBijectorTest(test.TestCase):
@test_util.run_in_graph_and_eager_modes
def testShapeGetters(self):
- with self.cached_session():
- x = tensor_shape.TensorShape([4])
- y = tensor_shape.TensorShape([4])
- bijector = Ordered(validate_args=True)
- self.assertAllEqual(y, bijector.forward_event_shape(x))
- self.assertAllEqual(y.as_list(),
- self.evaluate(bijector.forward_event_shape_tensor(
- x.as_list())))
- self.assertAllEqual(x, bijector.inverse_event_shape(y))
- self.assertAllEqual(x.as_list(),
- self.evaluate(bijector.inverse_event_shape_tensor(
- y.as_list())))
+ x = tensor_shape.TensorShape([4])
+ y = tensor_shape.TensorShape([4])
+ bijector = Ordered(validate_args=True)
+ self.assertAllEqual(y, bijector.forward_event_shape(x))
+ self.assertAllEqual(y.as_list(),
+ self.evaluate(bijector.forward_event_shape_tensor(
+ x.as_list())))
+ self.assertAllEqual(x, bijector.inverse_event_shape(y))
+ self.assertAllEqual(x.as_list(),
+ self.evaluate(bijector.inverse_event_shape_tensor(
+ y.as_list())))
def testBijectiveAndFinite(self):
with self.cached_session():
diff --git a/tensorflow/contrib/distributions/python/kernel_tests/bijectors/softsign_test.py b/tensorflow/contrib/distributions/python/kernel_tests/bijectors/softsign_test.py
index d0098c3c10..8dad80aa64 100644
--- a/tensorflow/contrib/distributions/python/kernel_tests/bijectors/softsign_test.py
+++ b/tensorflow/contrib/distributions/python/kernel_tests/bijectors/softsign_test.py
@@ -43,16 +43,15 @@ class SoftsignBijectorTest(test.TestCase):
@test_util.run_in_graph_and_eager_modes
def testBijectorBounds(self):
bijector = Softsign(validate_args=True)
- with self.test_session():
- with self.assertRaisesOpError("greater than -1"):
- bijector.inverse(-3.).eval()
- with self.assertRaisesOpError("greater than -1"):
- bijector.inverse_log_det_jacobian(-3., event_ndims=0).eval()
-
- with self.assertRaisesOpError("less than 1"):
- bijector.inverse(3.).eval()
- with self.assertRaisesOpError("less than 1"):
- bijector.inverse_log_det_jacobian(3., event_ndims=0).eval()
+ with self.assertRaisesOpError("greater than -1"):
+ self.evaluate(bijector.inverse(-3.))
+ with self.assertRaisesOpError("greater than -1"):
+ self.evaluate(bijector.inverse_log_det_jacobian(-3., event_ndims=0))
+
+ with self.assertRaisesOpError("less than 1"):
+ self.evaluate(bijector.inverse(3.))
+ with self.assertRaisesOpError("less than 1"):
+ self.evaluate(bijector.inverse_log_det_jacobian(3., event_ndims=0))
@test_util.run_in_graph_and_eager_modes
def testBijectorForwardInverse(self):
diff --git a/tensorflow/contrib/distributions/python/kernel_tests/distribution_util_test.py b/tensorflow/contrib/distributions/python/kernel_tests/distribution_util_test.py
index f7b2efa7bc..05f5d30666 100644
--- a/tensorflow/contrib/distributions/python/kernel_tests/distribution_util_test.py
+++ b/tensorflow/contrib/distributions/python/kernel_tests/distribution_util_test.py
@@ -542,9 +542,9 @@ class PadDynamicTest(_PadTest, test.TestCase):
return False
+@test_util.run_all_in_graph_and_eager_modes
class TestMoveDimension(test.TestCase):
- @test_util.run_in_graph_and_eager_modes
def test_move_dimension_static_shape(self):
x = random_ops.random_normal(shape=[200, 30, 4, 1, 6])
@@ -561,7 +561,6 @@ class TestMoveDimension(test.TestCase):
x_perm = distribution_util.move_dimension(x, 4, 2)
self.assertAllEqual(x_perm.shape.as_list(), [200, 30, 6, 4, 1])
- @test_util.run_in_graph_and_eager_modes
def test_move_dimension_dynamic_shape(self):
x_ = random_ops.random_normal(shape=[200, 30, 4, 1, 6])