aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/contrib/keras
diff options
context:
space:
mode:
authorGravatar Francois Chollet <fchollet@google.com>2017-08-30 18:31:41 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2017-08-30 18:35:22 -0700
commit114e129d6c31216e5b62ef1e78a77c3ae7afe82e (patch)
tree61db27c2249c7e21b2f116c5f2c957dce84647ac /tensorflow/contrib/keras
parent5c3977c297f07d1cc14591844e5df202b1994c85 (diff)
Fix TSAN flakes in Keras io_utils test.
PiperOrigin-RevId: 167081436
Diffstat (limited to 'tensorflow/contrib/keras')
-rw-r--r--tensorflow/contrib/keras/BUILD1
-rw-r--r--tensorflow/contrib/keras/python/keras/utils/io_utils_test.py75
2 files changed, 39 insertions, 37 deletions
diff --git a/tensorflow/contrib/keras/BUILD b/tensorflow/contrib/keras/BUILD
index a09045d7fd..26f0e41518 100644
--- a/tensorflow/contrib/keras/BUILD
+++ b/tensorflow/contrib/keras/BUILD
@@ -551,6 +551,7 @@ py_test(
size = "small",
srcs = ["python/keras/utils/io_utils_test.py"],
srcs_version = "PY2AND3",
+ tags = ["notsan"],
deps = [
":keras",
"//tensorflow/python:client_testlib",
diff --git a/tensorflow/contrib/keras/python/keras/utils/io_utils_test.py b/tensorflow/contrib/keras/python/keras/utils/io_utils_test.py
index baa9781e71..f6820ee039 100644
--- a/tensorflow/contrib/keras/python/keras/utils/io_utils_test.py
+++ b/tensorflow/contrib/keras/python/keras/utils/io_utils_test.py
@@ -57,43 +57,44 @@ class TestIOUtils(test.TestCase):
h5_path = os.path.join(temp_dir, 'test.h5')
create_dataset(h5_path)
- # Instantiating HDF5Matrix for the training set,
- # which is a slice of the first 150 elements
- x_train = keras.utils.io_utils.HDF5Matrix(
- h5_path, 'my_data', start=0, end=150)
- y_train = keras.utils.io_utils.HDF5Matrix(
- h5_path, 'my_labels', start=0, end=150)
-
- # Likewise for the test set
- x_test = keras.utils.io_utils.HDF5Matrix(
- h5_path, 'my_data', start=150, end=200)
- y_test = keras.utils.io_utils.HDF5Matrix(
- h5_path, 'my_labels', start=150, end=200)
-
- # HDF5Matrix behave more or less like Numpy matrices
- # with regard to indexing
- self.assertEqual(y_train.shape, (150, 1))
- # But they do not support negative indices, so don't try print(x_train[-1])
-
- self.assertEqual(y_train.dtype, np.dtype('i'))
- self.assertEqual(y_train.ndim, 2)
- self.assertEqual(y_train.size, 150)
-
- model = keras.models.Sequential()
- model.add(keras.layers.Dense(64, input_shape=(10,), activation='relu'))
- model.add(keras.layers.Dense(1, activation='sigmoid'))
- model.compile(loss='binary_crossentropy', optimizer='sgd')
-
- # Note: you have to use shuffle='batch' or False with HDF5Matrix
- model.fit(x_train, y_train, batch_size=32, shuffle='batch', verbose=False)
- # test that evalutation and prediction
- # don't crash and return reasonable results
- out_pred = model.predict(x_test, batch_size=32, verbose=False)
- out_eval = model.evaluate(x_test, y_test, batch_size=32, verbose=False)
-
- self.assertEqual(out_pred.shape, (50, 1))
- self.assertEqual(out_eval.shape, ())
- self.assertGreater(out_eval, 0)
+ with self.test_session():
+ # Instantiating HDF5Matrix for the training set,
+ # which is a slice of the first 150 elements
+ x_train = keras.utils.io_utils.HDF5Matrix(
+ h5_path, 'my_data', start=0, end=150)
+ y_train = keras.utils.io_utils.HDF5Matrix(
+ h5_path, 'my_labels', start=0, end=150)
+
+ # Likewise for the test set
+ x_test = keras.utils.io_utils.HDF5Matrix(
+ h5_path, 'my_data', start=150, end=200)
+ y_test = keras.utils.io_utils.HDF5Matrix(
+ h5_path, 'my_labels', start=150, end=200)
+
+ # HDF5Matrix behave more or less like Numpy matrices
+ # with regard to indexing
+ self.assertEqual(y_train.shape, (150, 1))
+ # But they don't support negative indices, so don't try print(x_train[-1])
+
+ self.assertEqual(y_train.dtype, np.dtype('i'))
+ self.assertEqual(y_train.ndim, 2)
+ self.assertEqual(y_train.size, 150)
+
+ model = keras.models.Sequential()
+ model.add(keras.layers.Dense(64, input_shape=(10,), activation='relu'))
+ model.add(keras.layers.Dense(1, activation='sigmoid'))
+ model.compile(loss='binary_crossentropy', optimizer='sgd')
+
+ # Note: you have to use shuffle='batch' or False with HDF5Matrix
+ model.fit(x_train, y_train, batch_size=32, shuffle='batch', verbose=False)
+ # test that evalutation and prediction
+ # don't crash and return reasonable results
+ out_pred = model.predict(x_test, batch_size=32, verbose=False)
+ out_eval = model.evaluate(x_test, y_test, batch_size=32, verbose=False)
+
+ self.assertEqual(out_pred.shape, (50, 1))
+ self.assertEqual(out_eval.shape, ())
+ self.assertGreater(out_eval, 0)
if __name__ == '__main__':