diff options
author | Priya Gupta <priyag@google.com> | 2018-09-13 16:45:11 -0700 |
---|---|---|
committer | TensorFlower Gardener <gardener@tensorflow.org> | 2018-09-13 16:51:29 -0700 |
commit | 4137d84a3b41638d4048e45ab579662c18a06df5 (patch) | |
tree | 4a9584cae1cdef036c656653e5101d0878042cb3 /tensorflow/contrib/distribute | |
parent | 3b438e4a24dd0f113f1d36d97196a027bd473fc4 (diff) |
Use `dataset.batch(.., drop_remainder=True)` instead of map_and_batch to achieve the same effect.
PiperOrigin-RevId: 212901207
Diffstat (limited to 'tensorflow/contrib/distribute')
-rw-r--r-- | tensorflow/contrib/distribute/python/BUILD | 1 | ||||
-rw-r--r-- | tensorflow/contrib/distribute/python/single_loss_example.py | 6 |
2 files changed, 2 insertions, 5 deletions
diff --git a/tensorflow/contrib/distribute/python/BUILD b/tensorflow/contrib/distribute/python/BUILD index 87f76eaa94..aaecbb0eb1 100644 --- a/tensorflow/contrib/distribute/python/BUILD +++ b/tensorflow/contrib/distribute/python/BUILD @@ -485,7 +485,6 @@ py_library( srcs = ["single_loss_example.py"], deps = [ ":step_fn", - "//tensorflow/contrib/data/python/ops:batching", "//tensorflow/python:array_ops", "//tensorflow/python:constant_op", "//tensorflow/python:layers", diff --git a/tensorflow/contrib/distribute/python/single_loss_example.py b/tensorflow/contrib/distribute/python/single_loss_example.py index 5aa19cf6a9..09b351ffa4 100644 --- a/tensorflow/contrib/distribute/python/single_loss_example.py +++ b/tensorflow/contrib/distribute/python/single_loss_example.py @@ -18,7 +18,6 @@ from __future__ import absolute_import from __future__ import division from __future__ import print_function -from tensorflow.contrib.data.python.ops import batching from tensorflow.contrib.distribute.python import step_fn from tensorflow.python.data.ops import dataset_ops from tensorflow.python.framework import constant_op @@ -59,10 +58,9 @@ def minimize_loss_example(optimizer_fn, def dataset_fn(): dataset = dataset_ops.Dataset.from_tensors([[1.]]).repeat() - # TODO(isaprykin): map_and_batch with drop_remainder causes shapes to be + # TODO(isaprykin): batch with drop_remainder causes shapes to be # fully defined for TPU. Remove this when XLA supports dynamic shapes. - return dataset.apply( - batching.map_and_batch(lambda x: x, batch_size=1, drop_remainder=True)) + return dataset.batch(1, drop_remainder=True) # An Optimizer instance is created either outside or inside model_fn. outer_optimizer = None |