aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/python/training
diff options
context:
space:
mode:
authorGravatar Derek Murray <mrry@google.com>2018-09-05 14:33:37 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2018-09-05 14:42:36 -0700
commita3c1ccd1da64040eeb139a0c6c1fc34ae46d7290 (patch)
tree8d141eb30f08cb6aa140f351071682fa2175040e /tensorflow/python/training
parentff2049a0719ac2a53f5a20dbe2144b3a1b6e87b8 (diff)
Deprecate `tf.train.batch()` and related APIs.
These APIs are based on queue runners, which have been deprecated and will be removed in TensorFlow 2.0. They have been replaced with `tf.data.Dataset`, which provides a more efficient version of the same functionality. PiperOrigin-RevId: 211700442
Diffstat (limited to 'tensorflow/python/training')
-rw-r--r--tensorflow/python/training/input.py48
1 files changed, 40 insertions, 8 deletions
diff --git a/tensorflow/python/training/input.py b/tensorflow/python/training/input.py
index 0d6207f8c4..94c6b47027 100644
--- a/tensorflow/python/training/input.py
+++ b/tensorflow/python/training/input.py
@@ -45,6 +45,7 @@ from tensorflow.python.ops import sparse_ops
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.summary import summary
from tensorflow.python.training import queue_runner
+from tensorflow.python.util import deprecation
from tensorflow.python.util.tf_export import tf_export
@@ -894,7 +895,11 @@ def _shuffle_batch_join(tensors_list, batch_size, capacity,
# Batching functions ----------------------------------------------------------
-@tf_export("train.batch")
+@tf_export(v1=["train.batch"])
+@deprecation.deprecated(
+ None, "Queue-based input pipelines have been replaced by `tf.data`. Use "
+ "`tf.data.Dataset.batch(batch_size)` (or `padded_batch(...)` if "
+ "`dynamic_pad=True`).")
def batch(tensors, batch_size, num_threads=1, capacity=32,
enqueue_many=False, shapes=None, dynamic_pad=False,
allow_smaller_final_batch=False, shared_name=None, name=None):
@@ -989,7 +994,11 @@ def batch(tensors, batch_size, num_threads=1, capacity=32,
name=name)
-@tf_export("train.maybe_batch")
+@tf_export(v1=["train.maybe_batch"])
+@deprecation.deprecated(
+ None, "Queue-based input pipelines have been replaced by `tf.data`. Use "
+ "`tf.data.Dataset.filter(...).batch(batch_size)` (or `padded_batch(...)`"
+ " if `dynamic_pad=True`).")
def maybe_batch(tensors, keep_input, batch_size, num_threads=1, capacity=32,
enqueue_many=False, shapes=None, dynamic_pad=False,
allow_smaller_final_batch=False, shared_name=None, name=None):
@@ -1042,7 +1051,11 @@ def maybe_batch(tensors, keep_input, batch_size, num_threads=1, capacity=32,
name=name)
-@tf_export("train.batch_join")
+@tf_export(v1=["train.batch_join"])
+@deprecation.deprecated(
+ None, "Queue-based input pipelines have been replaced by `tf.data`. Use "
+ "`tf.data.Dataset.interleave(...).batch(batch_size)` (or "
+ "`padded_batch(...)` if `dynamic_pad=True`).")
def batch_join(tensors_list, batch_size, capacity=32, enqueue_many=False,
shapes=None, dynamic_pad=False, allow_smaller_final_batch=False,
shared_name=None, name=None):
@@ -1148,7 +1161,11 @@ def batch_join(tensors_list, batch_size, capacity=32, enqueue_many=False,
name=name)
-@tf_export("train.maybe_batch_join")
+@tf_export(v1=["train.maybe_batch_join"])
+@deprecation.deprecated(
+ None, "Queue-based input pipelines have been replaced by `tf.data`. Use "
+ "`tf.data.Dataset.interleave(...).filter(...).batch(batch_size)` (or "
+ "`padded_batch(...)` if `dynamic_pad=True`).")
def maybe_batch_join(tensors_list, keep_input, batch_size, capacity=32,
enqueue_many=False, shapes=None, dynamic_pad=False,
allow_smaller_final_batch=False, shared_name=None,
@@ -1201,7 +1218,10 @@ def maybe_batch_join(tensors_list, keep_input, batch_size, capacity=32,
name=name)
-@tf_export("train.shuffle_batch")
+@tf_export(v1=["train.shuffle_batch"])
+@deprecation.deprecated(
+ None, "Queue-based input pipelines have been replaced by `tf.data`. Use "
+ "`tf.data.Dataset.shuffle(min_after_dequeue).batch(batch_size)`.")
def shuffle_batch(tensors, batch_size, capacity, min_after_dequeue,
num_threads=1, seed=None, enqueue_many=False, shapes=None,
allow_smaller_final_batch=False, shared_name=None, name=None):
@@ -1301,7 +1321,11 @@ def shuffle_batch(tensors, batch_size, capacity, min_after_dequeue,
name=name)
-@tf_export("train.maybe_shuffle_batch")
+@tf_export(v1=["train.maybe_shuffle_batch"])
+@deprecation.deprecated(
+ None, "Queue-based input pipelines have been replaced by `tf.data`. Use "
+ "`tf.data.Dataset.filter(...).shuffle(min_after_dequeue).batch(batch_size)`"
+ ".")
def maybe_shuffle_batch(tensors, batch_size, capacity, min_after_dequeue,
keep_input, num_threads=1, seed=None,
enqueue_many=False, shapes=None,
@@ -1361,7 +1385,11 @@ def maybe_shuffle_batch(tensors, batch_size, capacity, min_after_dequeue,
name=name)
-@tf_export("train.shuffle_batch_join")
+@tf_export(v1=["train.shuffle_batch_join"])
+@deprecation.deprecated(
+ None, "Queue-based input pipelines have been replaced by `tf.data`. Use "
+ "`tf.data.Dataset.interleave(...).shuffle(min_after_dequeue).batch"
+ "(batch_size)`.")
def shuffle_batch_join(tensors_list, batch_size, capacity,
min_after_dequeue, seed=None, enqueue_many=False,
shapes=None, allow_smaller_final_batch=False,
@@ -1455,7 +1483,11 @@ def shuffle_batch_join(tensors_list, batch_size, capacity,
name=name)
-@tf_export("train.maybe_shuffle_batch_join")
+@tf_export(v1=["train.maybe_shuffle_batch_join"])
+@deprecation.deprecated(
+ None, "Queue-based input pipelines have been replaced by `tf.data`. Use "
+ "`tf.data.Dataset.interleave(...).filter(...).shuffle(min_after_dequeue)"
+ ".batch(batch_size)`.")
def maybe_shuffle_batch_join(tensors_list, batch_size, capacity,
min_after_dequeue, keep_input, seed=None,
enqueue_many=False, shapes=None,