aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authorGravatar Asim Shankar <ashankar@google.com>2018-06-12 07:42:40 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2018-06-12 07:45:35 -0700
commit15ee5980a5873fd4c975d835e813b9377cb79f7d (patch)
tree662a429767af8f025f8a298faa6b991bd96f0498
parent8e7ae1c8c78cebc7cc98cb99b3f8a3e8a415b5ff (diff)
[Documentation]: Fix #19657
PiperOrigin-RevId: 200213440
-rw-r--r--tensorflow/python/data/ops/dataset_ops.py14
1 files changed, 14 insertions, 0 deletions
diff --git a/tensorflow/python/data/ops/dataset_ops.py b/tensorflow/python/data/ops/dataset_ops.py
index 597f92048e..7c1e9dd754 100644
--- a/tensorflow/python/data/ops/dataset_ops.py
+++ b/tensorflow/python/data/ops/dataset_ops.py
@@ -223,6 +223,13 @@ class Dataset(object):
def from_tensors(tensors):
"""Creates a `Dataset` with a single element, comprising the given tensors.
+ Note that if `tensors` contains a NumPy array, and eager execution is not
+ enabled, the values will be embedded in the graph as one or more
+ @{tf.constant} operations. For large datasets (> 1 GB), this can waste
+ memory and run into byte limits of graph serialization. If tensors contains
+ one or more large NumPy arrays, consider the alternative described in
+ @{$programmers_guide/datasets#consuming_numpy_arrays$this guide}.
+
Args:
tensors: A nested structure of tensors.
@@ -235,6 +242,13 @@ class Dataset(object):
def from_tensor_slices(tensors):
"""Creates a `Dataset` whose elements are slices of the given tensors.
+ Note that if `tensors` contains a NumPy array, and eager execution is not
+ enabled, the values will be embedded in the graph as one or more
+ @{tf.constant} operations. For large datasets (> 1 GB), this can waste
+ memory and run into byte limits of graph serialization. If tensors contains
+ one or more large NumPy arrays, consider the alternative described in
+ @{$programmers_guide/datasets#consuming_numpy_arrays$this guide}.
+
Args:
tensors: A nested structure of tensors, each having the same size in the
0th dimension.