aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/python/data/ops/dataset_ops.py
diff options
context:
space:
mode:
Diffstat (limited to 'tensorflow/python/data/ops/dataset_ops.py')
-rw-r--r--tensorflow/python/data/ops/dataset_ops.py14
1 files changed, 14 insertions, 0 deletions
diff --git a/tensorflow/python/data/ops/dataset_ops.py b/tensorflow/python/data/ops/dataset_ops.py
index 6f9b12b123..0e020d86d0 100644
--- a/tensorflow/python/data/ops/dataset_ops.py
+++ b/tensorflow/python/data/ops/dataset_ops.py
@@ -212,6 +212,13 @@ class Dataset(object):
def from_tensors(tensors):
"""Creates a `Dataset` with a single element, comprising the given tensors.
+ Note that if `tensors` contains a NumPy array, and eager execution is not
+ enabled, the values will be embedded in the graph as one or more
+ @{tf.constant} operations. For large datasets (> 1 GB), this can waste
+ memory and run into byte limits of graph serialization. If tensors contains
+ one or more large NumPy arrays, consider the alternative described in
+ @{$guide/datasets#consuming_numpy_arrays$this guide}.
+
Args:
tensors: A nested structure of tensors.
@@ -224,6 +231,13 @@ class Dataset(object):
def from_tensor_slices(tensors):
"""Creates a `Dataset` whose elements are slices of the given tensors.
+ Note that if `tensors` contains a NumPy array, and eager execution is not
+ enabled, the values will be embedded in the graph as one or more
+ @{tf.constant} operations. For large datasets (> 1 GB), this can waste
+ memory and run into byte limits of graph serialization. If tensors contains
+ one or more large NumPy arrays, consider the alternative described in
+ @{$guide/datasets#consuming_numpy_arrays$this guide}.
+
Args:
tensors: A nested structure of tensors, each having the same size in the
0th dimension.