aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/contrib/data/python/ops/error_ops.py
diff options
context:
space:
mode:
Diffstat (limited to 'tensorflow/contrib/data/python/ops/error_ops.py')
-rw-r--r--tensorflow/contrib/data/python/ops/error_ops.py45
1 files changed, 5 insertions, 40 deletions
diff --git a/tensorflow/contrib/data/python/ops/error_ops.py b/tensorflow/contrib/data/python/ops/error_ops.py
index 6c21e489f7..0559a2e09c 100644
--- a/tensorflow/contrib/data/python/ops/error_ops.py
+++ b/tensorflow/contrib/data/python/ops/error_ops.py
@@ -17,13 +17,11 @@ from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
-from tensorflow.contrib.data.python.ops import contrib_op_loader # pylint: disable=unused-import
-from tensorflow.contrib.data.python.ops import gen_dataset_ops
-from tensorflow.python.data.ops import dataset_ops
-from tensorflow.python.data.util import nest
-from tensorflow.python.data.util import sparse
+from tensorflow.python.data.experimental.ops import error_ops
+from tensorflow.python.util import deprecation
+@deprecation.deprecated(None, "Use `tf.data.experimental.ignore_errors()`.")
def ignore_errors():
"""Creates a `Dataset` from another `Dataset` and silently ignores any errors.
@@ -44,39 +42,6 @@ def ignore_errors():
Returns:
A `Dataset` transformation function, which can be passed to
- @{tf.data.Dataset.apply}.
+ `tf.data.Dataset.apply`.
"""
-
- def _apply_fn(dataset):
- return IgnoreErrorsDataset(dataset)
-
- return _apply_fn
-
-
-class IgnoreErrorsDataset(dataset_ops.Dataset):
- """A `Dataset` that silently ignores errors when computing its input."""
-
- def __init__(self, input_dataset):
- """See `Dataset.ignore_errors()` for details."""
- super(IgnoreErrorsDataset, self).__init__()
- self._input_dataset = input_dataset
-
- def _as_variant_tensor(self):
- return gen_dataset_ops.ignore_errors_dataset(
- self._input_dataset._as_variant_tensor(), # pylint: disable=protected-access
- output_shapes=nest.flatten(
- sparse.as_dense_shapes(self.output_shapes, self.output_classes)),
- output_types=nest.flatten(
- sparse.as_dense_types(self.output_types, self.output_classes)))
-
- @property
- def output_classes(self):
- return self._input_dataset.output_classes
-
- @property
- def output_shapes(self):
- return self._input_dataset.output_shapes
-
- @property
- def output_types(self):
- return self._input_dataset.output_types
+ return error_ops.ignore_errors()