aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow
diff options
context:
space:
mode:
authorGravatar Rohan Jain <rohanj@google.com>2016-09-08 19:43:48 -0800
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2016-09-08 20:47:26 -0700
commit56ad910f5957d3aa2d96eb840e6da36ac4105236 (patch)
treeaaa201e3593af8e7baad551c5c8b7f1d92d7c5ba /tensorflow
parent8016cf4ab1e2ff3c3c5448e539b9ec89d5a0bcd4 (diff)
Redirecting gfile to file_io.
Also, changed the return type of FileIO.readline(), FileIO.readlines(), list_directories() and walk() to be strings rather than bytes. Change: 132634441
Diffstat (limited to 'tensorflow')
-rw-r--r--tensorflow/contrib/learn/python/learn/datasets/mnist.py56
-rw-r--r--tensorflow/contrib/session_bundle/exporter_test.py2
-rw-r--r--tensorflow/core/platform/file_system.cc4
-rw-r--r--tensorflow/core/platform/file_system.h2
-rw-r--r--tensorflow/models/rnn/ptb/reader.py2
-rw-r--r--tensorflow/python/BUILD11
-rw-r--r--tensorflow/python/lib/io/file_io.i34
-rw-r--r--tensorflow/python/lib/io/file_io.py65
-rw-r--r--tensorflow/python/lib/io/file_io_test.py92
-rw-r--r--tensorflow/python/platform/default/_gfile.py452
-rw-r--r--tensorflow/python/platform/default/gfile_test.py248
-rw-r--r--tensorflow/python/platform/gfile.py459
-rw-r--r--tensorflow/python/platform/gfile_test.py248
-rw-r--r--tensorflow/python/summary/event_accumulator.py3
-rw-r--r--tensorflow/python/summary/event_multiplexer_test.py3
-rw-r--r--tensorflow/python/summary/impl/directory_watcher.py5
-rw-r--r--tensorflow/python/summary/impl/io_wrapper.py2
-rw-r--r--tensorflow/python/tools/freeze_graph.py2
-rw-r--r--tensorflow/python/tools/strip_unused_lib.py2
-rw-r--r--tensorflow/python/training/session_manager_test.py14
-rwxr-xr-xtensorflow/tools/ci_build/builds/test_installation.sh4
-rw-r--r--tensorflow/tools/test/gpu_info_lib.py5
-rw-r--r--tensorflow/tools/test/system_info_lib.py5
23 files changed, 240 insertions, 1480 deletions
diff --git a/tensorflow/contrib/learn/python/learn/datasets/mnist.py b/tensorflow/contrib/learn/python/learn/datasets/mnist.py
index 1e5679df08..08068da182 100644
--- a/tensorflow/contrib/learn/python/learn/datasets/mnist.py
+++ b/tensorflow/contrib/learn/python/learn/datasets/mnist.py
@@ -26,7 +26,6 @@ from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.contrib.learn.python.learn.datasets import base
from tensorflow.python.framework import dtypes
-from tensorflow.python.platform import gfile
SOURCE_URL = 'http://yann.lecun.com/exdb/mnist/'
@@ -36,14 +35,25 @@ def _read32(bytestream):
return numpy.frombuffer(bytestream.read(4), dtype=dt)[0]
-def extract_images(filename):
- """Extract the images into a 4D uint8 numpy array [index, y, x, depth]."""
- print('Extracting', filename)
- with gfile.Open(filename, 'rb') as f, gzip.GzipFile(fileobj=f) as bytestream:
+def extract_images(f):
+ """Extract the images into a 4D uint8 numpy array [index, y, x, depth].
+
+ Args:
+ f: A file object that can be passed into a gzip reader.
+
+ Returns:
+ data: A 4D unit8 numpy array [index, y, x, depth].
+
+ Raises:
+ ValueError: If the bytestream does not start with 2051.
+
+ """
+ print('Extracting', f.name)
+ with gzip.GzipFile(fileobj=f) as bytestream:
magic = _read32(bytestream)
if magic != 2051:
raise ValueError('Invalid magic number %d in MNIST image file: %s' %
- (magic, filename))
+ (magic, f.name))
num_images = _read32(bytestream)
rows = _read32(bytestream)
cols = _read32(bytestream)
@@ -62,14 +72,26 @@ def dense_to_one_hot(labels_dense, num_classes):
return labels_one_hot
-def extract_labels(filename, one_hot=False, num_classes=10):
- """Extract the labels into a 1D uint8 numpy array [index]."""
- print('Extracting', filename)
- with gfile.Open(filename, 'rb') as f, gzip.GzipFile(fileobj=f) as bytestream:
+def extract_labels(f, one_hot=False, num_classes=10):
+ """Extract the labels into a 1D uint8 numpy array [index].
+
+ Args:
+ f: A file object that can be passed into a gzip reader.
+ one_hot: Does one hot encoding for the result.
+ num_classes: Number of classes for the one hot encoding.
+
+ Returns:
+ labels: a 1D unit8 numpy array.
+
+ Raises:
+ ValueError: If the bystream doesn't start with 2049.
+ """
+ print('Extracting', f.name)
+ with gzip.GzipFile(fileobj=f) as bytestream:
magic = _read32(bytestream)
if magic != 2049:
raise ValueError('Invalid magic number %d in MNIST label file: %s' %
- (magic, filename))
+ (magic, f.name))
num_items = _read32(bytestream)
buf = bytestream.read(num_items)
labels = numpy.frombuffer(buf, dtype=numpy.uint8)
@@ -187,19 +209,23 @@ def read_data_sets(train_dir,
local_file = base.maybe_download(TRAIN_IMAGES, train_dir,
SOURCE_URL + TRAIN_IMAGES)
- train_images = extract_images(local_file)
+ with open(local_file, 'rb') as f:
+ train_images = extract_images(f)
local_file = base.maybe_download(TRAIN_LABELS, train_dir,
SOURCE_URL + TRAIN_LABELS)
- train_labels = extract_labels(local_file, one_hot=one_hot)
+ with open(local_file, 'rb') as f:
+ train_labels = extract_labels(f, one_hot=one_hot)
local_file = base.maybe_download(TEST_IMAGES, train_dir,
SOURCE_URL + TEST_IMAGES)
- test_images = extract_images(local_file)
+ with open(local_file, 'rb') as f:
+ test_images = extract_images(f)
local_file = base.maybe_download(TEST_LABELS, train_dir,
SOURCE_URL + TEST_LABELS)
- test_labels = extract_labels(local_file, one_hot=one_hot)
+ with open(local_file, 'rb') as f:
+ test_labels = extract_labels(f, one_hot=one_hot)
if not 0 <= validation_size <= len(train_images):
raise ValueError(
diff --git a/tensorflow/contrib/session_bundle/exporter_test.py b/tensorflow/contrib/session_bundle/exporter_test.py
index bed1c8ff45..06516bcb32 100644
--- a/tensorflow/contrib/session_bundle/exporter_test.py
+++ b/tensorflow/contrib/session_bundle/exporter_test.py
@@ -161,7 +161,7 @@ class SaveRestoreShardedTest(tf.test.TestCase):
global_step, constants.ASSETS_DIRECTORY,
"hello42.txt")
asset_contents = gfile.GFile(assets_path).read()
- self.assertEqual(asset_contents, "your data here")
+ self.assertEqual(asset_contents, b"your data here")
self.assertEquals("hello42.txt", asset.filename)
self.assertEquals("filename42:0", asset.tensor_binding.tensor_name)
ignored_asset_path = os.path.join(export_path,
diff --git a/tensorflow/core/platform/file_system.cc b/tensorflow/core/platform/file_system.cc
index bb926b35b7..6532c0f81a 100644
--- a/tensorflow/core/platform/file_system.cc
+++ b/tensorflow/core/platform/file_system.cc
@@ -30,6 +30,10 @@ FileSystem::~FileSystem() {}
string FileSystem::TranslateName(const string& name) const { return name; }
Status FileSystem::IsDirectory(const string& name) {
+ // Check if path exists.
+ if (!FileExists(name)) {
+ return Status(tensorflow::error::NOT_FOUND, "Path not found");
+ }
FileStatistics stat;
TF_RETURN_IF_ERROR(Stat(name, &stat));
if (S_ISDIR(stat.mode)) {
diff --git a/tensorflow/core/platform/file_system.h b/tensorflow/core/platform/file_system.h
index 51074768c5..2ca72eaff7 100644
--- a/tensorflow/core/platform/file_system.h
+++ b/tensorflow/core/platform/file_system.h
@@ -87,6 +87,7 @@ class FileSystem {
virtual Status IsDirectory(const string& fname);
};
+#ifndef SWIG
// Degenerate file system that provides no implementations.
class NullFileSystem : public FileSystem {
public:
@@ -146,6 +147,7 @@ class NullFileSystem : public FileSystem {
return errors::Unimplemented("Stat unimplemented");
}
};
+#endif
/// A file abstraction for randomly reading the contents of a file.
class RandomAccessFile {
diff --git a/tensorflow/models/rnn/ptb/reader.py b/tensorflow/models/rnn/ptb/reader.py
index cd18f1ccc2..21ee07dce7 100644
--- a/tensorflow/models/rnn/ptb/reader.py
+++ b/tensorflow/models/rnn/ptb/reader.py
@@ -28,7 +28,7 @@ import tensorflow as tf
def _read_words(filename):
with tf.gfile.GFile(filename, "r") as f:
- return f.read().replace("\n", "<eos>").split()
+ return f.read().decode("utf-8").replace("\n", "<eos>").split()
def _build_vocab(filename):
diff --git a/tensorflow/python/BUILD b/tensorflow/python/BUILD
index a9370bd44f..da2b0511cb 100644
--- a/tensorflow/python/BUILD
+++ b/tensorflow/python/BUILD
@@ -44,6 +44,7 @@ py_library(
srcs = glob(["platform/*.py"]),
srcs_version = "PY2AND3",
deps = [
+ ":lib",
":pywrap_tensorflow",
"//tensorflow/core:protos_all_py",
],
@@ -266,6 +267,15 @@ py_library(
],
)
+py_library(
+ name = "errors",
+ srcs = ["framework/errors.py"],
+ srcs_version = "PY2AND3",
+ deps = [
+ ":util",
+ ],
+)
+
# load("//third_party/py/cython:build_defs.bzl", "pyx_library")
py_library(
@@ -1009,6 +1019,7 @@ py_library(
srcs = glob(["lib/**/*.py"]),
srcs_version = "PY2AND3",
deps = [
+ ":errors",
":pywrap_tensorflow",
],
)
diff --git a/tensorflow/python/lib/io/file_io.i b/tensorflow/python/lib/io/file_io.i
index 3534d60ee0..5c73d29a88 100644
--- a/tensorflow/python/lib/io/file_io.i
+++ b/tensorflow/python/lib/io/file_io.i
@@ -24,10 +24,11 @@ limitations under the License.
#include "tensorflow/core/lib/io/buffered_inputstream.h"
#include "tensorflow/core/lib/io/inputstream_interface.h"
#include "tensorflow/core/lib/io/random_inputstream.h"
-#include "tensorflow/core/lib/io/path.h"
#include "tensorflow/core/lib/io/match.h"
+#include "tensorflow/core/lib/io/path.h"
#include "tensorflow/core/platform/env.h"
#include "tensorflow/core/platform/file_statistics.h"
+#include "tensorflow/core/platform/file_system.h"
#include "tensorflow/core/protobuf/meta_graph.pb.h"
%}
@@ -172,11 +173,35 @@ tensorflow::io::BufferedInputStream* CreateBufferedInputStream(
buffer_size));
return buffered_input_stream.release();
}
+
+tensorflow::WritableFile* CreateWritableFile(const string& filename) {
+ std::unique_ptr<tensorflow::WritableFile> file;
+ if (!tensorflow::Env::Default()->NewWritableFile(filename, &file).ok()) {
+ return nullptr;
+ }
+ return file.release();
+}
+
+void AppendToFile(const string& file_content, tensorflow::WritableFile* file,
+ TF_Status* out_status) {
+ tensorflow::Status status = file->Append(file_content);
+ if (!status.ok()) {
+ Set_TF_Status_from_Status(out_status, status);
+ }
+}
+
+void FlushWritableFile(tensorflow::WritableFile* file, TF_Status* out_status) {
+ tensorflow::Status status = file->Flush();
+ if (!status.ok()) {
+ Set_TF_Status_from_Status(out_status, status);
+ }
+}
%}
// Ensure that the returned object is destroyed when its wrapper is
// garbage collected.
%newobject CreateBufferedInputStream;
+%newobject CreateWritableFile;
// Wrap the above functions.
inline bool FileExists(const string& filename);
@@ -197,11 +222,18 @@ void Stat(const string& filename, tensorflow::FileStatistics* stats,
TF_Status* out_status);
tensorflow::io::BufferedInputStream* CreateBufferedInputStream(
const string& filename, size_t buffer_size);
+tensorflow::WritableFile* CreateWritableFile(const string& filename);
+void AppendToFile(const string& file_content, tensorflow::WritableFile* file,
+ TF_Status* out_status);
+void FlushWritableFile(tensorflow::WritableFile* file, TF_Status* out_status);
%ignoreall
%unignore tensorflow::io::BufferedInputStream;
%unignore tensorflow::io::BufferedInputStream::~BufferedInputStream;
%unignore tensorflow::io::BufferedInputStream::ReadLineAsString;
+%unignore tensorflow::WritableFile;
+%unignore tensorflow::WritableFile::~WritableFile;
+%include "tensorflow/core/platform/file_system.h"
%include "tensorflow/core/lib/io/inputstream_interface.h"
%include "tensorflow/core/lib/io/buffered_inputstream.h"
%unignoreall
diff --git a/tensorflow/python/lib/io/file_io.py b/tensorflow/python/lib/io/file_io.py
index f5b138c9da..7a19ae2b71 100644
--- a/tensorflow/python/lib/io/file_io.py
+++ b/tensorflow/python/lib/io/file_io.py
@@ -45,6 +45,7 @@ class FileIO(object):
self.__name = name
self.__mode = mode
self._read_buf = None
+ self._writable_file = None
if mode not in ("r", "w", "a", "r+", "w+", "a+"):
raise errors.InvalidArgumentError(
None, None, "mode is not 'r' or 'w' or 'a' or 'r+' or 'w+' or 'a+'")
@@ -72,18 +73,27 @@ class FileIO(object):
raise errors.InternalError(None, None,
"Could not open file for streaming")
+ def _prewrite_check(self):
+ if not self._writable_file:
+ if not self._write_check_passed:
+ raise errors.PermissionDeniedError(None, None,
+ "File isn't open for writing")
+ self._writable_file = pywrap_tensorflow.CreateWritableFile(
+ compat.as_bytes(self.__name))
+ if not self._writable_file:
+ raise errors.InternalError(None, None,
+ "Could not open file for writing")
+
def size(self):
"""Returns the size of the file."""
return stat(self.__name).length
def write(self, file_content):
- """Writes file_content to the file."""
- if not self._write_check_passed:
- raise errors.PermissionDeniedError(None, None,
- "File isn't open for writing")
+ """Writes file_content to the file. Appends to the end of the file."""
+ self._prewrite_check()
with errors.raise_exception_on_not_ok_status() as status:
- pywrap_tensorflow.WriteStringToFile(
- compat.as_bytes(self.__name), compat.as_bytes(file_content), status)
+ pywrap_tensorflow.AppendToFile(
+ compat.as_bytes(file_content), self._writable_file, status)
def read(self):
"""Returns the contents of a file as a string."""
@@ -97,7 +107,7 @@ class FileIO(object):
def readline(self):
r"""Reads the next line from the file. Leaves the '\n' at the end."""
self._prereadline_check()
- return self._read_buf.ReadLineAsString()
+ return compat.as_str_any(self._read_buf.ReadLineAsString())
def readlines(self):
"""Returns all lines from the file in a list."""
@@ -116,7 +126,7 @@ class FileIO(object):
def __exit__(self, unused_type, unused_value, unused_traceback):
"""Make usable with "with" statement."""
- self._read_buf = None
+ self.close()
def __iter__(self):
return self
@@ -130,6 +140,18 @@ class FileIO(object):
def __next__(self):
return self.next()
+ def flush(self):
+ """Flushes the Writable file to disk."""
+ if self._writable_file:
+ with errors.raise_exception_on_not_ok_status() as status:
+ pywrap_tensorflow.FlushWritableFile(self._writable_file, status)
+
+ def close(self):
+ """Closes FileIO. Should be called for the WritableFile to be flushed."""
+ self._read_buf = None
+ self.flush()
+ self._writable_file = None
+
def file_exists(filename):
"""Determines whether a path exists or not.
@@ -184,8 +206,8 @@ def write_string_to_file(filename, file_content):
Raises:
errors.OpError: If there are errors during the operation.
"""
- f = FileIO(filename, mode="w")
- f.write(file_content)
+ with FileIO(filename, mode="w") as f:
+ f.write(file_content)
def get_matching_files(filename):
@@ -203,7 +225,7 @@ def get_matching_files(filename):
with errors.raise_exception_on_not_ok_status() as status:
# Convert each element to string, since the return values of the
# vector of string should be interpreted as strings, not bytes.
- return [compat.as_str(matching_filename)
+ return [compat.as_str_any(matching_filename)
for matching_filename in pywrap_tensorflow.GetMatchingFiles(
compat.as_bytes(filename), status)]
@@ -235,7 +257,7 @@ def recursive_create_dir(dirname):
errors.OpError: If the operation fails.
"""
with errors.raise_exception_on_not_ok_status() as status:
- dirs = dirname.split("/")
+ dirs = compat.as_str_any(dirname).split("/")
for i in range(len(dirs)):
partial_dir = "/".join(dirs[0:i + 1])
if partial_dir and not file_exists(partial_dir):
@@ -301,8 +323,8 @@ def is_directory(dirname):
Raises:
errors.OpError: If the path doesn't exist or other errors
"""
- with errors.raise_exception_on_not_ok_status() as status:
- return pywrap_tensorflow.IsDirectory(compat.as_bytes(dirname), status)
+ status = pywrap_tensorflow.TF_NewStatus()
+ return pywrap_tensorflow.IsDirectory(compat.as_bytes(dirname), status)
def list_directory(dirname):
@@ -315,7 +337,7 @@ def list_directory(dirname):
dirname: string, path to a directory
Returns:
- [filename1, filename2, ... filenameN]
+ [filename1, filename2, ... filenameN] as strings
Raises:
errors.NotFoundError if directory doesn't exist
@@ -323,8 +345,10 @@ def list_directory(dirname):
if not is_directory(dirname):
raise errors.NotFoundError(None, None, "Could not find directory")
file_list = get_matching_files(os.path.join(compat.as_str_any(dirname), "*"))
- return [compat.as_bytes(pywrap_tensorflow.Basename(compat.as_bytes(filename)))
- for filename in file_list]
+ return [
+ compat.as_str_any(pywrap_tensorflow.Basename(compat.as_bytes(filename)))
+ for filename in file_list
+ ]
def walk(top, in_order=True):
@@ -337,11 +361,12 @@ def walk(top, in_order=True):
Errors that happen while listing directories are ignored.
Yields:
- # Each yield is a 3-tuple: the pathname of a directory, followed
- # by lists of all its subdirectories and leaf files.
+ Each yield is a 3-tuple: the pathname of a directory, followed by lists of
+ all its subdirectories and leaf files.
(dirname, [subdirname, subdirname, ...], [filename, filename, ...])
+ as strings
"""
- top = compat.as_bytes(top)
+ top = compat.as_str_any(top)
try:
listing = list_directory(top)
except errors.NotFoundError:
diff --git a/tensorflow/python/lib/io/file_io_test.py b/tensorflow/python/lib/io/file_io_test.py
index a4cef8cb5a..9837912d3e 100644
--- a/tensorflow/python/lib/io/file_io_test.py
+++ b/tensorflow/python/lib/io/file_io_test.py
@@ -23,7 +23,6 @@ import tensorflow as tf
from tensorflow.python.framework import errors
from tensorflow.python.lib.io import file_io
-from tensorflow.python.util import compat
class FileIoTest(tf.test.TestCase):
@@ -48,6 +47,14 @@ class FileIoTest(tf.test.TestCase):
file_contents = file_io.read_file_to_string(file_path)
self.assertEqual(b"testing", file_contents)
+ def testMultipleWrites(self):
+ file_path = os.path.join(self._base_dir, "temp_file")
+ with file_io.FileIO(file_path, mode="w") as f:
+ f.write("line1\n")
+ f.write("line2")
+ file_contents = file_io.read_file_to_string(file_path)
+ self.assertEqual(b"line1\nline2", file_contents)
+
def testFileWriteBadMode(self):
file_path = os.path.join(self._base_dir, "temp_file")
with self.assertRaises(errors.PermissionDeniedError):
@@ -100,7 +107,7 @@ class FileIoTest(tf.test.TestCase):
copy_path = os.path.join(self._base_dir, "copy_file")
file_io.copy(file_path, copy_path)
self.assertTrue(file_io.file_exists(copy_path))
- self.assertEqual(b"testing", file_io.read_file_to_string(file_path))
+ self.assertEqual(b"testing", file_io.FileIO(file_path, mode="r").read())
def testCopyOverwrite(self):
file_path = os.path.join(self._base_dir, "temp_file")
@@ -109,7 +116,7 @@ class FileIoTest(tf.test.TestCase):
file_io.FileIO(copy_path, mode="w").write("copy")
file_io.copy(file_path, copy_path, overwrite=True)
self.assertTrue(file_io.file_exists(copy_path))
- self.assertEqual(b"testing", file_io.read_file_to_string(file_path))
+ self.assertEqual(b"testing", file_io.FileIO(file_path, mode="r").read())
def testCopyOverwriteFalse(self):
file_path = os.path.join(self._base_dir, "temp_file")
@@ -154,8 +161,7 @@ class FileIoTest(tf.test.TestCase):
def testIsDirectory(self):
dir_path = os.path.join(self._base_dir, "test_dir")
# Failure for a non-existing dir.
- with self.assertRaises(errors.NotFoundError):
- file_io.is_directory(dir_path)
+ self.assertFalse(file_io.is_directory(dir_path))
file_io.create_dir(dir_path)
self.assertTrue(file_io.is_directory(dir_path))
file_path = os.path.join(dir_path, "test_file")
@@ -166,16 +172,16 @@ class FileIoTest(tf.test.TestCase):
def testListDirectory(self):
dir_path = os.path.join(self._base_dir, "test_dir")
file_io.create_dir(dir_path)
- files = [b"file1.txt", b"file2.txt", b"file3.txt"]
+ files = ["file1.txt", "file2.txt", "file3.txt"]
for name in files:
- file_path = os.path.join(dir_path, compat.as_str_any(name))
+ file_path = os.path.join(dir_path, name)
file_io.FileIO(file_path, mode="w").write("testing")
subdir_path = os.path.join(dir_path, "sub_dir")
file_io.create_dir(subdir_path)
subdir_file_path = os.path.join(subdir_path, "file4.txt")
file_io.FileIO(subdir_file_path, mode="w").write("testing")
dir_list = file_io.list_directory(dir_path)
- self.assertItemsEqual(files + [b"sub_dir"], dir_list)
+ self.assertItemsEqual(files + ["sub_dir"], dir_list)
def testListDirectoryFailure(self):
dir_path = os.path.join(self._base_dir, "test_dir")
@@ -209,22 +215,20 @@ class FileIoTest(tf.test.TestCase):
all_dirs.append(w_dir)
all_subdirs.append(w_subdirs)
all_files.append(w_files)
- self.assertItemsEqual(all_dirs, [compat.as_bytes(dir_path)] + [
- compat.as_bytes(os.path.join(dir_path, item))
+ self.assertItemsEqual(all_dirs, [dir_path] + [
+ os.path.join(dir_path, item)
for item in ["subdir1_1", "subdir1_2", "subdir1_2/subdir2", "subdir1_3"]
])
- self.assertEqual(compat.as_bytes(dir_path), all_dirs[0])
+ self.assertEqual(dir_path, all_dirs[0])
self.assertLess(
- all_dirs.index(compat.as_bytes(os.path.join(dir_path, "subdir1_2"))),
- all_dirs.index(
- compat.as_bytes(os.path.join(dir_path, "subdir1_2/subdir2"))))
- self.assertItemsEqual(all_subdirs[1:5], [[], [b"subdir2"], [], []])
+ all_dirs.index(os.path.join(dir_path, "subdir1_2")),
+ all_dirs.index(os.path.join(dir_path, "subdir1_2/subdir2")))
+ self.assertItemsEqual(all_subdirs[1:5], [[], ["subdir2"], [], []])
self.assertItemsEqual(all_subdirs[0],
- [b"subdir1_1", b"subdir1_2", b"subdir1_3"])
- self.assertItemsEqual(all_files, [[b"file1.txt"], [b"file2.txt"], [], [],
- []])
+ ["subdir1_1", "subdir1_2", "subdir1_3"])
+ self.assertItemsEqual(all_files, [["file1.txt"], ["file2.txt"], [], [], []])
self.assertLess(
- all_files.index([b"file1.txt"]), all_files.index([b"file2.txt"]))
+ all_files.index(["file1.txt"]), all_files.index(["file2.txt"]))
def testWalkPostOrder(self):
dir_path = os.path.join(self._base_dir, "test_dir")
@@ -238,21 +242,19 @@ class FileIoTest(tf.test.TestCase):
all_subdirs.append(w_subdirs)
all_files.append(w_files)
self.assertItemsEqual(all_dirs, [
- compat.as_bytes(os.path.join(dir_path, item))
+ os.path.join(dir_path, item)
for item in ["subdir1_1", "subdir1_2/subdir2", "subdir1_2", "subdir1_3"]
- ] + [compat.as_bytes(dir_path)])
- self.assertEqual(compat.as_bytes(dir_path), all_dirs[4])
+ ] + [dir_path])
+ self.assertEqual(dir_path, all_dirs[4])
self.assertLess(
- all_dirs.index(
- compat.as_bytes(os.path.join(dir_path, "subdir1_2/subdir2"))),
- all_dirs.index(compat.as_bytes(os.path.join(dir_path, "subdir1_2"))))
- self.assertItemsEqual(all_subdirs[0:4], [[], [], [b"subdir2"], []])
+ all_dirs.index(os.path.join(dir_path, "subdir1_2/subdir2")),
+ all_dirs.index(os.path.join(dir_path, "subdir1_2")))
+ self.assertItemsEqual(all_subdirs[0:4], [[], [], ["subdir2"], []])
self.assertItemsEqual(all_subdirs[4],
- [b"subdir1_1", b"subdir1_2", b"subdir1_3"])
- self.assertItemsEqual(all_files, [[b"file2.txt"], [], [], [],
- [b"file1.txt"]])
+ ["subdir1_1", "subdir1_2", "subdir1_3"])
+ self.assertItemsEqual(all_files, [["file2.txt"], [], [], [], ["file1.txt"]])
self.assertLess(
- all_files.index([b"file2.txt"]), all_files.index([b"file1.txt"]))
+ all_files.index(["file2.txt"]), all_files.index(["file1.txt"]))
def testWalkFailure(self):
dir_path = os.path.join(self._base_dir, "test_dir")
@@ -273,7 +275,7 @@ class FileIoTest(tf.test.TestCase):
file_io.FileIO(file_path, mode="w").write("testing")
file_statistics = file_io.stat(file_path)
os_statistics = os.stat(file_path)
- self.assertEquals(7, file_statistics.length)
+ self.assertEqual(7, file_statistics.length)
self.assertEqual(
int(os_statistics.st_mtime), int(file_statistics.mtime_nsec / 1e9))
@@ -283,34 +285,34 @@ class FileIoTest(tf.test.TestCase):
def testReadLine(self):
file_path = os.path.join(self._base_dir, "temp_file")
- f = file_io.FileIO(file_path, mode="r+")
- f.write("testing1\ntesting2\ntesting3\n\ntesting5")
+ with file_io.FileIO(file_path, mode="r+") as f:
+ f.write("testing1\ntesting2\ntesting3\n\ntesting5")
self.assertEqual(36, f.size())
- self.assertEqual(b"testing1\n", f.readline())
- self.assertEqual(b"testing2\n", f.readline())
- self.assertEqual(b"testing3\n", f.readline())
- self.assertEqual(b"\n", f.readline())
- self.assertEqual(b"testing5", f.readline())
- self.assertEqual(b"", f.readline())
+ self.assertEqual("testing1\n", f.readline())
+ self.assertEqual("testing2\n", f.readline())
+ self.assertEqual("testing3\n", f.readline())
+ self.assertEqual("\n", f.readline())
+ self.assertEqual("testing5", f.readline())
+ self.assertEqual("", f.readline())
def testReadingIterator(self):
file_path = os.path.join(self._base_dir, "temp_file")
- f = file_io.FileIO(file_path, mode="r+")
data = ["testing1\n", "testing2\n", "testing3\n", "\n", "testing5"]
- f.write("".join(data))
+ with file_io.FileIO(file_path, mode="r+") as f:
+ f.write("".join(data))
actual_data = []
for line in f:
actual_data.append(line)
- self.assertSequenceEqual(actual_data,
- [compat.as_bytes(item) for item in data])
+ self.assertSequenceEqual(actual_data, data)
def testReadlines(self):
file_path = os.path.join(self._base_dir, "temp_file")
- f = file_io.FileIO(file_path, mode="r+")
data = ["testing1\n", "testing2\n", "testing3\n", "\n", "testing5"]
+ f = file_io.FileIO(file_path, mode="r+")
f.write("".join(data))
+ f.flush()
lines = f.readlines()
- self.assertSequenceEqual(lines, [compat.as_bytes(item) for item in data])
+ self.assertSequenceEqual(lines, data)
if __name__ == "__main__":
diff --git a/tensorflow/python/platform/default/_gfile.py b/tensorflow/python/platform/default/_gfile.py
deleted file mode 100644
index f700d34978..0000000000
--- a/tensorflow/python/platform/default/_gfile.py
+++ /dev/null
@@ -1,452 +0,0 @@
-# Copyright 2015 Google Inc. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ==============================================================================
-
-"""File processing utilities."""
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-import collections
-import errno
-import functools
-import glob as _glob
-import os
-import shutil
-import threading
-
-import six
-
-
-class _GFileBase(six.Iterator):
- """Base I/O wrapper class. Similar semantics to Python's file object."""
-
- # pylint: disable=protected-access
- def _synchronized(fn):
- """Synchronizes file I/O for methods in GFileBase."""
- @functools.wraps(fn)
- def sync(self, *args, **kwargs):
- # Sometimes a GFileBase method is called before the instance
- # has been properly initialized. Check that _locker is available.
- if hasattr(self, '_locker'): self._locker.lock()
- try:
- return fn(self, *args, **kwargs)
- finally:
- if hasattr(self, '_locker'): self._locker.unlock()
- return sync
- # pylint: enable=protected-access
-
- def __init__(self, name, mode, locker):
- """Create the GFileBase object with the given filename, mode, and locker.
-
- Args:
- name: string, the filename.
- mode: string, the mode to open the file with (e.g. "r", "w", "a+").
- locker: the thread locking object (e.g. _PythonLocker) for controlling
- thread access to the I/O methods of this class.
- """
- self._name = name
- self._mode = mode
- self._locker = locker
- self._fp = open(name, mode)
-
- def __enter__(self):
- """Make GFileBase usable with "with" statement."""
- return self
-
- def __exit__(self, unused_type, unused_value, unused_traceback):
- """Make GFileBase usable with "with" statement."""
- self.close()
-
- @_synchronized
- def __del__(self):
- # __del__ is sometimes called before initialization, in which
- # case the object is not fully constructed. Check for this here
- # before trying to close the file handle.
- if hasattr(self, '_fp'): self._fp.close()
-
- @_synchronized
- def flush(self):
- """Flush the underlying file handle."""
- return self._fp.flush()
-
- @property
- @_synchronized
- def closed(self):
- """Returns "True" if the file handle is closed. Otherwise False."""
- return self._fp.closed
-
- @_synchronized
- def write(self, data):
- """Write data to the underlying file handle.
-
- Args:
- data: The string to write to the file handle.
- """
- self._fp.write(data)
-
- @_synchronized
- def writelines(self, seq):
- """Write a sequence of strings to the underlying file handle."""
- self._fp.writelines(seq)
-
- @_synchronized
- def tell(self):
- """Return the location from the underlying file handle.
-
- Returns:
- An integer location (which can be used in e.g., seek).
- """
- return self._fp.tell()
-
- @_synchronized
- def seek(self, offset, whence=0):
- """Seek to offset (conditioned on whence) in the underlying file handle.
-
- Args:
- offset: int, the offset within the file to seek to.
- whence: 0, 1, or 2. See python's seek() documentation for details.
- """
- self._fp.seek(offset, whence)
-
- @_synchronized
- def truncate(self, new_size=None):
- """Truncate the underlying file handle to new_size.
-
- Args:
- new_size: Size after truncation. If None, the file handle is truncated
- to 0 bytes.
- """
- self._fp.truncate(new_size)
-
- @_synchronized
- def readline(self, max_length=-1):
- """Read a single line (up to max_length) from the underlying file handle.
-
- Args:
- max_length: The maximum number of chsaracters to read.
-
- Returns:
- A string, including any newline at the end, or empty string if at EOF.
- """
- return self._fp.readline(max_length)
-
- @_synchronized
- def readlines(self, sizehint=None):
- """Read lines from the underlying file handle.
-
- Args:
- sizehint: See the python file.readlines() documentation.
-
- Returns:
- A list of strings from the underlying file handle.
- """
- if sizehint is not None:
- return self._fp.readlines(sizehint)
- else:
- return self._fp.readlines()
-
- def __iter__(self):
- """Enable line iteration on the underlying handle (not synchronized)."""
- return self
-
- # Not synchronized
- def __next__(self):
- """Enable line iteration on the underlying handle (not synchronized).
-
- Returns:
- An line iterator from the underlying handle.
-
- Example:
- # read a file's lines by consuming the iterator with a list
- with open("filename", "r") as fp: lines = list(fp)
- """
- return next(self._fp)
-
- @_synchronized
- def Size(self): # pylint: disable=invalid-name
- """Get byte size of the file from the underlying file handle."""
- cur = self.tell()
- try:
- self.seek(0, 2)
- size = self.tell()
- finally:
- self.seek(cur)
- return size
-
- @_synchronized
- def read(self, n=-1):
- """Read n bytes from the underlying file handle.
-
- Args:
- n: Number of bytes to read (if negative, read to end of file handle.)
-
- Returns:
- A string of the bytes read, up to the end of file.
- """
- return self._fp.read(n)
-
- @_synchronized
- def close(self):
- """Close the underlying file handle."""
- self._fp.close()
-
- # Declare wrappers as staticmethods at the end so that we can
- # use them as decorators.
- _synchronized = staticmethod(_synchronized)
-
-
-class GFile(_GFileBase):
- """File I/O wrappers with thread locking."""
-
- def __init__(self, name, mode='r'):
- super(GFile, self).__init__(name, mode, _Pythonlocker())
-
-
-class FastGFile(_GFileBase):
- """File I/O wrappers without thread locking."""
-
- def __init__(self, name, mode='r'):
- super(FastGFile, self).__init__(name, mode, _Nulllocker())
-
-
-# locker classes. Note that locks must be reentrant, so that multiple
-# lock() calls by the owning thread will not block.
-class _Pythonlocker(object):
- """A locking strategy that uses standard locks from the thread module."""
-
- def __init__(self):
- self._lock = threading.RLock()
-
- def lock(self):
- self._lock.acquire()
-
- def unlock(self):
- self._lock.release()
-
-
-class _Nulllocker(object):
- """A locking strategy where lock() and unlock() methods are no-ops."""
-
- def lock(self):
- pass
-
- def unlock(self):
- pass
-
-
-def Exists(path): # pylint: disable=invalid-name
- """Returns True iff "path" exists (as a dir, file, non-broken symlink)."""
- return os.path.exists(path)
-
-
-def IsDirectory(path): # pylint: disable=invalid-name
- """Return True iff "path" exists and is a directory."""
- return os.path.isdir(path)
-
-
-def Glob(glob): # pylint: disable=invalid-name
- """Return a list of filenames matching the glob "glob"."""
- return _glob.glob(glob)
-
-
-def MkDir(path, mode=0o755): # pylint: disable=invalid-name
- """Create the directory "path" with the given mode.
-
- Args:
- path: The directory path
- mode: The file mode for the directory
-
- Returns:
- None
-
- Raises:
- OSError: if the path already exists
- """
- os.mkdir(path, mode)
-
-
-def MakeDirs(path, mode=0o755): # pylint: disable=invalid-name
- """Recursively create the directory "path" with the given mode.
-
- Args:
- path: The directory path.
- mode: The file mode for the created directories
-
- Raises:
- OSError: if the path already exists
- """
- # NOTE(mrry): MakeDirs("") should be a no-op to match other
- # implementations of tf.gfile.
- if path:
- os.makedirs(path, mode)
-
-
-def RmDir(directory): # pylint: disable=invalid-name
- """Removes the directory "directory" iff the directory is empty.
-
- Args:
- directory: The directory to remove.
-
- Raises:
- OSError: If the directory does not exist or is not empty.
- """
- os.rmdir(directory)
-
-
-def Remove(path): # pylint: disable=invalid-name
- """Delete the (non-directory) file "path".
-
- Args:
- path: The file to remove.
-
- Raises:
- OSError: If "path" does not exist, is a directory, or cannot be deleted.
- """
- os.remove(path)
-
-
-def Rename(oldpath, newpath, overwrite=False):
- """Rename or move a file, or a local directory.
-
- Args:
- oldpath: string; a pathname of a file.
- newpath: string; a pathname to which the file will be moved.
- overwrite: boolean; if false, it is an error for newpath to be
- occupied by an existing file.
-
- Raises:
- OSError: If "newpath" is occupied by an existing file and overwrite=False.
- """
- if not overwrite and Exists(newpath) and not IsDirectory(newpath):
- raise OSError(errno.EEXIST, os.strerror(errno.EEXIST), newpath)
- os.rename(oldpath, newpath)
-
-
-def DeleteRecursively(path): # pylint: disable=invalid-name
- """Delete the file or directory "path" recursively.
-
- Args:
- path: The path to remove (may be a non-empty directory).
-
- Raises:
- OSError: If the path does not exist or cannot be deleted.
- """
- if IsDirectory(path):
- shutil.rmtree(path)
- else:
- Remove(path)
-
-
-def ListDirectory(directory, return_dotfiles=False): # pylint: disable=invalid-name
- """Returns a list of files in dir.
-
- As with the standard os.listdir(), the filenames in the returned list will be
- the basenames of the files in dir (not absolute paths). To get a list of
- absolute paths of files in a directory, a client could do:
- file_list = gfile.ListDir(my_dir)
- file_list = [os.path.join(my_dir, f) for f in file_list]
- (assuming that my_dir itself specified an absolute path to a directory).
-
- Args:
- directory: the directory to list
- return_dotfiles: if True, dotfiles will be returned as well. Even if
- this arg is True, '.' and '..' will not be returned.
-
- Returns:
- ['list', 'of', 'files']. The entries '.' and '..' are never returned.
- Other entries starting with a dot will only be returned if return_dotfiles
- is True.
- Raises:
- OSError: if there is an error retrieving the directory listing.
- """
- files = os.listdir(directory)
- if not return_dotfiles:
- files = [f for f in files if not f.startswith('.')]
- return files
-
-
-def Walk(top, topdown=1, onerror=None):
- """Recursive directory tree generator.
-
- Args:
- top: string, a pathname.
- topdown: bool, should traversal be pre-order (True) or post-order (False)
- onerror: function, optional callback for errors.
-
- By default, errors that occur when listing a directory are ignored.
- (This is the same semantics as Python's os.walk() generator.) If the
- optional argument "onerror" is specified, it should be a function. It
- will be called with one argument, an os.error instance. It can return
- to continue with the walk, or reraise the exception to abort the walk.
-
- Yields:
- # Each yield is a 3-tuple: the pathname of a directory, followed
- # by lists of all its subdirectories and leaf files.
- (dirname, [subdirname, subdirname, ...], [filename, filename, ...])
- """
- return os.walk(top, topdown=topdown, onerror=onerror)
-
-
-def Stat(path): # pylint: disable=invalid-name
- """Gets the status of a file.
-
- Args:
- path: The file to call Stat() on.
-
- Does the equivalent of Stat() on the specified "path" and return file
- properties.
-
- Returns:
- An object whose attributes give information on the file.
-
- Raises:
- OSError: If "path" does not exist.
- """
- statinfo = os.stat(path)
- filestat = collections.namedtuple('FileStat', ['mtime'])
- filestat.mtime = statinfo.st_mtime
- return filestat
-
-
-def Copy(oldpath, newpath, overwrite=False):
- """Copy a file.
-
- Args:
- oldpath: string; a pathname of a file.
- newpath: string; a pathname to which the file will be copied.
- overwrite: boolean; if false, it is an error for newpath to be
- occupied by an existing file.
-
- Raises:
- OSError: If "newpath" is occupied by an existing file and overwrite=False,
- or any error thrown by shutil.copy.
- """
- if not overwrite and Exists(newpath):
- raise OSError(errno.EEXIST, os.strerror(errno.EEXIST), newpath)
- shutil.copy(oldpath, newpath)
-
-
-def Open(name, mode='r'):
- """Exact API match to the standard open.
-
- Args:
- name: a file name, either local or a gfile compatible.
- mode: for example "w" to open the file for writing.
-
- Returns:
- A threadsafe gfile.GFile object.
- """
- return GFile(name, mode=mode)
diff --git a/tensorflow/python/platform/default/gfile_test.py b/tensorflow/python/platform/default/gfile_test.py
deleted file mode 100644
index 38b8b1d30a..0000000000
--- a/tensorflow/python/platform/default/gfile_test.py
+++ /dev/null
@@ -1,248 +0,0 @@
-# Copyright 2015 Google Inc. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ==============================================================================
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-import contextlib
-import os
-import shutil
-import time
-
-from tensorflow.python.platform.default import _gfile as gfile
-from tensorflow.python.platform.default import _googletest as googletest
-from tensorflow.python.platform.default import _logging as logging
-
-
-class _BaseTest(object):
-
- @property
- def tmp(self):
- return self._tmp_dir
-
- def setUp(self):
- self._orig_dir = os.getcwd()
- self._tmp_dir = googletest.GetTempDir() + "/"
- try:
- os.makedirs(self._tmp_dir)
- except OSError:
- pass # Directory already exists
-
- def tearDown(self):
- try:
- shutil.rmtree(self._tmp_dir)
- except OSError:
- logging.warn("[%s] Post-test directory cleanup failed: %s",
- self, self._tmp_dir)
-
-
-class _GFileBaseTest(_BaseTest):
-
- @property
- def gfile(self):
- raise NotImplementedError("Do not use _GFileBaseTest directly.")
-
- def testWith(self):
- with self.gfile(self.tmp + "test_with", "w") as fh:
- fh.write("hi")
- with self.gfile(self.tmp + "test_with", "r") as fh:
- self.assertEqual(fh.read(), "hi")
-
- def testSizeAndTellAndSeek(self):
- with self.gfile(self.tmp + "test_tell", "w") as fh:
- fh.write("".join(["0"] * 1000))
- with self.gfile(self.tmp + "test_tell", "r") as fh:
- self.assertEqual(1000, fh.Size())
- self.assertEqual(0, fh.tell())
- fh.seek(0, 2)
- self.assertEqual(1000, fh.tell())
- fh.seek(0)
- self.assertEqual(0, fh.tell())
-
- def testReadAndWritelines(self):
- with self.gfile(self.tmp + "test_writelines", "w") as fh:
- fh.writelines(["%d\n" % d for d in range(10)])
- with self.gfile(self.tmp + "test_writelines", "r") as fh:
- self.assertEqual(["%d\n" % x for x in range(10)], fh.readlines())
-
- def testWriteAndTruncate(self):
- with self.gfile(self.tmp + "test_truncate", "w") as fh:
- fh.write("ababab")
- with self.gfile(self.tmp + "test_truncate", "a+") as fh:
- fh.seek(0, 2)
- fh.write("hjhjhj")
- with self.gfile(self.tmp + "test_truncate", "a+") as fh:
- self.assertEqual(fh.Size(), 12)
- fh.truncate(6)
- with self.gfile(self.tmp + "test_truncate", "r") as fh:
- self.assertEqual(fh.read(), "ababab")
-
- def testErrors(self):
- self.assertRaises(
- IOError, lambda: self.gfile(self.tmp + "doesnt_exist", "r"))
- with self.gfile(self.tmp + "test_error", "w") as fh:
- # Raises FileError inside Google and ValueError outside, so we
- # can only test for Exception.
- self.assertRaises(Exception, lambda: fh.seek(-1))
- # test_error now exists, we can read from it:
- with self.gfile(self.tmp + "test_error", "r") as fh:
- self.assertRaises(IOError, lambda: fh.write("ack"))
- fh = self.gfile(self.tmp + "test_error", "w")
- self.assertFalse(fh.closed)
- fh.close()
- self.assertTrue(fh.closed)
- self.assertRaises(ValueError, lambda: fh.write("ack"))
-
- def testIteration(self):
- with self.gfile(self.tmp + "test_iter", "w") as fh:
- fh.writelines(["a\n", "b\n", "c\n"])
- with self.gfile(self.tmp + "test_iter", "r") as fh:
- lines = list(fh)
- self.assertEqual(["a\n", "b\n", "c\n"], lines)
-
-
-class GFileTest(_GFileBaseTest, googletest.TestCase):
-
- @property
- def gfile(self):
- return gfile.GFile
-
-
-class FastGFileTest(_GFileBaseTest, googletest.TestCase):
-
- @property
- def gfile(self):
- return gfile.FastGFile
-
-
-class FunctionTests(_BaseTest, googletest.TestCase):
-
- def testExists(self):
- self.assertFalse(gfile.Exists(self.tmp + "test_exists"))
- with gfile.GFile(self.tmp + "test_exists", "w"):
- pass
- self.assertTrue(gfile.Exists(self.tmp + "test_exists"))
-
- def testMkDirsGlobAndRmDirs(self):
- self.assertFalse(gfile.Exists(self.tmp + "test_dir"))
- gfile.MkDir(self.tmp + "test_dir")
- self.assertTrue(gfile.Exists(self.tmp + "test_dir"))
- gfile.RmDir(self.tmp + "test_dir")
- self.assertFalse(gfile.Exists(self.tmp + "test_dir"))
- gfile.MakeDirs(self.tmp + "test_dir/blah0")
- gfile.MakeDirs(self.tmp + "test_dir/blah1")
- self.assertEqual([self.tmp + "test_dir/blah0", self.tmp + "test_dir/blah1"],
- sorted(gfile.Glob(self.tmp + "test_dir/*")))
- gfile.DeleteRecursively(self.tmp + "test_dir")
- self.assertFalse(gfile.Exists(self.tmp + "test_dir"))
-
- @contextlib.contextmanager
- def _working_directory(self, wd):
- original_cwd = os.getcwd()
- os.chdir(wd)
- try:
- yield
- finally:
- os.chdir(original_cwd)
-
- def testMakeDirsWithEmptyString(self):
- gfile.MakeDirs(self.tmp + "test_dir")
- with self._working_directory(self.tmp + "test_dir"):
- gfile.MakeDirs("")
- # Should succeed because MakeDirs("") is a no-op.
- gfile.RmDir(self.tmp + "test_dir")
-
- def testErrors(self):
- self.assertRaises(
- OSError, lambda: gfile.RmDir(self.tmp + "dir_doesnt_exist"))
- self.assertRaises(
- OSError, lambda: gfile.Remove(self.tmp + "file_doesnt_exist"))
- gfile.MkDir(self.tmp + "error_dir")
- with gfile.GFile(self.tmp + "error_dir/file", "w"):
- pass # Create file
- self.assertRaises(
- OSError, lambda: gfile.Remove(self.tmp + "error_dir"))
- self.assertRaises(
- OSError, lambda: gfile.RmDir(self.tmp + "error_dir"))
- self.assertTrue(gfile.Exists(self.tmp + "error_dir"))
- gfile.DeleteRecursively(self.tmp + "error_dir")
- self.assertFalse(gfile.Exists(self.tmp + "error_dir"))
-
- def testStat(self):
- with gfile.GFile(self.tmp + "test_stat", "w"):
- pass
- creation_time = time.time()
- statinfo = gfile.Stat(self.tmp + "test_stat")
- # Test the modification timestamp is within 20 seconds of closing the file.
- self.assertLessEqual(statinfo.mtime, creation_time + 10)
- self.assertGreaterEqual(statinfo.mtime, creation_time - 10)
-
- def testRename(self):
- gfile.MkDir(self.tmp + "dir1")
- gfile.MkDir(self.tmp + "dir2")
- with gfile.GFile(self.tmp + "file1", "w"):
- pass # Create file
- with gfile.GFile(self.tmp + "file2", "w"):
- pass # Create file
-
- # Dest file already exists, overwrite=False (default).
- self.assertRaises(
- OSError, lambda: gfile.Rename(self.tmp + "file1", self.tmp + "file2"))
- gfile.Rename(self.tmp + "file1", self.tmp + "file2", overwrite=True)
- self.assertFalse(gfile.Exists(self.tmp + "file1"))
- gfile.Rename(self.tmp + "file2", self.tmp + "newfile")
- self.assertTrue(gfile.Exists(self.tmp + "newfile"))
-
- gfile.Rename(self.tmp + "dir1", self.tmp + "dir2")
- self.assertFalse(gfile.Exists(self.tmp + "dir1"))
- gfile.Rename(self.tmp + "dir2", self.tmp + "newdir")
- self.assertTrue(gfile.Exists(self.tmp + "newdir"))
-
- def testCopy(self):
- gfile.MkDir(self.tmp + "dir1")
- gfile.MkDir(self.tmp + "dir2")
- with gfile.GFile(self.tmp + "dir1/file1", "w"):
- pass # Create file
- with gfile.GFile(self.tmp + "dir2/file2", "w"):
- pass # Create file
-
- # Dest file already exists, overwrite=False (default).
- self.assertRaises(
- OSError, lambda: gfile.Copy(self.tmp + "dir1/file1",
- self.tmp + "dir2/file2"))
- # Overwrite succeeds
- gfile.Copy(self.tmp + "dir1/file1", self.tmp + "dir2/file2",
- overwrite=True)
- self.assertTrue(gfile.Exists(self.tmp + "dir2/file2"))
-
- # Normal copy.
- gfile.Rename(self.tmp + "dir1/file1", self.tmp + "dir2/file1")
- self.assertTrue(gfile.Exists(self.tmp + "dir2/file1"))
-
- # Normal copy to non-existent dir
- self.assertRaises(OSError,
- lambda: gfile.Rename(self.tmp + "dir1/file1",
- self.tmp + "newdir/file1"))
-
- def testOpen(self):
- with gfile.Open(self.tmp + "test_open", "wb") as f:
- f.write(b"foo")
- with gfile.Open(self.tmp + "test_open") as f:
- result = f.readlines()
- self.assertEqual(["foo"], result)
-
-if __name__ == "__main__":
- googletest.main()
diff --git a/tensorflow/python/platform/gfile.py b/tensorflow/python/platform/gfile.py
index b2657e83a2..16aab78773 100644
--- a/tensorflow/python/platform/gfile.py
+++ b/tensorflow/python/platform/gfile.py
@@ -13,445 +13,48 @@
# limitations under the License.
# ==============================================================================
-"""File processing utilities."""
-
+"""Import router for file_io."""
+# pylint: disable=wildcard-import
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
-import collections
-import errno
-import functools
-import glob as _glob
-import os
-import shutil
-import threading
-
-import six
-
-
-class _GFileBase(six.Iterator):
- """Base I/O wrapper class. Similar semantics to Python's file object."""
-
- # pylint: disable=protected-access
- def _synchronized(fn):
- """Synchronizes file I/O for methods in GFileBase."""
- @functools.wraps(fn)
- def sync(self, *args, **kwargs):
- # Sometimes a GFileBase method is called before the instance
- # has been properly initialized. Check that _locker is available.
- if hasattr(self, '_locker'): self._locker.lock()
- try:
- return fn(self, *args, **kwargs)
- finally:
- if hasattr(self, '_locker'): self._locker.unlock()
- return sync
- # pylint: enable=protected-access
-
- def __init__(self, name, mode, locker):
- """Create the GFileBase object with the given filename, mode, and locker.
-
- Args:
- name: string, the filename.
- mode: string, the mode to open the file with (e.g. "r", "w", "a+").
- locker: the thread locking object (e.g. _PythonLocker) for controlling
- thread access to the I/O methods of this class.
- """
- self._name = name
- self._mode = mode
- self._locker = locker
- self._fp = open(name, mode)
-
- def __enter__(self):
- """Make GFileBase usable with "with" statement."""
- return self
-
- def __exit__(self, unused_type, unused_value, unused_traceback):
- """Make GFileBase usable with "with" statement."""
- self.close()
-
- @_synchronized
- def __del__(self):
- # __del__ is sometimes called before initialization, in which
- # case the object is not fully constructed. Check for this here
- # before trying to close the file handle.
- if hasattr(self, '_fp'): self._fp.close()
-
- @_synchronized
- def flush(self):
- """Flush the underlying file handle."""
- return self._fp.flush()
-
- @property
- @_synchronized
- def closed(self):
- """Returns "True" if the file handle is closed. Otherwise False."""
- return self._fp.closed
-
- @_synchronized
- def write(self, data):
- """Write data to the underlying file handle.
-
- Args:
- data: The string to write to the file handle.
- """
- self._fp.write(data)
-
- @_synchronized
- def writelines(self, seq):
- """Write a sequence of strings to the underlying file handle."""
- self._fp.writelines(seq)
-
- @_synchronized
- def tell(self):
- """Return the location from the underlying file handle.
-
- Returns:
- An integer location (which can be used in e.g., seek).
- """
- return self._fp.tell()
-
- @_synchronized
- def seek(self, offset, whence=0):
- """Seek to offset (conditioned on whence) in the underlying file handle.
-
- Args:
- offset: int, the offset within the file to seek to.
- whence: 0, 1, or 2. See python's seek() documentation for details.
- """
- self._fp.seek(offset, whence)
-
- @_synchronized
- def truncate(self, new_size=None):
- """Truncate the underlying file handle to new_size.
-
- Args:
- new_size: Size after truncation. If None, the file handle is truncated
- to 0 bytes.
- """
- self._fp.truncate(new_size)
-
- @_synchronized
- def readline(self, max_length=-1):
- """Read a single line (up to max_length) from the underlying file handle.
-
- Args:
- max_length: The maximum number of chsaracters to read.
-
- Returns:
- A string, including any newline at the end, or empty string if at EOF.
- """
- return self._fp.readline(max_length)
-
- @_synchronized
- def readlines(self, sizehint=None):
- """Read lines from the underlying file handle.
-
- Args:
- sizehint: See the python file.readlines() documentation.
+from tensorflow.python.lib.io import file_io
- Returns:
- A list of strings from the underlying file handle.
- """
- if sizehint is not None:
- return self._fp.readlines(sizehint)
- else:
- return self._fp.readlines()
- def __iter__(self):
- """Enable line iteration on the underlying handle (not synchronized)."""
- return self
-
- # Not synchronized
- def __next__(self):
- """Enable line iteration on the underlying handle (not synchronized).
-
- Returns:
- An line iterator from the underlying handle.
-
- Example:
- # read a file's lines by consuming the iterator with a list
- with open("filename", "r") as fp: lines = list(fp)
- """
- return next(self._fp)
-
- @_synchronized
- def Size(self): # pylint: disable=invalid-name
- """Get byte size of the file from the underlying file handle."""
- cur = self.tell()
- try:
- self.seek(0, 2)
- size = self.tell()
- finally:
- self.seek(cur)
- return size
-
- @_synchronized
- def read(self, n=-1):
- """Read n bytes from the underlying file handle.
-
- Args:
- n: Number of bytes to read (if negative, read to end of file handle.)
-
- Returns:
- A string of the bytes read, up to the end of file.
- """
- return self._fp.read(n)
-
- @_synchronized
- def close(self):
- """Close the underlying file handle."""
- self._fp.close()
-
- # Declare wrappers as staticmethods at the end so that we can
- # use them as decorators.
- _synchronized = staticmethod(_synchronized)
-
-
-class GFile(_GFileBase):
+class GFile(file_io.FileIO):
"""File I/O wrappers with thread locking."""
def __init__(self, name, mode='r'):
- super(GFile, self).__init__(name, mode, _Pythonlocker())
+ mode = mode.replace('b', '')
+ super(GFile, self).__init__(name=name, mode=mode)
-class FastGFile(_GFileBase):
+class FastGFile(file_io.FileIO):
"""File I/O wrappers without thread locking."""
def __init__(self, name, mode='r'):
- super(FastGFile, self).__init__(name, mode, _Nulllocker())
-
-
-# locker classes. Note that locks must be reentrant, so that multiple
-# lock() calls by the owning thread will not block.
-class _Pythonlocker(object):
- """A locking strategy that uses standard locks from the thread module."""
-
- def __init__(self):
- self._lock = threading.RLock()
-
- def lock(self):
- self._lock.acquire()
-
- def unlock(self):
- self._lock.release()
-
-
-class _Nulllocker(object):
- """A locking strategy where lock() and unlock() methods are no-ops."""
-
- def lock(self):
- pass
-
- def unlock(self):
- pass
-
-
-def Exists(path): # pylint: disable=invalid-name
- """Returns True iff "path" exists (as a dir, file, non-broken symlink)."""
- return os.path.exists(path)
-
-
-def IsDirectory(path): # pylint: disable=invalid-name
- """Return True iff "path" exists and is a directory."""
- return os.path.isdir(path)
-
-
-def Glob(glob): # pylint: disable=invalid-name
- """Return a list of filenames matching the glob "glob"."""
- return _glob.glob(glob)
-
-
-def MkDir(path, mode=0o755): # pylint: disable=invalid-name
- """Create the directory "path" with the given mode.
-
- Args:
- path: The directory path
- mode: The file mode for the directory
-
- Returns:
- None
-
- Raises:
- OSError: if the path already exists
- """
- os.mkdir(path, mode)
-
-
-def MakeDirs(path, mode=0o755): # pylint: disable=invalid-name
- """Recursively create the directory "path" with the given mode.
-
- Args:
- path: The directory path.
- mode: The file mode for the created directories
-
- Raises:
- OSError: if the path already exists
- """
- # NOTE(mrry): MakeDirs("") should be a no-op to match other
- # implementations of tf.gfile.
- if path:
- try:
- os.makedirs(path, mode)
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise
-
-
-def RmDir(directory): # pylint: disable=invalid-name
- """Removes the directory "directory" iff the directory is empty.
-
- Args:
- directory: The directory to remove.
-
- Raises:
- OSError: If the directory does not exist or is not empty.
- """
- os.rmdir(directory)
-
-
-def Remove(path): # pylint: disable=invalid-name
- """Delete the (non-directory) file "path".
-
- Args:
- path: The file to remove.
-
- Raises:
- OSError: If "path" does not exist, is a directory, or cannot be deleted.
- """
- os.remove(path)
-
-
-def Rename(oldpath, newpath, overwrite=False):
- """Rename or move a file, or a local directory.
-
- Args:
- oldpath: string; a pathname of a file.
- newpath: string; a pathname to which the file will be moved.
- overwrite: boolean; if false, it is an error for newpath to be
- occupied by an existing file.
-
- Raises:
- OSError: If "newpath" is occupied by an existing file and overwrite=False.
- """
- if not overwrite and Exists(newpath) and not IsDirectory(newpath):
- raise OSError(errno.EEXIST, os.strerror(errno.EEXIST), newpath)
- os.rename(oldpath, newpath)
-
-
-def DeleteRecursively(path): # pylint: disable=invalid-name
- """Delete the file or directory "path" recursively.
-
- Args:
- path: The path to remove (may be a non-empty directory).
-
- Raises:
- OSError: If the path does not exist or cannot be deleted.
- """
- if IsDirectory(path):
- shutil.rmtree(path)
- else:
- Remove(path)
-
-
-def ListDirectory(directory, return_dotfiles=False): # pylint: disable=invalid-name
- """Returns a list of files in dir.
-
- As with the standard os.listdir(), the filenames in the returned list will be
- the basenames of the files in dir (not absolute paths). To get a list of
- absolute paths of files in a directory, a client could do:
- file_list = gfile.ListDir(my_dir)
- file_list = [os.path.join(my_dir, f) for f in file_list]
- (assuming that my_dir itself specified an absolute path to a directory).
-
- Args:
- directory: the directory to list
- return_dotfiles: if True, dotfiles will be returned as well. Even if
- this arg is True, '.' and '..' will not be returned.
-
- Returns:
- ['list', 'of', 'files']. The entries '.' and '..' are never returned.
- Other entries starting with a dot will only be returned if return_dotfiles
- is True.
- Raises:
- OSError: if there is an error retrieving the directory listing.
- """
- files = os.listdir(directory)
- if not return_dotfiles:
- files = [f for f in files if not f.startswith('.')]
- return files
-
-
-def Walk(top, topdown=1, onerror=None):
- """Recursive directory tree generator.
-
- Args:
- top: string, a pathname.
- topdown: bool, should traversal be pre-order (True) or post-order (False)
- onerror: function, optional callback for errors.
-
- By default, errors that occur when listing a directory are ignored.
- (This is the same semantics as Python's os.walk() generator.) If the
- optional argument "onerror" is specified, it should be a function. It
- will be called with one argument, an os.error instance. It can return
- to continue with the walk, or reraise the exception to abort the walk.
- By default, the walk follows symlinks that resolve into directories.
-
- Yields:
- # Each yield is a 3-tuple: the pathname of a directory, followed
- # by lists of all its subdirectories and leaf files.
- (dirname, [subdirname, subdirname, ...], [filename, filename, ...])
- """
- return os.walk(top, topdown=topdown, onerror=onerror, followlinks=True)
-
-
-def Stat(path): # pylint: disable=invalid-name
- """Gets the status of a file.
-
- Args:
- path: The file to call Stat() on.
-
- Does the equivalent of Stat() on the specified "path" and return file
- properties.
-
- Returns:
- An object whose attributes give information on the file.
-
- Raises:
- OSError: If "path" does not exist.
- """
- statinfo = os.stat(path)
- filestat = collections.namedtuple('FileStat', ['mtime'])
- filestat.mtime = statinfo.st_mtime
- return filestat
-
-
-def Copy(oldpath, newpath, overwrite=False):
- """Copy a file.
-
- Args:
- oldpath: string; a pathname of a file.
- newpath: string; a pathname to which the file will be copied.
- overwrite: boolean; if false, it is an error for newpath to be
- occupied by an existing file.
-
- Raises:
- OSError: If "newpath" is occupied by an existing file and overwrite=False,
- or any error thrown by shutil.copy.
- """
- if not overwrite and Exists(newpath):
- raise OSError(errno.EEXIST, os.strerror(errno.EEXIST), newpath)
- shutil.copy(oldpath, newpath)
-
-
-def Open(name, mode='r'):
- """Exact API match to the standard open.
-
- Args:
- name: a file name, either local or a gfile compatible.
- mode: for example "w" to open the file for writing.
-
- Returns:
- A threadsafe gfile.GFile object.
- """
- return GFile(name, mode=mode)
+ mode = mode.replace('b', '')
+ super(FastGFile, self).__init__(name=name, mode=mode)
+
+
+# This should be kept consistent with the OSS implementation
+# of the gfile interface.
+
+# Does not alias to Open so that we use our version of GFile to strip
+# 'b' mode.
+Open = GFile
+
+# pylint: disable=invalid-name
+Exists = file_io.file_exists
+IsDirectory = file_io.is_directory
+Glob = file_io.get_matching_files
+MkDir = file_io.create_dir
+MakeDirs = file_io.recursive_create_dir
+Remove = file_io.delete_file
+DeleteRecursively = file_io.delete_recursively
+ListDirectory = file_io.list_directory
+Walk = file_io.walk
+Stat = file_io.stat
+Rename = file_io.rename
+Copy = file_io.copy
diff --git a/tensorflow/python/platform/gfile_test.py b/tensorflow/python/platform/gfile_test.py
deleted file mode 100644
index 7e0681f428..0000000000
--- a/tensorflow/python/platform/gfile_test.py
+++ /dev/null
@@ -1,248 +0,0 @@
-# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ==============================================================================
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-import contextlib
-import os
-import shutil
-import time
-
-from tensorflow.python.platform import gfile
-from tensorflow.python.platform import googletest
-from tensorflow.python.platform import tf_logging as logging
-
-
-class _BaseTest(object):
-
- @property
- def tmp(self):
- return self._tmp_dir
-
- def setUp(self):
- self._orig_dir = os.getcwd()
- self._tmp_dir = googletest.GetTempDir() + "/"
- try:
- os.makedirs(self._tmp_dir)
- except OSError:
- pass # Directory already exists
-
- def tearDown(self):
- try:
- shutil.rmtree(self._tmp_dir)
- except OSError:
- logging.warn("[%s] Post-test directory cleanup failed: %s",
- self, self._tmp_dir)
-
-
-class _GFileBaseTest(_BaseTest):
-
- @property
- def gfile(self):
- raise NotImplementedError("Do not use _GFileBaseTest directly.")
-
- def testWith(self):
- with self.gfile(self.tmp + "test_with", "w") as fh:
- fh.write("hi")
- with self.gfile(self.tmp + "test_with", "r") as fh:
- self.assertEqual(fh.read(), "hi")
-
- def testSizeAndTellAndSeek(self):
- with self.gfile(self.tmp + "test_tell", "w") as fh:
- fh.write("".join(["0"] * 1000))
- with self.gfile(self.tmp + "test_tell", "r") as fh:
- self.assertEqual(1000, fh.Size())
- self.assertEqual(0, fh.tell())
- fh.seek(0, 2)
- self.assertEqual(1000, fh.tell())
- fh.seek(0)
- self.assertEqual(0, fh.tell())
-
- def testReadAndWritelines(self):
- with self.gfile(self.tmp + "test_writelines", "w") as fh:
- fh.writelines(["%d\n" % d for d in range(10)])
- with self.gfile(self.tmp + "test_writelines", "r") as fh:
- self.assertEqual(["%d\n" % x for x in range(10)], fh.readlines())
-
- def testWriteAndTruncate(self):
- with self.gfile(self.tmp + "test_truncate", "w") as fh:
- fh.write("ababab")
- with self.gfile(self.tmp + "test_truncate", "a+") as fh:
- fh.seek(0, 2)
- fh.write("hjhjhj")
- with self.gfile(self.tmp + "test_truncate", "a+") as fh:
- self.assertEqual(fh.Size(), 12)
- fh.truncate(6)
- with self.gfile(self.tmp + "test_truncate", "r") as fh:
- self.assertEqual(fh.read(), "ababab")
-
- def testErrors(self):
- self.assertRaises(
- IOError, lambda: self.gfile(self.tmp + "doesnt_exist", "r"))
- with self.gfile(self.tmp + "test_error", "w") as fh:
- # Raises FileError inside Google and ValueError outside, so we
- # can only test for Exception.
- self.assertRaises(Exception, lambda: fh.seek(-1))
- # test_error now exists, we can read from it:
- with self.gfile(self.tmp + "test_error", "r") as fh:
- self.assertRaises(IOError, lambda: fh.write("ack"))
- fh = self.gfile(self.tmp + "test_error", "w")
- self.assertFalse(fh.closed)
- fh.close()
- self.assertTrue(fh.closed)
- self.assertRaises(ValueError, lambda: fh.write("ack"))
-
- def testIteration(self):
- with self.gfile(self.tmp + "test_iter", "w") as fh:
- fh.writelines(["a\n", "b\n", "c\n"])
- with self.gfile(self.tmp + "test_iter", "r") as fh:
- lines = list(fh)
- self.assertEqual(["a\n", "b\n", "c\n"], lines)
-
-
-class GFileTest(_GFileBaseTest, googletest.TestCase):
-
- @property
- def gfile(self):
- return gfile.GFile
-
-
-class FastGFileTest(_GFileBaseTest, googletest.TestCase):
-
- @property
- def gfile(self):
- return gfile.FastGFile
-
-
-class FunctionTests(_BaseTest, googletest.TestCase):
-
- def testExists(self):
- self.assertFalse(gfile.Exists(self.tmp + "test_exists"))
- with gfile.GFile(self.tmp + "test_exists", "w"):
- pass
- self.assertTrue(gfile.Exists(self.tmp + "test_exists"))
-
- def testMkDirsGlobAndRmDirs(self):
- self.assertFalse(gfile.Exists(self.tmp + "test_dir"))
- gfile.MkDir(self.tmp + "test_dir")
- self.assertTrue(gfile.Exists(self.tmp + "test_dir"))
- gfile.RmDir(self.tmp + "test_dir")
- self.assertFalse(gfile.Exists(self.tmp + "test_dir"))
- gfile.MakeDirs(self.tmp + "test_dir/blah0")
- gfile.MakeDirs(self.tmp + "test_dir/blah1")
- self.assertEqual([self.tmp + "test_dir/blah0", self.tmp + "test_dir/blah1"],
- sorted(gfile.Glob(self.tmp + "test_dir/*")))
- gfile.DeleteRecursively(self.tmp + "test_dir")
- self.assertFalse(gfile.Exists(self.tmp + "test_dir"))
-
- @contextlib.contextmanager
- def _working_directory(self, wd):
- original_cwd = os.getcwd()
- os.chdir(wd)
- try:
- yield
- finally:
- os.chdir(original_cwd)
-
- def testMakeDirsWithEmptyString(self):
- gfile.MakeDirs(self.tmp + "test_dir")
- with self._working_directory(self.tmp + "test_dir"):
- gfile.MakeDirs("")
- # Should succeed because MakeDirs("") is a no-op.
- gfile.RmDir(self.tmp + "test_dir")
-
- def testErrors(self):
- self.assertRaises(
- OSError, lambda: gfile.RmDir(self.tmp + "dir_doesnt_exist"))
- self.assertRaises(
- OSError, lambda: gfile.Remove(self.tmp + "file_doesnt_exist"))
- gfile.MkDir(self.tmp + "error_dir")
- with gfile.GFile(self.tmp + "error_dir/file", "w"):
- pass # Create file
- self.assertRaises(
- OSError, lambda: gfile.Remove(self.tmp + "error_dir"))
- self.assertRaises(
- OSError, lambda: gfile.RmDir(self.tmp + "error_dir"))
- self.assertTrue(gfile.Exists(self.tmp + "error_dir"))
- gfile.DeleteRecursively(self.tmp + "error_dir")
- self.assertFalse(gfile.Exists(self.tmp + "error_dir"))
-
- def testStat(self):
- with gfile.GFile(self.tmp + "test_stat", "w"):
- pass
- creation_time = time.time()
- statinfo = gfile.Stat(self.tmp + "test_stat")
- # Test the modification timestamp is within 20 seconds of closing the file.
- self.assertLessEqual(statinfo.mtime, creation_time + 10)
- self.assertGreaterEqual(statinfo.mtime, creation_time - 10)
-
- def testRename(self):
- gfile.MkDir(self.tmp + "dir1")
- gfile.MkDir(self.tmp + "dir2")
- with gfile.GFile(self.tmp + "file1", "w"):
- pass # Create file
- with gfile.GFile(self.tmp + "file2", "w"):
- pass # Create file
-
- # Dest file already exists, overwrite=False (default).
- self.assertRaises(
- OSError, lambda: gfile.Rename(self.tmp + "file1", self.tmp + "file2"))
- gfile.Rename(self.tmp + "file1", self.tmp + "file2", overwrite=True)
- self.assertFalse(gfile.Exists(self.tmp + "file1"))
- gfile.Rename(self.tmp + "file2", self.tmp + "newfile")
- self.assertTrue(gfile.Exists(self.tmp + "newfile"))
-
- gfile.Rename(self.tmp + "dir1", self.tmp + "dir2")
- self.assertFalse(gfile.Exists(self.tmp + "dir1"))
- gfile.Rename(self.tmp + "dir2", self.tmp + "newdir")
- self.assertTrue(gfile.Exists(self.tmp + "newdir"))
-
- def testCopy(self):
- gfile.MkDir(self.tmp + "dir1")
- gfile.MkDir(self.tmp + "dir2")
- with gfile.GFile(self.tmp + "dir1/file1", "w"):
- pass # Create file
- with gfile.GFile(self.tmp + "dir2/file2", "w"):
- pass # Create file
-
- # Dest file already exists, overwrite=False (default).
- self.assertRaises(
- OSError, lambda: gfile.Copy(self.tmp + "dir1/file1",
- self.tmp + "dir2/file2"))
- # Overwrite succeeds
- gfile.Copy(self.tmp + "dir1/file1", self.tmp + "dir2/file2",
- overwrite=True)
- self.assertTrue(gfile.Exists(self.tmp + "dir2/file2"))
-
- # Normal copy.
- gfile.Rename(self.tmp + "dir1/file1", self.tmp + "dir2/file1")
- self.assertTrue(gfile.Exists(self.tmp + "dir2/file1"))
-
- # Normal copy to non-existent dir
- self.assertRaises(OSError,
- lambda: gfile.Rename(self.tmp + "dir1/file1",
- self.tmp + "newdir/file1"))
-
- def testOpen(self):
- with gfile.Open(self.tmp + "test_open", "wb") as f:
- f.write(b"foo")
- with gfile.Open(self.tmp + "test_open") as f:
- result = f.readlines()
- self.assertEqual(["foo"], result)
-
-if __name__ == "__main__":
- googletest.main()
diff --git a/tensorflow/python/summary/event_accumulator.py b/tensorflow/python/summary/event_accumulator.py
index 1617924357..c965f257dd 100644
--- a/tensorflow/python/summary/event_accumulator.py
+++ b/tensorflow/python/summary/event_accumulator.py
@@ -30,6 +30,7 @@ from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.summary.impl import directory_watcher
from tensorflow.python.summary.impl import io_wrapper
from tensorflow.python.summary.impl import reservoir
+from tensorflow.python.util import compat
namedtuple = collections.namedtuple
ScalarEvent = namedtuple('ScalarEvent', ['wall_time', 'step', 'value'])
@@ -96,7 +97,7 @@ STORE_EVERYTHING_SIZE_GUIDANCE = {
def IsTensorFlowEventsFile(path):
"""Check the path name to see if it is probably a TF Events file."""
- return 'tfevents' in os.path.basename(path)
+ return 'tfevents' in compat.as_str_any(os.path.basename(path))
class EventAccumulator(object):
diff --git a/tensorflow/python/summary/event_multiplexer_test.py b/tensorflow/python/summary/event_multiplexer_test.py
index a75f37e259..88571ef227 100644
--- a/tensorflow/python/summary/event_multiplexer_test.py
+++ b/tensorflow/python/summary/event_multiplexer_test.py
@@ -32,7 +32,8 @@ def _AddEvents(path):
if not gfile.IsDirectory(path):
gfile.MakeDirs(path)
fpath = os.path.join(path, 'hypothetical.tfevents.out')
- with gfile.GFile(fpath, 'w'):
+ with gfile.GFile(fpath, 'w') as f:
+ f.write('')
return fpath
diff --git a/tensorflow/python/summary/impl/directory_watcher.py b/tensorflow/python/summary/impl/directory_watcher.py
index 5ed80d7348..56a08b11ea 100644
--- a/tensorflow/python/summary/impl/directory_watcher.py
+++ b/tensorflow/python/summary/impl/directory_watcher.py
@@ -20,6 +20,7 @@ from __future__ import print_function
import bisect
+from tensorflow.python.framework import errors
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.summary.impl import gcs
from tensorflow.python.summary.impl import io_wrapper
@@ -86,7 +87,7 @@ class DirectoryWatcher(object):
try:
for event in self._LoadInternal():
yield event
- except (IOError, OSError):
+ except errors.OpError:
if not io_wrapper.Exists(self._directory):
raise DirectoryDeletedError(
'Directory %s has been permanently deleted' % self._directory)
@@ -183,7 +184,7 @@ class DirectoryWatcher(object):
size = io_wrapper.Size(old_path)
logging.debug('Setting latest size of %s to %d', old_path, size)
self._finalized_sizes[old_path] = size
- except (IOError, OSError) as e:
+ except errors.OpError as e:
logging.error('Unable to get size of %s: %s', old_path, e)
self._path = path
diff --git a/tensorflow/python/summary/impl/io_wrapper.py b/tensorflow/python/summary/impl/io_wrapper.py
index 0d7843e166..f7138833d6 100644
--- a/tensorflow/python/summary/impl/io_wrapper.py
+++ b/tensorflow/python/summary/impl/io_wrapper.py
@@ -98,4 +98,4 @@ def Size(path):
if gcs.IsGCSPath(path):
raise NotImplementedError("io_wrapper.Size doesn't support GCS paths")
else:
- return gfile.Open(path).Size()
+ return gfile.Open(path).size()
diff --git a/tensorflow/python/tools/freeze_graph.py b/tensorflow/python/tools/freeze_graph.py
index 21e4a84551..18f1e90596 100644
--- a/tensorflow/python/tools/freeze_graph.py
+++ b/tensorflow/python/tools/freeze_graph.py
@@ -94,7 +94,7 @@ def freeze_graph(input_graph, input_saver, input_binary, input_checkpoint,
if input_binary:
input_graph_def.ParseFromString(f.read())
else:
- text_format.Merge(f.read(), input_graph_def)
+ text_format.Merge(f.read().decode("utf-8"), input_graph_def)
# Remove all the explicit device specifications for this node. This helps to
# make the graph more portable.
if clear_devices:
diff --git a/tensorflow/python/tools/strip_unused_lib.py b/tensorflow/python/tools/strip_unused_lib.py
index ded6e1da62..7449d4d14e 100644
--- a/tensorflow/python/tools/strip_unused_lib.py
+++ b/tensorflow/python/tools/strip_unused_lib.py
@@ -82,7 +82,7 @@ def strip_unused_from_files(input_graph, input_binary, output_graph,
if input_binary:
input_graph_def.ParseFromString(f.read())
else:
- text_format.Merge(f.read(), input_graph_def)
+ text_format.Merge(f.read().decode("utf-8"), input_graph_def)
output_graph_def = strip_unused(input_graph_def, input_node_names.split(","),
output_node_names.split(","),
diff --git a/tensorflow/python/training/session_manager_test.py b/tensorflow/python/training/session_manager_test.py
index e846dc48df..2a7ff3c9f9 100644
--- a/tensorflow/python/training/session_manager_test.py
+++ b/tensorflow/python/training/session_manager_test.py
@@ -59,7 +59,7 @@ class SessionManagerTest(tf.test.TestCase):
try:
gfile.DeleteRecursively(checkpoint_dir)
gfile.DeleteRecursively(checkpoint_dir2)
- except OSError:
+ except errors.OpError:
pass # Ignore
gfile.MakeDirs(checkpoint_dir)
@@ -105,7 +105,7 @@ class SessionManagerTest(tf.test.TestCase):
checkpoint_dir = os.path.join(self.get_temp_dir(), "recover_session")
try:
gfile.DeleteRecursively(checkpoint_dir)
- except OSError:
+ except errors.OpError:
pass # Ignore
gfile.MakeDirs(checkpoint_dir)
@@ -163,7 +163,7 @@ class SessionManagerTest(tf.test.TestCase):
"recover_session_ready_for_local_init")
try:
gfile.DeleteRecursively(checkpoint_dir)
- except OSError:
+ except errors.OpError:
pass # Ignore
gfile.MakeDirs(checkpoint_dir)
@@ -220,7 +220,7 @@ class SessionManagerTest(tf.test.TestCase):
"recover_session_ready_for_local_init_fails_to_ready_local")
try:
gfile.DeleteRecursively(checkpoint_dir)
- except OSError:
+ except errors.OpError:
pass # Ignore
gfile.MakeDirs(checkpoint_dir)
@@ -299,7 +299,7 @@ class SessionManagerTest(tf.test.TestCase):
"recover_session_ready_for_local_init_fails_stil_run")
try:
gfile.DeleteRecursively(checkpoint_dir)
- except OSError:
+ except errors.OpError:
pass # Ignore
gfile.MakeDirs(checkpoint_dir)
@@ -522,7 +522,7 @@ class ObsoleteSessionManagerTest(tf.test.TestCase):
try:
gfile.DeleteRecursively(checkpoint_dir)
gfile.DeleteRecursively(checkpoint_dir2)
- except OSError:
+ except errors.OpError:
pass # Ignore
gfile.MakeDirs(checkpoint_dir)
@@ -568,7 +568,7 @@ class ObsoleteSessionManagerTest(tf.test.TestCase):
checkpoint_dir = os.path.join(self.get_temp_dir(), "recover_session")
try:
gfile.DeleteRecursively(checkpoint_dir)
- except OSError:
+ except errors.OpError:
pass # Ignore
gfile.MakeDirs(checkpoint_dir)
diff --git a/tensorflow/tools/ci_build/builds/test_installation.sh b/tensorflow/tools/ci_build/builds/test_installation.sh
index 5821e82f14..8b8d96af14 100755
--- a/tensorflow/tools/ci_build/builds/test_installation.sh
+++ b/tensorflow/tools/ci_build/builds/test_installation.sh
@@ -101,7 +101,6 @@ PY_TEST_BLACKLIST="${PY_TEST_BLACKLIST}:"\
"tensorflow/python/platform/default/_resource_loader_test.py:"\
"tensorflow/python/platform/default/flags_test.py:"\
"tensorflow/python/platform/default/logging_test.py:"\
-"tensorflow/python/platform/default/gfile_test.py:"\
"tensorflow/contrib/learn/nonlinear_test.py:"\
"tensorflow/contrib/learn/python/learn/tests/nonlinear_test.py"
@@ -113,8 +112,7 @@ PY_TEST_GPU_BLACKLIST="${PY_TEST_GPU_BLACKLIST}:"\
# Tests that should be run in the exclusive mode (i.e., not parallel with
# other tests)
-PY_TEST_EXCLUSIVE_LIST="tensorflow/python/platform/gfile_test.py:"\
-"tensorflow/python/platform/default/gfile_test.py"
+PY_TEST_EXCLUSIVE_LIST=""
# Append custom list of exclusive tests
if [[ ! -z "${TF_BUILD_EXTRA_EXCLUSIVE_INSTALL_TESTS}" ]]; then
diff --git a/tensorflow/tools/test/gpu_info_lib.py b/tensorflow/tools/test/gpu_info_lib.py
index 047a4f11fd..f29ff9af24 100644
--- a/tensorflow/tools/test/gpu_info_lib.py
+++ b/tensorflow/tools/test/gpu_info_lib.py
@@ -26,6 +26,7 @@ import platform
import tensorflow as tf
from tensorflow.core.util import test_log_pb2
+from tensorflow.python.framework import errors
def _gather_gpu_devices_proc():
@@ -174,11 +175,11 @@ def gather_gpu_devices():
if not dev_info:
raise ValueError("No devices found")
return dev_info
- except (IOError, ValueError):
+ except (IOError, ValueError, errors.OpError):
pass
try:
# Fall back on using libcudart
return _gather_gpu_devices_cudart()
- except (OSError, ValueError, NotImplementedError):
+ except (OSError, ValueError, NotImplementedError, errors.OpError):
return []
diff --git a/tensorflow/tools/test/system_info_lib.py b/tensorflow/tools/test/system_info_lib.py
index 2f10342d4a..0ef108faea 100644
--- a/tensorflow/tools/test/system_info_lib.py
+++ b/tensorflow/tools/test/system_info_lib.py
@@ -35,6 +35,7 @@ import psutil
from tensorflow.core.util import test_log_pb2
from tensorflow.python.client import device_lib
+from tensorflow.python.framework import errors
from tensorflow.tools.test import gpu_info_lib
@@ -85,7 +86,7 @@ def gather_cpu_info():
if nc: # e.g. 'ff' => 8, 'fff' => 12
cpu_info.num_cores_allowed = (
bin(int(nc.group(1).replace(',', ''), 16)).count('1'))
- except IOError:
+ except errors.OpError:
pass
finally:
if cpu_info.num_cores_allowed == 0:
@@ -112,7 +113,7 @@ def gather_cpu_info():
cpu_info.cpu_governor = 'mixed'
else:
cpu_info.cpu_governor = list(cpu_governors)[0]
- except IOError:
+ except errors.OpError:
pass
return cpu_info