aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authorGravatar Anna R <annarev@google.com>2018-04-08 15:42:16 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2018-04-08 15:44:25 -0700
commit1eea5ad3f9a622411117f7208d308055b0707d0f (patch)
tree585a1f61fad6e09d0932de595c62e0679b30da55
parent8fd805fc79ca585fe90ec9fd7c9e0feef89f798e (diff)
Automated g4 rollback of changelist 191360905
PiperOrigin-RevId: 192065431
-rw-r--r--tensorflow/BUILD8
-rw-r--r--tensorflow/__init__.py7
-rw-r--r--tensorflow/contrib/cmake/python_modules.txt2
-rwxr-xr-xtensorflow/contrib/cmake/tf_python.cmake91
-rw-r--r--tensorflow/experimental_api.py38
-rw-r--r--tensorflow/python/framework/dtypes.py2
-rw-r--r--tensorflow/python/framework/versions.py12
-rw-r--r--tensorflow/tools/api/generator/BUILD2
-rw-r--r--tensorflow/tools/api/generator/create_python_api.py124
-rw-r--r--tensorflow/tools/api/generator/create_python_api_test.py6
-rw-r--r--tensorflow/tools/api/tests/BUILD1
-rw-r--r--tensorflow/tools/api/tests/api_compatibility_test.py58
-rw-r--r--tensorflow/tools/ci_build/windows/cpu/cmake/run_py.bat6
13 files changed, 183 insertions, 174 deletions
diff --git a/tensorflow/BUILD b/tensorflow/BUILD
index 3d5737a9d7..cfafffdd13 100644
--- a/tensorflow/BUILD
+++ b/tensorflow/BUILD
@@ -540,14 +540,6 @@ py_library(
srcs = ["__init__.py"],
srcs_version = "PY2AND3",
visibility = ["//visibility:public"],
- deps = ["//tensorflow/python"],
-)
-
-py_library(
- name = "experimental_tensorflow_py",
- srcs = ["experimental_api.py"],
- srcs_version = "PY2AND3",
- visibility = ["//tensorflow/tools/api/tests:__subpackages__"],
deps = [
"//tensorflow/python",
"//tensorflow/tools/api/generator:python_api",
diff --git a/tensorflow/__init__.py b/tensorflow/__init__.py
index 78ad6aec19..c8683e3976 100644
--- a/tensorflow/__init__.py
+++ b/tensorflow/__init__.py
@@ -20,14 +20,19 @@ from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
+# pylint: disable=g-bad-import-order
+from tensorflow.python import pywrap_tensorflow # pylint: disable=unused-import
# pylint: disable=wildcard-import
-from tensorflow.python import * # pylint: disable=redefined-builtin
+from tensorflow.tools.api.generator.api import * # pylint: disable=redefined-builtin
# pylint: enable=wildcard-import
from tensorflow.python.util.lazy_loader import LazyLoader
contrib = LazyLoader('contrib', globals(), 'tensorflow.contrib')
del LazyLoader
+from tensorflow.python.platform import flags # pylint: disable=g-import-not-at-top
+app.flags = flags # pylint: disable=undefined-variable
+
del absolute_import
del division
del print_function
diff --git a/tensorflow/contrib/cmake/python_modules.txt b/tensorflow/contrib/cmake/python_modules.txt
index 8e83b4e176..b786c6d5cb 100644
--- a/tensorflow/contrib/cmake/python_modules.txt
+++ b/tensorflow/contrib/cmake/python_modules.txt
@@ -104,6 +104,8 @@ tensorflow/python/user_ops
tensorflow/python/util
tensorflow/python/util/protobuf
tensorflow/tools
+tensorflow/tools/api
+tensorflow/tools/api/generator
tensorflow/tools/graph_transforms
tensorflow/contrib
tensorflow/contrib/all_reduce
diff --git a/tensorflow/contrib/cmake/tf_python.cmake b/tensorflow/contrib/cmake/tf_python.cmake
index 1a5ec34844..ded15b4b66 100755
--- a/tensorflow/contrib/cmake/tf_python.cmake
+++ b/tensorflow/contrib/cmake/tf_python.cmake
@@ -689,6 +689,77 @@ AddUserOps(TARGET _beam_search_ops
DEPENDS pywrap_tensorflow_internal tf_python_ops
DISTCOPY ${CMAKE_CURRENT_BINARY_DIR}/tf_python/tensorflow/contrib/seq2seq/python/ops/)
+if(WIN32)
+ if(${CMAKE_GENERATOR} MATCHES "Visual Studio.*")
+ add_custom_command(TARGET pywrap_tensorflow_internal POST_BUILD
+ COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_CURRENT_BINARY_DIR}/$(Configuration)/pywrap_tensorflow_internal.dll
+ ${CMAKE_CURRENT_BINARY_DIR}/tf_python/tensorflow/python/_pywrap_tensorflow_internal.pyd
+ COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_CURRENT_BINARY_DIR}/$(Configuration)/pywrap_tensorflow_internal.lib
+ ${CMAKE_CURRENT_BINARY_DIR}/tf_python/tensorflow/python/)
+ else()
+ add_custom_command(TARGET pywrap_tensorflow_internal POST_BUILD
+ COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_CURRENT_BINARY_DIR}/pywrap_tensorflow_internal.dll
+ ${CMAKE_CURRENT_BINARY_DIR}/tf_python/tensorflow/python/_pywrap_tensorflow_internal.pyd
+ COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_CURRENT_BINARY_DIR}/pywrap_tensorflow_internal.lib
+ ${CMAKE_CURRENT_BINARY_DIR}/tf_python/tensorflow/python/)
+ endif()
+else()
+ add_custom_command(TARGET pywrap_tensorflow_internal POST_BUILD
+ COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_CURRENT_BINARY_DIR}/libpywrap_tensorflow_internal.so
+ ${CMAKE_CURRENT_BINARY_DIR}/tf_python/tensorflow/python/_pywrap_tensorflow_internal.so)
+endif()
+
+
+########################################################
+# Generate API __init__.py files.
+########################################################
+
+# Parse tensorflow/tools/api/generator/BUILD to get list of generated files.
+FILE(READ ${tensorflow_source_dir}/tensorflow/tools/api/generator/BUILD api_generator_BUILD_text)
+STRING(REGEX MATCH "# BEGIN GENERATED FILES.*# END GENERATED FILES" api_init_files_text ${api_generator_BUILD_text})
+string(REPLACE "# BEGIN GENERATED FILES" "" api_init_files_text ${api_init_files_text})
+string(REPLACE "# END GENERATED FILES" "" api_init_files_text ${api_init_files_text})
+string(REPLACE "," ";" api_init_files_list ${api_init_files_text})
+
+set(api_init_files "")
+foreach(api_init_file ${api_init_files_list})
+ string(STRIP "${api_init_file}" api_init_file)
+ if(api_init_file)
+ string(REPLACE "\"" "" api_init_file "${api_init_file}") # Remove quotes
+ list(APPEND api_init_files "${CMAKE_CURRENT_BINARY_DIR}/tf_python/tensorflow/tools/api/generator/${api_init_file}")
+ endif()
+endforeach(api_init_file)
+set(api_init_list_file "${tensorflow_source_dir}/api_init_files_list.txt")
+file(WRITE "${api_init_list_file}" "${api_init_files}")
+
+# Run create_python_api.py to generate __init__.py files.
+add_custom_command(
+ OUTPUT ${api_init_files}
+ DEPENDS tf_python_ops tf_python_copy_scripts_to_destination pywrap_tensorflow_internal tf_python_touchup_modules tf_extension_ops
+
+ # tensorflow/__init__.py depends on files generated in this step. So, remove it while
+ # this step is running since the files aren't there yet.
+ COMMAND ${CMAKE_COMMAND} -E rename ${CMAKE_CURRENT_BINARY_DIR}/tf_python/tensorflow/__init__.py
+ ${CMAKE_CURRENT_BINARY_DIR}/tf_python/tensorflow/final.__init__.py
+ COMMAND ${CMAKE_COMMAND} -E touch ${CMAKE_CURRENT_BINARY_DIR}/tf_python/tensorflow/__init__.py
+
+ # Run create_python_api.py to generate API init files.
+ COMMAND ${CMAKE_COMMAND} -E env PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}/tf_python ${PYTHON_EXECUTABLE}
+ "${CMAKE_CURRENT_BINARY_DIR}/tf_python/tensorflow/tools/api/generator/create_python_api.py" "${api_init_list_file}"
+
+ # Re-add tensorflow/__init__.py back.
+ COMMAND ${CMAKE_COMMAND} -E remove -f ${CMAKE_CURRENT_BINARY_DIR}/tf_python/tensorflow/__init__.py
+ COMMAND ${CMAKE_COMMAND} -E rename ${CMAKE_CURRENT_BINARY_DIR}/tf_python/tensorflow/final.__init__.py
+ ${CMAKE_CURRENT_BINARY_DIR}/tf_python/tensorflow/__init__.py
+
+ COMMENT "Generating __init__.py files for Python API."
+ WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/tf_python"
+)
+
+add_custom_target(tf_python_api SOURCES ${api_init_files})
+add_dependencies(tf_python_api tf_python_ops)
+
+
############################################################
# Build a PIP package containing the TensorFlow runtime.
############################################################
@@ -698,6 +769,7 @@ add_dependencies(tf_python_build_pip_package
tf_python_copy_scripts_to_destination
tf_python_touchup_modules
tf_python_ops
+ tf_python_api
tf_extension_ops)
# Fix-up Python files that were not included by the add_python_module() macros.
@@ -710,25 +782,6 @@ add_custom_command(TARGET tf_python_copy_scripts_to_destination PRE_BUILD
COMMAND ${CMAKE_COMMAND} -E copy ${tensorflow_source_dir}/tensorflow/contrib/testing/python/framework/util_test.py
${CMAKE_CURRENT_BINARY_DIR}/tf_python/tensorflow/contrib/testing/python/framework/)
-if(WIN32)
- if(${CMAKE_GENERATOR} MATCHES "Visual Studio.*")
- add_custom_command(TARGET tf_python_build_pip_package POST_BUILD
- COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_CURRENT_BINARY_DIR}/$(Configuration)/pywrap_tensorflow_internal.dll
- ${CMAKE_CURRENT_BINARY_DIR}/tf_python/tensorflow/python/_pywrap_tensorflow_internal.pyd
- COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_CURRENT_BINARY_DIR}/$(Configuration)/pywrap_tensorflow_internal.lib
- ${CMAKE_CURRENT_BINARY_DIR}/tf_python/tensorflow/python/)
- else()
- add_custom_command(TARGET tf_python_build_pip_package POST_BUILD
- COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_CURRENT_BINARY_DIR}/pywrap_tensorflow_internal.dll
- ${CMAKE_CURRENT_BINARY_DIR}/tf_python/tensorflow/python/_pywrap_tensorflow_internal.pyd
- COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_CURRENT_BINARY_DIR}/pywrap_tensorflow_internal.lib
- ${CMAKE_CURRENT_BINARY_DIR}/tf_python/tensorflow/python/)
- endif()
-else()
- add_custom_command(TARGET tf_python_build_pip_package POST_BUILD
- COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_CURRENT_BINARY_DIR}/libpywrap_tensorflow_internal.so
- ${CMAKE_CURRENT_BINARY_DIR}/tf_python/tensorflow/python/_pywrap_tensorflow_internal.so)
-endif()
add_custom_command(TARGET tf_python_build_pip_package POST_BUILD
COMMAND ${CMAKE_COMMAND} -E copy ${tensorflow_source_dir}/tensorflow/tools/pip_package/README
${CMAKE_CURRENT_BINARY_DIR}/tf_python/)
diff --git a/tensorflow/experimental_api.py b/tensorflow/experimental_api.py
deleted file mode 100644
index 63a8aa9cb1..0000000000
--- a/tensorflow/experimental_api.py
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ==============================================================================
-
-# Bring in all of the public TensorFlow interface into this
-# module.
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-# pylint: disable=g-bad-import-order
-from tensorflow.python import pywrap_tensorflow # pylint: disable=unused-import
-# pylint: disable=wildcard-import
-from tensorflow.tools.api.generator.api import * # pylint: disable=redefined-builtin
-# pylint: enable=wildcard-import
-
-from tensorflow.python.util.lazy_loader import LazyLoader
-contrib = LazyLoader('contrib', globals(), 'tensorflow.contrib')
-del LazyLoader
-
-from tensorflow.python.platform import flags # pylint: disable=g-import-not-at-top
-app.flags = flags # pylint: disable=undefined-variable
-
-del absolute_import
-del division
-del print_function
diff --git a/tensorflow/python/framework/dtypes.py b/tensorflow/python/framework/dtypes.py
index 0edae92fd4..a31c424263 100644
--- a/tensorflow/python/framework/dtypes.py
+++ b/tensorflow/python/framework/dtypes.py
@@ -345,7 +345,7 @@ tf_export("uint16").export_constant(__name__, "uint16")
uint32 = DType(types_pb2.DT_UINT32)
tf_export("uint32").export_constant(__name__, "uint32")
uint64 = DType(types_pb2.DT_UINT64)
-tf_export("uint64").export_constant(__name__, "uint32")
+tf_export("uint64").export_constant(__name__, "uint64")
int16 = DType(types_pb2.DT_INT16)
tf_export("int16").export_constant(__name__, "int16")
int8 = DType(types_pb2.DT_INT8)
diff --git a/tensorflow/python/framework/versions.py b/tensorflow/python/framework/versions.py
index d08b4bf48a..472ccbcac7 100644
--- a/tensorflow/python/framework/versions.py
+++ b/tensorflow/python/framework/versions.py
@@ -31,13 +31,17 @@ __monolithic_build__ = pywrap_tensorflow.__monolithic_build__
VERSION = __version__
tf_export("VERSION", "__version__").export_constant(__name__, "VERSION")
GIT_VERSION = __git_version__
-tf_export("GIT_VERSION").export_constant(__name__, "GIT_VERSION")
+tf_export("GIT_VERSION", "__git_version__").export_constant(
+ __name__, "GIT_VERSION")
COMPILER_VERSION = __compiler_version__
-tf_export("COMPILER_VERSION").export_constant(__name__, "COMPILER_VERSION")
+tf_export("COMPILER_VERSION", "__compiler_version__").export_constant(
+ __name__, "COMPILER_VERSION")
CXX11_ABI_FLAG = __cxx11_abi_flag__
-tf_export("CXX11_ABI_FLAG").export_constant(__name__, "CXX11_ABI_FLAG")
+tf_export("CXX11_ABI_FLAG", "__cxx11_abi_flag__").export_constant(
+ __name__, "CXX11_ABI_FLAG")
MONOLITHIC_BUILD = __monolithic_build__
-tf_export("MONOLITHIC_BUILD").export_constant(__name__, "MONOLITHIC_BUILD")
+tf_export("MONOLITHIC_BUILD", "__monolithic_build__").export_constant(
+ __name__, "MONOLITHIC_BUILD")
GRAPH_DEF_VERSION = pywrap_tensorflow.GRAPH_DEF_VERSION
tf_export("GRAPH_DEF_VERSION").export_constant(__name__, "GRAPH_DEF_VERSION")
diff --git a/tensorflow/tools/api/generator/BUILD b/tensorflow/tools/api/generator/BUILD
index 9f1bdd8aae..a1c569951e 100644
--- a/tensorflow/tools/api/generator/BUILD
+++ b/tensorflow/tools/api/generator/BUILD
@@ -32,6 +32,7 @@ genrule(
# api/module1/module2/__init__.py and api/module3/__init__.py.
# keep sorted
outs = [
+ # BEGIN GENERATED FILES
"api/__init__.py",
"api/app/__init__.py",
"api/bitwise/__init__.py",
@@ -117,6 +118,7 @@ genrule(
"api/train/__init__.py",
"api/train/queue_runner/__init__.py",
"api/user_ops/__init__.py",
+ # END GENERATED FILES
],
cmd = "$(location create_python_api) $(OUTS)",
tools = ["create_python_api"],
diff --git a/tensorflow/tools/api/generator/create_python_api.py b/tensorflow/tools/api/generator/create_python_api.py
index 183c4731b8..6fa48cd70c 100644
--- a/tensorflow/tools/api/generator/create_python_api.py
+++ b/tensorflow/tools/api/generator/create_python_api.py
@@ -67,18 +67,23 @@ def format_import(source_module_name, source_name, dest_name):
return 'import %s as %s' % (source_name, dest_name)
-class _ModuleImportsBuilder(object):
+class _ModuleInitCodeBuilder(object):
"""Builds a map from module name to imports included in that module."""
def __init__(self):
- self.module_imports = collections.defaultdict(list)
- self._seen_api_names = set()
+ self.module_imports = collections.defaultdict(
+ lambda: collections.defaultdict(set))
+ self._dest_import_to_id = collections.defaultdict(int)
+ # Names that start with underscore in the root module.
+ self._underscore_names_in_root = []
def add_import(
- self, dest_module_name, source_module_name, source_name, dest_name):
+ self, symbol_id, dest_module_name, source_module_name, source_name,
+ dest_name):
"""Adds this import to module_imports.
Args:
+ symbol_id: (number) Unique identifier of the symbol to import.
dest_module_name: (string) Module name to add import to.
source_module_name: (string) Module to import from.
source_name: (string) Name of the symbol to import.
@@ -89,34 +94,67 @@ class _ModuleImportsBuilder(object):
dest_name has already been added to dest_module_name.
"""
import_str = format_import(source_module_name, source_name, dest_name)
- if import_str in self.module_imports[dest_module_name]:
- return
# Check if we are trying to expose two different symbols with same name.
full_api_name = dest_name
if dest_module_name:
full_api_name = dest_module_name + '.' + full_api_name
- if full_api_name in self._seen_api_names:
+ if (full_api_name in self._dest_import_to_id and
+ symbol_id != self._dest_import_to_id[full_api_name] and
+ symbol_id != -1):
raise SymbolExposedTwiceError(
'Trying to export multiple symbols with same name: %s.' %
full_api_name)
- self._seen_api_names.add(full_api_name)
+ self._dest_import_to_id[full_api_name] = symbol_id
- self.module_imports[dest_module_name].append(import_str)
+ if not dest_module_name and dest_name.startswith('_'):
+ self._underscore_names_in_root.append(dest_name)
+ # The same symbol can be available in multiple modules.
+ # We store all possible ways of importing this symbol and later pick just
+ # one.
+ self.module_imports[dest_module_name][full_api_name].add(import_str)
-def get_api_imports():
- """Get a map from destination module to formatted imports.
+ def build(self):
+ """Get a map from destination module to __init__.py code for that module.
+
+ Returns:
+ A dictionary where
+ key: (string) destination module (for e.g. tf or tf.consts).
+ value: (string) text that should be in __init__.py files for
+ corresponding modules.
+ """
+ module_text_map = {}
+ for dest_module, dest_name_to_imports in self.module_imports.items():
+ # Sort all possible imports for a symbol and pick the first one.
+ imports_list = [
+ sorted(imports)[0]
+ for _, imports in dest_name_to_imports.items()]
+ module_text_map[dest_module] = '\n'.join(sorted(imports_list))
+
+ # Expose exported symbols with underscores in root module
+ # since we import from it using * import.
+ underscore_names_str = ', '.join(
+ '\'%s\'' % name for name in self._underscore_names_in_root)
+ module_text_map[''] += '''
+_names_with_underscore = [%s]
+__all__ = [s for s in dir() if not s.startswith('_')]
+__all__.extend([s for s in _names_with_underscore])
+''' % underscore_names_str
+
+ return module_text_map
+
+
+def get_api_init_text():
+ """Get a map from destination module to __init__.py code for that module.
Returns:
A dictionary where
key: (string) destination module (for e.g. tf or tf.consts).
- value: List of strings representing module imports
- (for e.g. 'from foo import bar') and constant
- assignments (for e.g. 'FOO = 123').
+ value: (string) text that should be in __init__.py files for
+ corresponding modules.
"""
- module_imports_builder = _ModuleImportsBuilder()
- visited_symbols = set()
+ module_code_builder = _ModuleInitCodeBuilder()
# Traverse over everything imported above. Specifically,
# we want to traverse over TensorFlow Python modules.
@@ -130,8 +168,6 @@ def get_api_imports():
for module_contents_name in dir(module):
attr = getattr(module, module_contents_name)
- if id(attr) in visited_symbols:
- continue
# If attr is _tf_api_constants attribute, then add the constants.
if module_contents_name == _API_CONSTANTS_ATTR:
@@ -139,30 +175,25 @@ def get_api_imports():
for export in exports:
names = export.split('.')
dest_module = '.'.join(names[:-1])
- module_imports_builder.add_import(
- dest_module, module.__name__, value, names[-1])
+ module_code_builder.add_import(
+ -1, dest_module, module.__name__, value, names[-1])
continue
_, attr = tf_decorator.unwrap(attr)
# If attr is a symbol with _tf_api_names attribute, then
# add import for it.
if hasattr(attr, '__dict__') and _API_NAMES_ATTR in attr.__dict__:
- # If the same symbol is available using multiple names, only create
- # imports for it once.
- if id(attr) in visited_symbols:
- continue
- visited_symbols.add(id(attr))
-
for export in attr._tf_api_names: # pylint: disable=protected-access
names = export.split('.')
dest_module = '.'.join(names[:-1])
- module_imports_builder.add_import(
- dest_module, module.__name__, module_contents_name, names[-1])
+ module_code_builder.add_import(
+ id(attr), dest_module, module.__name__, module_contents_name,
+ names[-1])
# Import all required modules in their parent modules.
# For e.g. if we import 'foo.bar.Value'. Then, we also
# import 'bar' in 'foo'.
- imported_modules = set(module_imports_builder.module_imports.keys())
+ imported_modules = set(module_code_builder.module_imports.keys())
for module in imported_modules:
if not module:
continue
@@ -175,11 +206,11 @@ def get_api_imports():
parent_module += ('.' + module_split[submodule_index-1] if parent_module
else module_split[submodule_index-1])
import_from += '.' + parent_module
- module_imports_builder.add_import(
- parent_module, import_from, module_split[submodule_index],
- module_split[submodule_index])
+ module_code_builder.add_import(
+ -1, parent_module, import_from,
+ module_split[submodule_index], module_split[submodule_index])
- return module_imports_builder.module_imports
+ return module_code_builder.build()
def create_api_files(output_files):
@@ -195,16 +226,19 @@ def create_api_files(output_files):
"""
module_name_to_file_path = {}
for output_file in output_files:
+ # Convert path separators to '/' for easier parsing below.
+ normalized_output_file = output_file.replace(os.sep, '/')
if _API_DIR not in output_file:
raise ValueError(
'Output files must be in api/ directory, found %s.' % output_file)
# Get the module name that corresponds to output_file.
# First get module directory under _API_DIR.
module_dir = os.path.dirname(
- output_file[output_file.rfind(_API_DIR)+len(_API_DIR):])
+ normalized_output_file[
+ normalized_output_file.rfind(_API_DIR)+len(_API_DIR):])
# Convert / to .
module_name = module_dir.replace('/', '.').strip('.')
- module_name_to_file_path[module_name] = output_file
+ module_name_to_file_path[module_name] = os.path.normpath(output_file)
# Create file for each expected output in genrule.
for module, file_path in module_name_to_file_path.items():
@@ -212,11 +246,11 @@ def create_api_files(output_files):
os.makedirs(os.path.dirname(file_path))
open(file_path, 'a').close()
- module_imports = get_api_imports()
+ module_text_map = get_api_init_text()
# Add imports to output files.
missing_output_files = []
- for module, exports in module_imports.items():
+ for module, text in module_text_map.items():
# Make sure genrule output file list is in sync with API exports.
if module not in module_name_to_file_path:
module_file_path = '"api/%s/__init__.py"' % (
@@ -224,7 +258,7 @@ def create_api_files(output_files):
missing_output_files.append(module_file_path)
continue
with open(module_name_to_file_path[module], 'w') as fp:
- fp.write(_GENERATED_FILE_HEADER + '\n'.join(exports))
+ fp.write(_GENERATED_FILE_HEADER + text)
if missing_output_files:
raise ValueError(
@@ -241,6 +275,16 @@ if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'outputs', metavar='O', type=str, nargs='+',
- help='Python files that we expect this script to output.')
+ help='If a single file is passed in, then we we assume it contains a '
+ 'semicolon-separated list of Python files that we expect this script to '
+ 'output. If multiple files are passed in, then we assume output files '
+ 'are listed directly as arguments.')
args = parser.parse_args()
- main(args.outputs)
+ if len(args.outputs) == 1:
+ # If we only get a single argument, then it must be a file containing
+ # list of outputs.
+ with open(args.outputs[0]) as output_list_file:
+ outputs = [line.strip() for line in output_list_file.read().split(';')]
+ else:
+ outputs = args.outputs
+ main(outputs)
diff --git a/tensorflow/tools/api/generator/create_python_api_test.py b/tensorflow/tools/api/generator/create_python_api_test.py
index 2760779e6e..218c812045 100644
--- a/tensorflow/tools/api/generator/create_python_api_test.py
+++ b/tensorflow/tools/api/generator/create_python_api_test.py
@@ -56,7 +56,7 @@ class CreatePythonApiTest(test.TestCase):
del sys.modules[_MODULE_NAME]
def testFunctionImportIsAdded(self):
- imports = create_python_api.get_api_imports()
+ imports = create_python_api.get_api_init_text()
expected_import = (
'from test.tensorflow.test_module import test_op as test_op1')
self.assertTrue(
@@ -69,14 +69,14 @@ class CreatePythonApiTest(test.TestCase):
msg='%s not in %s' % (expected_import, str(imports)))
def testClassImportIsAdded(self):
- imports = create_python_api.get_api_imports()
+ imports = create_python_api.get_api_init_text()
expected_import = 'from test.tensorflow.test_module import TestClass'
self.assertTrue(
'TestClass' in str(imports),
msg='%s not in %s' % (expected_import, str(imports)))
def testConstantIsAdded(self):
- imports = create_python_api.get_api_imports()
+ imports = create_python_api.get_api_init_text()
expected = 'from test.tensorflow.test_module import _TEST_CONSTANT'
self.assertTrue(expected in str(imports),
msg='%s not in %s' % (expected, str(imports)))
diff --git a/tensorflow/tools/api/tests/BUILD b/tensorflow/tools/api/tests/BUILD
index 0dc154b6d2..724b12cd47 100644
--- a/tensorflow/tools/api/tests/BUILD
+++ b/tensorflow/tools/api/tests/BUILD
@@ -23,7 +23,6 @@ py_test(
],
srcs_version = "PY2AND3",
deps = [
- "//tensorflow:experimental_tensorflow_py",
"//tensorflow:tensorflow_py",
"//tensorflow/python:client_testlib",
"//tensorflow/python:lib",
diff --git a/tensorflow/tools/api/tests/api_compatibility_test.py b/tensorflow/tools/api/tests/api_compatibility_test.py
index 7eeae05847..26d5bca637 100644
--- a/tensorflow/tools/api/tests/api_compatibility_test.py
+++ b/tensorflow/tools/api/tests/api_compatibility_test.py
@@ -34,7 +34,6 @@ import sys
import unittest
import tensorflow as tf
-from tensorflow import experimental_api as api
from google.protobuf import text_format
@@ -47,8 +46,6 @@ from tensorflow.tools.api.lib import python_object_to_proto_visitor
from tensorflow.tools.common import public_api
from tensorflow.tools.common import traverse
-if hasattr(tf, 'experimental_api'):
- del tf.experimental_api
# FLAGS defined at the bottom:
FLAGS = None
@@ -145,9 +142,6 @@ class ApiCompatibilityTest(test.TestCase):
verbose_diff_message = ''
# First check if the key is not found in one or the other.
if key in only_in_expected:
- # TODO(annarev): remove once we switch to using tf_export decorators.
- if key == 'tensorflow.math':
- continue
diff_message = 'Object %s expected but not found (removed). %s' % (
key, additional_missing_object_message)
verbose_diff_message = diff_message
@@ -211,55 +205,9 @@ class ApiCompatibilityTest(test.TestCase):
public_api_visitor = public_api.PublicAPIVisitor(visitor)
public_api_visitor.do_not_descend_map['tf'].append('contrib')
public_api_visitor.do_not_descend_map['tf.GPUOptions'] = ['Experimental']
- traverse.traverse(tf, public_api_visitor)
-
- proto_dict = visitor.GetProtos()
-
- # Read all golden files.
- expression = os.path.join(
- resource_loader.get_root_dir_with_all_resources(),
- _KeyToFilePath('*'))
- golden_file_list = file_io.get_matching_files(expression)
-
- def _ReadFileToProto(filename):
- """Read a filename, create a protobuf from its contents."""
- ret_val = api_objects_pb2.TFAPIObject()
- text_format.Merge(file_io.read_file_to_string(filename), ret_val)
- return ret_val
-
- golden_proto_dict = {
- _FileNameToKey(filename): _ReadFileToProto(filename)
- for filename in golden_file_list
- }
-
- # TODO(annarev): remove once we switch to using tf_export decorators.
- tf_module = golden_proto_dict['tensorflow'].tf_module
- for i in range(len(tf_module.member)):
- if tf_module.member[i].name == 'math':
- del tf_module.member[i]
- break
-
- # Diff them. Do not fail if called with update.
- # If the test is run to update goldens, only report diffs but do not fail.
- self._AssertProtoDictEquals(
- golden_proto_dict,
- proto_dict,
- verbose=FLAGS.verbose_diffs,
- update_goldens=FLAGS.update_goldens)
-
- @unittest.skipUnless(
- sys.version_info.major == 2,
- 'API compabitility test goldens are generated using python2.')
- def testNewAPIBackwardsCompatibility(self):
- # Extract all API stuff.
- visitor = python_object_to_proto_visitor.PythonObjectToProtoVisitor()
-
- public_api_visitor = public_api.PublicAPIVisitor(visitor)
- public_api_visitor.do_not_descend_map['tf'].append('contrib')
- public_api_visitor.do_not_descend_map['tf.GPUOptions'] = ['Experimental']
# TODO(annarev): Make slide_dataset available in API.
public_api_visitor.private_map['tf'] = ['slide_dataset']
- traverse.traverse(api, public_api_visitor)
+ traverse.traverse(tf, public_api_visitor)
proto_dict = visitor.GetProtos()
@@ -286,9 +234,7 @@ class ApiCompatibilityTest(test.TestCase):
golden_proto_dict,
proto_dict,
verbose=FLAGS.verbose_diffs,
- update_goldens=False,
- additional_missing_object_message=
- 'Check if tf_export decorator/call is missing for this symbol.')
+ update_goldens=FLAGS.update_goldens)
if __name__ == '__main__':
diff --git a/tensorflow/tools/ci_build/windows/cpu/cmake/run_py.bat b/tensorflow/tools/ci_build/windows/cpu/cmake/run_py.bat
index 3c3b223a00..30554a084c 100644
--- a/tensorflow/tools/ci_build/windows/cpu/cmake/run_py.bat
+++ b/tensorflow/tools/ci_build/windows/cpu/cmake/run_py.bat
@@ -28,6 +28,9 @@ IF DEFINED TF_NIGHTLY (ECHO TF_NIGHTLY is set to %TF_NIGHTLY%) ELSE (SET TF_NIGH
:: Set pip binary location. Do not override if it is set already.
IF DEFINED PIP_EXE (ECHO PIP_EXE is set to %PIP_EXE%) ELSE (SET PIP_EXE="C:\Program Files\Anaconda3\Scripts\pip.exe")
+:: Install absl-py.
+%PIP_EXE% install --upgrade absl-py
+
:: Run the CMAKE build to build the pip package.
CALL %REPO_ROOT%\tensorflow\tools\ci_build\windows\cpu\cmake\run_build.bat
if %errorlevel% neq 0 exit /b %errorlevel%
@@ -37,9 +40,6 @@ DIR %REPO_ROOT%\%BUILD_DIR%\tf_python\dist\ /S /B > wheel_filename_file
set /p WHEEL_FILENAME=<wheel_filename_file
del wheel_filename_file
-:: Install absl-py.
-%PIP_EXE% install --upgrade absl-py
-
:: Install the pip package.
echo Installing PIP package...
%PIP_EXE% install --upgrade --no-deps %WHEEL_FILENAME% -v -v