aboutsummaryrefslogtreecommitdiffhomepage
path: root/third_party
diff options
context:
space:
mode:
Diffstat (limited to 'third_party')
-rw-r--r--third_party/android/android_configure.bzl54
-rw-r--r--third_party/clang_toolchain/cc_configure_clang.bzl18
-rw-r--r--third_party/clang_toolchain/download_clang.bzl104
-rw-r--r--third_party/common.bzl10
-rw-r--r--third_party/flatbuffers/build_defs.bzl347
-rw-r--r--third_party/llvm/llvm.bzl193
-rw-r--r--third_party/mkl/build_defs.bzl53
-rw-r--r--third_party/mpi/mpi.bzl10
-rw-r--r--third_party/repo.bzl126
-rw-r--r--third_party/sycl/sycl_configure.bzl331
-rw-r--r--third_party/toolchains/clang6/repo.bzl49
-rw-r--r--third_party/toolchains/cpus/arm/arm_compiler_configure.bzl52
-rw-r--r--third_party/toolchains/gpus/cuda/build_defs.bzl7
13 files changed, 647 insertions, 707 deletions
diff --git a/third_party/android/android_configure.bzl b/third_party/android/android_configure.bzl
index 646ed732a1..da09bdf39e 100644
--- a/third_party/android/android_configure.bzl
+++ b/third_party/android/android_configure.bzl
@@ -36,39 +36,33 @@ _ANDROID_NDK_REPO_TEMPLATE = """
"""
def _android_autoconf_impl(repository_ctx):
- """Implementation of the android_autoconf repository rule."""
- sdk_home = repository_ctx.os.environ.get(_ANDROID_SDK_HOME)
- sdk_api_level = repository_ctx.os.environ.get(_ANDROID_SDK_API_VERSION)
- build_tools_version = repository_ctx.os.environ.get(
- _ANDROID_BUILD_TOOLS_VERSION,
- )
- ndk_home = repository_ctx.os.environ.get(_ANDROID_NDK_HOME)
- ndk_api_level = repository_ctx.os.environ.get(_ANDROID_NDK_API_VERSION)
+ """Implementation of the android_autoconf repository rule."""
+ sdk_home = repository_ctx.os.environ.get(_ANDROID_SDK_HOME)
+ sdk_api_level = repository_ctx.os.environ.get(_ANDROID_SDK_API_VERSION)
+ build_tools_version = repository_ctx.os.environ.get(
+ _ANDROID_BUILD_TOOLS_VERSION)
+ ndk_home = repository_ctx.os.environ.get(_ANDROID_NDK_HOME)
+ ndk_api_level = repository_ctx.os.environ.get(_ANDROID_NDK_API_VERSION)
- sdk_rule = "pass"
- if all([sdk_home, sdk_api_level, build_tools_version]):
- sdk_rule = _ANDROID_SDK_REPO_TEMPLATE % (
- sdk_home,
- sdk_api_level,
- build_tools_version,
- )
+ sdk_rule = "pass"
+ if all([sdk_home, sdk_api_level, build_tools_version]):
+ sdk_rule = _ANDROID_SDK_REPO_TEMPLATE % (
+ sdk_home, sdk_api_level, build_tools_version)
- ndk_rule = "pass"
- if all([ndk_home, ndk_api_level]):
- ndk_rule = _ANDROID_NDK_REPO_TEMPLATE % (ndk_home, ndk_api_level)
+ ndk_rule = "pass"
+ if all([ndk_home, ndk_api_level]):
+ ndk_rule = _ANDROID_NDK_REPO_TEMPLATE % (ndk_home, ndk_api_level)
- repository_ctx.template(
- "BUILD",
- Label("//third_party/android:android_configure.BUILD.tpl"),
- )
- repository_ctx.template(
- "android.bzl",
- Label("//third_party/android:android.bzl.tpl"),
- substitutions = {
- "MAYBE_ANDROID_SDK_REPOSITORY": sdk_rule,
- "MAYBE_ANDROID_NDK_REPOSITORY": ndk_rule,
- },
- )
+ repository_ctx.template(
+ "BUILD",
+ Label("//third_party/android:android_configure.BUILD.tpl"))
+ repository_ctx.template(
+ "android.bzl",
+ Label("//third_party/android:android.bzl.tpl"),
+ substitutions={
+ "MAYBE_ANDROID_SDK_REPOSITORY": sdk_rule,
+ "MAYBE_ANDROID_NDK_REPOSITORY": ndk_rule,
+ })
android_configure = repository_rule(
implementation = _android_autoconf_impl,
diff --git a/third_party/clang_toolchain/cc_configure_clang.bzl b/third_party/clang_toolchain/cc_configure_clang.bzl
index 0778c43c53..1181110ea9 100644
--- a/third_party/clang_toolchain/cc_configure_clang.bzl
+++ b/third_party/clang_toolchain/cc_configure_clang.bzl
@@ -7,16 +7,16 @@ _TF_DOWNLOAD_CLANG = "TF_DOWNLOAD_CLANG"
_TF_NEED_CUDA = "TF_NEED_CUDA"
def _cc_clang_autoconf(repo_ctx):
- if repo_ctx.os.environ.get(_TF_DOWNLOAD_CLANG) != "1":
- return
- if repo_ctx.os.environ.get(_TF_NEED_CUDA) == "1":
- # Clang is handled separately for CUDA configs.
- # See cuda_configure.bzl for more details.
- return
+ if repo_ctx.os.environ.get(_TF_DOWNLOAD_CLANG) != "1":
+ return
+ if repo_ctx.os.environ.get(_TF_NEED_CUDA) == "1":
+ # Clang is handled separately for CUDA configs.
+ # See cuda_configure.bzl for more details.
+ return
- download_clang(repo_ctx, out_folder = "extra_tools")
- overriden_tools = {"gcc": "extra_tools/bin/clang"}
- cc_autoconf_impl(repo_ctx, overriden_tools)
+ download_clang(repo_ctx, out_folder='extra_tools')
+ overriden_tools = {'gcc': 'extra_tools/bin/clang'}
+ cc_autoconf_impl(repo_ctx, overriden_tools)
cc_download_clang_toolchain = repository_rule(
environ = [
diff --git a/third_party/clang_toolchain/download_clang.bzl b/third_party/clang_toolchain/download_clang.bzl
index 6a4c029243..a014a806a6 100644
--- a/third_party/clang_toolchain/download_clang.bzl
+++ b/third_party/clang_toolchain/download_clang.bzl
@@ -1,60 +1,54 @@
""" Helpers to download a recent clang release."""
def _get_platform_folder(os_name):
- os_name = os_name.lower()
- if os_name.startswith("windows"):
- return "Win"
- if os_name.startswith("mac os"):
- return "Mac"
- if not os_name.startswith("linux"):
- fail("Unknown platform")
- return "Linux_x64"
-
-def _download_chromium_clang(
- repo_ctx,
- platform_folder,
- package_version,
- sha256,
- out_folder):
- cds_url = "https://commondatastorage.googleapis.com/chromium-browser-clang"
- cds_file = "clang-%s.tgz" % package_version
- cds_full_url = "{0}/{1}/{2}".format(cds_url, platform_folder, cds_file)
- repo_ctx.download_and_extract(cds_full_url, output = out_folder, sha256 = sha256)
+ os_name = os_name.lower()
+ if os_name.startswith('windows'):
+ return 'Win'
+ if os_name.startswith('mac os'):
+ return 'Mac'
+ if not os_name.startswith('linux'):
+ fail('Unknown platform')
+ return 'Linux_x64'
+
+def _download_chromium_clang(repo_ctx, platform_folder, package_version, sha256,
+ out_folder):
+ cds_url = 'https://commondatastorage.googleapis.com/chromium-browser-clang'
+ cds_file = 'clang-%s.tgz' % package_version
+ cds_full_url = '{0}/{1}/{2}'.format(cds_url, platform_folder, cds_file)
+ repo_ctx.download_and_extract(cds_full_url, output=out_folder, sha256=sha256)
def download_clang(repo_ctx, out_folder):
- """ Download a fresh clang release and put it into out_folder.
-
- Clang itself will be located in 'out_folder/bin/clang'.
- We currently download one of the latest releases of clang by the
- Chromium project (see
- https://chromium.googlesource.com/chromium/src/+/master/docs/clang.md).
-
- Args:
- repo_ctx: An instance of repository_context object.
- out_folder: A folder to extract the compiler into.
- """
- # TODO(ibiryukov): we currently download and extract some extra tools in the
- # clang release (e.g., sanitizers). We should probably remove the ones
- # we don't need and document the ones we want provide in addition to clang.
-
- # Latest CLANG_REVISION and CLANG_SUB_REVISION of the Chromiums's release
- # can be found in https://chromium.googlesource.com/chromium/src/tools/clang/+/master/scripts/update.py
- CLANG_REVISION = "335091"
- CLANG_SUB_REVISION = 1
-
- package_version = "%s-%s" % (CLANG_REVISION, CLANG_SUB_REVISION)
-
- checksums = {
- "Linux_x64": "17002b75293fccfdd175eacdc9ee47d97b58d7e98fef343384fbbef1b68ce99f",
- "Mac": "9351e46d28315daaa06a1eb55bd0370ed4aaeb693a2a3e82e48d2737d7723468",
- "Win": "e78a1e469224d6f6751b4df4374bf58893ac03900ec924e4c8264888ba4aeb1e",
- }
-
- platform_folder = _get_platform_folder(repo_ctx.os.name)
- _download_chromium_clang(
- repo_ctx,
- platform_folder,
- package_version,
- checksums[platform_folder],
- out_folder,
- )
+ """ Download a fresh clang release and put it into out_folder.
+
+ Clang itself will be located in 'out_folder/bin/clang'.
+ We currently download one of the latest releases of clang by the
+ Chromium project (see
+ https://chromium.googlesource.com/chromium/src/+/master/docs/clang.md).
+
+ Args:
+ repo_ctx: An instance of repository_context object.
+ out_folder: A folder to extract the compiler into.
+ """
+ # TODO(ibiryukov): we currently download and extract some extra tools in the
+ # clang release (e.g., sanitizers). We should probably remove the ones
+ # we don't need and document the ones we want provide in addition to clang.
+
+ # Latest CLANG_REVISION and CLANG_SUB_REVISION of the Chromiums's release
+ # can be found in https://chromium.googlesource.com/chromium/src/tools/clang/+/master/scripts/update.py
+ CLANG_REVISION = '335091'
+ CLANG_SUB_REVISION = 1
+
+ package_version = '%s-%s' % (CLANG_REVISION, CLANG_SUB_REVISION)
+
+ checksums = {
+ 'Linux_x64':
+ '17002b75293fccfdd175eacdc9ee47d97b58d7e98fef343384fbbef1b68ce99f',
+ 'Mac':
+ '9351e46d28315daaa06a1eb55bd0370ed4aaeb693a2a3e82e48d2737d7723468',
+ 'Win':
+ 'e78a1e469224d6f6751b4df4374bf58893ac03900ec924e4c8264888ba4aeb1e',
+ }
+
+ platform_folder = _get_platform_folder(repo_ctx.os.name)
+ _download_chromium_clang(repo_ctx, platform_folder, package_version,
+ checksums[platform_folder], out_folder)
diff --git a/third_party/common.bzl b/third_party/common.bzl
index 8134bf3d25..db981a5e31 100644
--- a/third_party/common.bzl
+++ b/third_party/common.bzl
@@ -21,11 +21,11 @@
# substitutions: A dictionary mapping strings to their substitutions
def template_rule_impl(ctx):
- ctx.template_action(
- template = ctx.file.src,
- output = ctx.outputs.out,
- substitutions = ctx.attr.substitutions,
- )
+ ctx.template_action(
+ template = ctx.file.src,
+ output = ctx.outputs.out,
+ substitutions = ctx.attr.substitutions,
+ )
template_rule = rule(
attrs = {
diff --git a/third_party/flatbuffers/build_defs.bzl b/third_party/flatbuffers/build_defs.bzl
index fb24cc73df..ae8d7feebe 100644
--- a/third_party/flatbuffers/build_defs.bzl
+++ b/third_party/flatbuffers/build_defs.bzl
@@ -8,49 +8,66 @@ DEFAULT_FLATC_ARGS = [
"--gen-object-api",
]
-def flatbuffer_library_public(
- name,
- srcs,
- outs,
- language_flag,
- out_prefix = "",
- includes = [],
- include_paths = [],
- flatc_args = DEFAULT_FLATC_ARGS,
- reflection_name = "",
- reflection_visiblity = None,
- output_to_bindir = False):
- """Generates code files for reading/writing the given flatbuffers in the requested language using the public compiler.
-
- Args:
- name: Rule name.
- srcs: Source .fbs files. Sent in order to the compiler.
- outs: Output files from flatc.
- language_flag: Target language flag. One of [-c, -j, -js].
- out_prefix: Prepend this path to the front of all generated files except on
- single source targets. Usually is a directory name.
- includes: Optional, list of filegroups of schemas that the srcs depend on.
- include_paths: Optional, list of paths the includes files can be found in.
- flatc_args: Optional, list of additional arguments to pass to flatc.
- reflection_name: Optional, if set this will generate the flatbuffer
- reflection binaries for the schemas.
- reflection_visiblity: The visibility of the generated reflection Fileset.
- output_to_bindir: Passed to genrule for output to bin directory.
- Outs:
- filegroup(name): all generated source files.
- Fileset([reflection_name]): (Optional) all generated reflection binaries.
- """
- include_paths_cmd = ["-I %s" % (s) for s in include_paths]
+def flatbuffer_library_public(name,
+ srcs,
+ outs,
+ language_flag,
+ out_prefix="",
+ includes=[],
+ include_paths=[],
+ flatc_args=DEFAULT_FLATC_ARGS,
+ reflection_name="",
+ reflection_visiblity=None,
+ output_to_bindir=False):
+ '''Generates code files for reading/writing the given flatbuffers in the requested language using the public compiler.
- # '$(@D)' when given a single source target will give the appropriate
- # directory. Appending 'out_prefix' is only necessary when given a build
- # target with multiple sources.
- output_directory = (
- ("-o $(@D)/%s" % (out_prefix)) if len(srcs) > 1 else ("-o $(@D)")
- )
- genrule_cmd = " ".join([
+ Args:
+ name: Rule name.
+ srcs: Source .fbs files. Sent in order to the compiler.
+ outs: Output files from flatc.
+ language_flag: Target language flag. One of [-c, -j, -js].
+ out_prefix: Prepend this path to the front of all generated files except on
+ single source targets. Usually is a directory name.
+ includes: Optional, list of filegroups of schemas that the srcs depend on.
+ include_paths: Optional, list of paths the includes files can be found in.
+ flatc_args: Optional, list of additional arguments to pass to flatc.
+ reflection_name: Optional, if set this will generate the flatbuffer
+ reflection binaries for the schemas.
+ reflection_visiblity: The visibility of the generated reflection Fileset.
+ output_to_bindir: Passed to genrule for output to bin directory.
+ Outs:
+ filegroup(name): all generated source files.
+ Fileset([reflection_name]): (Optional) all generated reflection binaries.
+ '''
+ include_paths_cmd = ["-I %s" % (s) for s in include_paths]
+ # '$(@D)' when given a single source target will give the appropriate
+ # directory. Appending 'out_prefix' is only necessary when given a build
+ # target with multiple sources.
+ output_directory = (
+ ("-o $(@D)/%s" % (out_prefix)) if len(srcs) > 1 else ("-o $(@D)"))
+ genrule_cmd = " ".join([
+ "for f in $(SRCS); do",
+ "$(location %s)" % (flatc_path),
+ " ".join(flatc_args),
+ " ".join(include_paths_cmd),
+ language_flag,
+ output_directory,
+ "$$f;",
+ "done",
+ ])
+ native.genrule(
+ name=name,
+ srcs=srcs,
+ outs=outs,
+ output_to_bindir=output_to_bindir,
+ tools=includes + [flatc_path,],
+ cmd=genrule_cmd,
+ message="Generating flatbuffer files for %s:" % (name),)
+ if reflection_name:
+ reflection_genrule_cmd = " ".join([
"for f in $(SRCS); do",
"$(location %s)" % (flatc_path),
+ "-b --schema",
" ".join(flatc_args),
" ".join(include_paths_cmd),
language_flag,
@@ -58,156 +75,122 @@ def flatbuffer_library_public(
"$$f;",
"done",
])
+ reflection_outs = [
+ (out_prefix + "%s.bfbs") % (s.replace(".fbs", "").split("/")[-1]) for s in srcs
+ ]
native.genrule(
- name = name,
- srcs = srcs,
- outs = outs,
- output_to_bindir = output_to_bindir,
- tools = includes + [flatc_path],
- cmd = genrule_cmd,
- message = "Generating flatbuffer files for %s:" % (name),
+ name= "%s_srcs" % reflection_name,
+ srcs=srcs,
+ outs=reflection_outs,
+ output_to_bindir=output_to_bindir,
+ tools=includes + [flatc_path,],
+ cmd=reflection_genrule_cmd,
+ message="Generating flatbuffer reflection binary for %s:" % (name),)
+ native.Fileset(
+ name=reflection_name,
+ out="%s_out" % reflection_name,
+ entries=[
+ native.FilesetEntry(files=reflection_outs),
+ ],
+ visibility=reflection_visiblity
)
- if reflection_name:
- reflection_genrule_cmd = " ".join([
- "for f in $(SRCS); do",
- "$(location %s)" % (flatc_path),
- "-b --schema",
- " ".join(flatc_args),
- " ".join(include_paths_cmd),
- language_flag,
- output_directory,
- "$$f;",
- "done",
- ])
- reflection_outs = [
- (out_prefix + "%s.bfbs") % (s.replace(".fbs", "").split("/")[-1])
- for s in srcs
- ]
- native.genrule(
- name = "%s_srcs" % reflection_name,
- srcs = srcs,
- outs = reflection_outs,
- output_to_bindir = output_to_bindir,
- tools = includes + [flatc_path],
- cmd = reflection_genrule_cmd,
- message = "Generating flatbuffer reflection binary for %s:" % (name),
- )
- native.Fileset(
- name = reflection_name,
- out = "%s_out" % reflection_name,
- entries = [
- native.FilesetEntry(files = reflection_outs),
- ],
- visibility = reflection_visiblity,
- )
-def flatbuffer_cc_library(
- name,
- srcs,
- srcs_filegroup_name = "",
- out_prefix = "",
- includes = [],
- include_paths = [],
- flatc_args = DEFAULT_FLATC_ARGS,
- visibility = None,
- srcs_filegroup_visibility = None,
- gen_reflections = False):
- '''A cc_library with the generated reader/writers for the given flatbuffer definitions.
- Args:
- name: Rule name.
- srcs: Source .fbs files. Sent in order to the compiler.
- srcs_filegroup_name: Name of the output filegroup that holds srcs. Pass this
- filegroup into the `includes` parameter of any other
- flatbuffer_cc_library that depends on this one's schemas.
- out_prefix: Prepend this path to the front of all generated files. Usually
- is a directory name.
- includes: Optional, list of filegroups of schemas that the srcs depend on.
- ** SEE REMARKS BELOW **
- include_paths: Optional, list of paths the includes files can be found in.
- flatc_args: Optional list of additional arguments to pass to flatc
- (e.g. --gen-mutable).
- visibility: The visibility of the generated cc_library. By default, use the
- default visibility of the project.
- srcs_filegroup_visibility: The visibility of the generated srcs filegroup.
- By default, use the value of the visibility parameter above.
- gen_reflections: Optional, if true this will generate the flatbuffer
- reflection binaries for the schemas.
- Outs:
- filegroup([name]_srcs): all generated .h files.
- filegroup(srcs_filegroup_name if specified, or [name]_includes if not):
- Other flatbuffer_cc_library's can pass this in for their `includes`
- parameter, if they depend on the schemas in this library.
- Fileset([name]_reflection): (Optional) all generated reflection binaries.
- cc_library([name]): library with sources and flatbuffers deps.
+def flatbuffer_cc_library(name, srcs, srcs_filegroup_name="",
+ out_prefix="", includes=[], include_paths=[],
+ flatc_args=DEFAULT_FLATC_ARGS,
+ visibility=None, srcs_filegroup_visibility=None,
+ gen_reflections=False):
+ '''A cc_library with the generated reader/writers for the given flatbuffer definitions.
- Remarks:
- ** Because the genrule used to call flatc does not have any trivial way of
- computing the output list of files transitively generated by includes and
- --gen-includes (the default) being defined for flatc, the --gen-includes
- flag will not work as expected. The way around this is to add a dependency
- to the flatbuffer_cc_library defined alongside the flatc included Fileset.
- For example you might define:
+ Args:
+ name: Rule name.
+ srcs: Source .fbs files. Sent in order to the compiler.
+ srcs_filegroup_name: Name of the output filegroup that holds srcs. Pass this
+ filegroup into the `includes` parameter of any other
+ flatbuffer_cc_library that depends on this one's schemas.
+ out_prefix: Prepend this path to the front of all generated files. Usually
+ is a directory name.
+ includes: Optional, list of filegroups of schemas that the srcs depend on.
+ ** SEE REMARKS BELOW **
+ include_paths: Optional, list of paths the includes files can be found in.
+ flatc_args: Optional list of additional arguments to pass to flatc
+ (e.g. --gen-mutable).
+ visibility: The visibility of the generated cc_library. By default, use the
+ default visibility of the project.
+ srcs_filegroup_visibility: The visibility of the generated srcs filegroup.
+ By default, use the value of the visibility parameter above.
+ gen_reflections: Optional, if true this will generate the flatbuffer
+ reflection binaries for the schemas.
+ Outs:
+ filegroup([name]_srcs): all generated .h files.
+ filegroup(srcs_filegroup_name if specified, or [name]_includes if not):
+ Other flatbuffer_cc_library's can pass this in for their `includes`
+ parameter, if they depend on the schemas in this library.
+ Fileset([name]_reflection): (Optional) all generated reflection binaries.
+ cc_library([name]): library with sources and flatbuffers deps.
- flatbuffer_cc_library(
- name = "my_fbs",
- srcs = [ "schemas/foo.fbs" ],
- includes = [ "//third_party/bazz:bazz_fbs_includes" ],
- )
+ Remarks:
+ ** Because the genrule used to call flatc does not have any trivial way of
+ computing the output list of files transitively generated by includes and
+ --gen-includes (the default) being defined for flatc, the --gen-includes
+ flag will not work as expected. The way around this is to add a dependency
+ to the flatbuffer_cc_library defined alongside the flatc included Fileset.
+ For example you might define:
- In which foo.fbs includes a few files from the Fileset defined at
- //third_party/bazz:bazz_fbs_includes. When compiling the library that
- includes foo_generated.h, and therefore has my_fbs as a dependency, it
- will fail to find any of the bazz *_generated.h files unless you also
- add bazz's flatbuffer_cc_library to your own dependency list, e.g.:
+ flatbuffer_cc_library(
+ name = "my_fbs",
+ srcs = [ "schemas/foo.fbs" ],
+ includes = [ "//third_party/bazz:bazz_fbs_includes" ],
+ )
- cc_library(
- name = "my_lib",
- deps = [
- ":my_fbs",
- "//third_party/bazz:bazz_fbs"
- ],
- )
+ In which foo.fbs includes a few files from the Fileset defined at
+ //third_party/bazz:bazz_fbs_includes. When compiling the library that
+ includes foo_generated.h, and therefore has my_fbs as a dependency, it
+ will fail to find any of the bazz *_generated.h files unless you also
+ add bazz's flatbuffer_cc_library to your own dependency list, e.g.:
- Happy dependent Flatbuffering!
- '''
- output_headers = [
- (out_prefix + "%s_generated.h") % (s.replace(".fbs", "").split("/")[-1])
- for s in srcs
- ]
- reflection_name = "%s_reflection" % name if gen_reflections else ""
+ cc_library(
+ name = "my_lib",
+ deps = [
+ ":my_fbs",
+ "//third_party/bazz:bazz_fbs"
+ ],
+ )
- flatbuffer_library_public(
- name = "%s_srcs" % (name),
- srcs = srcs,
- outs = output_headers,
- language_flag = "-c",
- out_prefix = out_prefix,
- includes = includes,
- include_paths = include_paths,
- flatc_args = flatc_args,
- reflection_name = reflection_name,
- reflection_visiblity = visibility,
- )
- native.cc_library(
- name = name,
- hdrs = output_headers,
- srcs = output_headers,
- features = [
- "-parse_headers",
- ],
- deps = [
- "@flatbuffers//:runtime_cc",
- ],
- includes = ["."],
- linkstatic = 1,
- visibility = visibility,
- )
+ Happy dependent Flatbuffering!
+ '''
+ output_headers = [
+ (out_prefix + "%s_generated.h") % (s.replace(".fbs", "").split("/")[-1]) for s in srcs
+ ]
+ reflection_name = "%s_reflection" % name if gen_reflections else ""
- # A filegroup for the `srcs`. That is, all the schema files for this
- # Flatbuffer set.
- native.filegroup(
- name = srcs_filegroup_name if srcs_filegroup_name else "%s_includes" % (name),
- srcs = srcs,
- visibility = srcs_filegroup_visibility if srcs_filegroup_visibility != None else visibility,
- )
+ flatbuffer_library_public(name="%s_srcs" % (name),
+ srcs=srcs,
+ outs=output_headers,
+ language_flag="-c",
+ out_prefix=out_prefix,
+ includes=includes,
+ include_paths=include_paths,
+ flatc_args=flatc_args,
+ reflection_name=reflection_name,
+ reflection_visiblity=visibility,)
+ native.cc_library(name=name,
+ hdrs=output_headers,
+ srcs=output_headers,
+ features=[
+ "-parse_headers",
+ ],
+ deps=[
+ "@flatbuffers//:runtime_cc",
+ ],
+ includes=["."],
+ linkstatic=1,
+ visibility=visibility)
+
+ # A filegroup for the `srcs`. That is, all the schema files for this
+ # Flatbuffer set.
+ native.filegroup(
+ name = srcs_filegroup_name if srcs_filegroup_name else "%s_includes" % (name),
+ srcs = srcs,
+ visibility=srcs_filegroup_visibility if srcs_filegroup_visibility != None else visibility)
diff --git a/third_party/llvm/llvm.bzl b/third_party/llvm/llvm.bzl
index b65bfe3544..2e809e5f14 100644
--- a/third_party/llvm/llvm.bzl
+++ b/third_party/llvm/llvm.bzl
@@ -8,114 +8,102 @@ correctly understood by the build system.
"""
def gentbl(name, tblgen, td_file, td_srcs, tbl_outs, library = True, **kwargs):
- """gentbl() generates tabular code from a table definition file.
-
- Args:
- name: The name of the build rule for use in dependencies.
- tblgen: The binary used to produce the output.
- td_file: The primary table definitions file.
- td_srcs: A list of table definition files included transitively.
- tbl_outs: A list of tuples (opts, out), where each opts is a string of
- options passed to tblgen, and the out is the corresponding output file
- produced.
- library: Whether to bundle the generated files into a library.
- **kwargs: Keyword arguments to pass to subsidiary cc_library() rule.
- """
- if td_file not in td_srcs:
- td_srcs += [td_file]
- includes = []
- for (opts, out) in tbl_outs:
- outdir = out[:out.rindex("/")]
- if outdir not in includes:
- includes.append(outdir)
- rule_suffix = "_".join(opts.replace("-", "_").replace("=", "_").split(" "))
- native.genrule(
- name = "%s_%s_genrule" % (name, rule_suffix),
- srcs = td_srcs,
- outs = [out],
- tools = [tblgen],
- message = "Generating code from table: %s" % td_file,
- cmd = (("$(location %s) " + "-I external/llvm/include " +
- "-I external/llvm/tools/clang/include " +
- "-I $$(dirname $(location %s)) " + "%s $(location %s) -o $@") % (
- tblgen,
- td_file,
- opts,
- td_file,
- )),
- )
-
- # For now, all generated files can be assumed to comprise public interfaces.
- # If this is not true, you should specify library = False
- # and list the generated '.inc' files in "srcs".
- if library:
- native.cc_library(
- name = name,
- textual_hdrs = [f for (_, f) in tbl_outs],
- includes = includes,
- **kwargs
- )
+ """gentbl() generates tabular code from a table definition file.
+
+ Args:
+ name: The name of the build rule for use in dependencies.
+ tblgen: The binary used to produce the output.
+ td_file: The primary table definitions file.
+ td_srcs: A list of table definition files included transitively.
+ tbl_outs: A list of tuples (opts, out), where each opts is a string of
+ options passed to tblgen, and the out is the corresponding output file
+ produced.
+ library: Whether to bundle the generated files into a library.
+ **kwargs: Keyword arguments to pass to subsidiary cc_library() rule.
+ """
+ if td_file not in td_srcs:
+ td_srcs += [td_file]
+ includes = []
+ for (opts, out) in tbl_outs:
+ outdir = out[:out.rindex("/")]
+ if outdir not in includes:
+ includes.append(outdir)
+ rule_suffix = "_".join(opts.replace("-", "_").replace("=", "_").split(" "))
+ native.genrule(
+ name="%s_%s_genrule" % (name, rule_suffix),
+ srcs=td_srcs,
+ outs=[out],
+ tools=[tblgen],
+ message="Generating code from table: %s" % td_file,
+ cmd=(("$(location %s) " + "-I external/llvm/include " +
+ "-I external/llvm/tools/clang/include " +
+ "-I $$(dirname $(location %s)) " + "%s $(location %s) -o $@") % (
+ tblgen, td_file, opts, td_file)))
+ # For now, all generated files can be assumed to comprise public interfaces.
+ # If this is not true, you should specify library = False
+ # and list the generated '.inc' files in "srcs".
+ if library:
+ native.cc_library(name=name, textual_hdrs=[f for (_, f) in tbl_outs],
+ includes=includes, **kwargs)
def llvm_target_cmake_vars(native_arch, target_triple):
- return {
- "LLVM_HOST_TRIPLE": target_triple,
- "LLVM_DEFAULT_TARGET_TRIPLE": target_triple,
- "LLVM_NATIVE_ARCH": native_arch,
- }
+ return {
+ "LLVM_HOST_TRIPLE": target_triple,
+ "LLVM_DEFAULT_TARGET_TRIPLE": target_triple,
+ "LLVM_NATIVE_ARCH": native_arch,
+ }
def _quote(s):
- """Quotes the given string for use in a shell command.
-
- This function double-quotes the given string (in case it contains spaces or
- other special characters) and escapes any special characters (dollar signs,
- double-quotes, and backslashes) that may be present.
-
- Args:
- s: The string to quote.
- Returns:
- An escaped and quoted version of the string that can be passed to a shell
- command.
- """
- return ('"' +
- s.replace("\\", "\\\\").replace("$", "\\$").replace('"', '\\"') +
- '"')
+ """Quotes the given string for use in a shell command.
+
+ This function double-quotes the given string (in case it contains spaces or
+ other special characters) and escapes any special characters (dollar signs,
+ double-quotes, and backslashes) that may be present.
+
+ Args:
+ s: The string to quote.
+ Returns:
+ An escaped and quoted version of the string that can be passed to a shell
+ command.
+ """
+ return ('"' +
+ s.replace("\\", "\\\\").replace("$", "\\$").replace('"', '\\"') +
+ '"')
def cmake_var_string(cmake_vars):
- """Converts a dictionary to an input suitable for expand_cmake_vars.
+ """Converts a dictionary to an input suitable for expand_cmake_vars.
- Ideally we would jist stringify in the expand_cmake_vars() rule, but select()
- interacts badly with genrules.
+ Ideally we would jist stringify in the expand_cmake_vars() rule, but select()
+ interacts badly with genrules.
- TODO(phawkins): replace the genrule() with native rule and delete this rule.
+ TODO(phawkins): replace the genrule() with native rule and delete this rule.
- Args:
- cmake_vars: a dictionary with string keys and values that are convertable to
- strings.
- """
- return " ".join([
- _quote("{}={}".format(k, str(v)))
- for (k, v) in cmake_vars.items()
- ])
+ Args:
+ cmake_vars: a dictionary with string keys and values that are convertable to
+ strings.
+ """
+ return " ".join([_quote("{}={}".format(k, str(v)))
+ for (k, v) in cmake_vars.items()])
def expand_cmake_vars(name, src, dst, cmake_vars):
- """Expands #cmakedefine, #cmakedefine01, and CMake variables in a text file.
-
- Args:
- name: the name of the rule
- src: the input of the rule
- dst: the output of the rule
- cmake_vars: a string containing the CMake variables, as generated by
- cmake_var_string.
- """
- expand_cmake_vars_tool = Label("@org_tensorflow//third_party/llvm:expand_cmake_vars")
- native.genrule(
- name = name,
- srcs = [src],
- tools = [expand_cmake_vars_tool],
- outs = [dst],
- cmd = ("$(location {}) ".format(expand_cmake_vars_tool) + cmake_vars +
- "< $< > $@"),
- )
+ """Expands #cmakedefine, #cmakedefine01, and CMake variables in a text file.
+
+ Args:
+ name: the name of the rule
+ src: the input of the rule
+ dst: the output of the rule
+ cmake_vars: a string containing the CMake variables, as generated by
+ cmake_var_string.
+ """
+ expand_cmake_vars_tool = Label("@org_tensorflow//third_party/llvm:expand_cmake_vars")
+ native.genrule(
+ name = name,
+ srcs = [src],
+ tools = [expand_cmake_vars_tool],
+ outs = [dst],
+ cmd = ("$(location {}) ".format(expand_cmake_vars_tool) + cmake_vars +
+ "< $< > $@")
+ )
# TODO(phawkins): the set of CMake variables was hardcoded for expediency.
# However, we should really detect many of these via configure-time tests.
@@ -225,18 +213,17 @@ darwin_cmake_vars = {
llvm_all_cmake_vars = select({
"@org_tensorflow//tensorflow:darwin": cmake_var_string(
cmake_vars + llvm_target_cmake_vars("X86", "x86_64-apple-darwin") +
- darwin_cmake_vars,
- ),
+ darwin_cmake_vars),
"@org_tensorflow//tensorflow:linux_ppc64le": cmake_var_string(
cmake_vars +
llvm_target_cmake_vars("PowerPC", "powerpc64le-unknown-linux_gnu") +
linux_cmake_vars,
),
"//conditions:default": cmake_var_string(
- cmake_vars +
- llvm_target_cmake_vars("X86", "x86_64-unknown-linux_gnu") +
- linux_cmake_vars,
- ),
+ cmake_vars +
+ llvm_target_cmake_vars("X86", "x86_64-unknown-linux_gnu") +
+ linux_cmake_vars),
+
})
LLVM_LINKOPTS = ["-ldl", "-lm", "-lpthread"]
diff --git a/third_party/mkl/build_defs.bzl b/third_party/mkl/build_defs.bzl
index 6571f38fe6..53e02769da 100644
--- a/third_party/mkl/build_defs.bzl
+++ b/third_party/mkl/build_defs.bzl
@@ -8,8 +8,10 @@ mkl_repository depends on the following environment variables:
* `TF_MKL_ROOT`: The root folder where a copy of libmkl is located.
"""
+
_TF_MKL_ROOT = "TF_MKL_ROOT"
+
def if_mkl(if_true, if_false = []):
"""Shorthand for select()'ing on whether we're building with MKL.
@@ -19,7 +21,7 @@ def if_mkl(if_true, if_false = []):
"""
return select({
str(Label("//third_party/mkl:using_mkl")): if_true,
- "//conditions:default": if_false,
+ "//conditions:default": if_false
})
def if_mkl_lnx_x64(if_true, if_false = []):
@@ -31,34 +33,37 @@ def if_mkl_lnx_x64(if_true, if_false = []):
"""
return select({
str(Label("//third_party/mkl:using_mkl_lnx_x64")): if_true,
- "//conditions:default": if_false,
+ "//conditions:default": if_false
})
+
def _enable_local_mkl(repository_ctx):
- return _TF_MKL_ROOT in repository_ctx.os.environ
+ return _TF_MKL_ROOT in repository_ctx.os.environ
+
def _mkl_autoconf_impl(repository_ctx):
- """Implementation of the local_mkl_autoconf repository rule."""
-
- if _enable_local_mkl(repository_ctx):
- # Symlink lib and include local folders.
- mkl_root = repository_ctx.os.environ[_TF_MKL_ROOT]
- mkl_lib_path = "%s/lib" % mkl_root
- repository_ctx.symlink(mkl_lib_path, "lib")
- mkl_include_path = "%s/include" % mkl_root
- repository_ctx.symlink(mkl_include_path, "include")
- mkl_license_path = "%s/license.txt" % mkl_root
- repository_ctx.symlink(mkl_license_path, "license.txt")
- else:
- # setup remote mkl repository.
- repository_ctx.download_and_extract(
- repository_ctx.attr.urls,
- sha256 = repository_ctx.attr.sha256,
- stripPrefix = repository_ctx.attr.strip_prefix,
- )
-
- # Also setup BUILD file.
- repository_ctx.symlink(repository_ctx.attr.build_file, "BUILD")
+ """Implementation of the local_mkl_autoconf repository rule."""
+
+ if _enable_local_mkl(repository_ctx):
+ # Symlink lib and include local folders.
+ mkl_root = repository_ctx.os.environ[_TF_MKL_ROOT]
+ mkl_lib_path = "%s/lib" % mkl_root
+ repository_ctx.symlink(mkl_lib_path, "lib")
+ mkl_include_path = "%s/include" % mkl_root
+ repository_ctx.symlink(mkl_include_path, "include")
+ mkl_license_path = "%s/license.txt" % mkl_root
+ repository_ctx.symlink(mkl_license_path, "license.txt")
+ else:
+ # setup remote mkl repository.
+ repository_ctx.download_and_extract(
+ repository_ctx.attr.urls,
+ sha256=repository_ctx.attr.sha256,
+ stripPrefix=repository_ctx.attr.strip_prefix,
+ )
+
+ # Also setup BUILD file.
+ repository_ctx.symlink(repository_ctx.attr.build_file, "BUILD")
+
mkl_repository = repository_rule(
implementation = _mkl_autoconf_impl,
diff --git a/third_party/mpi/mpi.bzl b/third_party/mpi/mpi.bzl
index 3a483351d1..38ce91c4d0 100644
--- a/third_party/mpi/mpi.bzl
+++ b/third_party/mpi/mpi.bzl
@@ -2,16 +2,16 @@
#based on the configuration options return one or the other
def mpi_hdr():
- MPI_LIB_IS_OPENMPI = True
- hdrs = []
+ MPI_LIB_IS_OPENMPI=True
+ hdrs = []
if MPI_LIB_IS_OPENMPI:
- hdrs = ["mpi.h", "mpi_portable_platform.h"] #When using OpenMPI
+ hdrs = ["mpi.h", "mpi_portable_platform.h"] #When using OpenMPI
else:
- hdrs = ["mpi.h", "mpio.h", "mpicxx.h"] #When using MVAPICH
+ hdrs = ["mpi.h", "mpio.h", "mpicxx.h"] #When using MVAPICH
return hdrs
def if_mpi(if_true, if_false = []):
return select({
"//tensorflow:with_mpi_support": if_true,
- "//conditions:default": if_false,
+ "//conditions:default": if_false
})
diff --git a/third_party/repo.bzl b/third_party/repo.bzl
index d020248ac9..9cee1fcc4b 100644
--- a/third_party/repo.bzl
+++ b/third_party/repo.bzl
@@ -19,98 +19,90 @@ _SINGLE_URL_WHITELIST = depset([
])
def _is_windows(ctx):
- return ctx.os.name.lower().find("windows") != -1
+ return ctx.os.name.lower().find("windows") != -1
def _wrap_bash_cmd(ctx, cmd):
- if _is_windows(ctx):
- bazel_sh = _get_env_var(ctx, "BAZEL_SH")
- if not bazel_sh:
- fail("BAZEL_SH environment variable is not set")
- cmd = [bazel_sh, "-l", "-c", " ".join(cmd)]
- return cmd
+ if _is_windows(ctx):
+ bazel_sh = _get_env_var(ctx, "BAZEL_SH")
+ if not bazel_sh:
+ fail("BAZEL_SH environment variable is not set")
+ cmd = [bazel_sh, "-l", "-c", " ".join(cmd)]
+ return cmd
def _get_env_var(ctx, name):
- if name in ctx.os.environ:
- return ctx.os.environ[name]
- else:
- return None
+ if name in ctx.os.environ:
+ return ctx.os.environ[name]
+ else:
+ return None
# Executes specified command with arguments and calls 'fail' if it exited with
# non-zero code
def _execute_and_check_ret_code(repo_ctx, cmd_and_args):
- result = repo_ctx.execute(cmd_and_args, timeout = 10)
- if result.return_code != 0:
- fail(("Non-zero return code({1}) when executing '{0}':\n" + "Stdout: {2}\n" +
- "Stderr: {3}").format(
- " ".join(cmd_and_args),
- result.return_code,
- result.stdout,
- result.stderr,
- ))
+ result = repo_ctx.execute(cmd_and_args, timeout=10)
+ if result.return_code != 0:
+ fail(("Non-zero return code({1}) when executing '{0}':\n" + "Stdout: {2}\n"
+ + "Stderr: {3}").format(" ".join(cmd_and_args), result.return_code,
+ result.stdout, result.stderr))
def _repos_are_siblings():
- return Label("@foo//bar").workspace_root.startswith("../")
+ return Label("@foo//bar").workspace_root.startswith("../")
# Apply a patch_file to the repository root directory
# Runs 'patch -p1'
def _apply_patch(ctx, patch_file):
- # Don't check patch on Windows, because patch is only available under bash.
- if not _is_windows(ctx) and not ctx.which("patch"):
- fail("patch command is not found, please install it")
- cmd = _wrap_bash_cmd(
- ctx,
- ["patch", "-p1", "-d", ctx.path("."), "-i", ctx.path(patch_file)],
- )
- _execute_and_check_ret_code(ctx, cmd)
+ # Don't check patch on Windows, because patch is only available under bash.
+ if not _is_windows(ctx) and not ctx.which("patch"):
+ fail("patch command is not found, please install it")
+ cmd = _wrap_bash_cmd(
+ ctx, ["patch", "-p1", "-d", ctx.path("."), "-i", ctx.path(patch_file)])
+ _execute_and_check_ret_code(ctx, cmd)
def _apply_delete(ctx, paths):
- for path in paths:
- if path.startswith("/"):
- fail("refusing to rm -rf path starting with '/': " + path)
- if ".." in path:
- fail("refusing to rm -rf path containing '..': " + path)
- cmd = _wrap_bash_cmd(ctx, ["rm", "-rf"] + [ctx.path(path) for path in paths])
- _execute_and_check_ret_code(ctx, cmd)
+ for path in paths:
+ if path.startswith("/"):
+ fail("refusing to rm -rf path starting with '/': " + path)
+ if ".." in path:
+ fail("refusing to rm -rf path containing '..': " + path)
+ cmd = _wrap_bash_cmd(ctx, ["rm", "-rf"] + [ctx.path(path) for path in paths])
+ _execute_and_check_ret_code(ctx, cmd)
def _tf_http_archive(ctx):
- if ("mirror.bazel.build" not in ctx.attr.urls[0] and
- (len(ctx.attr.urls) < 2 and
- ctx.attr.name not in _SINGLE_URL_WHITELIST)):
- fail("tf_http_archive(urls) must have redundant URLs. The " +
- "mirror.bazel.build URL must be present and it must come first. " +
- "Even if you don't have permission to mirror the file, please " +
- "put the correctly formatted mirror URL there anyway, because " +
- "someone will come along shortly thereafter and mirror the file.")
- ctx.download_and_extract(
- ctx.attr.urls,
- "",
- ctx.attr.sha256,
- ctx.attr.type,
- ctx.attr.strip_prefix,
- )
- if ctx.attr.delete:
- _apply_delete(ctx, ctx.attr.delete)
- if ctx.attr.patch_file != None:
- _apply_patch(ctx, ctx.attr.patch_file)
- if ctx.attr.build_file != None:
- # Use BUILD.bazel to avoid conflict with third party projects with
- # BUILD or build (directory) underneath.
- ctx.template("BUILD.bazel", ctx.attr.build_file, {
- "%prefix%": ".." if _repos_are_siblings() else "external",
- }, False)
+ if ("mirror.bazel.build" not in ctx.attr.urls[0] and
+ (len(ctx.attr.urls) < 2 and
+ ctx.attr.name not in _SINGLE_URL_WHITELIST)):
+ fail("tf_http_archive(urls) must have redundant URLs. The " +
+ "mirror.bazel.build URL must be present and it must come first. " +
+ "Even if you don't have permission to mirror the file, please " +
+ "put the correctly formatted mirror URL there anyway, because " +
+ "someone will come along shortly thereafter and mirror the file.")
+ ctx.download_and_extract(
+ ctx.attr.urls,
+ "",
+ ctx.attr.sha256,
+ ctx.attr.type,
+ ctx.attr.strip_prefix)
+ if ctx.attr.delete:
+ _apply_delete(ctx, ctx.attr.delete)
+ if ctx.attr.patch_file != None:
+ _apply_patch(ctx, ctx.attr.patch_file)
+ if ctx.attr.build_file != None:
+ # Use BUILD.bazel to avoid conflict with third party projects with
+ # BUILD or build (directory) underneath.
+ ctx.template("BUILD.bazel", ctx.attr.build_file, {
+ "%prefix%": ".." if _repos_are_siblings() else "external",
+ }, False)
tf_http_archive = repository_rule(
- implementation = _tf_http_archive,
- attrs = {
- "sha256": attr.string(mandatory = True),
- "urls": attr.string_list(mandatory = True, allow_empty = False),
+ implementation=_tf_http_archive,
+ attrs={
+ "sha256": attr.string(mandatory=True),
+ "urls": attr.string_list(mandatory=True, allow_empty=False),
"strip_prefix": attr.string(),
"type": attr.string(),
"delete": attr.string_list(),
"patch_file": attr.label(),
"build_file": attr.label(),
- },
-)
+ })
"""Downloads and creates Bazel repos for dependencies.
This is a swappable replacement for both http_archive() and
diff --git a/third_party/sycl/sycl_configure.bzl b/third_party/sycl/sycl_configure.bzl
index deba6c4116..5b9d0eb383 100644
--- a/third_party/sycl/sycl_configure.bzl
+++ b/third_party/sycl/sycl_configure.bzl
@@ -11,124 +11,122 @@
"""
_HOST_CXX_COMPILER = "HOST_CXX_COMPILER"
-_HOST_C_COMPILER = "HOST_C_COMPILER"
+_HOST_C_COMPILER= "HOST_C_COMPILER"
_COMPUTECPP_TOOLKIT_PATH = "COMPUTECPP_TOOLKIT_PATH"
_TRISYCL_INCLUDE_DIR = "TRISYCL_INCLUDE_DIR"
_PYTHON_LIB_PATH = "PYTHON_LIB_PATH"
def _enable_sycl(repository_ctx):
- if "TF_NEED_OPENCL_SYCL" in repository_ctx.os.environ:
- enable_sycl = repository_ctx.os.environ["TF_NEED_OPENCL_SYCL"].strip()
- return enable_sycl == "1"
- return False
+ if "TF_NEED_OPENCL_SYCL" in repository_ctx.os.environ:
+ enable_sycl = repository_ctx.os.environ["TF_NEED_OPENCL_SYCL"].strip()
+ return enable_sycl == "1"
+ return False
def _enable_compute_cpp(repository_ctx):
- return _COMPUTECPP_TOOLKIT_PATH in repository_ctx.os.environ
+ return _COMPUTECPP_TOOLKIT_PATH in repository_ctx.os.environ
def auto_configure_fail(msg):
- """Output failure message when auto configuration fails."""
- red = "\033[0;31m"
- no_color = "\033[0m"
- fail("\n%sAuto-Configuration Error:%s %s\n" % (red, no_color, msg))
-
+ """Output failure message when auto configuration fails."""
+ red = "\033[0;31m"
+ no_color = "\033[0m"
+ fail("\n%sAuto-Configuration Error:%s %s\n" % (red, no_color, msg))
# END cc_configure common functions (see TODO above).
def find_c(repository_ctx):
- """Find host C compiler."""
- c_name = "gcc"
- if _HOST_C_COMPILER in repository_ctx.os.environ:
- c_name = repository_ctx.os.environ[_HOST_C_COMPILER].strip()
- if c_name.startswith("/"):
- return c_name
- c = repository_ctx.which(c_name)
- if c == None:
- fail("Cannot find C compiler, please correct your path.")
- return c
+ """Find host C compiler."""
+ c_name = "gcc"
+ if _HOST_C_COMPILER in repository_ctx.os.environ:
+ c_name = repository_ctx.os.environ[_HOST_C_COMPILER].strip()
+ if c_name.startswith("/"):
+ return c_name
+ c = repository_ctx.which(c_name)
+ if c == None:
+ fail("Cannot find C compiler, please correct your path.")
+ return c
def find_cc(repository_ctx):
- """Find host C++ compiler."""
- cc_name = "g++"
- if _HOST_CXX_COMPILER in repository_ctx.os.environ:
- cc_name = repository_ctx.os.environ[_HOST_CXX_COMPILER].strip()
- if cc_name.startswith("/"):
- return cc_name
- cc = repository_ctx.which(cc_name)
- if cc == None:
- fail("Cannot find C++ compiler, please correct your path.")
- return cc
+ """Find host C++ compiler."""
+ cc_name = "g++"
+ if _HOST_CXX_COMPILER in repository_ctx.os.environ:
+ cc_name = repository_ctx.os.environ[_HOST_CXX_COMPILER].strip()
+ if cc_name.startswith("/"):
+ return cc_name
+ cc = repository_ctx.which(cc_name)
+ if cc == None:
+ fail("Cannot find C++ compiler, please correct your path.")
+ return cc
def find_computecpp_root(repository_ctx):
- """Find ComputeCpp compiler."""
- sycl_name = ""
- if _COMPUTECPP_TOOLKIT_PATH in repository_ctx.os.environ:
- sycl_name = repository_ctx.os.environ[_COMPUTECPP_TOOLKIT_PATH].strip()
- if sycl_name.startswith("/"):
- return sycl_name
- fail("Cannot find SYCL compiler, please correct your path")
+ """Find ComputeCpp compiler."""
+ sycl_name = ""
+ if _COMPUTECPP_TOOLKIT_PATH in repository_ctx.os.environ:
+ sycl_name = repository_ctx.os.environ[_COMPUTECPP_TOOLKIT_PATH].strip()
+ if sycl_name.startswith("/"):
+ return sycl_name
+ fail("Cannot find SYCL compiler, please correct your path")
def find_trisycl_include_dir(repository_ctx):
- """Find triSYCL include directory. """
- if _TRISYCL_INCLUDE_DIR in repository_ctx.os.environ:
- sycl_name = repository_ctx.os.environ[_TRISYCL_INCLUDE_DIR].strip()
- if sycl_name.startswith("/"):
- return sycl_name
- fail("Cannot find triSYCL include directory, please correct your path")
+ """Find triSYCL include directory. """
+ if _TRISYCL_INCLUDE_DIR in repository_ctx.os.environ:
+ sycl_name = repository_ctx.os.environ[_TRISYCL_INCLUDE_DIR].strip()
+ if sycl_name.startswith("/"):
+ return sycl_name
+ fail( "Cannot find triSYCL include directory, please correct your path")
def find_python_lib(repository_ctx):
- """Returns python path."""
- if _PYTHON_LIB_PATH in repository_ctx.os.environ:
- return repository_ctx.os.environ[_PYTHON_LIB_PATH].strip()
- fail("Environment variable PYTHON_LIB_PATH was not specified re-run ./configure")
+ """Returns python path."""
+ if _PYTHON_LIB_PATH in repository_ctx.os.environ:
+ return repository_ctx.os.environ[_PYTHON_LIB_PATH].strip()
+ fail("Environment variable PYTHON_LIB_PATH was not specified re-run ./configure")
+
def _check_lib(repository_ctx, toolkit_path, lib):
- """Checks if lib exists under sycl_toolkit_path or fail if it doesn't.
+ """Checks if lib exists under sycl_toolkit_path or fail if it doesn't.
- Args:
- repository_ctx: The repository context.
- toolkit_path: The toolkit directory containing the libraries.
- ib: The library to look for under toolkit_path.
- """
- lib_path = toolkit_path + "/" + lib
- if not repository_ctx.path(lib_path).exists:
- auto_configure_fail("Cannot find %s" % lib_path)
+ Args:
+ repository_ctx: The repository context.
+ toolkit_path: The toolkit directory containing the libraries.
+ ib: The library to look for under toolkit_path.
+ """
+ lib_path = toolkit_path + "/" + lib
+ if not repository_ctx.path(lib_path).exists:
+ auto_configure_fail("Cannot find %s" % lib_path)
def _check_dir(repository_ctx, directory):
- """Checks whether the directory exists and fail if it does not.
+ """Checks whether the directory exists and fail if it does not.
- Args:
- repository_ctx: The repository context.
- directory: The directory to check the existence of.
- """
- if not repository_ctx.path(directory).exists:
- auto_configure_fail("Cannot find dir: %s" % directory)
+ Args:
+ repository_ctx: The repository context.
+ directory: The directory to check the existence of.
+ """
+ if not repository_ctx.path(directory).exists:
+ auto_configure_fail("Cannot find dir: %s" % directory)
def _symlink_dir(repository_ctx, src_dir, dest_dir):
- """Symlinks all the files in a directory.
-
- Args:
- repository_ctx: The repository context.
- src_dir: The source directory.
- dest_dir: The destination directory to create the symlinks in.
- """
- files = repository_ctx.path(src_dir).readdir()
- for src_file in files:
- repository_ctx.symlink(src_file, dest_dir + "/" + src_file.basename)
-
-def _tpl(repository_ctx, tpl, substitutions = {}, out = None):
- if not out:
- out = tpl.replace(":", "/")
- repository_ctx.template(
- out,
- Label("//third_party/sycl/%s.tpl" % tpl),
- substitutions,
- )
+ """Symlinks all the files in a directory.
+
+ Args:
+ repository_ctx: The repository context.
+ src_dir: The source directory.
+ dest_dir: The destination directory to create the symlinks in.
+ """
+ files = repository_ctx.path(src_dir).readdir()
+ for src_file in files:
+ repository_ctx.symlink(src_file, dest_dir + "/" + src_file.basename)
+
+def _tpl(repository_ctx, tpl, substitutions={}, out=None):
+ if not out:
+ out = tpl.replace(":", "/")
+ repository_ctx.template(
+ out,
+ Label("//third_party/sycl/%s.tpl" % tpl),
+ substitutions)
def _file(repository_ctx, label):
- repository_ctx.template(
- label.replace(":", "/"),
- Label("//third_party/sycl/%s" % label),
- {},
- )
+ repository_ctx.template(
+ label.replace(":", "/"),
+ Label("//third_party/sycl/%s" % label),
+ {})
_DUMMY_CROSSTOOL_BZL_FILE = """
def error_sycl_disabled():
@@ -149,6 +147,7 @@ def error_sycl_disabled():
)
"""
+
_DUMMY_CROSSTOOL_BUILD_FILE = """
load("//crosstool:error_sycl_disabled.bzl", "error_sycl_disabled")
@@ -156,97 +155,87 @@ error_sycl_disabled()
"""
def _create_dummy_repository(repository_ctx):
- # Set up BUILD file for sycl/.
+ # Set up BUILD file for sycl/.
+ _tpl(repository_ctx, "sycl:build_defs.bzl")
+ _tpl(repository_ctx, "sycl:BUILD")
+ _file(repository_ctx, "sycl:LICENSE.text")
+ _tpl(repository_ctx, "sycl:platform.bzl")
+
+ # Create dummy files for the SYCL toolkit since they are still required by
+ # tensorflow/sycl/platform/default/build_config:sycl.
+ repository_ctx.file("sycl/include/sycl.hpp", "")
+ repository_ctx.file("sycl/lib/libComputeCpp.so", "")
+
+ # If sycl_configure is not configured to build with SYCL support, and the user
+ # attempts to build with --config=sycl, add a dummy build rule to intercept
+ # this and fail with an actionable error message.
+ repository_ctx.file("crosstool/error_sycl_disabled.bzl",
+ _DUMMY_CROSSTOOL_BZL_FILE)
+ repository_ctx.file("crosstool/BUILD", _DUMMY_CROSSTOOL_BUILD_FILE)
+
+
+def _sycl_autoconf_imp(repository_ctx):
+ """Implementation of the sycl_autoconf rule."""
+ if not _enable_sycl(repository_ctx):
+ _create_dummy_repository(repository_ctx)
+ else:
+ # copy template files
_tpl(repository_ctx, "sycl:build_defs.bzl")
_tpl(repository_ctx, "sycl:BUILD")
- _file(repository_ctx, "sycl:LICENSE.text")
_tpl(repository_ctx, "sycl:platform.bzl")
+ _tpl(repository_ctx, "crosstool:BUILD")
+ _file(repository_ctx, "sycl:LICENSE.text")
- # Create dummy files for the SYCL toolkit since they are still required by
- # tensorflow/sycl/platform/default/build_config:sycl.
- repository_ctx.file("sycl/include/sycl.hpp", "")
- repository_ctx.file("sycl/lib/libComputeCpp.so", "")
+ if _enable_compute_cpp(repository_ctx):
+ _tpl(repository_ctx, "crosstool:computecpp",
+ {
+ "%{host_cxx_compiler}" : find_cc(repository_ctx),
+ "%{host_c_compiler}" : find_c(repository_ctx)
+ })
+
+ computecpp_root = find_computecpp_root(repository_ctx);
+ _check_dir(repository_ctx, computecpp_root)
+
+ _tpl(repository_ctx, "crosstool:CROSSTOOL",
+ {
+ "%{sycl_include_dir}" : computecpp_root,
+ "%{sycl_impl}" : "computecpp",
+ "%{c++_std}" : "-std=c++11",
+ "%{python_lib_path}" : find_python_lib(repository_ctx),
+ })
+
+ # symlink libraries
+ _check_lib(repository_ctx, computecpp_root+"/lib", "libComputeCpp.so" )
+ _symlink_dir(repository_ctx, computecpp_root + "/lib", "sycl/lib")
+ _symlink_dir(repository_ctx, computecpp_root + "/include", "sycl/include")
+ _symlink_dir(repository_ctx, computecpp_root + "/bin", "sycl/bin")
+ else:
- # If sycl_configure is not configured to build with SYCL support, and the user
- # attempts to build with --config=sycl, add a dummy build rule to intercept
- # this and fail with an actionable error message.
- repository_ctx.file(
- "crosstool/error_sycl_disabled.bzl",
- _DUMMY_CROSSTOOL_BZL_FILE,
- )
- repository_ctx.file("crosstool/BUILD", _DUMMY_CROSSTOOL_BUILD_FILE)
+ trisycl_include_dir = find_trisycl_include_dir(repository_ctx);
+ _check_dir(repository_ctx, trisycl_include_dir)
+
+ _tpl(repository_ctx, "crosstool:trisycl",
+ {
+ "%{host_cxx_compiler}" : find_cc(repository_ctx),
+ "%{host_c_compiler}" : find_c(repository_ctx),
+ "%{trisycl_include_dir}" : trisycl_include_dir
+ })
+
+
+ _tpl(repository_ctx, "crosstool:CROSSTOOL",
+ {
+ "%{sycl_include_dir}" : trisycl_include_dir,
+ "%{sycl_impl}" : "trisycl",
+ "%{c++_std}" : "-std=c++1y",
+ "%{python_lib_path}" : find_python_lib(repository_ctx),
+ })
+
+ _symlink_dir(repository_ctx, trisycl_include_dir, "sycl/include")
-def _sycl_autoconf_imp(repository_ctx):
- """Implementation of the sycl_autoconf rule."""
- if not _enable_sycl(repository_ctx):
- _create_dummy_repository(repository_ctx)
- else:
- # copy template files
- _tpl(repository_ctx, "sycl:build_defs.bzl")
- _tpl(repository_ctx, "sycl:BUILD")
- _tpl(repository_ctx, "sycl:platform.bzl")
- _tpl(repository_ctx, "crosstool:BUILD")
- _file(repository_ctx, "sycl:LICENSE.text")
-
- if _enable_compute_cpp(repository_ctx):
- _tpl(
- repository_ctx,
- "crosstool:computecpp",
- {
- "%{host_cxx_compiler}": find_cc(repository_ctx),
- "%{host_c_compiler}": find_c(repository_ctx),
- },
- )
-
- computecpp_root = find_computecpp_root(repository_ctx)
- _check_dir(repository_ctx, computecpp_root)
-
- _tpl(
- repository_ctx,
- "crosstool:CROSSTOOL",
- {
- "%{sycl_include_dir}": computecpp_root,
- "%{sycl_impl}": "computecpp",
- "%{c++_std}": "-std=c++11",
- "%{python_lib_path}": find_python_lib(repository_ctx),
- },
- )
-
- # symlink libraries
- _check_lib(repository_ctx, computecpp_root + "/lib", "libComputeCpp.so")
- _symlink_dir(repository_ctx, computecpp_root + "/lib", "sycl/lib")
- _symlink_dir(repository_ctx, computecpp_root + "/include", "sycl/include")
- _symlink_dir(repository_ctx, computecpp_root + "/bin", "sycl/bin")
- else:
- trisycl_include_dir = find_trisycl_include_dir(repository_ctx)
- _check_dir(repository_ctx, trisycl_include_dir)
-
- _tpl(
- repository_ctx,
- "crosstool:trisycl",
- {
- "%{host_cxx_compiler}": find_cc(repository_ctx),
- "%{host_c_compiler}": find_c(repository_ctx),
- "%{trisycl_include_dir}": trisycl_include_dir,
- },
- )
-
- _tpl(
- repository_ctx,
- "crosstool:CROSSTOOL",
- {
- "%{sycl_include_dir}": trisycl_include_dir,
- "%{sycl_impl}": "trisycl",
- "%{c++_std}": "-std=c++1y",
- "%{python_lib_path}": find_python_lib(repository_ctx),
- },
- )
-
- _symlink_dir(repository_ctx, trisycl_include_dir, "sycl/include")
sycl_configure = repository_rule(
- implementation = _sycl_autoconf_imp,
- local = True,
+ implementation = _sycl_autoconf_imp,
+ local = True,
)
"""Detects and configures the SYCL toolchain.
diff --git a/third_party/toolchains/clang6/repo.bzl b/third_party/toolchains/clang6/repo.bzl
index e4b6422c96..b81f44506f 100644
--- a/third_party/toolchains/clang6/repo.bzl
+++ b/third_party/toolchains/clang6/repo.bzl
@@ -1,37 +1,30 @@
"""Repository rule for Debian 8 Jessie Clang-6.0 portable Linux builds."""
def _clang6_configure(ctx):
- # TODO(jart): It'd probably be better to use Bazel's struct.to_proto()
- # method to generate a gigantic CROSSTOOL file that allows
- # Clang to support everything.
- ctx.symlink(
- ctx.os.environ.get(
- "TF_LLVM_PATH",
- "/usr/lib/llvm-6.0",
- ),
- "clang6/llvm",
- )
- ctx.symlink(
- ctx.os.environ.get("STRIP", "/usr/bin/strip"),
- "clang6/sbin/strip",
- )
- ctx.symlink(
- ctx.os.environ.get("OBJDUMP", "/usr/bin/objdump"),
- "clang6/sbin/objdump",
- )
- ctx.symlink(ctx.attr._build, "clang6/BUILD")
- ctx.template("clang6/CROSSTOOL", ctx.attr._crosstool, {
- "%package(@local_config_clang6//clang6)%": str(ctx.path("clang6")),
- })
+ # TODO(jart): It'd probably be better to use Bazel's struct.to_proto()
+ # method to generate a gigantic CROSSTOOL file that allows
+ # Clang to support everything.
+ ctx.symlink(
+ ctx.os.environ.get('TF_LLVM_PATH',
+ '/usr/lib/llvm-6.0'),
+ 'clang6/llvm')
+ ctx.symlink(
+ ctx.os.environ.get('STRIP', '/usr/bin/strip'),
+ 'clang6/sbin/strip')
+ ctx.symlink(
+ ctx.os.environ.get('OBJDUMP', '/usr/bin/objdump'),
+ 'clang6/sbin/objdump')
+ ctx.symlink(ctx.attr._build, 'clang6/BUILD')
+ ctx.template('clang6/CROSSTOOL', ctx.attr._crosstool, {
+ '%package(@local_config_clang6//clang6)%': str(ctx.path('clang6')),
+ })
clang6_configure = repository_rule(
implementation = _clang6_configure,
attrs = {
- "_build": attr.label(
- default = str(Label("//third_party/toolchains/clang6:clang.BUILD")),
- ),
- "_crosstool": attr.label(
- default = str(Label("//third_party/toolchains/clang6:CROSSTOOL.tpl")),
- ),
+ '_build': attr.label(
+ default=str(Label('//third_party/toolchains/clang6:clang.BUILD'))),
+ '_crosstool': attr.label(
+ default=str(Label('//third_party/toolchains/clang6:CROSSTOOL.tpl'))),
},
)
diff --git a/third_party/toolchains/cpus/arm/arm_compiler_configure.bzl b/third_party/toolchains/cpus/arm/arm_compiler_configure.bzl
index d675e95f70..ab6eac115c 100644
--- a/third_party/toolchains/cpus/arm/arm_compiler_configure.bzl
+++ b/third_party/toolchains/cpus/arm/arm_compiler_configure.bzl
@@ -1,38 +1,38 @@
# -*- Python -*-
"""Repository rule for arm compiler autoconfiguration."""
-def _tpl(repository_ctx, tpl, substitutions = {}, out = None):
- if not out:
- out = tpl
- repository_ctx.template(
- out,
- Label("//third_party/toolchains/cpus/arm:%s.tpl" % tpl),
- substitutions,
- )
+def _tpl(repository_ctx, tpl, substitutions={}, out=None):
+ if not out:
+ out = tpl
+ repository_ctx.template(
+ out,
+ Label("//third_party/toolchains/cpus/arm:%s.tpl" % tpl),
+ substitutions)
+
def _arm_compiler_configure_impl(repository_ctx):
- # We need to find a cross-compilation include directory for Python, so look
- # for an environment variable. Be warned, this crosstool template is only
- # regenerated on the first run of Bazel, so if you change the variable after
- # it may not be reflected in later builds. Doing a shutdown and clean of Bazel
- # doesn't fix this, you'll need to delete the generated file at something like:
- # external/local_config_arm_compiler/CROSSTOOL in your Bazel install.
- if "CROSSTOOL_PYTHON_INCLUDE_PATH" in repository_ctx.os.environ:
- python_include_path = repository_ctx.os.environ["CROSSTOOL_PYTHON_INCLUDE_PATH"]
- else:
- python_include_path = "/usr/include/python2.7"
- _tpl(repository_ctx, "CROSSTOOL", {
- "%{ARM_COMPILER_PATH}%": str(repository_ctx.path(
- repository_ctx.attr.remote_config_repo,
- )),
- "%{PYTHON_INCLUDE_PATH}%": python_include_path,
- })
- repository_ctx.symlink(repository_ctx.attr.build_file, "BUILD")
+ # We need to find a cross-compilation include directory for Python, so look
+ # for an environment variable. Be warned, this crosstool template is only
+ # regenerated on the first run of Bazel, so if you change the variable after
+ # it may not be reflected in later builds. Doing a shutdown and clean of Bazel
+ # doesn't fix this, you'll need to delete the generated file at something like:
+ # external/local_config_arm_compiler/CROSSTOOL in your Bazel install.
+ if "CROSSTOOL_PYTHON_INCLUDE_PATH" in repository_ctx.os.environ:
+ python_include_path = repository_ctx.os.environ["CROSSTOOL_PYTHON_INCLUDE_PATH"]
+ else:
+ python_include_path = "/usr/include/python2.7"
+ _tpl(repository_ctx, "CROSSTOOL", {
+ "%{ARM_COMPILER_PATH}%": str(repository_ctx.path(
+ repository_ctx.attr.remote_config_repo)),
+ "%{PYTHON_INCLUDE_PATH}%": python_include_path,
+ })
+ repository_ctx.symlink(repository_ctx.attr.build_file, "BUILD")
+
arm_compiler_configure = repository_rule(
implementation = _arm_compiler_configure_impl,
attrs = {
- "remote_config_repo": attr.string(mandatory = False, default = ""),
+ "remote_config_repo": attr.string(mandatory = False, default =""),
"build_file": attr.label(),
},
)
diff --git a/third_party/toolchains/gpus/cuda/build_defs.bzl b/third_party/toolchains/gpus/cuda/build_defs.bzl
index 7295ecb3b4..badaf43019 100644
--- a/third_party/toolchains/gpus/cuda/build_defs.bzl
+++ b/third_party/toolchains/gpus/cuda/build_defs.bzl
@@ -12,13 +12,15 @@ def if_cuda(if_true, if_false = []):
return select({
"@local_config_cuda//cuda:using_nvcc": if_true,
"@local_config_cuda//cuda:using_clang": if_true,
- "//conditions:default": if_false,
+ "//conditions:default": if_false
})
+
def cuda_default_copts():
"""Default options for all CUDA compilations."""
return if_cuda(["-x", "cuda", "-DGOOGLE_CUDA=1"] + ["--cuda-gpu-arch=sm_30"])
+
def cuda_is_configured():
"""Returns true if CUDA was enabled during the configure process."""
return True
@@ -30,5 +32,6 @@ def if_cuda_is_configured(x):
--config=cuda. Used to allow non-CUDA code to depend on CUDA libraries.
"""
if cuda_is_configured():
- return x
+ return x
return []
+