aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authorGravatar Justine Tunney <jart@google.com>2017-12-01 16:02:57 -0800
committerGravatar Gunhan Gulsoy <gunan@google.com>2017-12-01 16:02:57 -0800
commitefbdc15b280374607895ab0ada467de4a0512e0c (patch)
tree5a9d94d1fa0dd43c5df01337da1dc996dbc464e4
parentcffd79f4b102c2082cbcc258abf7ed06df8c141c (diff)
Introduce tf_http_archive (#15018)
This new repository rule consolidates patched_http_archive, temp_workaround_http_archive, http_archive, and new_http_archive. The following behaviors have been introduced: - A delete attribute that can rm -rf certain repo content after extraction - Helpful error messages when mirroring requirements aren't followed
-rwxr-xr-xtensorflow/contrib/makefile/download_dependencies.sh2
-rw-r--r--tensorflow/workspace.bzl385
-rw-r--r--third_party/aws.BUILD16
-rw-r--r--third_party/curl.BUILD38
-rw-r--r--third_party/gif.BUILD2
-rw-r--r--third_party/jemalloc.BUILD10
-rw-r--r--third_party/jpeg/jpeg.BUILD2
-rw-r--r--third_party/nccl.BUILD8
-rw-r--r--third_party/repo.bzl103
-rw-r--r--third_party/snappy.BUILD4
10 files changed, 286 insertions, 284 deletions
diff --git a/tensorflow/contrib/makefile/download_dependencies.sh b/tensorflow/contrib/makefile/download_dependencies.sh
index 904118e2d9..675ab24289 100755
--- a/tensorflow/contrib/makefile/download_dependencies.sh
+++ b/tensorflow/contrib/makefile/download_dependencies.sh
@@ -33,7 +33,7 @@ NSYNC_URL="$(grep -o 'https://mirror.bazel.build/github.com/google/nsync/.*tar\.
PROTOBUF_URL="$(grep -o 'https://mirror.bazel.build/github.com/google/protobuf/.*tar\.gz' "${BZL_FILE_PATH}" | head -n1)"
RE2_URL="$(grep -o 'https://mirror.bazel.build/github.com/google/re2/.*tar\.gz' "${BZL_FILE_PATH}" | head -n1)"
FFT2D_URL="$(grep -o 'http.*fft\.tgz' "${BZL_FILE_PATH}" | grep -v mirror.bazel | head -n1)"
-DOUBLE_CONVERSION_URL="$(grep -o "https.*google/double-conversion.*\.zip" "${BZL_FILE_PATH}" | head -n1)"
+DOUBLE_CONVERSION_URL="$(grep -o "https.*google/double-conversion.*\.tar.gz" "${BZL_FILE_PATH}" | grep -v mirror.bazel | head -n1)"
ABSL_URL="$(grep -o 'https://github.com/abseil/abseil-cpp/.*tar.gz' "${BZL_FILE_PATH}" | head -n1)"
# TODO(petewarden): Some new code in Eigen triggers a clang bug with iOS arm64,
diff --git a/tensorflow/workspace.bzl b/tensorflow/workspace.bzl
index 6b13271002..43c8de5303 100644
--- a/tensorflow/workspace.bzl
+++ b/tensorflow/workspace.bzl
@@ -1,40 +1,21 @@
# TensorFlow external dependencies that can be loaded in WORKSPACE files.
load("//third_party/gpus:cuda_configure.bzl", "cuda_configure")
-
-load("//third_party/sycl:sycl_configure.bzl", "sycl_configure")
load("//third_party/mkl:build_defs.bzl", "mkl_repository")
-load(
- "@io_bazel_rules_closure//closure/private:java_import_external.bzl",
- "java_import_external",
-)
-load("@io_bazel_rules_closure//closure:defs.bzl", "filegroup_external")
load("//third_party/py:python_configure.bzl", "python_configure")
-load(
- "//third_party/toolchains/cpus/arm:arm_compiler_configure.bzl",
- "arm_compiler_configure",
-)
-
-def _is_windows(repository_ctx):
- """Returns true if the host operating system is windows."""
- return repository_ctx.os.name.lower().find("windows") != -1
-
-def _get_env_var(repository_ctx, name):
- """Find an environment variable."""
- if name in repository_ctx.os.environ:
- return repository_ctx.os.environ[name]
- else:
- return None
+load("//third_party/sycl:sycl_configure.bzl", "sycl_configure")
+load("//third_party/toolchains/cpus/arm:arm_compiler_configure.bzl", "arm_compiler_configure")
+load("//third_party:repo.bzl", "tf_http_archive")
+load("@io_bazel_rules_closure//closure/private:java_import_external.bzl", "java_import_external")
+load("@io_bazel_rules_closure//closure:defs.bzl", "filegroup_external")
# Parse the bazel version string from `native.bazel_version`.
def _parse_bazel_version(bazel_version):
# Remove commit from version.
version = bazel_version.split(" ", 1)[0]
-
# Split into (release, date) parts and only return the release
# as a tuple of integers.
parts = version.split("-", 1)
-
# Turn "release" into a tuple of strings
version_tuple = ()
for number in parts[0].split("."):
@@ -57,79 +38,6 @@ def check_version(bazel_version):
fail("\nCurrent Bazel version is {}, expected at least {}\n".format(
native.bazel_version, bazel_version))
-def _repos_are_siblings():
- return Label("@foo//bar").workspace_root.startswith("../")
-
-# Temporary workaround to support including TensorFlow as a submodule until this
-# use-case is supported in the next Bazel release.
-def _temp_workaround_http_archive_impl(repo_ctx):
- repo_ctx.template("BUILD", repo_ctx.attr.build_file, {
- "%prefix%": ".." if _repos_are_siblings() else "external",
- "%ws%": repo_ctx.attr.repository
- }, False)
- repo_ctx.download_and_extract(repo_ctx.attr.urls, "", repo_ctx.attr.sha256,
- "", repo_ctx.attr.strip_prefix)
- if repo_ctx.attr.patch_file != None:
- _apply_patch(repo_ctx, repo_ctx.attr.patch_file)
-
-temp_workaround_http_archive = repository_rule(
- attrs = {
- "build_file": attr.label(),
- "repository": attr.string(),
- "patch_file": attr.label(default = None),
- "urls": attr.string_list(default = []),
- "sha256": attr.string(default = ""),
- "strip_prefix": attr.string(default = ""),
- },
- implementation = _temp_workaround_http_archive_impl,
-)
-
-# Executes specified command with arguments and calls 'fail' if it exited with
-# non-zero code
-def _execute_and_check_ret_code(repo_ctx, cmd_and_args):
- result = repo_ctx.execute(cmd_and_args, timeout=10)
- if result.return_code != 0:
- fail(("Non-zero return code({1}) when executing '{0}':\n" + "Stdout: {2}\n"
- + "Stderr: {3}").format(" ".join(cmd_and_args), result.return_code,
- result.stdout, result.stderr))
-
-# Apply a patch_file to the repository root directory
-# Runs 'patch -p1'
-def _apply_patch(repo_ctx, patch_file):
- # Don't check patch on Windows, because patch is only available under bash.
- if not _is_windows(repo_ctx) and not repo_ctx.which("patch"):
- fail("patch command is not found, please install it")
-
- cmd = [
- "patch", "-p1", "-d", repo_ctx.path("."), "-i", repo_ctx.path(patch_file)
- ]
- if _is_windows(repo_ctx):
- bazel_sh = _get_env_var(repo_ctx, "BAZEL_SH")
- if not bazel_sh:
- fail("BAZEL_SH environment variable is not set")
- cmd = [bazel_sh, "-l", "-c", " ".join(cmd)]
- _execute_and_check_ret_code(repo_ctx, cmd)
-
-# Download the repository and apply a patch to its root
-def _patched_http_archive_impl(repo_ctx):
- repo_ctx.download_and_extract(
- repo_ctx.attr.urls,
- sha256=repo_ctx.attr.sha256,
- stripPrefix=repo_ctx.attr.strip_prefix)
- _apply_patch(repo_ctx, repo_ctx.attr.patch_file)
-
-patched_http_archive = repository_rule(
- attrs = {
- "patch_file": attr.label(),
- "build_file": attr.label(),
- "repository": attr.string(),
- "urls": attr.string_list(default = []),
- "sha256": attr.string(default = ""),
- "strip_prefix": attr.string(default = ""),
- },
- implementation = _patched_http_archive_impl,
-)
-
# If TensorFlow is linked as a submodule.
# path_prefix is no longer used.
# tf_repo_name is thought to be under consideration.
@@ -157,14 +65,13 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
sha256 = "57ba56c4c243f403ff78f417ff854ef50b9eddf4a610a917b7c95e7fa8553a4b",
strip_prefix = "mklml_lnx_2018.0.20170720",
build_file = str(Label("//third_party/mkl:mkl.BUILD")),
- repository = tf_repo_name,
)
if path_prefix:
print("path_prefix was specified to tf_workspace but is no longer used " +
"and will be removed in the future.")
- native.new_http_archive(
+ tf_http_archive(
name = "mkl_dnn",
urls = [
"https://mirror.bazel.build/github.com/01org/mkl-dnn/archive/b01e3a55a07be62172e713bcd2644c5176360212.tar.gz",
@@ -175,7 +82,7 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
build_file = str(Label("//third_party/mkl_dnn:mkldnn.BUILD")),
)
- native.http_archive(
+ tf_http_archive(
name = "com_google_absl",
urls = [
"https://mirror.bazel.build/github.com/abseil/abseil-cpp/archive/cc4bed2d74f7c8717e31f9579214ab52a9c9c610.tar.gz",
@@ -185,7 +92,7 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
strip_prefix = "abseil-cpp-cc4bed2d74f7c8717e31f9579214ab52a9c9c610",
)
- native.new_http_archive(
+ tf_http_archive(
name = "eigen_archive",
urls = [
"https://mirror.bazel.build/bitbucket.org/eigen/eigen/get/429aa5254200.tar.gz",
@@ -196,18 +103,20 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
build_file = str(Label("//third_party:eigen.BUILD")),
)
- native.new_http_archive(
+ tf_http_archive(
name = "arm_compiler",
- build_file = str(Label("//:arm_compiler.BUILD")),
sha256 = "970285762565c7890c6c087d262b0a18286e7d0384f13a37786d8521773bc969",
strip_prefix = "tools-0e906ebc527eab1cdbf7adabff5b474da9562e9f/arm-bcm2708/arm-rpi-4.9.3-linux-gnueabihf",
urls = [
"https://mirror.bazel.build/github.com/raspberrypi/tools/archive/0e906ebc527eab1cdbf7adabff5b474da9562e9f.tar.gz",
+ # Please uncomment me, when the next upgrade happens. Then
+ # remove the whitelist entry in third_party/repo.bzl.
# "https://github.com/raspberrypi/tools/archive/0e906ebc527eab1cdbf7adabff5b474da9562e9f.tar.gz",
],
+ build_file = str(Label("//:arm_compiler.BUILD")),
)
- native.new_http_archive(
+ tf_http_archive(
name = "libxsmm_archive",
urls = [
"https://mirror.bazel.build/github.com/hfp/libxsmm/archive/1.8.1.tar.gz",
@@ -218,15 +127,12 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
build_file = str(Label("//third_party:libxsmm.BUILD")),
)
- native.bind(
- name = "xsmm_avx",
- actual = "@libxsmm_archive//third_party:xsmm_avx",
- )
-
- native.new_http_archive(
+ tf_http_archive(
name = "ortools_archive",
urls = [
"https://mirror.bazel.build/github.com/google/or-tools/archive/253f7955c6a1fd805408fba2e42ac6d45b312d15.tar.gz",
+ # Please uncomment me, when the next upgrade happens. Then
+ # remove the whitelist entry in third_party/repo.bzl.
# "https://github.com/google/or-tools/archive/253f7955c6a1fd805408fba2e42ac6d45b312d15.tar.gz",
],
sha256 = "932075525642b04ac6f1b50589f1df5cd72ec2f448b721fd32234cf183f0e755",
@@ -234,7 +140,7 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
build_file = str(Label("//third_party:ortools.BUILD")),
)
- native.http_archive(
+ tf_http_archive(
name = "com_googlesource_code_re2",
urls = [
"https://mirror.bazel.build/github.com/google/re2/archive/b94b7cd42e9f02673cd748c1ac1d16db4052514c.tar.gz",
@@ -244,7 +150,7 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
strip_prefix = "re2-b94b7cd42e9f02673cd748c1ac1d16db4052514c",
)
- native.http_archive(
+ tf_http_archive(
name = "gemmlowp",
urls = [
"https://mirror.bazel.build/github.com/google/gemmlowp/archive/010bb3e71a26ca1d0884a167081d092b43563996.zip",
@@ -254,7 +160,7 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
strip_prefix = "gemmlowp-010bb3e71a26ca1d0884a167081d092b43563996",
)
- native.new_http_archive(
+ tf_http_archive(
name = "farmhash_archive",
urls = [
"https://mirror.bazel.build/github.com/google/farmhash/archive/816a4ae622e964763ca0862d9dbd19324a1eaf45.tar.gz",
@@ -265,12 +171,7 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
build_file = str(Label("//third_party:farmhash.BUILD")),
)
- native.bind(
- name = "farmhash",
- actual = "@farmhash//:farmhash",
- )
-
- native.new_http_archive(
+ tf_http_archive(
name = "highwayhash",
urls = [
"https://mirror.bazel.build/github.com/google/highwayhash/archive/dfcb97ca4fe9277bf9dc1802dd979b071896453b.tar.gz",
@@ -281,7 +182,7 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
build_file = str(Label("//third_party:highwayhash.BUILD")),
)
- native.new_http_archive(
+ tf_http_archive(
name = "nasm",
urls = [
"https://mirror.bazel.build/www.nasm.us/pub/nasm/releasebuilds/2.12.02/nasm-2.12.02.tar.bz2",
@@ -292,7 +193,7 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
build_file = str(Label("//third_party:nasm.BUILD")),
)
- temp_workaround_http_archive(
+ tf_http_archive(
name = "jpeg",
urls = [
"https://mirror.bazel.build/github.com/libjpeg-turbo/libjpeg-turbo/archive/1.5.1.tar.gz",
@@ -301,10 +202,9 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
sha256 = "c15a9607892113946379ccea3ca8b85018301b200754f209453ab21674268e77",
strip_prefix = "libjpeg-turbo-1.5.1",
build_file = str(Label("//third_party/jpeg:jpeg.BUILD")),
- repository = tf_repo_name,
)
- native.new_http_archive(
+ tf_http_archive(
name = "png_archive",
urls = [
"https://mirror.bazel.build/github.com/glennrp/libpng/archive/v1.2.53.tar.gz",
@@ -315,7 +215,7 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
build_file = str(Label("//third_party:png.BUILD")),
)
- native.new_http_archive(
+ tf_http_archive(
name = "sqlite_archive",
urls = [
"https://mirror.bazel.build/www.sqlite.org/2017/sqlite-amalgamation-3200000.zip",
@@ -323,10 +223,10 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
],
sha256 = "208780b3616f9de0aeb50822b7a8f5482f6515193859e91ed61637be6ad74fd4",
strip_prefix = "sqlite-amalgamation-3200000",
- build_file = str(Label("//third_party:sqlite.BUILD"))
+ build_file = str(Label("//third_party:sqlite.BUILD")),
)
- native.new_http_archive(
+ tf_http_archive(
name = "gif_archive",
urls = [
"https://mirror.bazel.build/ufpr.dl.sourceforge.net/project/giflib/giflib-5.1.4.tar.gz",
@@ -337,7 +237,7 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
build_file = str(Label("//third_party:gif.BUILD")),
)
- native.new_http_archive(
+ tf_http_archive(
name = "six_archive",
urls = [
"https://mirror.bazel.build/pypi.python.org/packages/source/s/six/six-1.10.0.tar.gz",
@@ -348,7 +248,7 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
build_file = str(Label("//third_party:six.BUILD")),
)
- native.http_archive(
+ tf_http_archive(
name = "absl_py",
urls = [
"https://mirror.bazel.build/github.com/abseil/abseil-py/archive/231e3870b976c1dc61dce1749138661d21556028.tar.gz",
@@ -358,7 +258,7 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
strip_prefix = "abseil-py-231e3870b976c1dc61dce1749138661d21556028",
)
- native.new_http_archive(
+ tf_http_archive(
name = "org_python_pypi_backports_weakref",
urls = [
"https://mirror.bazel.build/pypi.python.org/packages/bc/cc/3cdb0a02e7e96f6c70bd971bc8a90b8463fda83e264fa9c5c1c98ceabd81/backports.weakref-1.0rc1.tar.gz",
@@ -369,7 +269,7 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
build_file = str(Label("//third_party:backports_weakref.BUILD")),
)
- native.new_http_archive(
+ tf_http_archive(
name = "com_github_andreif_codegen",
urls = [
"https://mirror.bazel.build/github.com/andreif/codegen/archive/1.0.tar.gz",
@@ -391,12 +291,7 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
},
)
- native.bind(
- name = "six",
- actual = "@six_archive//:six",
- )
-
- patched_http_archive(
+ tf_http_archive(
name = "protobuf_archive",
urls = [
"https://mirror.bazel.build/github.com/google/protobuf/archive/b04e5cba356212e4e8c66c61bbe0c3a20537c5b9.tar.gz",
@@ -411,20 +306,10 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
patch_file = str(Label("//third_party/protobuf:add_noinlines.patch")),
)
- native.bind(
- name = "protobuf",
- actual = "@protobuf_archive//:protobuf",
- )
-
- native.bind(
- name = "protobuf_headers",
- actual = "@protobuf_archive//:protobuf_headers",
- )
-
# We need to import the protobuf library under the names com_google_protobuf
# and com_google_protobuf_cc to enable proto_library support in bazel.
# Unfortunately there is no way to alias http_archives at the moment.
- native.http_archive(
+ tf_http_archive(
name = "com_google_protobuf",
urls = [
"https://mirror.bazel.build/github.com/google/protobuf/archive/b04e5cba356212e4e8c66c61bbe0c3a20537c5b9.tar.gz",
@@ -434,7 +319,7 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
strip_prefix = "protobuf-b04e5cba356212e4e8c66c61bbe0c3a20537c5b9",
)
- native.http_archive(
+ tf_http_archive(
name = "com_google_protobuf_cc",
urls = [
"https://mirror.bazel.build/github.com/google/protobuf/archive/b04e5cba356212e4e8c66c61bbe0c3a20537c5b9.tar.gz",
@@ -444,7 +329,7 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
strip_prefix = "protobuf-b04e5cba356212e4e8c66c61bbe0c3a20537c5b9",
)
- native.http_archive(
+ tf_http_archive(
name = "nsync",
urls = [
"https://mirror.bazel.build/github.com/google/nsync/archive/93815892dddafe9146a5f7e7042281d59d0f4323.tar.gz",
@@ -454,7 +339,7 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
strip_prefix = "nsync-93815892dddafe9146a5f7e7042281d59d0f4323",
)
- native.http_archive(
+ tf_http_archive(
name = "com_google_googletest",
urls = [
"https://mirror.bazel.build/github.com/google/googletest/archive/9816b96a6ddc0430671693df90192bbee57108b6.zip",
@@ -464,7 +349,7 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
strip_prefix = "googletest-9816b96a6ddc0430671693df90192bbee57108b6",
)
- native.http_archive(
+ tf_http_archive(
name = "com_github_gflags_gflags",
urls = [
"https://mirror.bazel.build/github.com/gflags/gflags/archive/f8a0efe03aa69b3336d8e228b37d4ccb17324b88.tar.gz",
@@ -474,12 +359,7 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
strip_prefix = "gflags-f8a0efe03aa69b3336d8e228b37d4ccb17324b88",
)
- native.bind(
- name = "python_headers",
- actual = str(Label("//util/python:python_headers")),
- )
-
- native.new_http_archive(
+ tf_http_archive(
name = "pcre",
sha256 = "ccdf7e788769838f8285b3ee672ed573358202305ee361cfec7a4a4fb005bbc7",
urls = [
@@ -490,7 +370,7 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
build_file = str(Label("//third_party:pcre.BUILD")),
)
- native.new_http_archive(
+ tf_http_archive(
name = "swig",
sha256 = "58a475dbbd4a4d7075e5fe86d4e54c9edde39847cdb96a3053d87cb64a23a453",
urls = [
@@ -502,7 +382,7 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
build_file = str(Label("//third_party:swig.BUILD")),
)
- temp_workaround_http_archive(
+ tf_http_archive(
name = "curl",
sha256 = "ff3e80c1ca6a068428726cd7dd19037a47cc538ce58ef61c59587191039b2ca6",
urls = [
@@ -511,29 +391,9 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
],
strip_prefix = "curl-7.49.1",
build_file = str(Label("//third_party:curl.BUILD")),
- repository = tf_repo_name
- )
-
- # grpc expects //external:protobuf_clib and //external:protobuf_compiler
- # to point to the protobuf's compiler library.
- native.bind(
- name = "protobuf_clib",
- actual = "@protobuf_archive//:protoc_lib",
- )
-
- native.bind(
- name = "libssl",
- actual = "@boringssl//:ssl",
)
- # gRPC has includes directly from their third_party path for nanopb, so we
- # must depend on their version of it.
- native.bind(
- name = "nanopb",
- actual = "@grpc//third_party/nanopb:nanopb",
- )
-
- native.http_archive(
+ tf_http_archive(
name = "grpc",
urls = [
"https://mirror.bazel.build/github.com/grpc/grpc/archive/54e8f37e537794c2d814c1604c1282125f64f093.tar.gz",
@@ -543,26 +403,7 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
strip_prefix = "grpc-54e8f37e537794c2d814c1604c1282125f64f093",
)
- # gRPC wants the existence of a cares dependence but its contents are not
- # actually important since we have set GRPC_ARES=0 in tools/bazel.rc
- native.bind(
- name = "cares",
- actual = "@grpc//third_party/nanopb:nanopb",
- )
-
- # protobuf expects //external:grpc_cpp_plugin to point to grpc's
- # C++ plugin code generator.
- native.bind(
- name = "grpc_cpp_plugin",
- actual = "@grpc//:grpc_cpp_plugin",
- )
-
- native.bind(
- name = "grpc_lib",
- actual = "@grpc//:grpc++_unsecure",
- )
-
- native.new_http_archive(
+ tf_http_archive(
name = "linenoise",
sha256 = "7f51f45887a3d31b4ce4fa5965210a5e64637ceac12720cfce7954d6a2e812f7",
urls = [
@@ -575,7 +416,7 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
# TODO(phawkins): currently, this rule uses an unofficial LLVM mirror.
# Switch to an official source of snapshots if/when possible.
- temp_workaround_http_archive(
+ tf_http_archive(
name = "llvm",
urls = [
"https://mirror.bazel.build/github.com/llvm-mirror/llvm/archive/8d26b8bee4d8e7230870a600bc968c7ee8cf6f67.tar.gz",
@@ -584,10 +425,9 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
sha256 = "ff5ddbe5af5e264426c8d489e7fddfc5ad7e0975f19cefe9db8c0a5d0faeb23e",
strip_prefix = "llvm-8d26b8bee4d8e7230870a600bc968c7ee8cf6f67",
build_file = str(Label("//third_party/llvm:llvm.BUILD")),
- repository = tf_repo_name,
)
- native.new_http_archive(
+ tf_http_archive(
name = "lmdb",
urls = [
"https://mirror.bazel.build/github.com/LMDB/lmdb/archive/LMDB_0.9.19.tar.gz",
@@ -598,7 +438,7 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
build_file = str(Label("//third_party:lmdb.BUILD")),
)
- native.new_http_archive(
+ tf_http_archive(
name = "jsoncpp_git",
urls = [
"https://mirror.bazel.build/github.com/open-source-parsers/jsoncpp/archive/11086dd6a7eba04289944367ca82cea71299ed70.tar.gz",
@@ -609,12 +449,7 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
build_file = str(Label("//third_party:jsoncpp.BUILD")),
)
- native.bind(
- name = "jsoncpp",
- actual = "@jsoncpp_git//:jsoncpp",
- )
-
- native.http_archive(
+ tf_http_archive(
name = "boringssl",
urls = [
"https://mirror.bazel.build/github.com/google/boringssl/archive/a0fb951d2a26a8ee746b52f3ba81ab011a0af778.tar.gz",
@@ -624,7 +459,7 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
strip_prefix = "boringssl-a0fb951d2a26a8ee746b52f3ba81ab011a0af778",
)
- native.new_http_archive(
+ tf_http_archive(
name = "zlib_archive",
urls = [
"https://mirror.bazel.build/zlib.net/zlib-1.2.8.tar.gz",
@@ -635,12 +470,7 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
build_file = str(Label("//third_party:zlib.BUILD")),
)
- native.bind(
- name = "zlib",
- actual = "@zlib_archive//:zlib",
- )
-
- native.new_http_archive(
+ tf_http_archive(
name = "fft2d",
urls = [
"https://mirror.bazel.build/www.kurims.kyoto-u.ac.jp/~ooura/fft.tgz",
@@ -650,7 +480,7 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
build_file = str(Label("//third_party/fft2d:fft2d.BUILD")),
)
- temp_workaround_http_archive(
+ tf_http_archive(
name = "snappy",
urls = [
"https://mirror.bazel.build/github.com/google/snappy/archive/1.1.4.tar.gz",
@@ -659,10 +489,9 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
sha256 = "2f7504c73d85bac842e893340333be8cb8561710642fc9562fccdd9d2c3fcc94",
strip_prefix = "snappy-1.1.4",
build_file = str(Label("//third_party:snappy.BUILD")),
- repository = tf_repo_name,
)
- temp_workaround_http_archive(
+ tf_http_archive(
name = "nccl_archive",
urls = [
"https://mirror.bazel.build/github.com/nvidia/nccl/archive/03d856977ecbaac87e598c0c4bafca96761b9ac7.tar.gz",
@@ -671,10 +500,9 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
sha256 = "2ca86fb6179ecbff789cc67c836139c1bbc0324ed8c04643405a30bf26325176",
strip_prefix = "nccl-03d856977ecbaac87e598c0c4bafca96761b9ac7",
build_file = str(Label("//third_party:nccl.BUILD")),
- repository = tf_repo_name,
)
- temp_workaround_http_archive(
+ tf_http_archive(
name = "aws",
urls = [
"https://mirror.bazel.build/github.com/aws/aws-sdk-cpp/archive/1.0.90.tar.gz",
@@ -683,7 +511,6 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
sha256 = "f599b57aec4f03ad696044dd430b2d201864113937353adc346f53ad47991319",
strip_prefix = "aws-sdk-cpp-1.0.90",
build_file = str(Label("//third_party:aws.BUILD")),
- repository = tf_repo_name
)
java_import_external(
@@ -711,7 +538,7 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
testonly_ = True,
)
- temp_workaround_http_archive(
+ tf_http_archive(
name = "jemalloc",
urls = [
"https://mirror.bazel.build/github.com/jemalloc/jemalloc/archive/4.4.0.tar.gz",
@@ -720,7 +547,6 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
sha256 = "3c8f25c02e806c3ce0ab5fb7da1817f89fc9732709024e2a81b6b82f7cc792a8",
strip_prefix = "jemalloc-4.4.0",
build_file = str(Label("//third_party:jemalloc.BUILD")),
- repository = tf_repo_name,
)
java_import_external(
@@ -758,7 +584,7 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
licenses = ["notice"], # Apache 2.0
)
- native.new_http_archive(
+ tf_http_archive(
name = "com_google_pprof",
urls = [
"https://mirror.bazel.build/github.com/google/pprof/archive/c0fb62ec88c411cc91194465e54db2632845b650.tar.gz",
@@ -769,7 +595,7 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
build_file = str(Label("//third_party:pprof.BUILD")),
)
- native.new_http_archive(
+ tf_http_archive(
name = "cub_archive",
urls = [
"https://mirror.bazel.build/github.com/NVlabs/cub/archive/1.7.4.zip",
@@ -780,12 +606,7 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
build_file = str(Label("//third_party:cub.BUILD")),
)
- native.bind(
- name = "cub",
- actual = "@cub_archive//:cub",
- )
-
- native.new_http_archive(
+ tf_http_archive(
name = "cython",
sha256 = "6dcd30b5ceb887b2b965ee7ceb82ea3acb5f0642fe2206c7636b45acea4798e5",
urls = [
@@ -794,9 +615,10 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
],
strip_prefix = "cython-3732784c45cfb040a5b0936951d196f83a12ea17",
build_file = str(Label("//third_party:cython.BUILD")),
+ delete = ["BUILD.bazel"],
)
- native.http_archive(
+ tf_http_archive(
name = "bazel_toolchains",
urls = [
"https://mirror.bazel.build/github.com/bazelbuild/bazel-toolchains/archive/af4681c3d19f063f090222ec3d04108c4e0ca255.tar.gz",
@@ -806,7 +628,7 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
strip_prefix = "bazel-toolchains-af4681c3d19f063f090222ec3d04108c4e0ca255",
)
- native.new_http_archive(
+ tf_http_archive(
name = "arm_neon_2_x86_sse",
sha256 = "c8d90aa4357f8079d427e87a6f4c493da1fa4140aee926c05902d7ec1533d9a5",
strip_prefix = "ARM_NEON_2_x86_SSE-0f77d9d182265259b135dad949230ecbf1a2633d",
@@ -817,32 +639,109 @@ def tf_workspace(path_prefix="", tf_repo_name=""):
build_file = str(Label("//third_party:arm_neon_2_x86_sse.BUILD")),
)
- native.new_http_archive(
+ tf_http_archive(
name = "flatbuffers",
- build_file = str(Label("//third_party/flatbuffers:flatbuffers.BUILD")),
strip_prefix = "flatbuffers-971a68110e4fc1bace10fcb6deeb189e7e1a34ce",
sha256 = "874088d2ee0d9f8524191f77209556415f03dd44e156276edf19e5b90ceb5f55",
urls = [
"https://mirror.bazel.build/github.com/google/flatbuffers/archive/971a68110e4fc1bace10fcb6deeb189e7e1a34ce.tar.gz",
"https://github.com/google/flatbuffers/archive/971a68110e4fc1bace10fcb6deeb189e7e1a34ce.tar.gz",
],
+ build_file = str(Label("//third_party/flatbuffers:flatbuffers.BUILD")),
)
- native.http_archive(
+ tf_http_archive(
name = "double_conversion",
urls = [
- "https://github.com/google/double-conversion/archive/5664746c5e64dc265e7fbc1a890a6698e6ad0ebb.zip",
+ "https://mirror.bazel.build/github.com/google/double-conversion/archive/5664746c5e64dc265e7fbc1a890a6698e6ad0ebb.tar.gz",
+ "https://github.com/google/double-conversion/archive/5664746c5e64dc265e7fbc1a890a6698e6ad0ebb.tar.gz",
],
- sha256 = "a0c49fb3cc8d34b2230d278a115f1bb266bcfcaae10400b84dc2a3b7dc2c8bc6",
+ sha256 = "ce651ba63faa55f86333f50bdd58a574327ca1565a65b875b11f5132c7c72bb6",
strip_prefix = "double-conversion-5664746c5e64dc265e7fbc1a890a6698e6ad0ebb",
)
-
- native.new_http_archive(
+
+ tf_http_archive(
name = "tflite_mobilenet",
- build_file = str(Label("//third_party:tflite_mobilenet.BUILD")),
sha256 = "23f814d1c076bdf03715dfb6cab3713aa4fbdf040fd5448c43196bd2e97a4c1b",
urls = [
"https://mirror.bazel.build/storage.googleapis.com/download.tensorflow.org/models/tflite/mobilenet_v1_224_android_quant_2017_11_08.zip",
"https://storage.googleapis.com/download.tensorflow.org/models/tflite/mobilenet_v1_224_android_quant_2017_11_08.zip",
],
+ build_file = str(Label("//third_party:tflite_mobilenet.BUILD")),
+ )
+
+ ##############################################################################
+ # BIND DEFINITIONS
+ #
+ # Please do not add bind() definitions unless we have no other choice.
+ # If that ends up being the case, please leave a comment explaining
+ # why we can't depend on the canonical build target.
+
+ # gRPC wants a cares dependency but its contents is not actually
+ # important since we have set GRPC_ARES=0 in tools/bazel.rc
+ native.bind(
+ name = "cares",
+ actual = "@grpc//third_party/nanopb:nanopb",
+ )
+
+ # Needed by Protobuf
+ native.bind(
+ name = "grpc_cpp_plugin",
+ actual = "@grpc//:grpc_cpp_plugin",
+ )
+
+ # gRPC has three empty C++ functions which it wants the user to define
+ # at build time. https://github.com/grpc/grpc/issues/13590
+ native.bind(
+ name = "grpc_lib",
+ actual = "@grpc//:grpc++_unsecure",
+ )
+
+ # Needed by gRPC
+ native.bind(
+ name = "libssl",
+ actual = "@boringssl//:ssl",
+ )
+
+ # Needed by gRPC
+ native.bind(
+ name = "nanopb",
+ actual = "@grpc//third_party/nanopb:nanopb",
+ )
+
+ # Needed by gRPC
+ native.bind(
+ name = "protobuf",
+ actual = "@protobuf_archive//:protobuf",
+ )
+
+ # gRPC expects //external:protobuf_clib and //external:protobuf_compiler
+ # to point to Protobuf's compiler library.
+ native.bind(
+ name = "protobuf_clib",
+ actual = "@protobuf_archive//:protoc_lib",
+ )
+
+ # Needed by gRPC
+ native.bind(
+ name = "protobuf_headers",
+ actual = "@protobuf_archive//:protobuf_headers",
+ )
+
+ # Needed by Protobuf
+ native.bind(
+ name = "python_headers",
+ actual = str(Label("//util/python:python_headers")),
+ )
+
+ # Needed by Protobuf
+ native.bind(
+ name = "six",
+ actual = "@six_archive//:six",
+ )
+
+ # Needed by gRPC
+ native.bind(
+ name = "zlib",
+ actual = "@zlib_archive//:zlib",
)
diff --git a/third_party/aws.BUILD b/third_party/aws.BUILD
index bc9e37ffb3..bf5310aa16 100644
--- a/third_party/aws.BUILD
+++ b/third_party/aws.BUILD
@@ -7,21 +7,21 @@ licenses(["notice"]) # Apache 2.0
exports_files(["LICENSE"])
-load("@%ws%//third_party:common.bzl", "template_rule")
+load("@org_tensorflow//third_party:common.bzl", "template_rule")
cc_library(
name = "aws",
srcs = select({
- "@%ws%//tensorflow:linux_x86_64": glob([
+ "@org_tensorflow//tensorflow:linux_x86_64": glob([
"aws-cpp-sdk-core/source/platform/linux-shared/*.cpp",
]),
- "@%ws%//tensorflow:darwin": glob([
+ "@org_tensorflow//tensorflow:darwin": glob([
"aws-cpp-sdk-core/source/platform/linux-shared/*.cpp",
]),
- "@%ws%//tensorflow:linux_ppc64le": glob([
+ "@org_tensorflow//tensorflow:linux_ppc64le": glob([
"aws-cpp-sdk-core/source/platform/linux-shared/*.cpp",
]),
- "@%ws%//tensorflow:raspberry_pi_armeabi": glob([
+ "@org_tensorflow//tensorflow:raspberry_pi_armeabi": glob([
"aws-cpp-sdk-core/source/platform/linux-shared/*.cpp",
]),
"//conditions:default": [],
@@ -53,17 +53,17 @@ cc_library(
"aws-cpp-sdk-core/include/aws/core/SDKConfig.h",
],
defines = select({
- "@%ws%//tensorflow:linux_x86_64": [
+ "@org_tensorflow//tensorflow:linux_x86_64": [
"PLATFORM_LINUX",
"ENABLE_CURL_CLIENT",
"ENABLE_NO_ENCRYPTION",
],
- "@%ws%//tensorflow:darwin": [
+ "@org_tensorflow//tensorflow:darwin": [
"PLATFORM_APPLE",
"ENABLE_CURL_CLIENT",
"ENABLE_NO_ENCRYPTION",
],
- "@%ws%//tensorflow:linux_ppc64le": [
+ "@org_tensorflow//tensorflow:linux_ppc64le": [
"PLATFORM_LINUX",
"ENABLE_CURL_CLIENT",
"ENABLE_NO_ENCRYPTION",
diff --git a/third_party/curl.BUILD b/third_party/curl.BUILD
index 805a30d262..0f6c75a210 100644
--- a/third_party/curl.BUILD
+++ b/third_party/curl.BUILD
@@ -224,14 +224,14 @@ cc_library(
"lib/wildcard.h",
"lib/x509asn1.h",
] + select({
- "@%ws%//tensorflow:darwin": [
+ "@org_tensorflow//tensorflow:darwin": [
"lib/vtls/darwinssl.c",
],
- "@%ws%//tensorflow:ios": [
+ "@org_tensorflow//tensorflow:ios": [
"lib/vtls/darwinssl.c",
],
- "@%ws%//tensorflow:windows": CURL_WIN_SRCS,
- "@%ws%//tensorflow:windows_msvc": CURL_WIN_SRCS,
+ "@org_tensorflow//tensorflow:windows": CURL_WIN_SRCS,
+ "@org_tensorflow//tensorflow:windows_msvc": CURL_WIN_SRCS,
"//conditions:default": [
"lib/vtls/openssl.c",
],
@@ -248,8 +248,8 @@ cc_library(
"include/curl/typecheck-gcc.h",
],
copts = select({
- "@%ws%//tensorflow:windows": CURL_WIN_COPTS,
- "@%ws%//tensorflow:windows_msvc": CURL_WIN_COPTS,
+ "@org_tensorflow//tensorflow:windows": CURL_WIN_COPTS,
+ "@org_tensorflow//tensorflow:windows_msvc": CURL_WIN_COPTS,
"//conditions:default": [
"-I%prefix%/curl/lib",
"-D_GNU_SOURCE",
@@ -261,14 +261,14 @@ cc_library(
"-Wno-string-plus-int",
],
}) + select({
- "@%ws%//tensorflow:darwin": [
+ "@org_tensorflow//tensorflow:darwin": [
"-fno-constant-cfstrings",
],
- "@%ws%//tensorflow:windows": [
+ "@org_tensorflow//tensorflow:windows": [
# See curl.h for discussion of write size and Windows
"/DCURL_MAX_WRITE_SIZE=16384",
],
- "@%ws%//tensorflow:windows_msvc": [
+ "@org_tensorflow//tensorflow:windows_msvc": [
# See curl.h for discussion of write size and Windows
"/DCURL_MAX_WRITE_SIZE=16384",
],
@@ -278,20 +278,20 @@ cc_library(
}),
includes = ["include"],
linkopts = select({
- "@%ws%//tensorflow:android": [
+ "@org_tensorflow//tensorflow:android": [
"-pie",
],
- "@%ws%//tensorflow:darwin": [
+ "@org_tensorflow//tensorflow:darwin": [
"-Wl,-framework",
"-Wl,CoreFoundation",
"-Wl,-framework",
"-Wl,Security",
],
- "@%ws%//tensorflow:ios": [],
- "@%ws%//tensorflow:windows": [
+ "@org_tensorflow//tensorflow:ios": [],
+ "@org_tensorflow//tensorflow:windows": [
"-Wl,ws2_32.lib",
],
- "@%ws%//tensorflow:windows_msvc": [
+ "@org_tensorflow//tensorflow:windows_msvc": [
"-Wl,ws2_32.lib",
],
"//conditions:default": [
@@ -302,9 +302,9 @@ cc_library(
deps = [
"@zlib_archive//:zlib",
] + select({
- "@%ws%//tensorflow:ios": [],
- "@%ws%//tensorflow:windows": [],
- "@%ws%//tensorflow:windows_msvc": [],
+ "@org_tensorflow//tensorflow:ios": [],
+ "@org_tensorflow//tensorflow:windows": [],
+ "@org_tensorflow//tensorflow:windows_msvc": [],
"//conditions:default": [
"@boringssl//:ssl",
],
@@ -406,8 +406,8 @@ cc_binary(
"src/tool_xattr.h",
],
copts = select({
- "@%ws%//tensorflow:windows": CURL_BIN_WIN_COPTS,
- "@%ws%//tensorflow:windows_msvc": CURL_BIN_WIN_COPTS,
+ "@org_tensorflow//tensorflow:windows": CURL_BIN_WIN_COPTS,
+ "@org_tensorflow//tensorflow:windows_msvc": CURL_BIN_WIN_COPTS,
"//conditions:default": [
"-I%prefix%/curl/lib",
"-D_GNU_SOURCE",
diff --git a/third_party/gif.BUILD b/third_party/gif.BUILD
index 27808a9d64..78fbd6c0e0 100644
--- a/third_party/gif.BUILD
+++ b/third_party/gif.BUILD
@@ -21,7 +21,7 @@ cc_library(
],
hdrs = ["lib/gif_lib.h"],
defines = select({
- #"@%ws%//tensorflow:android": [
+ #"@org_tensorflow//tensorflow:android": [
":android": [
"S_IREAD=S_IRUSR",
"S_IWRITE=S_IWUSR",
diff --git a/third_party/jemalloc.BUILD b/third_party/jemalloc.BUILD
index a2addf2c66..1b0829b8fe 100644
--- a/third_party/jemalloc.BUILD
+++ b/third_party/jemalloc.BUILD
@@ -5,7 +5,7 @@ licenses(["notice"]) # BSD
exports_files(["COPYING"])
-load("@%ws%//third_party:common.bzl", "template_rule")
+load("@org_tensorflow//third_party:common.bzl", "template_rule")
cc_library(
name = "jemalloc_headers",
@@ -97,10 +97,10 @@ cc_library(
includes = ["include"],
# pthread_atfork() is called for PPC.
linkopts = select({
- "@%ws%//tensorflow:linux_ppc64le": [
+ "@org_tensorflow//tensorflow:linux_ppc64le": [
"-lpthread",
],
- "@%ws%//tensorflow:linux_x86_64": [
+ "@org_tensorflow//tensorflow:linux_x86_64": [
"-lpthread",
],
"//conditions:default": [
@@ -208,8 +208,8 @@ genrule(
name = "size_classes_h",
outs = ["include/jemalloc/internal/size_classes.h"],
cmd = select({
- "@%ws%//tensorflow:linux_ppc64le": "$(location :size_classes_sh) \"3 4\" 3 16 2 >$@",
- "@%ws%//tensorflow:linux_x86_64": "$(location :size_classes_sh) \"3 4\" 3 12 2 >$@",
+ "@org_tensorflow//tensorflow:linux_ppc64le": "$(location :size_classes_sh) \"3 4\" 3 16 2 >$@",
+ "@org_tensorflow//tensorflow:linux_x86_64": "$(location :size_classes_sh) \"3 4\" 3 12 2 >$@",
"//conditions:default": "$(location :size_classes_sh) \"3 4\" 3 12 2 >$@",
}),
tools = [":size_classes_sh"],
diff --git a/third_party/jpeg/jpeg.BUILD b/third_party/jpeg/jpeg.BUILD
index f6078052ec..e431f19382 100644
--- a/third_party/jpeg/jpeg.BUILD
+++ b/third_party/jpeg/jpeg.BUILD
@@ -5,7 +5,7 @@ licenses(["notice"]) # custom notice-style license, see LICENSE.md
exports_files(["LICENSE.md"])
-load("@%ws%//third_party:common.bzl", "template_rule")
+load("@org_tensorflow//third_party:common.bzl", "template_rule")
libjpegturbo_nocopts = "-[W]error"
diff --git a/third_party/nccl.BUILD b/third_party/nccl.BUILD
index 8c7b9bdbe9..b2b8e18824 100644
--- a/third_party/nccl.BUILD
+++ b/third_party/nccl.BUILD
@@ -44,17 +44,17 @@ cc_library(
"-O3",
] + cuda_default_copts(),
linkopts = select({
- "@%ws%//tensorflow:android": [
+ "@org_tensorflow//tensorflow:android": [
"-pie",
],
- "@%ws%//tensorflow:darwin": [
+ "@org_tensorflow//tensorflow:darwin": [
"-Wl,-framework",
"-Wl,CoreFoundation",
"-Wl,-framework",
"-Wl,Security",
],
- "@%ws%//tensorflow:ios": [],
- "@%ws%//tensorflow:windows": [
+ "@org_tensorflow//tensorflow:ios": [],
+ "@org_tensorflow//tensorflow:windows": [
"-DEFAULTLIB:ws2_32.lib",
],
"//conditions:default": [
diff --git a/third_party/repo.bzl b/third_party/repo.bzl
new file mode 100644
index 0000000000..d6e5dfced0
--- /dev/null
+++ b/third_party/repo.bzl
@@ -0,0 +1,103 @@
+# Copyright 2017 The TensorFlow Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Utilities for defining TensorFlow Bazel dependencies."""
+
+_SINGLE_URL_WHITELIST = depset([
+ "arm_compiler",
+ "ortools_archive",
+])
+
+def _is_windows(ctx):
+ return ctx.os.name.lower().find("windows") != -1
+
+def _get_env_var(ctx, name):
+ if name in ctx.os.environ:
+ return ctx.os.environ[name]
+ else:
+ return None
+
+# Executes specified command with arguments and calls 'fail' if it exited with
+# non-zero code
+def _execute_and_check_ret_code(repo_ctx, cmd_and_args):
+ result = repo_ctx.execute(cmd_and_args, timeout=10)
+ if result.return_code != 0:
+ fail(("Non-zero return code({1}) when executing '{0}':\n" + "Stdout: {2}\n"
+ + "Stderr: {3}").format(" ".join(cmd_and_args), result.return_code,
+ result.stdout, result.stderr))
+
+def _repos_are_siblings():
+ return Label("@foo//bar").workspace_root.startswith("../")
+
+# Apply a patch_file to the repository root directory
+# Runs 'patch -p1'
+def _apply_patch(ctx, patch_file):
+ # Don't check patch on Windows, because patch is only available under bash.
+ if not _is_windows(ctx) and not ctx.which("patch"):
+ fail("patch command is not found, please install it")
+ cmd = ["patch", "-p1", "-d", ctx.path("."), "-i", ctx.path(patch_file)]
+ if _is_windows(ctx):
+ bazel_sh = _get_env_var(ctx, "BAZEL_SH")
+ if not bazel_sh:
+ fail("BAZEL_SH environment variable is not set")
+ cmd = [bazel_sh, "-c", " ".join(cmd)]
+ _execute_and_check_ret_code(ctx, cmd)
+
+def _apply_delete(ctx, paths):
+ for path in paths:
+ if path.startswith("/"):
+ fail("refusing to rm -rf path starting with '/': " + path)
+ if ".." in path:
+ fail("refusing to rm -rf path containing '..': " + path)
+ _execute_and_check_ret_code(
+ ctx, ["rm", "-rf"] + [ctx.path(path) for path in paths])
+
+def _tf_http_archive(ctx):
+ if ("mirror.bazel.build" not in ctx.attr.urls[0] or
+ (len(ctx.attr.urls) < 2 and
+ ctx.attr.name not in _SINGLE_URL_WHITELIST)):
+ fail("tf_http_archive(urls) must have redundant URLs. The Bazel Mirror " +
+ "URL must come first. Please note mirroring happens after merge")
+ ctx.download_and_extract(
+ ctx.attr.urls,
+ "",
+ ctx.attr.sha256,
+ ctx.attr.type,
+ ctx.attr.strip_prefix)
+ if ctx.attr.delete:
+ _apply_delete(ctx, ctx.attr.delete)
+ if ctx.attr.patch_file != None:
+ _apply_patch(ctx, ctx.attr.patch_file)
+ if ctx.attr.build_file != None:
+ ctx.template("BUILD", ctx.attr.build_file, {
+ "%prefix%": ".." if _repos_are_siblings() else "external",
+ }, False)
+
+tf_http_archive = repository_rule(
+ implementation=_tf_http_archive,
+ attrs={
+ "sha256": attr.string(mandatory=True),
+ "urls": attr.string_list(mandatory=True, allow_empty=False),
+ "strip_prefix": attr.string(),
+ "type": attr.string(),
+ "delete": attr.string_list(),
+ "patch_file": attr.label(),
+ "build_file": attr.label(),
+ })
+"""Downloads and creates Bazel repos for dependencies.
+
+This is a swappable replacement for both http_archive() and
+new_http_archive() that offers some additional features. It also helps
+ensure best practices are followed.
+"""
diff --git a/third_party/snappy.BUILD b/third_party/snappy.BUILD
index 9c00b7068a..fd48ed8941 100644
--- a/third_party/snappy.BUILD
+++ b/third_party/snappy.BUILD
@@ -50,8 +50,8 @@ genrule(
"-e 's/@ac_cv_have_stddef_h@/1/g' " +
"-e 's/@ac_cv_have_stdint_h@/1/g' " +
select({
- "@%ws%//tensorflow:windows": "-e 's/@ac_cv_have_sys_uio_h@/0/g' ",
- "@%ws%//tensorflow:windows_msvc": "-e 's/@ac_cv_have_sys_uio_h@/0/g' ",
+ "@org_tensorflow//tensorflow:windows": "-e 's/@ac_cv_have_sys_uio_h@/0/g' ",
+ "@org_tensorflow//tensorflow:windows_msvc": "-e 's/@ac_cv_have_sys_uio_h@/0/g' ",
"//conditions:default": "-e 's/@ac_cv_have_sys_uio_h@/1/g' ",
}) +
"-e 's/@SNAPPY_MAJOR@/1/g' " +