aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authorGravatar A. Unique TensorFlower <gardener@tensorflow.org>2017-03-13 16:58:18 -0800
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2017-03-13 18:09:05 -0700
commit1b881b7c77bd1e664382785447a170de2b85f688 (patch)
tree2d3b8c455ef9b93cb55eb03ea91d050db598fa74
parentee6f27b647fd51b11f9795042c4f6941c77d1c86 (diff)
First version of BigQuery Reader.
Change: 150016997
-rw-r--r--tensorflow/BUILD3
-rw-r--r--tensorflow/contrib/BUILD1
-rw-r--r--tensorflow/contrib/__init__.py1
-rw-r--r--tensorflow/contrib/cloud/BUILD73
-rw-r--r--tensorflow/contrib/cloud/__init__.py (renamed from tensorflow/python/ops/cloud/cloud.py)15
-rw-r--r--tensorflow/contrib/cloud/kernels/BUILD (renamed from tensorflow/core/kernels/cloud/BUILD)32
-rw-r--r--tensorflow/contrib/cloud/kernels/bigquery_reader_ops.cc (renamed from tensorflow/core/kernels/cloud/bigquery_reader_ops.cc)5
-rw-r--r--tensorflow/contrib/cloud/kernels/bigquery_table_accessor.cc (renamed from tensorflow/core/kernels/cloud/bigquery_table_accessor.cc)2
-rw-r--r--tensorflow/contrib/cloud/kernels/bigquery_table_accessor.h (renamed from tensorflow/core/kernels/cloud/bigquery_table_accessor.h)3
-rw-r--r--tensorflow/contrib/cloud/kernels/bigquery_table_accessor_test.cc (renamed from tensorflow/core/kernels/cloud/bigquery_table_accessor_test.cc)92
-rw-r--r--tensorflow/contrib/cloud/kernels/bigquery_table_accessor_test_data.h (renamed from tensorflow/core/kernels/cloud/bigquery_table_accessor_test_data.h)79
-rw-r--r--tensorflow/contrib/cloud/kernels/bigquery_table_partition.proto (renamed from tensorflow/core/kernels/cloud/bigquery_table_partition.proto)0
-rw-r--r--tensorflow/contrib/cloud/ops/bigquery_reader_ops.cc (renamed from tensorflow/core/ops/cloud_ops.cc)2
-rw-r--r--tensorflow/contrib/cloud/python/ops/bigquery_reader_ops.py (renamed from tensorflow/python/ops/cloud/bigquery_reader_ops.py)13
-rw-r--r--tensorflow/contrib/cloud/python/ops/bigquery_reader_ops_test.py (renamed from tensorflow/python/ops/cloud/bigquery_reader_ops_test.py)3
-rw-r--r--tensorflow/contrib/cmake/tf_core_kernels.cmake4
-rw-r--r--tensorflow/contrib/cmake/tf_core_ops.cmake1
-rw-r--r--tensorflow/contrib/cmake/tf_python.cmake8
-rw-r--r--tensorflow/contrib/cmake/tf_tests.cmake1
-rw-r--r--tensorflow/core/BUILD10
-rw-r--r--tensorflow/core/kernels/BUILD2
-rw-r--r--tensorflow/core/platform/cloud/http_request.cc1
-rw-r--r--tensorflow/core/platform/default/build_config.bzl24
-rw-r--r--tensorflow/python/BUILD38
-rw-r--r--tensorflow/python/ops/cloud/__init__.py0
25 files changed, 302 insertions, 111 deletions
diff --git a/tensorflow/BUILD b/tensorflow/BUILD
index ec2861a675..db6d42e1bc 100644
--- a/tensorflow/BUILD
+++ b/tensorflow/BUILD
@@ -160,6 +160,8 @@ filegroup(
"//tensorflow/contrib:all_files",
"//tensorflow/contrib/android:all_files",
"//tensorflow/contrib/bayesflow:all_files",
+ "//tensorflow/contrib/cloud:all_files",
+ "//tensorflow/contrib/cloud/kernels:all_files",
"//tensorflow/contrib/compiler:all_files",
"//tensorflow/contrib/copy_graph:all_files",
"//tensorflow/contrib/crf:all_files",
@@ -220,7 +222,6 @@ filegroup(
"//tensorflow/core/grappler/inputs:all_files",
"//tensorflow/core/grappler/optimizers:all_files",
"//tensorflow/core/kernels:all_files",
- "//tensorflow/core/kernels/cloud:all_files",
"//tensorflow/core/kernels/hexagon:all_files",
"//tensorflow/core/ops/compat:all_files",
"//tensorflow/core/platform/cloud:all_files",
diff --git a/tensorflow/contrib/BUILD b/tensorflow/contrib/BUILD
index ef36702a52..29d60ae241 100644
--- a/tensorflow/contrib/BUILD
+++ b/tensorflow/contrib/BUILD
@@ -16,6 +16,7 @@ py_library(
visibility = ["//visibility:public"],
deps = [
"//tensorflow/contrib/bayesflow:bayesflow_py",
+ "//tensorflow/contrib/cloud:cloud_py",
"//tensorflow/contrib/compiler:compiler_py",
"//tensorflow/contrib/copy_graph:copy_graph_py",
"//tensorflow/contrib/crf:crf_py",
diff --git a/tensorflow/contrib/__init__.py b/tensorflow/contrib/__init__.py
index af4e130870..7c0d1da8a6 100644
--- a/tensorflow/contrib/__init__.py
+++ b/tensorflow/contrib/__init__.py
@@ -20,6 +20,7 @@ from __future__ import print_function
# Add projects here, they will show up under tf.contrib.
from tensorflow.contrib import bayesflow
+from tensorflow.contrib import cloud
from tensorflow.contrib import compiler
from tensorflow.contrib import copy_graph
from tensorflow.contrib import crf
diff --git a/tensorflow/contrib/cloud/BUILD b/tensorflow/contrib/cloud/BUILD
new file mode 100644
index 0000000000..840997223f
--- /dev/null
+++ b/tensorflow/contrib/cloud/BUILD
@@ -0,0 +1,73 @@
+# Description:
+# BigQueryReader implementation
+
+package(
+ default_visibility = ["//visibility:public"],
+)
+
+licenses(["notice"]) # Apache 2.0
+
+load(
+ "//tensorflow:tensorflow.bzl",
+ "tf_gen_op_libs",
+ "tf_gen_op_wrapper_py",
+ "tf_py_test",
+)
+
+filegroup(
+ name = "all_files",
+ srcs = glob(
+ ["**/*"],
+ exclude = [
+ "**/METADATA",
+ "**/OWNERS",
+ ],
+ ),
+ visibility = ["//tensorflow:__subpackages__"],
+)
+
+tf_gen_op_libs(
+ op_lib_names = ["bigquery_reader_ops"],
+ deps = [
+ "//tensorflow/core:lib",
+ ],
+)
+
+tf_gen_op_wrapper_py(
+ name = "gen_bigquery_reader_ops",
+ out = "python/ops/gen_bigquery_reader_ops.py",
+ require_shape_functions = True,
+ deps = [":bigquery_reader_ops_op_lib"],
+)
+
+py_library(
+ name = "cloud_py",
+ srcs = [
+ "__init__.py",
+ "python/ops/bigquery_reader_ops.py",
+ ],
+ srcs_version = "PY2AND3",
+ deps = [
+ ":gen_bigquery_reader_ops",
+ "//tensorflow/contrib/util:util_py",
+ "//tensorflow/python:framework_for_generated_wrappers",
+ "//tensorflow/python:platform",
+ ],
+)
+
+tf_py_test(
+ name = "bigquery_reader_ops_test",
+ size = "small",
+ srcs = ["python/ops/bigquery_reader_ops_test.py"],
+ additional_deps = [
+ ":bigquery_reader_ops_op_lib",
+ ":cloud_py",
+ "//tensorflow/contrib/cloud/kernels:bigquery_reader_ops",
+ "//tensorflow/python:array_ops",
+ "//tensorflow/python:client_testlib",
+ "//tensorflow/python:data_flow_ops",
+ "//tensorflow/python:io_ops",
+ "//tensorflow/python:parsing_ops",
+ ],
+ tags = ["manual"],
+)
diff --git a/tensorflow/python/ops/cloud/cloud.py b/tensorflow/contrib/cloud/__init__.py
index eb917a987e..8870264b95 100644
--- a/tensorflow/python/ops/cloud/cloud.py
+++ b/tensorflow/contrib/cloud/__init__.py
@@ -1,4 +1,4 @@
-# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -12,20 +12,17 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
-"""Import cloud ops."""
+"""Module for cloud ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
-import sys
-
-# go/tf-wildcard-import
-# pylint: disable=wildcard-import
-from tensorflow.python.ops.cloud.bigquery_reader_ops import *
-# pylint: enable=wildcard-import
+# pylint: disable=line-too-long,wildcard-import
+from tensorflow.contrib.cloud.python.ops.bigquery_reader_ops import *
+# pylint: enable=line-too-long,wildcard-import
from tensorflow.python.util.all_util import remove_undocumented
_allowed_symbols = ['BigQueryReader']
-remove_undocumented(__name__, _allowed_symbols, [sys.modules[__name__]])
+remove_undocumented(__name__, _allowed_symbols)
diff --git a/tensorflow/core/kernels/cloud/BUILD b/tensorflow/contrib/cloud/kernels/BUILD
index 52313ef9ed..2500f10c74 100644
--- a/tensorflow/core/kernels/cloud/BUILD
+++ b/tensorflow/contrib/cloud/kernels/BUILD
@@ -9,8 +9,9 @@ licenses(["notice"]) # Apache 2.0
load(
"//tensorflow:tensorflow.bzl",
- "tf_kernel_library",
"tf_cc_test",
+ "tf_copts",
+ "tf_kernel_library",
)
# For platform specific build config
@@ -36,15 +37,13 @@ tf_kernel_library(
srcs = [
"bigquery_reader_ops.cc",
],
- visibility = ["//visibility:public"],
+ visibility = ["//tensorflow:__subpackages__"],
deps = [
":bigquery_table_accessor",
":bigquery_table_partition_proto_cc",
- "//tensorflow/core:cloud_ops_op_lib",
+ "//tensorflow/contrib/cloud:bigquery_reader_ops_op_lib",
"//tensorflow/core:framework",
"//tensorflow/core:lib",
- "//tensorflow/core:lib_internal",
- "//tensorflow/core:protos_all_cc",
"//tensorflow/core:reader_base",
],
)
@@ -57,28 +56,18 @@ cc_library(
hdrs = [
"bigquery_table_accessor.h",
],
- visibility = ["//visibility:public"],
+ copts = tf_copts(),
+ linkstatic = 1,
deps = [
":bigquery_table_partition_proto_cc",
- "//tensorflow/core:framework",
"//tensorflow/core:lib",
- "//tensorflow/core:lib_internal",
"//tensorflow/core:protos_all_cc",
- "//tensorflow/core:reader_base",
"//tensorflow/core/platform/cloud:google_auth_provider",
"//tensorflow/core/platform/cloud:http_request",
],
alwayslink = 1,
)
-tf_proto_library(
- name = "bigquery_table_partition_proto",
- srcs = [
- "bigquery_table_partition.proto",
- ],
- cc_api_version = 2,
-)
-
tf_cc_test(
name = "bigquery_table_accessor_test",
size = "small",
@@ -89,10 +78,17 @@ tf_cc_test(
deps = [
":bigquery_table_accessor",
"//tensorflow/core:lib_internal",
- "//tensorflow/core:lib_proto_parsing",
"//tensorflow/core:protos_all_cc",
"//tensorflow/core:test",
"//tensorflow/core:test_main",
"//tensorflow/core/platform/cloud:http_request_fake",
],
)
+
+tf_proto_library(
+ name = "bigquery_table_partition_proto",
+ srcs = [
+ "bigquery_table_partition.proto",
+ ],
+ cc_api_version = 2,
+)
diff --git a/tensorflow/core/kernels/cloud/bigquery_reader_ops.cc b/tensorflow/contrib/cloud/kernels/bigquery_reader_ops.cc
index bfaa09cfd6..02a759eefd 100644
--- a/tensorflow/core/kernels/cloud/bigquery_reader_ops.cc
+++ b/tensorflow/contrib/cloud/kernels/bigquery_reader_ops.cc
@@ -17,11 +17,10 @@ limitations under the License.
#include <memory>
#include <set>
-#include "tensorflow/core/example/example.pb.h"
+#include "tensorflow/contrib/cloud/kernels/bigquery_table_accessor.h"
+#include "tensorflow/contrib/cloud/kernels/bigquery_table_partition.pb.h"
#include "tensorflow/core/framework/reader_base.h"
#include "tensorflow/core/framework/reader_op_kernel.h"
-#include "tensorflow/core/kernels/cloud/bigquery_table_accessor.h"
-#include "tensorflow/core/kernels/cloud/bigquery_table_partition.pb.h"
#include "tensorflow/core/lib/core/errors.h"
#include "tensorflow/core/lib/math/math_util.h"
#include "tensorflow/core/lib/strings/numbers.h"
diff --git a/tensorflow/core/kernels/cloud/bigquery_table_accessor.cc b/tensorflow/contrib/cloud/kernels/bigquery_table_accessor.cc
index 3e9adfa372..5e95db55b6 100644
--- a/tensorflow/core/kernels/cloud/bigquery_table_accessor.cc
+++ b/tensorflow/contrib/cloud/kernels/bigquery_table_accessor.cc
@@ -12,7 +12,7 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
-#include "tensorflow/core/kernels/cloud/bigquery_table_accessor.h"
+#include "tensorflow/contrib/cloud/kernels/bigquery_table_accessor.h"
#include "tensorflow/core/example/feature.pb.h"
#include "tensorflow/core/lib/strings/numbers.h"
diff --git a/tensorflow/core/kernels/cloud/bigquery_table_accessor.h b/tensorflow/contrib/cloud/kernels/bigquery_table_accessor.h
index 33d1905b8a..1cd0482186 100644
--- a/tensorflow/core/kernels/cloud/bigquery_table_accessor.h
+++ b/tensorflow/contrib/cloud/kernels/bigquery_table_accessor.h
@@ -19,8 +19,9 @@ limitations under the License.
#include <map>
#include <memory>
#include <vector>
+
+#include "tensorflow/contrib/cloud/kernels/bigquery_table_partition.pb.h"
#include "tensorflow/core/example/example.pb.h"
-#include "tensorflow/core/kernels/cloud/bigquery_table_partition.pb.h"
#include "tensorflow/core/lib/core/errors.h"
#include "tensorflow/core/platform/cloud/google_auth_provider.h"
#include "tensorflow/core/platform/cloud/http_request.h"
diff --git a/tensorflow/core/kernels/cloud/bigquery_table_accessor_test.cc b/tensorflow/contrib/cloud/kernels/bigquery_table_accessor_test.cc
index 7591f9cfd5..9fb339864d 100644
--- a/tensorflow/core/kernels/cloud/bigquery_table_accessor_test.cc
+++ b/tensorflow/contrib/cloud/kernels/bigquery_table_accessor_test.cc
@@ -13,13 +13,12 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
-#include "tensorflow/core/kernels/cloud/bigquery_table_accessor.h"
+#include "tensorflow/contrib/cloud/kernels/bigquery_table_accessor.h"
+#include "tensorflow/contrib/cloud/kernels/bigquery_table_accessor_test_data.h"
#include "tensorflow/core/example/feature.pb.h"
-#include "tensorflow/core/kernels/cloud/bigquery_table_accessor_test_data.h"
#include "tensorflow/core/lib/core/status_test_util.h"
#include "tensorflow/core/lib/gtl/stl_util.h"
#include "tensorflow/core/platform/cloud/http_request_fake.h"
-#include "tensorflow/core/platform/protobuf.h"
#include "tensorflow/core/platform/test.h"
namespace tensorflow {
@@ -274,6 +273,88 @@ TEST_F(BigQueryTableAccessorTest, ReadOneRowWithNullsTest) {
EXPECT_TRUE(accessor_->Done());
}
+TEST_F(BigQueryTableAccessorTest, ReadOneRowTwoRecords) {
+ requests_.emplace_back(new FakeHttpRequest(
+ "Uri: https://www.googleapis.com/bigquery/v2/projects/test-project/"
+ "datasets/test-dataset/tables/test-table/\n"
+ "Auth Token: fake_token\n",
+ kSampleSchemaTwoRecords));
+ requests_.emplace_back(new FakeHttpRequest(
+ "Uri: https://www.googleapis.com/bigquery/v2/projects/test-project/"
+ "datasets/test-dataset/tables/test-table/data?maxResults=1&startIndex=2\n"
+ "Auth Token: fake_token\n",
+ kTestRowWithTwoRecords));
+ BigQueryTablePartition partition;
+ partition.set_start_index(2);
+ partition.set_end_index(2);
+ TF_EXPECT_OK(CreateTableAccessor(
+ kTestProject, kTestDataset, kTestTable, 1, 1,
+ {"rec_field2.bool_field", "rec_field1.float_field"}, partition));
+
+ int64 row_id;
+ Example example;
+ TF_EXPECT_OK(accessor_->ReadRow(&row_id, &example));
+
+ // Validate returned result.
+ Example expected_example;
+ ASSERT_TRUE(protobuf::TextFormat::ParseFromString(
+ kTestExampleProtoWithTwoRecords, &expected_example));
+ EXPECT_EQ(DeterministicSerialization(expected_example),
+ DeterministicSerialization(example));
+ EXPECT_EQ(row_id, 2);
+ EXPECT_TRUE(accessor_->Done());
+}
+
+TEST_F(BigQueryTableAccessorTest, NonExistentColumns) {
+ requests_.emplace_back(new FakeHttpRequest(
+ "Uri: https://www.googleapis.com/bigquery/v2/projects/test-project/"
+ "datasets/test-dataset/tables/test-table/\n"
+ "Auth Token: fake_token\n",
+ kSampleSchemaTwoRecords));
+ requests_.emplace_back(new FakeHttpRequest(
+ "Uri: https://www.googleapis.com/bigquery/v2/projects/test-project/"
+ "datasets/test-dataset/tables/test-table/data?maxResults=1&startIndex=2\n"
+ "Auth Token: fake_token\n",
+ kTestRowWithTwoRecords));
+ BigQueryTablePartition partition;
+ partition.set_start_index(2);
+ partition.set_end_index(2);
+ TF_EXPECT_OK(CreateTableAccessor(kTestProject, kTestDataset, kTestTable, 1, 1,
+ {"bool_field", "float_field"}, partition));
+ int64 row_id;
+ Example example;
+ TF_EXPECT_OK(accessor_->ReadRow(&row_id, &example));
+
+ // Validate returned result.
+ EXPECT_EQ(row_id, 2);
+ EXPECT_TRUE(accessor_->Done());
+}
+
+TEST_F(BigQueryTableAccessorTest, EmptyRow) {
+ requests_.emplace_back(new FakeHttpRequest(
+ "Uri: https://www.googleapis.com/bigquery/v2/projects/test-project/"
+ "datasets/test-dataset/tables/test-table/\n"
+ "Auth Token: fake_token\n",
+ kSampleSchemaTwoRecords));
+ requests_.emplace_back(new FakeHttpRequest(
+ "Uri: https://www.googleapis.com/bigquery/v2/projects/test-project/"
+ "datasets/test-dataset/tables/test-table/data?maxResults=1&startIndex=2\n"
+ "Auth Token: fake_token\n",
+ kTestEmptyRow));
+ BigQueryTablePartition partition;
+ partition.set_start_index(2);
+ partition.set_end_index(2);
+ TF_EXPECT_OK(CreateTableAccessor(kTestProject, kTestDataset, kTestTable, 1, 1,
+ {}, partition));
+ int64 row_id;
+ Example example;
+ TF_EXPECT_OK(accessor_->ReadRow(&row_id, &example));
+
+ // Validate returned result.
+ EXPECT_EQ(row_id, 2);
+ EXPECT_TRUE(accessor_->Done());
+}
+
TEST_F(BigQueryTableAccessorTest, BrokenRowTest) {
requests_.emplace_back(new FakeHttpRequest(
"Uri: https://www.googleapis.com/bigquery/v2/projects/test-project/"
@@ -340,6 +421,7 @@ TEST_F(BigQueryTableAccessorTest, MultiplePagesTest) {
TF_EXPECT_OK(accessor_->ReadRow(&row_id, &example));
EXPECT_EQ(3, row_id);
EXPECT_TRUE(accessor_->Done());
+
Example expected_example;
ASSERT_TRUE(protobuf::TextFormat::ParseFromString(kTestExampleProtoWithNulls,
&expected_example));
@@ -387,7 +469,7 @@ TEST_F(BigQueryTableAccessorTest, SwitchingPartitionsTest) {
partition.set_start_index(3);
partition.set_end_index(-1);
- TF_EXPECT_OK(accessor_->SetPartition(partition));
+ accessor_->SetPartition(partition);
TF_EXPECT_OK(accessor_->ReadRow(&row_id, &example));
EXPECT_EQ(3, row_id);
EXPECT_TRUE(accessor_->Done());
@@ -396,7 +478,7 @@ TEST_F(BigQueryTableAccessorTest, SwitchingPartitionsTest) {
partition.set_start_index(0);
partition.set_end_index(1);
- TF_EXPECT_OK(accessor_->SetPartition(partition));
+ accessor_->SetPartition(partition);
TF_EXPECT_OK(accessor_->ReadRow(&row_id, &example));
EXPECT_EQ(0, row_id);
EXPECT_FALSE(accessor_->Done());
diff --git a/tensorflow/core/kernels/cloud/bigquery_table_accessor_test_data.h b/tensorflow/contrib/cloud/kernels/bigquery_table_accessor_test_data.h
index e339ff25ff..b2b11f4f57 100644
--- a/tensorflow/core/kernels/cloud/bigquery_table_accessor_test_data.h
+++ b/tensorflow/contrib/cloud/kernels/bigquery_table_accessor_test_data.h
@@ -73,6 +73,43 @@ const string kSampleSchema = R"({
"numRows": "4"
})";
+const string kSampleSchemaTwoRecords = R"({
+ "kind": "bigquery#table",
+ "etag": "\"4zcX32ezvFoFzxHoG04qJqKZk6c/MTQ1Nzk3NTgwNzE4Mw\"",
+ "id": "test-project:test-dataset.test-table",
+ "schema": {
+ "fields": [
+ {
+ "name": "rec_field1",
+ "type": "RECORD",
+ "fields": [
+ {
+ "name": "int_field",
+ "type": "INTEGER",
+ "mode": "NULLABLE"
+ }, {
+ "name": "float_field",
+ "type": "FLOAT",
+ "mode": "NULLABLE"
+ }]
+ },{
+ "name": "rec_field2",
+ "type": "RECORD",
+ "fields": [
+ {
+ "name": "bool_field",
+ "type": "BOOLEAN",
+ "mode": "NULLABLE"
+ },{
+ "name": "bytes_field",
+ "type": "BYTES",
+ "mode": "NULLABLE"
+ }]
+ }]
+ },
+ "numRows": "4"
+})";
+
const string kTestRow = R"({
"kind": "bigquery#table",
"etag": "\"4zcX32ezvFoFzxHoG04qJqKZk6c/MTQ1Nzk3NTgwNzE4Mw\"",
@@ -309,6 +346,26 @@ const string kTestPartialExampleProto = R"(features {
}
)";
+const string kTestExampleProtoWithTwoRecords = R"(features {
+ feature {
+ key: "rec_field1.float_field"
+ value {
+ float_list {
+ value: 1.23456
+ }
+ }
+ }
+ feature {
+ key: "rec_field2.bool_field"
+ value {
+ int64_list {
+ value: 1
+ }
+ }
+ }
+}
+)";
+
const string kTestTwoRows = R"({
"kind": "bigquery#table",
"etag": "\"4zcX32ezvFoFzxHoG04qJqKZk6c/MTQ1Nzk3NTgwNzE4Mw\"",
@@ -319,6 +376,28 @@ const string kTestTwoRows = R"({
{"f": [{"v": "2222"},{},{},{},{},{},{},{},{}]}
]})";
+const string kTestRowWithTwoRecords = R"({
+ "kind": "bigquery#table",
+ "etag": "\"4zcX32ezvFoFzxHoG04qJqKZk6c/MTQ1Nzk3NTgwNzE4Mw\"",
+ "id": "test-project:test-dataset.test-table",
+ "rows": [
+ {
+ "f": [
+ {"v": {"f": [{}, {"v": "1.23456"}]}},
+ {"v": {"f": [{"v": "true"}, {}]}
+ }]}]})";
+
+const string kTestEmptyRow = R"({
+ "kind": "bigquery#table",
+ "etag": "\"4zcX32ezvFoFzxHoG04qJqKZk6c/MTQ1Nzk3NTgwNzE4Mw\"",
+ "id": "test-project:test-dataset.test-table",
+ "rows": [
+ {
+ "f": [
+ {"v": {"f": [{}, {}]}},
+ {"v": {"f": [{"v": null}, {}]}
+ }]}]})";
+
} // namespace
} // namepsace tensorflow
diff --git a/tensorflow/core/kernels/cloud/bigquery_table_partition.proto b/tensorflow/contrib/cloud/kernels/bigquery_table_partition.proto
index 2d9d1380db..2d9d1380db 100644
--- a/tensorflow/core/kernels/cloud/bigquery_table_partition.proto
+++ b/tensorflow/contrib/cloud/kernels/bigquery_table_partition.proto
diff --git a/tensorflow/core/ops/cloud_ops.cc b/tensorflow/contrib/cloud/ops/bigquery_reader_ops.cc
index 89f31a46ab..fbba04a31a 100644
--- a/tensorflow/core/ops/cloud_ops.cc
+++ b/tensorflow/contrib/cloud/ops/bigquery_reader_ops.cc
@@ -13,7 +13,7 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
-/* This file registers all cloud ops. */
+/* This file registers Bigquery reader ops. */
#include "tensorflow/core/framework/op.h"
#include "tensorflow/core/framework/shape_inference.h"
diff --git a/tensorflow/python/ops/cloud/bigquery_reader_ops.py b/tensorflow/contrib/cloud/python/ops/bigquery_reader_ops.py
index 7786aea025..136707da18 100644
--- a/tensorflow/python/ops/cloud/bigquery_reader_ops.py
+++ b/tensorflow/contrib/cloud/python/ops/bigquery_reader_ops.py
@@ -18,21 +18,14 @@ from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
+from tensorflow.contrib.cloud.python.ops import gen_bigquery_reader_ops
from tensorflow.python.framework import ops
-from tensorflow.python.ops import gen_cloud_ops
from tensorflow.python.ops import io_ops
class BigQueryReader(io_ops.ReaderBase):
"""A Reader that outputs keys and tf.Example values from a BigQuery table.
- Note(1): This op is currently not linked into the binary. It will be linked
- by default after more perf testing.
-
- Note(2): This op currently returns example proto as its output. This is not
- final and we are experimenting with adding support for returning csv. Support
- for example proto may be deprecated after that.
-
Example use:
```python
# Assume a BigQuery has the following schema,
@@ -121,7 +114,7 @@ class BigQueryReader(io_ops.ReaderBase):
self._num_partitions = num_partitions
self._test_end_point = test_end_point
- reader = gen_cloud_ops.big_query_reader(
+ reader = gen_bigquery_reader_ops.big_query_reader(
name=name,
project_id=self._project_id,
dataset_id=self._dataset_id,
@@ -143,7 +136,7 @@ class BigQueryReader(io_ops.ReaderBase):
Returns:
`1-D` string `Tensor` of serialized `BigQueryTablePartition` messages.
"""
- return gen_cloud_ops.generate_big_query_reader_partitions(
+ return gen_bigquery_reader_ops.generate_big_query_reader_partitions(
name=name,
project_id=self._project_id,
dataset_id=self._dataset_id,
diff --git a/tensorflow/python/ops/cloud/bigquery_reader_ops_test.py b/tensorflow/contrib/cloud/python/ops/bigquery_reader_ops_test.py
index 141b0af901..b7d044ed25 100644
--- a/tensorflow/python/ops/cloud/bigquery_reader_ops_test.py
+++ b/tensorflow/contrib/cloud/python/ops/bigquery_reader_ops_test.py
@@ -26,13 +26,13 @@ import threading
from six.moves import SimpleHTTPServer
from six.moves import socketserver
+from tensorflow.contrib.cloud.python.ops import bigquery_reader_ops as cloud
from tensorflow.core.example import example_pb2
from tensorflow.core.framework import types_pb2
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import parsing_ops
-from tensorflow.python.ops.cloud import cloud
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import compat
@@ -176,6 +176,7 @@ class BigQueryReaderOpsTest(test.TestCase):
self.server.start()
logging.info("server address is %s:%s", self.server.httpd.server_address[0],
self.server.httpd.server_address[1])
+
# An override to bypass the GCP auth token retrieval logic
# in google_auth_provider.cc.
os.environ["GOOGLE_AUTH_TOKEN_FOR_TESTING"] = "not-used"
diff --git a/tensorflow/contrib/cmake/tf_core_kernels.cmake b/tensorflow/contrib/cmake/tf_core_kernels.cmake
index 556a0b8919..dd28817b54 100644
--- a/tensorflow/contrib/cmake/tf_core_kernels.cmake
+++ b/tensorflow/contrib/cmake/tf_core_kernels.cmake
@@ -69,8 +69,8 @@ endif(tensorflow_BUILD_CONTRIB_KERNELS)
if(NOT tensorflow_ENABLE_SSL_SUPPORT)
# Cloud libraries require boringssl.
file(GLOB tf_core_kernels_cloud_srcs
- "${tensorflow_source_dir}/tensorflow/core/kernels/cloud/*.h"
- "${tensorflow_source_dir}/tensorflow/core/kernels/cloud/*.cc"
+ "${tensorflow_source_dir}/tensorflow/contrib/cloud/kernels/*.h"
+ "${tensorflow_source_dir}/tensorflow/contrib/cloud/kernels/*.cc"
)
list(REMOVE_ITEM tf_core_kernels_srcs ${tf_core_kernels_cloud_srcs})
endif()
diff --git a/tensorflow/contrib/cmake/tf_core_ops.cmake b/tensorflow/contrib/cmake/tf_core_ops.cmake
index 8e237a78a7..73686d0dd3 100644
--- a/tensorflow/contrib/cmake/tf_core_ops.cmake
+++ b/tensorflow/contrib/cmake/tf_core_ops.cmake
@@ -51,6 +51,7 @@ GENERATE_CONTRIB_OP_LIBRARY(factorization_factorization "${tensorflow_source_dir
GENERATE_CONTRIB_OP_LIBRARY(framework_variable "${tensorflow_source_dir}/tensorflow/contrib/framework/ops/variable_ops.cc")
GENERATE_CONTRIB_OP_LIBRARY(memory_stats "${tensorflow_source_dir}/tensorflow/contrib/memory_stats/ops/memory_stats_ops.cc")
GENERATE_CONTRIB_OP_LIBRARY(tensor_forest "${tensorflow_source_dir}/tensorflow/contrib/tensor_forest/ops/tensor_forest_ops.cc")
+GENERATE_CONTRIB_OP_LIBRARY(bigquery_reader "${tensorflow_source_dir}/tensorflow/contrib/cloud/ops/bigquery_reader_ops.cc")
########################################################
# tf_user_ops library
diff --git a/tensorflow/contrib/cmake/tf_python.cmake b/tensorflow/contrib/cmake/tf_python.cmake
index 38047cae78..e58b672347 100644
--- a/tensorflow/contrib/cmake/tf_python.cmake
+++ b/tensorflow/contrib/cmake/tf_python.cmake
@@ -227,6 +227,11 @@ add_python_module("tensorflow/contrib/bayesflow/examples/reinforce_simple")
add_python_module("tensorflow/contrib/bayesflow/python")
add_python_module("tensorflow/contrib/bayesflow/python/kernel_tests")
add_python_module("tensorflow/contrib/bayesflow/python/ops")
+add_python_module("tensorflow/contrib/cloud")
+add_python_module("tensorflow/contrib/cloud/kernels")
+add_python_module("tensorflow/contrib/cloud/ops")
+add_python_module("tensorflow/contrib/cloud/python")
+add_python_module("tensorflow/contrib/cloud/python/ops")
add_python_module("tensorflow/contrib/compiler")
add_python_module("tensorflow/contrib/copy_graph")
add_python_module("tensorflow/contrib/copy_graph/python")
@@ -542,6 +547,9 @@ GENERATE_PYTHON_OP_LIB("contrib_memory_stats_ops"
GENERATE_PYTHON_OP_LIB("contrib_tensor_forest_ops"
DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/tf_python/tensorflow/contrib/tensor_forest/python/ops/gen_tensor_forest_ops.py)
+GENERATE_PYTHON_OP_LIB("contrib_bigquery_reader_ops"
+ DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/tf_python/tensorflow/contrib/cloud/python/ops/gen_bigquery_reader_ops.py)
+
add_custom_target(tf_python_ops SOURCES ${tf_python_ops_generated_files} ${PYTHON_PROTO_GENFILES})
add_dependencies(tf_python_ops tf_python_op_gen_main)
diff --git a/tensorflow/contrib/cmake/tf_tests.cmake b/tensorflow/contrib/cmake/tf_tests.cmake
index d1c6a74937..fc3363189d 100644
--- a/tensorflow/contrib/cmake/tf_tests.cmake
+++ b/tensorflow/contrib/cmake/tf_tests.cmake
@@ -181,6 +181,7 @@ if (tensorflow_BUILD_PYTHON_TESTS)
"${tensorflow_source_dir}/tensorflow/contrib/tensor_forest/python/kernel_tests/sample_inputs_op_test.py" # Results in wrong order.
"${tensorflow_source_dir}/tensorflow/contrib/tensor_forest/python/kernel_tests/scatter_add_ndim_op_test.py" # Bad placement.
"${tensorflow_source_dir}/tensorflow/contrib/tensor_forest/python/topn_test.py" # Results inaccurate
+ "${tensorflow_source_dir}/tensorflow/python/ops/cloud/bigquery_reader_ops_test.py" # No libcurl support
# Failing on some CI.
"${tensorflow_source_dir}/tensorflow/python/debug/cli/analyzer_cli_test.py"
"${tensorflow_source_dir}/tensorflow/python/debug/lib/session_debug_file_test.py"
diff --git a/tensorflow/core/BUILD b/tensorflow/core/BUILD
index a4d0ec63a3..0fd71f6f0a 100644
--- a/tensorflow/core/BUILD
+++ b/tensorflow/core/BUILD
@@ -538,16 +538,6 @@ cc_library(
)
cc_library(
- name = "cloud_ops_op_lib",
- srcs = ["ops/cloud_ops.cc"],
- copts = tf_copts(),
- linkstatic = 1,
- visibility = ["//visibility:public"],
- deps = [":framework"],
- alwayslink = 1,
-)
-
-cc_library(
name = "ops",
visibility = ["//visibility:public"],
deps = [
diff --git a/tensorflow/core/kernels/BUILD b/tensorflow/core/kernels/BUILD
index a4eef8fe24..e2c1ade13d 100644
--- a/tensorflow/core/kernels/BUILD
+++ b/tensorflow/core/kernels/BUILD
@@ -3827,8 +3827,6 @@ filegroup(
# Excluded due to experimental status:
"debug_ops.*",
"scatter_nd_op*",
- # Lib CURL is not supported on Android.
- "bigquery*",
],
),
visibility = ["//visibility:public"],
diff --git a/tensorflow/core/platform/cloud/http_request.cc b/tensorflow/core/platform/cloud/http_request.cc
index 8a8d1e448a..9267d3ea83 100644
--- a/tensorflow/core/platform/cloud/http_request.cc
+++ b/tensorflow/core/platform/cloud/http_request.cc
@@ -19,7 +19,6 @@ limitations under the License.
#include "tensorflow/core/lib/gtl/map_util.h"
#include "tensorflow/core/lib/strings/scanner.h"
#include "tensorflow/core/lib/strings/str_util.h"
-#include "tensorflow/core/platform/protobuf.h"
#include "tensorflow/core/platform/types.h"
#include "tensorflow/core/public/version.h"
diff --git a/tensorflow/core/platform/default/build_config.bzl b/tensorflow/core/platform/default/build_config.bzl
index a2c133b43a..338d9309e8 100644
--- a/tensorflow/core/platform/default/build_config.bzl
+++ b/tensorflow/core/platform/default/build_config.bzl
@@ -227,19 +227,27 @@ def tf_additional_core_deps():
# TODO(jart, jhseu): Delete when GCP is default on.
def tf_additional_cloud_op_deps():
deps = []
- # TODO(hormati): Remove the comments below to enable BigQuery op. The op is
- # not linked for now because it is under perf testing.
- #if WITH_GCP_SUPPORT:
- # deps = if_not_mobile(["//tensorflow/core/kernels/cloud:bigquery_reader_ops"])
+ if WITH_GCP_SUPPORT:
+ deps = select({
+ "//tensorflow:windows": [],
+ "//tensorflow:android": [],
+ "//tensorflow:ios": [],
+ "//conditions:default":
+ ["//tensorflow/contrib/cloud:bigquery_reader_ops_op_lib"],
+ })
return deps
# TODO(jart, jhseu): Delete when GCP is default on.
def tf_additional_cloud_kernel_deps():
deps = []
- # TODO(hormati): Remove the comments below to enable BigQuery op. The op is
- # not linked for now because it is under perf testing.
- #if WITH_GCP_SUPPORT:
- # deps = if_not_mobile(["//tensorflow/core:cloud_ops_op_lib"])
+ if WITH_GCP_SUPPORT:
+ deps = select({
+ "//tensorflow:windows": [],
+ "//tensorflow:android": [],
+ "//tensorflow:ios": [],
+ "//conditions:default":
+ ["//tensorflow/contrib/cloud/kernels:bigquery_reader_ops"],
+ })
return deps
def tf_lib_proto_parsing_deps():
diff --git a/tensorflow/python/BUILD b/tensorflow/python/BUILD
index 40ec2569de..7f416dc609 100644
--- a/tensorflow/python/BUILD
+++ b/tensorflow/python/BUILD
@@ -45,7 +45,6 @@ py_library(
":check_ops",
":client",
":client_testlib",
- ":cloud_ops",
":confusion_matrix",
":control_flow_ops",
":errors",
@@ -124,38 +123,6 @@ py_library(
deps = [":platform_benchmark"],
)
-py_library(
- name = "cloud_ops",
- srcs = [
- "ops/cloud/__init__.py",
- "ops/cloud/bigquery_reader_ops.py",
- "ops/cloud/cloud.py",
- ],
- srcs_version = "PY2AND3",
- deps = [
- ":cloud_ops_gen",
- ":framework_for_generated_wrappers",
- ],
-)
-
-tf_py_test(
- name = "bigquery_reader_ops_test",
- size = "small",
- srcs = ["ops/cloud/bigquery_reader_ops_test.py"],
- additional_deps = [
- ":array_ops",
- ":client_testlib",
- ":cloud_ops",
- ":data_flow_ops",
- ":io_ops",
- ":parsing_ops",
- ":util",
- "//tensorflow/core/kernels/cloud:bigquery_reader_ops",
- "//tensorflow/core:cloud_ops_op_lib",
- ],
- tags = ["manual"],
-)
-
tf_py_test(
name = "resource_loader_test",
size = "small",
@@ -1019,11 +986,6 @@ tf_gen_op_wrapper_private_py(
)
tf_gen_op_wrapper_private_py(
- name = "cloud_ops_gen",
- require_shape_functions = True,
-)
-
-tf_gen_op_wrapper_private_py(
name = "control_flow_ops_gen",
require_shape_functions = True,
visibility = ["//learning/brain/python/ops:__pkg__"],
diff --git a/tensorflow/python/ops/cloud/__init__.py b/tensorflow/python/ops/cloud/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
--- a/tensorflow/python/ops/cloud/__init__.py
+++ /dev/null