aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/contrib/bigtable
diff options
context:
space:
mode:
authorGravatar Misha Brukman <mbrukman@google.com>2018-07-22 12:48:00 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2018-07-22 12:50:47 -0700
commit162304f9da4114f5ed3f0e4c27929413e7abc965 (patch)
tree57198c495fbab05f754bc786c9f5aba5dc62323f /tensorflow/contrib/bigtable
parent88e560d6fadc1cf23519b00a9de5ed7c973536fd (diff)
[tf.data / Bigtable] Renamed BigTable class to BigtableTable for clarity
This removes the confusion between BigTable and Bigtable naming. Also cleaned up all other uses of BigTable in error messages. PiperOrigin-RevId: 205586899
Diffstat (limited to 'tensorflow/contrib/bigtable')
-rw-r--r--tensorflow/contrib/bigtable/README.md16
-rw-r--r--tensorflow/contrib/bigtable/__init__.py6
-rw-r--r--tensorflow/contrib/bigtable/kernels/bigtable_kernels.cc2
-rw-r--r--tensorflow/contrib/bigtable/kernels/bigtable_lib.cc8
-rw-r--r--tensorflow/contrib/bigtable/python/kernel_tests/bigtable_ops_test.py2
-rw-r--r--tensorflow/contrib/bigtable/python/ops/bigtable_api.py17
6 files changed, 23 insertions, 28 deletions
diff --git a/tensorflow/contrib/bigtable/README.md b/tensorflow/contrib/bigtable/README.md
index b2c0460f04..d7c71a20ed 100644
--- a/tensorflow/contrib/bigtable/README.md
+++ b/tensorflow/contrib/bigtable/README.md
@@ -17,7 +17,7 @@ APIs, see the [official Cloud Bigtable client library documentation][clientdoc].
## Sample Use
-There are three main reading styles supported by the `BigTable` class:
+There are three main reading styles supported by the `BigtableTable` class:
1. **Reading keys**: Read only the row keys in a table. Keys are returned in
sorted order from the table. Most key reading operations retrieve all keys
@@ -34,9 +34,9 @@ There are three main reading styles supported by the `BigTable` class:
When using the Cloud Bigtable API, the workflow is:
1. Create a `BigtableClient` object.
- 2. Use the `BigtableClient` to create `BigTable` objects corresponding to each
- table in the Bigtable instance you would like to access.
- 3. Call methods on the `BigTable` object to create `tf.data.Dataset`s to
+ 2. Use the `BigtableClient` to create `BigtableTable` objects corresponding to
+ each table in the Cloud Bigtable instance you would like to access.
+ 3. Call methods on the `BigtableTable` object to create `tf.data.Dataset`s to
retrieve data.
The following is an example for how to read all row keys with the prefix
@@ -116,7 +116,7 @@ Given a contiguous range of rows retrieve both the row key and the data
associated with a fixed set of columns. Scanning is the most efficient way to
retrieve data from Cloud Bigtable and is thus a very common API for high
performance data pipelines. To construct a scanning `tf.data.Dataset` from a
-`BigTable` object, call one of the following methods:
+`BigtableTable` object, call one of the following methods:
- `table.scan_prefix(prefix, ...)`
- `table.scan_range(start, end, ...)`
@@ -342,9 +342,3 @@ are available.
- **Cloud TPU**: Your Cloud TPUs run with the designated Cloud TPU service
account dedicated to your GCP project. Ensure the service account has been
authorized via the Cloud Console to access your Cloud Bigtable instances.
-
-### `BigTable` vs Bigtable?
-
-Cloud Bigtable is spelled with a lower-case (aka common) `t`. The Python class
-`BigTable`, however is short for `BigtableTable`, and thus uses an upper-case
-(aka capital) `T`.
diff --git a/tensorflow/contrib/bigtable/__init__.py b/tensorflow/contrib/bigtable/__init__.py
index 7df054637c..b7d89c9842 100644
--- a/tensorflow/contrib/bigtable/__init__.py
+++ b/tensorflow/contrib/bigtable/__init__.py
@@ -18,7 +18,7 @@ This contrib package allows TensorFlow to interface directly with Cloud Bigtable
for high-speed data loading.
@@BigtableClient
-@@BigTable
+@@BigtableTable
"""
@@ -26,14 +26,14 @@ from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
-from tensorflow.contrib.bigtable.python.ops.bigtable_api import BigTable
from tensorflow.contrib.bigtable.python.ops.bigtable_api import BigtableClient
+from tensorflow.contrib.bigtable.python.ops.bigtable_api import BigtableTable
from tensorflow.python.util.all_util import remove_undocumented
_allowed_symbols = [
- 'BigTable',
'BigtableClient',
+ 'BigtableTable',
]
remove_undocumented(__name__, _allowed_symbols)
diff --git a/tensorflow/contrib/bigtable/kernels/bigtable_kernels.cc b/tensorflow/contrib/bigtable/kernels/bigtable_kernels.cc
index 70923e6287..a6755a3496 100644
--- a/tensorflow/contrib/bigtable/kernels/bigtable_kernels.cc
+++ b/tensorflow/contrib/bigtable/kernels/bigtable_kernels.cc
@@ -276,7 +276,7 @@ class ToBigtableOp : public AsyncOpKernel {
}
OP_REQUIRES_ASYNC(
ctx, failures.empty() && mutation_status.ok(),
- errors::Unknown("Failure while writing to BigTable: ",
+ errors::Unknown("Failure while writing to Cloud Bigtable: ",
mutation_status.error_code(), " - ",
mutation_status.error_message(), " (",
mutation_status.error_details(),
diff --git a/tensorflow/contrib/bigtable/kernels/bigtable_lib.cc b/tensorflow/contrib/bigtable/kernels/bigtable_lib.cc
index 2514575f30..67bf14c176 100644
--- a/tensorflow/contrib/bigtable/kernels/bigtable_lib.cc
+++ b/tensorflow/contrib/bigtable/kernels/bigtable_lib.cc
@@ -27,10 +27,10 @@ Status GrpcStatusToTfStatus(const ::grpc::Status& status) {
status.error_code() == ::grpc::StatusCode::OUT_OF_RANGE) {
grpc_code = ::grpc::StatusCode::INTERNAL;
}
- return Status(
- static_cast<::tensorflow::error::Code>(status.error_code()),
- strings::StrCat("Error reading from BigTable: ", status.error_message(),
- " (Details: ", status.error_details(), ")"));
+ return Status(static_cast<::tensorflow::error::Code>(status.error_code()),
+ strings::StrCat("Error reading from Cloud Bigtable: ",
+ status.error_message(),
+ " (Details: ", status.error_details(), ")"));
}
string RegexFromStringSet(const std::vector<string>& strs) {
diff --git a/tensorflow/contrib/bigtable/python/kernel_tests/bigtable_ops_test.py b/tensorflow/contrib/bigtable/python/kernel_tests/bigtable_ops_test.py
index 2f20064619..e36f7f32c6 100644
--- a/tensorflow/contrib/bigtable/python/kernel_tests/bigtable_ops_test.py
+++ b/tensorflow/contrib/bigtable/python/kernel_tests/bigtable_ops_test.py
@@ -44,7 +44,7 @@ class BigtableOpsTest(test.TestCase):
def setUp(self):
self._client = gen_bigtable_test_ops.bigtable_test_client()
table = gen_bigtable_ops.bigtable_table(self._client, "testtable")
- self._table = bigtable.BigTable("testtable", None, table)
+ self._table = bigtable.BigtableTable("testtable", None, table)
def _makeSimpleDataset(self):
output_rows = dataset_ops.Dataset.from_tensor_slices(self.COMMON_ROW_KEYS)
diff --git a/tensorflow/contrib/bigtable/python/ops/bigtable_api.py b/tensorflow/contrib/bigtable/python/ops/bigtable_api.py
index 9f73b7223c..fd30aa8bbb 100644
--- a/tensorflow/contrib/bigtable/python/ops/bigtable_api.py
+++ b/tensorflow/contrib/bigtable/python/ops/bigtable_api.py
@@ -94,7 +94,7 @@ class BigtableClient(object):
project_id, instance_id, connection_pool_size, max_receive_message_size)
def table(self, name, snapshot=None):
- """Opens a table and returns a `BigTable` object.
+ """Opens a table and returns a `BigtableTable` object.
Args:
name: A `tf.string` `tf.Tensor` name of the table to open.
@@ -102,19 +102,20 @@ class BigtableClient(object):
request the creation of a snapshot. (Note: currently unimplemented.)
Returns:
- A `BigTable` python object representing the operations available on the
- table.
+ A `BigtableTable` python object representing the operations available on
+ the table.
"""
# TODO(saeta): Implement snapshot functionality.
table = gen_bigtable_ops.bigtable_table(self._resource, name)
- return BigTable(name, snapshot, table)
+ return BigtableTable(name, snapshot, table)
-class BigTable(object):
- """BigTable is the entrypoint for reading and writing data in Cloud Bigtable.
+class BigtableTable(object):
+ """BigtableTable is the entrypoint for reading and writing data in Cloud
+ Bigtable.
- This BigTable class is the python representation of the Cloud Bigtable table
- within TensorFlow. Methods on this class allow data to be read from and
+ This BigtableTable class is the Python representation of the Cloud Bigtable
+ table within TensorFlow. Methods on this class allow data to be read from and
written to the Cloud Bigtable service in flexible and high performance
manners.
"""