diff options
author | Jiri Simsa <jsimsa@google.com> | 2018-08-13 16:07:17 -0700 |
---|---|---|
committer | TensorFlower Gardener <gardener@tensorflow.org> | 2018-08-13 16:16:47 -0700 |
commit | 83f1458ec1c19b3d46676ab543dff4ec401a0dd0 (patch) | |
tree | 1722ba4f61266464782ccae19987a71c2e5a3043 /tensorflow/contrib/bigtable | |
parent | c527d67d21f4b1d778ca92d3c3478c381aba5fd6 (diff) |
[tf.data] Internal refactoring of C++ classes and APIs.
- replacing `OpKernelContext` with newly introduced `DatasetContext` in `DatasetBase` constructor to make it possible to instantiate `DatasetBase` in places where an instance of `OpKernelContext` is not available
- replacing `dataset::MakeIteratorContext(OpKernelContext* ctx)` factory with `IteratorContext(OpKernelContext *ctx)` constructor.
- folding `GraphDatasetBase` into `DataseBase` and removing the default implementation of `AsGraphDefInternal`, making it the responsibility of the derived class to implement it to encourage/hint developers to provide serialization logic
PiperOrigin-RevId: 208560010
Diffstat (limited to 'tensorflow/contrib/bigtable')
7 files changed, 78 insertions, 27 deletions
diff --git a/tensorflow/contrib/bigtable/kernels/bigtable_kernels.cc b/tensorflow/contrib/bigtable/kernels/bigtable_kernels.cc index 1790b4bc11..a25a641cdb 100644 --- a/tensorflow/contrib/bigtable/kernels/bigtable_kernels.cc +++ b/tensorflow/contrib/bigtable/kernels/bigtable_kernels.cc @@ -218,11 +218,11 @@ class ToBigtableOp : public AsyncOpKernel { OP_REQUIRES_OK_ASYNC( ctx, GetDatasetFromVariantTensor(ctx->input(1), &dataset), done); - IteratorContext iter_ctx = dataset::MakeIteratorContext(ctx); std::unique_ptr<IteratorBase> iterator; OP_REQUIRES_OK_ASYNC( ctx, - dataset->MakeIterator(&iter_ctx, "ToBigtableOpIterator", &iterator), + dataset->MakeIterator(IteratorContext(ctx), "ToBigtableOpIterator", + &iterator), done); int64 timestamp_int; @@ -245,9 +245,10 @@ class ToBigtableOp : public AsyncOpKernel { ::google::cloud::bigtable::BulkMutation mutation; // TODO(saeta): Make # of mutations configurable. for (uint64 i = 0; i < 100 && !end_of_sequence; ++i) { - OP_REQUIRES_OK_ASYNC( - ctx, iterator->GetNext(&iter_ctx, &components, &end_of_sequence), - done); + OP_REQUIRES_OK_ASYNC(ctx, + iterator->GetNext(IteratorContext(ctx), + &components, &end_of_sequence), + done); if (!end_of_sequence) { OP_REQUIRES_OK_ASYNC( ctx, diff --git a/tensorflow/contrib/bigtable/kernels/bigtable_lookup_dataset_op.cc b/tensorflow/contrib/bigtable/kernels/bigtable_lookup_dataset_op.cc index 9e49fa35db..bd32672aa9 100644 --- a/tensorflow/contrib/bigtable/kernels/bigtable_lookup_dataset_op.cc +++ b/tensorflow/contrib/bigtable/kernels/bigtable_lookup_dataset_op.cc @@ -53,7 +53,7 @@ class BigtableLookupDatasetOp : public UnaryDatasetOpKernel { } private: - class Dataset : public GraphDatasetBase { + class Dataset : public DatasetBase { public: explicit Dataset(OpKernelContext* ctx, const DatasetBase* input, BigtableTableResource* table, @@ -61,7 +61,7 @@ class BigtableLookupDatasetOp : public UnaryDatasetOpKernel { std::vector<string> columns, const DataTypeVector& output_types, std::vector<PartialTensorShape> output_shapes) - : GraphDatasetBase(ctx), + : DatasetBase(DatasetContext(ctx)), input_(input), table_(table), column_families_(std::move(column_families)), @@ -80,8 +80,8 @@ class BigtableLookupDatasetOp : public UnaryDatasetOpKernel { std::unique_ptr<IteratorBase> MakeIteratorInternal( const string& prefix) const override { - return std::unique_ptr<IteratorBase>(new Iterator( - {this, strings::StrCat(prefix, "::BigtableLookupDataset")})); + return std::unique_ptr<IteratorBase>( + new Iterator({this, strings::StrCat(prefix, "::BigtableLookup")})); } const DataTypeVector& output_dtypes() const override { @@ -96,6 +96,14 @@ class BigtableLookupDatasetOp : public UnaryDatasetOpKernel { return "BigtableLookupDatasetOp::Dataset"; } + protected: + Status AsGraphDefInternal(SerializationContext* ctx, + DatasetGraphDefBuilder* b, + Node** output) const override { + return errors::Unimplemented("%s does not support serialization", + DebugString()); + } + private: static ::google::cloud::bigtable::Filter MakeFilter( const std::vector<string>& column_families, diff --git a/tensorflow/contrib/bigtable/kernels/bigtable_prefix_key_dataset_op.cc b/tensorflow/contrib/bigtable/kernels/bigtable_prefix_key_dataset_op.cc index e960719614..a803fdcb49 100644 --- a/tensorflow/contrib/bigtable/kernels/bigtable_prefix_key_dataset_op.cc +++ b/tensorflow/contrib/bigtable/kernels/bigtable_prefix_key_dataset_op.cc @@ -35,11 +35,13 @@ class BigtablePrefixKeyDatasetOp : public DatasetOpKernel { } private: - class Dataset : public GraphDatasetBase { + class Dataset : public DatasetBase { public: explicit Dataset(OpKernelContext* ctx, BigtableTableResource* table, string prefix) - : GraphDatasetBase(ctx), table_(table), prefix_(std::move(prefix)) { + : DatasetBase(DatasetContext(ctx)), + table_(table), + prefix_(std::move(prefix)) { table_->Ref(); } @@ -47,8 +49,8 @@ class BigtablePrefixKeyDatasetOp : public DatasetOpKernel { std::unique_ptr<IteratorBase> MakeIteratorInternal( const string& prefix) const override { - return std::unique_ptr<IteratorBase>(new Iterator( - {this, strings::StrCat(prefix, "::BigtablePrefixKeyDataset")})); + return std::unique_ptr<IteratorBase>( + new Iterator({this, strings::StrCat(prefix, "::BigtablePrefixKey")})); } const DataTypeVector& output_dtypes() const override { @@ -68,6 +70,14 @@ class BigtablePrefixKeyDatasetOp : public DatasetOpKernel { BigtableTableResource* table() const { return table_; } + protected: + Status AsGraphDefInternal(SerializationContext* ctx, + DatasetGraphDefBuilder* b, + Node** output) const override { + return errors::Unimplemented("%s does not support serialization", + DebugString()); + } + private: class Iterator : public BigtableReaderDatasetIterator<Dataset> { public: diff --git a/tensorflow/contrib/bigtable/kernels/bigtable_range_key_dataset_op.cc b/tensorflow/contrib/bigtable/kernels/bigtable_range_key_dataset_op.cc index 96d3565d9b..5cd0371c79 100644 --- a/tensorflow/contrib/bigtable/kernels/bigtable_range_key_dataset_op.cc +++ b/tensorflow/contrib/bigtable/kernels/bigtable_range_key_dataset_op.cc @@ -39,11 +39,11 @@ class BigtableRangeKeyDatasetOp : public DatasetOpKernel { } private: - class Dataset : public GraphDatasetBase { + class Dataset : public DatasetBase { public: explicit Dataset(OpKernelContext* ctx, BigtableTableResource* table, string start_key, string end_key) - : GraphDatasetBase(ctx), + : DatasetBase(DatasetContext(ctx)), table_(table), start_key_(std::move(start_key)), end_key_(std::move(end_key)) { @@ -54,8 +54,8 @@ class BigtableRangeKeyDatasetOp : public DatasetOpKernel { std::unique_ptr<IteratorBase> MakeIteratorInternal( const string& prefix) const override { - return std::unique_ptr<IteratorBase>(new Iterator( - {this, strings::StrCat(prefix, "::BigtableRangeKeyDataset")})); + return std::unique_ptr<IteratorBase>( + new Iterator({this, strings::StrCat(prefix, "::BigtableRangeKey")})); } const DataTypeVector& output_dtypes() const override { @@ -75,6 +75,14 @@ class BigtableRangeKeyDatasetOp : public DatasetOpKernel { BigtableTableResource* table() const { return table_; } + protected: + Status AsGraphDefInternal(SerializationContext* ctx, + DatasetGraphDefBuilder* b, + Node** output) const override { + return errors::Unimplemented("%s does not support serialization", + DebugString()); + } + private: class Iterator : public BigtableReaderDatasetIterator<Dataset> { public: diff --git a/tensorflow/contrib/bigtable/kernels/bigtable_sample_key_pairs_dataset_op.cc b/tensorflow/contrib/bigtable/kernels/bigtable_sample_key_pairs_dataset_op.cc index a1a63a975a..6928d9423c 100644 --- a/tensorflow/contrib/bigtable/kernels/bigtable_sample_key_pairs_dataset_op.cc +++ b/tensorflow/contrib/bigtable/kernels/bigtable_sample_key_pairs_dataset_op.cc @@ -52,11 +52,11 @@ class BigtableSampleKeyPairsDatasetOp : public DatasetOpKernel { } private: - class Dataset : public GraphDatasetBase { + class Dataset : public DatasetBase { public: explicit Dataset(OpKernelContext* ctx, BigtableTableResource* table, string prefix, string start_key, string end_key) - : GraphDatasetBase(ctx), + : DatasetBase(DatasetContext(ctx)), table_(table), key_range_(MakeMultiModeKeyRange( std::move(prefix), std::move(start_key), std::move(end_key))) { @@ -68,7 +68,7 @@ class BigtableSampleKeyPairsDatasetOp : public DatasetOpKernel { std::unique_ptr<IteratorBase> MakeIteratorInternal( const string& prefix) const override { return std::unique_ptr<IteratorBase>(new Iterator( - {this, strings::StrCat(prefix, "::BigtableSampleKeyPairsDataset")})); + {this, strings::StrCat(prefix, "::BigtableSampleKeyPairs")})); } const DataTypeVector& output_dtypes() const override { @@ -87,6 +87,14 @@ class BigtableSampleKeyPairsDatasetOp : public DatasetOpKernel { return "BigtableSampleKeyPairsDatasetOp::Dataset"; } + protected: + Status AsGraphDefInternal(SerializationContext* ctx, + DatasetGraphDefBuilder* b, + Node** output) const override { + return errors::Unimplemented("%s does not support serialization", + DebugString()); + } + private: static MultiModeKeyRange MakeMultiModeKeyRange(string prefix, string start_key, diff --git a/tensorflow/contrib/bigtable/kernels/bigtable_sample_keys_dataset_op.cc b/tensorflow/contrib/bigtable/kernels/bigtable_sample_keys_dataset_op.cc index a5a47cfe2d..a759fb5063 100644 --- a/tensorflow/contrib/bigtable/kernels/bigtable_sample_keys_dataset_op.cc +++ b/tensorflow/contrib/bigtable/kernels/bigtable_sample_keys_dataset_op.cc @@ -31,10 +31,10 @@ class BigtableSampleKeysDatasetOp : public DatasetOpKernel { } private: - class Dataset : public GraphDatasetBase { + class Dataset : public DatasetBase { public: explicit Dataset(OpKernelContext* ctx, BigtableTableResource* table) - : GraphDatasetBase(ctx), table_(table) { + : DatasetBase(DatasetContext(ctx)), table_(table) { table_->Ref(); } @@ -43,7 +43,7 @@ class BigtableSampleKeysDatasetOp : public DatasetOpKernel { std::unique_ptr<IteratorBase> MakeIteratorInternal( const string& prefix) const override { return std::unique_ptr<IteratorBase>(new Iterator( - {this, strings::StrCat(prefix, "::BigtableSampleKeysDataset")})); + {this, strings::StrCat(prefix, "::BigtableSampleKeys")})); } const DataTypeVector& output_dtypes() const override { @@ -63,6 +63,14 @@ class BigtableSampleKeysDatasetOp : public DatasetOpKernel { BigtableTableResource* table() const { return table_; } + protected: + Status AsGraphDefInternal(SerializationContext* ctx, + DatasetGraphDefBuilder* b, + Node** output) const override { + return errors::Unimplemented("%s does not support serialization", + DebugString()); + } + private: class Iterator : public DatasetIterator<Dataset> { public: diff --git a/tensorflow/contrib/bigtable/kernels/bigtable_scan_dataset_op.cc b/tensorflow/contrib/bigtable/kernels/bigtable_scan_dataset_op.cc index 13cb868167..78a920b077 100644 --- a/tensorflow/contrib/bigtable/kernels/bigtable_scan_dataset_op.cc +++ b/tensorflow/contrib/bigtable/kernels/bigtable_scan_dataset_op.cc @@ -84,7 +84,7 @@ class BigtableScanDatasetOp : public DatasetOpKernel { } private: - class Dataset : public GraphDatasetBase { + class Dataset : public DatasetBase { public: explicit Dataset(OpKernelContext* ctx, BigtableTableResource* table, string prefix, string start_key, string end_key, @@ -92,7 +92,7 @@ class BigtableScanDatasetOp : public DatasetOpKernel { std::vector<string> columns, float probability, const DataTypeVector& output_types, std::vector<PartialTensorShape> output_shapes) - : GraphDatasetBase(ctx), + : DatasetBase(DatasetContext(ctx)), table_(table), prefix_(std::move(prefix)), start_key_(std::move(start_key)), @@ -111,8 +111,8 @@ class BigtableScanDatasetOp : public DatasetOpKernel { std::unique_ptr<IteratorBase> MakeIteratorInternal( const string& prefix) const override { - return std::unique_ptr<IteratorBase>(new Iterator( - {this, strings::StrCat(prefix, "::BigtableScanDataset")})); + return std::unique_ptr<IteratorBase>( + new Iterator({this, strings::StrCat(prefix, "::BigtableScan")})); } const DataTypeVector& output_dtypes() const override { @@ -129,6 +129,14 @@ class BigtableScanDatasetOp : public DatasetOpKernel { BigtableTableResource* table() const { return table_; } + protected: + Status AsGraphDefInternal(SerializationContext* ctx, + DatasetGraphDefBuilder* b, + Node** output) const override { + return errors::Unimplemented("%s does not support serialization", + DebugString()); + } + private: class Iterator : public BigtableReaderDatasetIterator<Dataset> { public: |