aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/tools/api/golden/v2/tensorflow.data.experimental.pbtxt
diff options
context:
space:
mode:
Diffstat (limited to 'tensorflow/tools/api/golden/v2/tensorflow.data.experimental.pbtxt')
-rw-r--r--tensorflow/tools/api/golden/v2/tensorflow.data.experimental.pbtxt139
1 files changed, 139 insertions, 0 deletions
diff --git a/tensorflow/tools/api/golden/v2/tensorflow.data.experimental.pbtxt b/tensorflow/tools/api/golden/v2/tensorflow.data.experimental.pbtxt
new file mode 100644
index 0000000000..b14585f8d7
--- /dev/null
+++ b/tensorflow/tools/api/golden/v2/tensorflow.data.experimental.pbtxt
@@ -0,0 +1,139 @@
+path: "tensorflow.data.experimental"
+tf_module {
+ member {
+ name: "CheckpointInputPipelineHook"
+ mtype: "<type \'type\'>"
+ }
+ member {
+ name: "CsvDataset"
+ mtype: "<class \'abc.ABCMeta\'>"
+ }
+ member {
+ name: "Optional"
+ mtype: "<type \'type\'>"
+ }
+ member {
+ name: "RandomDataset"
+ mtype: "<class \'abc.ABCMeta\'>"
+ }
+ member {
+ name: "Reducer"
+ mtype: "<type \'type\'>"
+ }
+ member {
+ name: "SqlDataset"
+ mtype: "<class \'abc.ABCMeta\'>"
+ }
+ member {
+ name: "StatsAggregator"
+ mtype: "<type \'type\'>"
+ }
+ member {
+ name: "TFRecordWriter"
+ mtype: "<type \'type\'>"
+ }
+ member_method {
+ name: "Counter"
+ argspec: "args=[\'start\', \'step\', \'dtype\'], varargs=None, keywords=None, defaults=[\'0\', \'1\', \"<dtype: \'int64\'>\"], "
+ }
+ member_method {
+ name: "bucket_by_sequence_length"
+ argspec: "args=[\'element_length_func\', \'bucket_boundaries\', \'bucket_batch_sizes\', \'padded_shapes\', \'padding_values\', \'pad_to_bucket_boundary\', \'no_padding\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'False\', \'False\'], "
+ }
+ member_method {
+ name: "choose_from_datasets"
+ argspec: "args=[\'datasets\', \'choice_dataset\'], varargs=None, keywords=None, defaults=None"
+ }
+ member_method {
+ name: "copy_to_device"
+ argspec: "args=[\'target_device\', \'source_device\'], varargs=None, keywords=None, defaults=[\'/cpu:0\'], "
+ }
+ member_method {
+ name: "dense_to_sparse_batch"
+ argspec: "args=[\'batch_size\', \'row_shape\'], varargs=None, keywords=None, defaults=None"
+ }
+ member_method {
+ name: "enumerate_dataset"
+ argspec: "args=[\'start\'], varargs=None, keywords=None, defaults=[\'0\'], "
+ }
+ member_method {
+ name: "get_next_as_optional"
+ argspec: "args=[\'iterator\'], varargs=None, keywords=None, defaults=None"
+ }
+ member_method {
+ name: "get_single_element"
+ argspec: "args=[\'dataset\'], varargs=None, keywords=None, defaults=None"
+ }
+ member_method {
+ name: "group_by_reducer"
+ argspec: "args=[\'key_func\', \'reducer\'], varargs=None, keywords=None, defaults=None"
+ }
+ member_method {
+ name: "group_by_window"
+ argspec: "args=[\'key_func\', \'reduce_func\', \'window_size\', \'window_size_func\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
+ }
+ member_method {
+ name: "ignore_errors"
+ argspec: "args=[], varargs=None, keywords=None, defaults=None"
+ }
+ member_method {
+ name: "latency_stats"
+ argspec: "args=[\'tag\'], varargs=None, keywords=None, defaults=None"
+ }
+ member_method {
+ name: "make_batched_features_dataset"
+ argspec: "args=[\'file_pattern\', \'batch_size\', \'features\', \'reader\', \'label_key\', \'reader_args\', \'num_epochs\', \'shuffle\', \'shuffle_buffer_size\', \'shuffle_seed\', \'prefetch_buffer_size\', \'reader_num_threads\', \'parser_num_threads\', \'sloppy_ordering\', \'drop_final_batch\'], varargs=None, keywords=None, defaults=[\"<class \'tensorflow.python.data.ops.readers.TFRecordDataset\'>\", \'None\', \'None\', \'None\', \'True\', \'10000\', \'None\', \'-1\', \'1\', \'2\', \'False\', \'False\'], "
+ }
+ member_method {
+ name: "make_csv_dataset"
+ argspec: "args=[\'file_pattern\', \'batch_size\', \'column_names\', \'column_defaults\', \'label_name\', \'select_columns\', \'field_delim\', \'use_quote_delim\', \'na_value\', \'header\', \'num_epochs\', \'shuffle\', \'shuffle_buffer_size\', \'shuffle_seed\', \'prefetch_buffer_size\', \'num_parallel_reads\', \'sloppy\', \'num_rows_for_inference\', \'compression_type\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\', \'None\', \',\', \'True\', \'\', \'True\', \'None\', \'True\', \'10000\', \'None\', \'-1\', \'1\', \'False\', \'100\', \'None\'], "
+ }
+ member_method {
+ name: "make_saveable_from_iterator"
+ argspec: "args=[\'iterator\'], varargs=None, keywords=None, defaults=None"
+ }
+ member_method {
+ name: "map_and_batch"
+ argspec: "args=[\'map_func\', \'batch_size\', \'num_parallel_batches\', \'drop_remainder\', \'num_parallel_calls\'], varargs=None, keywords=None, defaults=[\'None\', \'False\', \'None\'], "
+ }
+ member_method {
+ name: "parallel_interleave"
+ argspec: "args=[\'map_func\', \'cycle_length\', \'block_length\', \'sloppy\', \'buffer_output_elements\', \'prefetch_input_elements\'], varargs=None, keywords=None, defaults=[\'1\', \'False\', \'None\', \'None\'], "
+ }
+ member_method {
+ name: "parse_example_dataset"
+ argspec: "args=[\'features\', \'num_parallel_calls\'], varargs=None, keywords=None, defaults=[\'1\'], "
+ }
+ member_method {
+ name: "prefetch_to_device"
+ argspec: "args=[\'device\', \'buffer_size\'], varargs=None, keywords=None, defaults=[\'None\'], "
+ }
+ member_method {
+ name: "rejection_resample"
+ argspec: "args=[\'class_func\', \'target_dist\', \'initial_dist\', \'seed\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
+ }
+ member_method {
+ name: "sample_from_datasets"
+ argspec: "args=[\'datasets\', \'weights\', \'seed\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
+ }
+ member_method {
+ name: "scan"
+ argspec: "args=[\'initial_state\', \'scan_func\'], varargs=None, keywords=None, defaults=None"
+ }
+ member_method {
+ name: "set_stats_aggregator"
+ argspec: "args=[\'stats_aggregator\'], varargs=None, keywords=None, defaults=None"
+ }
+ member_method {
+ name: "shuffle_and_repeat"
+ argspec: "args=[\'buffer_size\', \'count\', \'seed\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
+ }
+ member_method {
+ name: "unbatch"
+ argspec: "args=[], varargs=None, keywords=None, defaults=None"
+ }
+ member_method {
+ name: "unique"
+ argspec: "args=[], varargs=None, keywords=None, defaults=None"
+ }
+}