aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/cc/saved_model
diff options
context:
space:
mode:
authorGravatar Sukriti Ramesh <sukritiramesh@google.com>2016-10-25 09:30:53 -0800
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2016-10-25 10:46:33 -0700
commitbe7fe33bbc3c5538f16ade41b7a685a4e127f7cd (patch)
tree6a0053bf38a05abf25baf5bd5f5a8c859f51cc40 /tensorflow/cc/saved_model
parent8d290a54ac5e5eaf7663457c0da70130f1710085 (diff)
Add support for legacy init op and asset file defs in SavedModel cc.
Change: 137172757
Diffstat (limited to 'tensorflow/cc/saved_model')
-rw-r--r--tensorflow/cc/saved_model/BUILD2
-rw-r--r--tensorflow/cc/saved_model/constants.h3
-rw-r--r--tensorflow/cc/saved_model/loader.cc91
-rw-r--r--tensorflow/cc/saved_model/loader_test.cc35
-rw-r--r--tensorflow/cc/saved_model/testdata/half_plus_two/assets/foo.txt1
-rw-r--r--tensorflow/cc/saved_model/testdata/half_plus_two/saved_model.pbbin6491 -> 0 bytes
-rw-r--r--tensorflow/cc/saved_model/testdata/half_plus_two/variables/variables.data-00000-of-00001bin8 -> 0 bytes
-rw-r--r--tensorflow/cc/saved_model/testdata/half_plus_two/variables/variables.indexbin134 -> 0 bytes
-rw-r--r--tensorflow/cc/saved_model/testdata/half_plus_two_pbtxt/saved_model.pbtxt492
-rw-r--r--tensorflow/cc/saved_model/testdata/half_plus_two_sharded/saved_model.pbbin6491 -> 7331 bytes
10 files changed, 569 insertions, 55 deletions
diff --git a/tensorflow/cc/saved_model/BUILD b/tensorflow/cc/saved_model/BUILD
index eeedaaff27..90c87210b1 100644
--- a/tensorflow/cc/saved_model/BUILD
+++ b/tensorflow/cc/saved_model/BUILD
@@ -34,6 +34,7 @@ cc_library(
":constants",
"//tensorflow/core:core_cpu",
"//tensorflow/core:lib",
+ "//tensorflow/core:lib_internal",
"//tensorflow/core:protos_all_cc",
"//tensorflow/core:tensorflow",
"//tensorflow/core/util/tensor_bundle:naming",
@@ -63,7 +64,6 @@ tf_cc_test(
filegroup(
name = "saved_model_half_plus_two",
srcs = glob([
- "testdata/half_plus_two/**",
"testdata/half_plus_two_pbtxt/**",
"testdata/half_plus_two_sharded/**",
]),
diff --git a/tensorflow/cc/saved_model/constants.h b/tensorflow/cc/saved_model/constants.h
index f67c56ba1c..654e765170 100644
--- a/tensorflow/cc/saved_model/constants.h
+++ b/tensorflow/cc/saved_model/constants.h
@@ -30,6 +30,9 @@ constexpr char kSavedModelFilenamePb[] = "saved_model.pb";
// SavedModel text format proto filename.
constexpr char kSavedModelFilenamePbTxt[] = "saved_model.pbtxt";
+// SavedModel legacy init op key.
+constexpr char kSavedModelLegacyInitOpKey[] = "legacy_init_op";
+
// Directory in which to save the SavedModel variables.
constexpr char kSavedModelVariablesDirectory[] = "variables";
diff --git a/tensorflow/cc/saved_model/loader.cc b/tensorflow/cc/saved_model/loader.cc
index 1f95229355..c654d56e8a 100644
--- a/tensorflow/cc/saved_model/loader.cc
+++ b/tensorflow/cc/saved_model/loader.cc
@@ -21,6 +21,7 @@ limitations under the License.
#include "tensorflow/core/lib/io/path.h"
#include "tensorflow/core/lib/monitoring/counter.h"
#include "tensorflow/core/platform/env.h"
+#include "tensorflow/core/platform/protobuf_internal.h"
#include "tensorflow/core/protobuf/saved_model.pb.h"
#include "tensorflow/core/public/session.h"
#include "tensorflow/core/public/session_options.h"
@@ -83,10 +84,32 @@ Status LoadMetaGraphIntoSession(const MetaGraphDef& meta_graph_def,
return (*session)->Create(meta_graph_def.graph_def());
}
-Status Restore(const RunOptions& run_options, const string& export_dir,
- const StringPiece restore_op_name,
- const StringPiece variable_filename_const_op_name,
- Session* session) {
+Tensor CreateStringTensor(const string& value) {
+ Tensor tensor(DT_STRING, TensorShape({}));
+ tensor.scalar<string>()() = value;
+ return tensor;
+}
+
+void AddAssetsTensorsToInputs(const StringPiece export_dir,
+ const std::vector<AssetFileDef>& asset_file_defs,
+ std::vector<std::pair<string, Tensor>>* inputs) {
+ if (asset_file_defs.empty()) {
+ return;
+ }
+ for (auto& asset_file_def : asset_file_defs) {
+ Tensor assets_file_path_tensor = CreateStringTensor(io::JoinPath(
+ export_dir, kSavedModelAssetsDirectory, asset_file_def.filename()));
+ inputs->push_back(
+ {asset_file_def.tensor_info().name(), assets_file_path_tensor});
+ }
+}
+
+Status RunRestore(const RunOptions& run_options, const string& export_dir,
+ const StringPiece restore_op_name,
+ const StringPiece variable_filename_const_op_name,
+ const std::vector<AssetFileDef>& asset_file_defs,
+ Session* session) {
+ LOG(INFO) << "Restoring SavedModel bundle.";
// Find path to variables to be restored in export directory.
const string variables_directory =
io::JoinPath(export_dir, kSavedModelVariablesDirectory);
@@ -109,11 +132,54 @@ Status Restore(const RunOptions& run_options, const string& export_dir,
std::vector<std::pair<string, Tensor>> inputs = {
{variable_filename_const_op_name.ToString(), variables_path_tensor}};
+ AddAssetsTensorsToInputs(export_dir, asset_file_defs, &inputs);
+
RunMetadata run_metadata;
return session->Run(run_options, inputs, {}, {restore_op_name.ToString()},
nullptr /* outputs */, &run_metadata);
}
+Status RunLegacyInitOp(const RunOptions& run_options, const string& export_dir,
+ const MetaGraphDef& meta_graph_def,
+ const std::vector<AssetFileDef>& asset_file_defs,
+ Session* session) {
+ LOG(INFO) << "Running LegacyInitOp on SavedModel bundle.";
+ const auto& collection_def_map = meta_graph_def.collection_def();
+ const auto init_op_it = collection_def_map.find(kSavedModelLegacyInitOpKey);
+ if (init_op_it != collection_def_map.end()) {
+ if (init_op_it->second.node_list().value_size() != 1) {
+ return errors::FailedPrecondition(strings::StrCat(
+ "Expected exactly one serving init op in : ", export_dir));
+ }
+ std::vector<std::pair<string, Tensor>> inputs;
+ AddAssetsTensorsToInputs(export_dir, asset_file_defs, &inputs);
+ RunMetadata run_metadata;
+ const StringPiece legacy_init_op_name =
+ init_op_it->second.node_list().value(0);
+ return session->Run(run_options, inputs, {},
+ {legacy_init_op_name.ToString()}, nullptr /* outputs */,
+ &run_metadata);
+ }
+ return Status::OK();
+}
+
+Status GetAssetFileDefs(const MetaGraphDef& meta_graph_def,
+ std::vector<AssetFileDef>* asset_file_defs) {
+ const auto& collection_def_map = meta_graph_def.collection_def();
+ const auto assets_it = collection_def_map.find(kSavedModelAssetsKey);
+ if (assets_it == collection_def_map.end()) {
+ return Status::OK();
+ }
+ const auto& any_assets = assets_it->second.any_list().value();
+ for (const auto& any_asset : any_assets) {
+ AssetFileDef asset_file_def;
+ TF_RETURN_IF_ERROR(
+ ParseAny(any_asset, &asset_file_def, "tensorflow.AssetFileDef"));
+ asset_file_defs->push_back(asset_file_def);
+ }
+ return Status::OK();
+}
+
Status LoadSavedModelInternal(const SessionOptions& session_options,
const RunOptions& run_options,
const string& export_dir,
@@ -134,12 +200,19 @@ Status LoadSavedModelInternal(const SessionOptions& session_options,
TF_RETURN_IF_ERROR(LoadMetaGraphIntoSession(
bundle->meta_graph_def, session_options, &bundle->session));
+ std::vector<AssetFileDef> asset_file_defs;
TF_RETURN_IF_ERROR(
- Restore(run_options, export_dir,
- bundle->meta_graph_def.saver_def().restore_op_name(),
- bundle->meta_graph_def.saver_def().filename_tensor_name(),
- bundle->session.get()));
-
+ GetAssetFileDefs(bundle->meta_graph_def, &asset_file_defs));
+ TF_RETURN_IF_ERROR(
+ RunRestore(run_options, export_dir,
+ bundle->meta_graph_def.saver_def().restore_op_name(),
+ bundle->meta_graph_def.saver_def().filename_tensor_name(),
+ asset_file_defs, bundle->session.get()));
+ // TODO(sukritiramesh): Add support for a single main op to run upon load,
+ // which will supersede the legacy_init_op and separate RunRestore.
+ TF_RETURN_IF_ERROR(RunLegacyInitOp(run_options, export_dir,
+ bundle->meta_graph_def, asset_file_defs,
+ bundle->session.get()));
return Status::OK();
}
diff --git a/tensorflow/cc/saved_model/loader_test.cc b/tensorflow/cc/saved_model/loader_test.cc
index a7e4d6cfde..55a22e4e81 100644
--- a/tensorflow/cc/saved_model/loader_test.cc
+++ b/tensorflow/cc/saved_model/loader_test.cc
@@ -29,7 +29,6 @@ limitations under the License.
namespace tensorflow {
namespace {
-constexpr char kTestDataPb[] = "cc/saved_model/testdata/half_plus_two";
constexpr char kTestDataPbTxt[] = "cc/saved_model/testdata/half_plus_two_pbtxt";
constexpr char kTestDataSharded[] =
"cc/saved_model/testdata/half_plus_two_sharded";
@@ -45,12 +44,26 @@ class LoaderTest : public ::testing::Test {
return example.SerializeAsString();
}
+ void ValidateAssets(const string& export_dir,
+ const SavedModelBundle& bundle) {
+ const string asset_directory =
+ io::JoinPath(export_dir, kSavedModelAssetsDirectory);
+ const string asset_filename = "foo.txt";
+ const string asset_filepath = io::JoinPath(asset_directory, asset_filename);
+ EXPECT_TRUE(Env::Default()->FileExists(asset_filepath));
+
+ std::vector<Tensor> path_outputs;
+ TF_ASSERT_OK(
+ bundle.session->Run({}, {"filename_tensor:0"}, {}, &path_outputs));
+ ASSERT_EQ(1, path_outputs.size());
+
+ test::ExpectTensorEqual<string>(
+ test::AsTensor<string>({"foo.txt"}, TensorShape({})), path_outputs[0]);
+ }
+
void CheckSavedModelBundle(const string& export_dir,
const SavedModelBundle& bundle) {
- const string asset_path =
- io::JoinPath(export_dir, kSavedModelAssetsDirectory, "foo.txt");
- EXPECT_TRUE(Env::Default()->FileExists(asset_path));
-
+ ValidateAssets(export_dir, bundle);
// Retrieve the regression signature from meta graph def.
const auto signature_def_map = bundle.meta_graph_def.signature_def();
const auto signature_def = signature_def_map.at(kRegressMethodName);
@@ -151,18 +164,6 @@ TEST_F(LoaderTest, PbtxtFormat) {
CheckSavedModelBundle(export_dir, bundle);
}
-TEST_F(LoaderTest, SingleShardVariables) {
- SavedModelBundle bundle;
- SessionOptions session_options;
- RunOptions run_options;
-
- const string export_dir =
- io::JoinPath(testing::TensorFlowSrcRoot(), kTestDataPb);
- TF_ASSERT_OK(LoadSavedModel(session_options, run_options, export_dir,
- {kSavedModelTagServe}, &bundle));
- CheckSavedModelBundle(export_dir, bundle);
-}
-
TEST_F(LoaderTest, InvalidExportPath) {
SavedModelBundle bundle;
RunOptions run_options;
diff --git a/tensorflow/cc/saved_model/testdata/half_plus_two/assets/foo.txt b/tensorflow/cc/saved_model/testdata/half_plus_two/assets/foo.txt
deleted file mode 100644
index f9ff036688..0000000000
--- a/tensorflow/cc/saved_model/testdata/half_plus_two/assets/foo.txt
+++ /dev/null
@@ -1 +0,0 @@
-asset-file-contents \ No newline at end of file
diff --git a/tensorflow/cc/saved_model/testdata/half_plus_two/saved_model.pb b/tensorflow/cc/saved_model/testdata/half_plus_two/saved_model.pb
deleted file mode 100644
index e894f9b101..0000000000
--- a/tensorflow/cc/saved_model/testdata/half_plus_two/saved_model.pb
+++ /dev/null
Binary files differ
diff --git a/tensorflow/cc/saved_model/testdata/half_plus_two/variables/variables.data-00000-of-00001 b/tensorflow/cc/saved_model/testdata/half_plus_two/variables/variables.data-00000-of-00001
deleted file mode 100644
index 20bc7d454d..0000000000
--- a/tensorflow/cc/saved_model/testdata/half_plus_two/variables/variables.data-00000-of-00001
+++ /dev/null
Binary files differ
diff --git a/tensorflow/cc/saved_model/testdata/half_plus_two/variables/variables.index b/tensorflow/cc/saved_model/testdata/half_plus_two/variables/variables.index
deleted file mode 100644
index e7df518f5b..0000000000
--- a/tensorflow/cc/saved_model/testdata/half_plus_two/variables/variables.index
+++ /dev/null
Binary files differ
diff --git a/tensorflow/cc/saved_model/testdata/half_plus_two_pbtxt/saved_model.pbtxt b/tensorflow/cc/saved_model/testdata/half_plus_two_pbtxt/saved_model.pbtxt
index 2e714d262d..693262eb4d 100644
--- a/tensorflow/cc/saved_model/testdata/half_plus_two_pbtxt/saved_model.pbtxt
+++ b/tensorflow/cc/saved_model/testdata/half_plus_two_pbtxt/saved_model.pbtxt
@@ -103,6 +103,24 @@ meta_graphs {
}
}
op {
+ name: "MergeV2Checkpoints"
+ input_arg {
+ name: "checkpoint_prefixes"
+ type: DT_STRING
+ }
+ input_arg {
+ name: "destination_prefix"
+ type: DT_STRING
+ }
+ attr {
+ name: "delete_old_dirs"
+ type: "bool"
+ default_value {
+ b: true
+ }
+ }
+ }
+ op {
name: "Mul"
input_arg {
name: "x"
@@ -141,6 +159,35 @@ meta_graphs {
name: "NoOp"
}
op {
+ name: "Pack"
+ input_arg {
+ name: "values"
+ type_attr: "T"
+ number_attr: "N"
+ }
+ output_arg {
+ name: "output"
+ type_attr: "T"
+ }
+ attr {
+ name: "N"
+ type: "int"
+ has_minimum: true
+ minimum: 1
+ }
+ attr {
+ name: "T"
+ type: "type"
+ }
+ attr {
+ name: "axis"
+ type: "int"
+ default_value {
+ i: 0
+ }
+ }
+ }
+ op {
name: "ParseExample"
input_arg {
name: "serialized"
@@ -267,9 +314,9 @@ meta_graphs {
}
}
op {
- name: "SaveSlices"
+ name: "SaveV2"
input_arg {
- name: "filename"
+ name: "prefix"
type: DT_STRING
}
input_arg {
@@ -277,15 +324,15 @@ meta_graphs {
type: DT_STRING
}
input_arg {
- name: "shapes_and_slices"
+ name: "shape_and_slices"
type: DT_STRING
}
input_arg {
- name: "data"
- type_list_attr: "T"
+ name: "tensors"
+ type_list_attr: "dtypes"
}
attr {
- name: "T"
+ name: "dtypes"
type: "list(type)"
has_minimum: true
minimum: 1
@@ -311,19 +358,29 @@ meta_graphs {
}
}
op {
- name: "ShardedFilespec"
+ name: "StringJoin"
input_arg {
- name: "basename"
+ name: "inputs"
type: DT_STRING
- }
- input_arg {
- name: "num_shards"
- type: DT_INT32
+ number_attr: "N"
}
output_arg {
- name: "filename"
+ name: "output"
type: DT_STRING
}
+ attr {
+ name: "N"
+ type: "int"
+ has_minimum: true
+ minimum: 1
+ }
+ attr {
+ name: "separator"
+ type: "string"
+ default_value {
+ s: ""
+ }
+ }
}
op {
name: "Variable"
@@ -900,6 +957,244 @@ meta_graphs {
}
}
node {
+ name: "Const"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_STRING
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_STRING
+ tensor_shape {
+ }
+ string_val: "/tmp/original/export/assets/foo.txt"
+ }
+ }
+ }
+ }
+ node {
+ name: "filename_tensor/initial_value"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_STRING
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_STRING
+ tensor_shape {
+ }
+ string_val: "foo.txt"
+ }
+ }
+ }
+ }
+ node {
+ name: "filename_tensor"
+ op: "Variable"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ attr {
+ key: "container"
+ value {
+ s: ""
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_STRING
+ }
+ }
+ attr {
+ key: "shape"
+ value {
+ shape {
+ }
+ }
+ }
+ attr {
+ key: "shared_name"
+ value {
+ s: ""
+ }
+ }
+ }
+ node {
+ name: "filename_tensor/Assign"
+ op: "Assign"
+ input: "filename_tensor"
+ input: "filename_tensor/initial_value"
+ attr {
+ key: "T"
+ value {
+ type: DT_STRING
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@filename_tensor"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ attr {
+ key: "use_locking"
+ value {
+ b: true
+ }
+ }
+ attr {
+ key: "validate_shape"
+ value {
+ b: true
+ }
+ }
+ }
+ node {
+ name: "filename_tensor/read"
+ op: "Identity"
+ input: "filename_tensor"
+ attr {
+ key: "T"
+ value {
+ type: DT_STRING
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@filename_tensor"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ }
+ node {
+ name: "Assign/value"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_STRING
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_STRING
+ tensor_shape {
+ }
+ string_val: "foo.txt"
+ }
+ }
+ }
+ }
+ node {
+ name: "Assign"
+ op: "Assign"
+ input: "filename_tensor"
+ input: "Assign/value"
+ attr {
+ key: "T"
+ value {
+ type: DT_STRING
+ }
+ }
+ attr {
+ key: "_class"
+ value {
+ list {
+ s: "loc:@filename_tensor"
+ }
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ attr {
+ key: "use_locking"
+ value {
+ b: false
+ }
+ }
+ attr {
+ key: "validate_shape"
+ value {
+ b: true
+ }
+ }
+ }
+ node {
name: "Identity"
op: "Identity"
input: "y"
@@ -932,6 +1227,11 @@ meta_graphs {
input: "^b/Assign"
}
node {
+ name: "group_deps"
+ op: "NoOp"
+ input: "^Assign"
+ }
+ node {
name: "save/Const"
op: "Const"
attr {
@@ -962,6 +1262,63 @@ meta_graphs {
}
}
node {
+ name: "save/StringJoin/inputs_1"
+ op: "Const"
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ attr {
+ key: "dtype"
+ value {
+ type: DT_STRING
+ }
+ }
+ attr {
+ key: "value"
+ value {
+ tensor {
+ dtype: DT_STRING
+ tensor_shape {
+ }
+ string_val: "_temp_ff2bd25218b646ea9ed224eecdce5e79/part"
+ }
+ }
+ }
+ }
+ node {
+ name: "save/StringJoin"
+ op: "StringJoin"
+ input: "save/Const"
+ input: "save/StringJoin/inputs_1"
+ attr {
+ key: "N"
+ value {
+ i: 2
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ }
+ }
+ }
+ }
+ attr {
+ key: "separator"
+ value {
+ s: ""
+ }
+ }
+ }
+ node {
name: "save/num_shards"
op: "Const"
attr {
@@ -1024,7 +1381,7 @@ meta_graphs {
node {
name: "save/ShardedFilename"
op: "ShardedFilename"
- input: "save/Const"
+ input: "save/StringJoin"
input: "save/ShardedFilename/shard"
input: "save/num_shards"
attr {
@@ -1038,7 +1395,7 @@ meta_graphs {
}
}
node {
- name: "save/save/tensor_names"
+ name: "save/SaveV2/tensor_names"
op: "Const"
attr {
key: "_output_shapes"
@@ -1075,7 +1432,7 @@ meta_graphs {
}
}
node {
- name: "save/save/shapes_and_slices"
+ name: "save/SaveV2/shape_and_slices"
op: "Const"
attr {
key: "_output_shapes"
@@ -1112,15 +1469,15 @@ meta_graphs {
}
}
node {
- name: "save/save"
- op: "SaveSlices"
+ name: "save/SaveV2"
+ op: "SaveV2"
input: "save/ShardedFilename"
- input: "save/save/tensor_names"
- input: "save/save/shapes_and_slices"
+ input: "save/SaveV2/tensor_names"
+ input: "save/SaveV2/shape_and_slices"
input: "a"
input: "b"
attr {
- key: "T"
+ key: "dtypes"
value {
list {
type: DT_FLOAT
@@ -1133,7 +1490,7 @@ meta_graphs {
name: "save/control_dependency"
op: "Identity"
input: "save/ShardedFilename"
- input: "^save/save"
+ input: "^save/SaveV2"
attr {
key: "T"
value {
@@ -1159,11 +1516,65 @@ meta_graphs {
}
}
node {
- name: "save/ShardedFilespec"
- op: "ShardedFilespec"
+ name: "save/MergeV2Checkpoints/checkpoint_prefixes"
+ op: "Pack"
+ input: "save/ShardedFilename"
+ input: "^save/control_dependency"
+ attr {
+ key: "N"
+ value {
+ i: 1
+ }
+ }
+ attr {
+ key: "T"
+ value {
+ type: DT_STRING
+ }
+ }
+ attr {
+ key: "_output_shapes"
+ value {
+ list {
+ shape {
+ dim {
+ size: 1
+ }
+ }
+ }
+ }
+ }
+ attr {
+ key: "axis"
+ value {
+ i: 0
+ }
+ }
+ }
+ node {
+ name: "save/MergeV2Checkpoints"
+ op: "MergeV2Checkpoints"
+ input: "save/MergeV2Checkpoints/checkpoint_prefixes"
+ input: "save/Const"
+ attr {
+ key: "delete_old_dirs"
+ value {
+ b: true
+ }
+ }
+ }
+ node {
+ name: "save/Identity"
+ op: "Identity"
input: "save/Const"
- input: "save/num_shards"
input: "^save/control_dependency"
+ input: "^save/MergeV2Checkpoints"
+ attr {
+ key: "T"
+ value {
+ type: DT_STRING
+ }
+ }
attr {
key: "_output_shapes"
value {
@@ -1467,12 +1878,39 @@ meta_graphs {
}
saver_def {
filename_tensor_name: "save/Const:0"
- save_tensor_name: "save/ShardedFilespec:0"
+ save_tensor_name: "save/Identity:0"
restore_op_name: "save/restore_all"
max_to_keep: 5
sharded: true
keep_checkpoint_every_n_hours: 10000.0
- version: V1
+ version: V2
+ }
+ collection_def {
+ key: "asset_filepaths"
+ value {
+ node_list {
+ value: "Const:0"
+ }
+ }
+ }
+ collection_def {
+ key: "legacy_init_op"
+ value {
+ node_list {
+ value: "group_deps"
+ }
+ }
+ }
+ collection_def {
+ key: "saved_model_assets"
+ value {
+ any_list {
+ value {
+ type_url: "type.googleapis.com/tensorflow.AssetFileDef"
+ value: "\n\t\n\007Const:0\022\007foo.txt"
+ }
+ }
+ }
}
collection_def {
key: "trainable_variables"
diff --git a/tensorflow/cc/saved_model/testdata/half_plus_two_sharded/saved_model.pb b/tensorflow/cc/saved_model/testdata/half_plus_two_sharded/saved_model.pb
index e894f9b101..0df49f2168 100644
--- a/tensorflow/cc/saved_model/testdata/half_plus_two_sharded/saved_model.pb
+++ b/tensorflow/cc/saved_model/testdata/half_plus_two_sharded/saved_model.pb
Binary files differ