aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/python/training/saver.py
diff options
context:
space:
mode:
authorGravatar Anna R <annarev@google.com>2018-01-30 20:28:38 -0800
committerGravatar Michael Case <mikecase@google.com>2018-01-31 16:48:35 -0800
commite9d4d3d06c0fb211f7488f868fefb477f07df4f8 (patch)
tree727a6fa1cb1a08d259cb461d17819409278fb8cf /tensorflow/python/training/saver.py
parent2a01e3f2ee1ec5b1cf212dd949c1072129e4770a (diff)
Adding tf_export decorators/calls to TensorFlow functions and constants.
PiperOrigin-RevId: 183936100
Diffstat (limited to 'tensorflow/python/training/saver.py')
-rw-r--r--tensorflow/python/training/saver.py10
1 files changed, 10 insertions, 0 deletions
diff --git a/tensorflow/python/training/saver.py b/tensorflow/python/training/saver.py
index abc700b810..3888e9bba4 100644
--- a/tensorflow/python/training/saver.py
+++ b/tensorflow/python/training/saver.py
@@ -53,6 +53,7 @@ from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import training_util
from tensorflow.python.training.checkpoint_state_pb2 import CheckpointState
from tensorflow.python.util import compat
+from tensorflow.python.util.tf_export import tf_export
# Op names which identify variable reads which should be saved.
@@ -889,6 +890,7 @@ def _GetCheckpointFilename(save_dir, latest_filename):
return os.path.join(save_dir, latest_filename)
+@tf_export("train.generate_checkpoint_state_proto")
def generate_checkpoint_state_proto(save_dir,
model_checkpoint_path,
all_model_checkpoint_paths=None):
@@ -933,6 +935,7 @@ def generate_checkpoint_state_proto(save_dir,
return coord_checkpoint_proto
+@tf_export("train.update_checkpoint_state")
def update_checkpoint_state(save_dir,
model_checkpoint_path,
all_model_checkpoint_paths=None,
@@ -1025,6 +1028,7 @@ def _update_checkpoint_state(save_dir,
text_format.MessageToString(ckpt))
+@tf_export("train.get_checkpoint_state")
def get_checkpoint_state(checkpoint_dir, latest_filename=None):
"""Returns CheckpointState proto from the "checkpoint" file.
@@ -1082,6 +1086,7 @@ def get_checkpoint_state(checkpoint_dir, latest_filename=None):
return ckpt
+@tf_export("train.Saver")
class Saver(object):
"""Saves and restores variables.
@@ -1783,6 +1788,7 @@ def _prefix_to_checkpoint_path(prefix, format_version):
return prefix # Just the data file.
+@tf_export("train.latest_checkpoint")
def latest_checkpoint(checkpoint_dir, latest_filename=None):
"""Finds the filename of latest saved checkpoint file.
@@ -1812,6 +1818,7 @@ def latest_checkpoint(checkpoint_dir, latest_filename=None):
return None
+@tf_export("train.import_meta_graph")
def import_meta_graph(meta_graph_or_file, clear_devices=False,
import_scope=None, **kwargs):
"""Recreates a Graph saved in a `MetaGraphDef` proto.
@@ -1913,6 +1920,7 @@ def import_meta_graph(meta_graph_or_file, clear_devices=False,
return None
+@tf_export("train.export_meta_graph")
def export_meta_graph(filename=None,
meta_info_def=None,
graph_def=None,
@@ -1989,6 +1997,7 @@ def export_meta_graph(filename=None,
return meta_graph_def
+@tf_export("train.checkpoint_exists")
def checkpoint_exists(checkpoint_prefix):
"""Checks whether a V1 or V2 checkpoint exists with the specified prefix.
@@ -2013,6 +2022,7 @@ def checkpoint_exists(checkpoint_prefix):
return False
+@tf_export("train.get_checkpoint_mtimes")
def get_checkpoint_mtimes(checkpoint_prefixes):
"""Returns the mtimes (modification timestamps) of the checkpoints.