diff options
author | 2017-06-23 12:30:57 -0700 | |
---|---|---|
committer | 2017-06-23 12:34:52 -0700 | |
commit | c1e0d6067c23b0e87c97b17fd0052f438bff76c5 (patch) | |
tree | bc958174f2ab8f5d9811ae14accf55d128a03ee0 /tensorflow | |
parent | 0fbb0c45f9776879abf5620ed9be50ffb1e6626c (diff) |
Add None check for save_path when restoring checkpoints as if something is wrong in tf.train.latest_checkpoint, it will often return None and it's nice to have a common sense check in restore for this. This way log.error says what has happened.
PiperOrigin-RevId: 159979481
Diffstat (limited to 'tensorflow')
-rw-r--r-- | tensorflow/python/training/saver.py | 5 |
1 files changed, 5 insertions, 0 deletions
diff --git a/tensorflow/python/training/saver.py b/tensorflow/python/training/saver.py index 20d520fd7b..d9304ff50a 100644 --- a/tensorflow/python/training/saver.py +++ b/tensorflow/python/training/saver.py @@ -1547,9 +1547,14 @@ class Saver(object): Args: sess: A `Session` to use to restore the parameters. save_path: Path where parameters were previously saved. + + Raises: + ValueError: If save_path is None. """ if self._is_empty: return + if save_path is None: + raise ValueError("Can't load save_path when it is None.") logging.info("Restoring parameters from %s", save_path) sess.run(self.saver_def.restore_op_name, {self.saver_def.filename_tensor_name: save_path}) |