aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/tensorboard/plugins
diff options
context:
space:
mode:
authorGravatar Dan Smilkov <smilkov@google.com>2016-11-07 11:30:08 -0800
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2016-11-07 11:44:10 -0800
commitc344444b901db156d7a790396beb354d55e159aa (patch)
tree413eaecec7feff26fc5bbcec9341fb9f4cb9c14e /tensorflow/tensorboard/plugins
parent4c782b58ee2912f7e51978961ef1883bf3ae045f (diff)
Reload the checkpoint reader when the latest checkpoint has changed.
The embedding projector plugin caches the checkpoint reader, which is pointing to the latest checkpoint when TensorBoard was started. But after some time, the saver will remove that old checkpoint file (keeps only N latest checkpoints), while the reader is still pointing to it. Also add more tests regarding the V1 and V2 checkpoint versions. Change: 138416977
Diffstat (limited to 'tensorflow/tensorboard/plugins')
-rw-r--r--tensorflow/tensorboard/plugins/projector/plugin.py44
1 files changed, 34 insertions, 10 deletions
diff --git a/tensorflow/tensorboard/plugins/projector/plugin.py b/tensorflow/tensorboard/plugins/projector/plugin.py
index c293d813c7..f6dd2bcad7 100644
--- a/tensorflow/tensorboard/plugins/projector/plugin.py
+++ b/tensorflow/tensorboard/plugins/projector/plugin.py
@@ -61,6 +61,27 @@ def _read_tensor_file(fpath):
return tensor
+def _latest_checkpoints_changed(configs, run_path_pairs):
+ """Returns true if the latest checkpoint has changed in any of the runs."""
+ for run_name, logdir in run_path_pairs:
+ if run_name not in configs:
+ continue
+ config = configs[run_name]
+ if not config.model_checkpoint_path:
+ continue
+
+ # See if you can find a checkpoint file in the logdir.
+ ckpt_path = latest_checkpoint(logdir)
+ if not ckpt_path:
+ # See if you can find a checkpoint in the parent of logdir.
+ ckpt_path = latest_checkpoint(os.path.join('../', logdir))
+ if not ckpt_path:
+ continue
+ if config.model_checkpoint_path != ckpt_path:
+ return True
+ return False
+
+
class ProjectorPlugin(TBPlugin):
"""Embedding projector."""
@@ -88,9 +109,16 @@ class ProjectorPlugin(TBPlugin):
@property
def configs(self):
"""Returns a map of run paths to `ProjectorConfig` protos."""
- if self._run_paths_changed():
- self._configs, self.config_fpaths = self._read_config_files(
- self.run_paths, self.logdir)
+ run_path_pairs = self.run_paths.items()
+ # If there are no summary event files, the projector should still work,
+ # treating the `logdir` as the model checkpoint directory.
+ if not run_path_pairs:
+ run_path_pairs.append(('.', self.logdir))
+ if (self._run_paths_changed() or
+ _latest_checkpoints_changed(self._configs, run_path_pairs)):
+ self.readers = {}
+ self._configs, self.config_fpaths = self._read_latest_config_files(
+ run_path_pairs)
self._augment_configs_with_checkpoint_info()
return self._configs
@@ -140,15 +168,11 @@ class ProjectorPlugin(TBPlugin):
del self._configs[run]
del self.config_fpaths[run]
- def _read_config_files(self, run_paths, summary_logdir):
- # If there are no summary event files, the projector can still work,
- # thus treating the `logdir` as the model checkpoint directory.
- if not run_paths:
- run_paths['.'] = summary_logdir
-
+ def _read_latest_config_files(self, run_path_pairs):
+ """Reads and returns the projector config files in every run directory."""
configs = {}
config_fpaths = {}
- for run_name, logdir in run_paths.items():
+ for run_name, logdir in run_path_pairs:
config = ProjectorConfig()
config_fpath = os.path.join(logdir, PROJECTOR_FILENAME)
if file_io.file_exists(config_fpath):