aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/contrib/model_pruning/python
diff options
context:
space:
mode:
Diffstat (limited to 'tensorflow/contrib/model_pruning/python')
-rw-r--r--tensorflow/contrib/model_pruning/python/learning.py11
-rw-r--r--tensorflow/contrib/model_pruning/python/pruning.py8
2 files changed, 11 insertions, 8 deletions
diff --git a/tensorflow/contrib/model_pruning/python/learning.py b/tensorflow/contrib/model_pruning/python/learning.py
index 2b79c23cef..26695237c2 100644
--- a/tensorflow/contrib/model_pruning/python/learning.py
+++ b/tensorflow/contrib/model_pruning/python/learning.py
@@ -33,11 +33,14 @@ to support training of pruned models
# Create the train_op
train_op = slim.learning.create_train_op(total_loss, optimizer)
- # Set up sparsity
- sparsity = pruning.setup_gradual_sparsity(self.global_step)
+ # Parse pruning hyperparameters
+ pruning_hparams = pruning.get_pruning_hparams().parse(FLAGS.pruning_hparams)
- # Create mask update op
- mask_update_op = pruning.add_mask_update_ip(sparsity)
+ # Create a pruning object using the pruning_hparams
+ p = pruning.Pruning(pruning_hparams)
+
+ # Add mask update ops to the graph
+ mask_update_op = p.conditional_mask_update_op()
# Run training.
learning.train(train_op,
diff --git a/tensorflow/contrib/model_pruning/python/pruning.py b/tensorflow/contrib/model_pruning/python/pruning.py
index 4b7af18b33..da9d398cbc 100644
--- a/tensorflow/contrib/model_pruning/python/pruning.py
+++ b/tensorflow/contrib/model_pruning/python/pruning.py
@@ -518,11 +518,11 @@ class Pruning(object):
summary.scalar('last_mask_update_step', self._last_update_step)
masks = get_masks()
thresholds = get_thresholds()
- for index, mask in enumerate(masks):
+ for mask, threshold in zip(masks, thresholds):
if not self._exists_in_do_not_prune_list(mask.name):
- summary.scalar(mask.name + '/sparsity', nn_impl.zero_fraction(mask))
- summary.scalar(thresholds[index].op.name + '/threshold',
- thresholds[index])
+ summary.scalar(mask.op.name + '/sparsity',
+ nn_impl.zero_fraction(mask))
+ summary.scalar(threshold.op.name + '/threshold', threshold)
def print_hparams(self):
logging.info(self._spec.to_json())