diff options
author | 2018-06-14 02:14:42 -0700 | |
---|---|---|
committer | 2018-06-14 02:17:24 -0700 | |
commit | 8d9787bed57f1dd5d697ff847cd5598ecc032620 (patch) | |
tree | 26accb03641e90b4236edfc1da587cd8f4638666 /tensorflow/contrib/opt | |
parent | 0b8c5806f4f1d3a47b30bf203b3e456f036b0adc (diff) |
Automated g4 rollback of changelist 200467580
PiperOrigin-RevId: 200525639
Diffstat (limited to 'tensorflow/contrib/opt')
-rw-r--r-- | tensorflow/contrib/opt/python/training/drop_stale_gradient_optimizer.py | 7 |
1 files changed, 3 insertions, 4 deletions
diff --git a/tensorflow/contrib/opt/python/training/drop_stale_gradient_optimizer.py b/tensorflow/contrib/opt/python/training/drop_stale_gradient_optimizer.py index 918165bc6a..4a905b1b2a 100644 --- a/tensorflow/contrib/opt/python/training/drop_stale_gradient_optimizer.py +++ b/tensorflow/contrib/opt/python/training/drop_stale_gradient_optimizer.py @@ -63,7 +63,7 @@ class DropStaleGradientOptimizer(optimizer.Optimizer): def compute_gradients(self, loss, *args, **kwargs): # Record current global step for worker. with ops.colocate_with(loss): - self._local_step = training_util.get_global_step().read_value() + 0 + self._local_step = training_util.get_global_step() + 0 with ops.control_dependencies([self._local_step]): loss = gen_array_ops.identity(loss) @@ -102,7 +102,7 @@ class DropStaleGradientOptimizer(optimizer.Optimizer): with ops.control_dependencies(gradients), ops.colocate_with(global_step): staleness = gen_array_ops.reshape( - global_step.read_value() - self._local_step, shape=()) + global_step - self._local_step, shape=()) conditional_update = stale_counter.assign_add(control_flow_ops.cond( gen_math_ops.less_equal(staleness, self._staleness), @@ -110,6 +110,5 @@ class DropStaleGradientOptimizer(optimizer.Optimizer): summary.scalar( "Gradient staleness percentage", - stale_counter / (math_ops.cast(global_step.read_value() + 1, - dtypes.float32))) + stale_counter / (math_ops.cast(global_step + 1, dtypes.float32))) return conditional_update |