diff options
Diffstat (limited to 'tensorflow/python/training/optimizer.py')
-rw-r--r-- | tensorflow/python/training/optimizer.py | 2 |
1 files changed, 1 insertions, 1 deletions
diff --git a/tensorflow/python/training/optimizer.py b/tensorflow/python/training/optimizer.py index 1e8d6b0f12..1c3ac2d09d 100644 --- a/tensorflow/python/training/optimizer.py +++ b/tensorflow/python/training/optimizer.py @@ -75,7 +75,7 @@ class Optimizer(object): # grads_and_vars is a list of tuples (gradient, variable). Do whatever you # need to the 'gradient' part, for example cap them, etc. - capped_grads_and_vars = [(MyCapper(gv[0]), gv[1])) for gv in grads_and_vars] + capped_grads_and_vars = [(MyCapper(gv[0]), gv[1]) for gv in grads_and_vars] # Ask the optimizer to apply the capped gradients. opt.apply_gradients(capped_grads_and_vars) |