diff options
Diffstat (limited to 'tensorflow/contrib/layers/python/layers/optimizers.py')
-rw-r--r-- | tensorflow/contrib/layers/python/layers/optimizers.py | 8 |
1 files changed, 6 insertions, 2 deletions
diff --git a/tensorflow/contrib/layers/python/layers/optimizers.py b/tensorflow/contrib/layers/python/layers/optimizers.py index e38b93790b..b6e3af5451 100644 --- a/tensorflow/contrib/layers/python/layers/optimizers.py +++ b/tensorflow/contrib/layers/python/layers/optimizers.py @@ -61,7 +61,8 @@ def optimize_loss(loss, update_ops=None, variables=None, name=None, - summaries=None): + summaries=None, + colocate_gradients_with_ops=False): """Given loss and parameters for optimizer, returns a training op. Various ways of passing optimizers, include: @@ -112,6 +113,8 @@ def optimize_loss(loss, summaries: List of internal quantities to visualize on tensorboard. If not set only the loss and the learning rate will be reported. The complete list is in OPTIMIZER_SUMMARIES. + colocate_gradients_with_ops: If True, try colocating gradients with the + corresponding op. Returns: Training op. @@ -185,7 +188,8 @@ def optimize_loss(loss, variables = vars_.trainable_variables() # Compute gradients. - gradients = opt.compute_gradients(loss, variables) + gradients = opt.compute_gradients(loss, variables, + colocate_gradients_with_ops=colocate_gradients_with_ops) # Optionally add gradient noise. if gradient_noise_scale is not None: |