aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/python/ops/losses/util.py
diff options
context:
space:
mode:
Diffstat (limited to 'tensorflow/python/ops/losses/util.py')
-rw-r--r--tensorflow/python/ops/losses/util.py8
1 files changed, 6 insertions, 2 deletions
diff --git a/tensorflow/python/ops/losses/util.py b/tensorflow/python/ops/losses/util.py
index 09ad874fae..3414df475f 100644
--- a/tensorflow/python/ops/losses/util.py
+++ b/tensorflow/python/ops/losses/util.py
@@ -57,7 +57,7 @@ def get_losses(scope=None, loss_collection=ops.GraphKeys.LOSSES):
def get_regularization_losses(scope=None):
- """Gets the regularization losses.
+ """Gets the list of regularization losses.
Args:
scope: An optional scope for filtering the losses to return.
@@ -88,7 +88,11 @@ def get_regularization_loss(scope=None, name="total_regularization_loss"):
def get_total_loss(add_regularization_losses=True, name="total_loss"):
"""Returns a tensor whose value represents the total loss.
- Notice that the function adds the given losses to the regularization losses.
+ In particular, this adds any losses you have added with `tf.add_loss()` to
+ any regularization losses that have been added by regularization parameters
+ on layers constructors e.g. `tf.layers`. Be very sure to use this if you
+ are constructing a loss_op manually. Otherwise regularization arguments
+ on `tf.layers` methods will not function.
Args:
add_regularization_losses: A boolean indicating whether or not to use the