aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/contrib/losses
diff options
context:
space:
mode:
authorGravatar Alexandre Passos <apassos@google.com>2017-02-06 15:15:05 -0800
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2017-02-06 15:27:19 -0800
commitec843c549280444fa747627720eac42112225261 (patch)
tree8bfe8667d050505e5c8ed3a0864bdea551c885de /tensorflow/contrib/losses
parent18c3e122242660948f9ca069f0244fd0d6b93c3c (diff)
Better deprecation warnings for the losses move.
Change: 146717512
Diffstat (limited to 'tensorflow/contrib/losses')
-rw-r--r--tensorflow/contrib/losses/python/losses/loss_ops.py24
1 files changed, 18 insertions, 6 deletions
diff --git a/tensorflow/contrib/losses/python/losses/loss_ops.py b/tensorflow/contrib/losses/python/losses/loss_ops.py
index 69293bea13..1e4fb58945 100644
--- a/tensorflow/contrib/losses/python/losses/loss_ops.py
+++ b/tensorflow/contrib/losses/python/losses/loss_ops.py
@@ -299,7 +299,9 @@ def absolute_difference(predictions, labels=None, weights=1.0, scope=None):
return compute_weighted_loss(losses, weights, scope=scope)
-@deprecated("2016-12-30", "Use tf.losses.sigmoid_cross_entropy instead.")
+@deprecated("2016-12-30",
+ "Use tf.losses.sigmoid_cross_entropy instead. Note that the order "
+ "of the predictions and labels arguments was changed.")
def sigmoid_cross_entropy(
logits, multi_class_labels, weights=1.0, label_smoothing=0, scope=None):
"""Creates a cross-entropy loss using tf.nn.sigmoid_cross_entropy_with_logits.
@@ -346,7 +348,9 @@ def sigmoid_cross_entropy(
return compute_weighted_loss(losses, weights, scope=scope)
-@deprecated("2016-12-30", "Use tf.losses.softmax_cross_entropy instead.")
+@deprecated("2016-12-30",
+ "Use tf.losses.softmax_cross_entropy instead. Note that the order "
+ "of the logits and labels arguments has been changed.")
def softmax_cross_entropy(
logits, onehot_labels, weights=1.0, label_smoothing=0, scope=None):
"""Creates a cross-entropy loss using tf.nn.softmax_cross_entropy_with_logits.
@@ -394,7 +398,9 @@ def softmax_cross_entropy(
return compute_weighted_loss(losses, weights, scope=scope)
-@deprecated("2016-12-30", "Use tf.losses.sparse_softmax_cross_entropy instead.")
+@deprecated("2016-12-30",
+ "Use tf.losses.sparse_softmax_cross_entropy instead. Note that "
+ "the order of the logits and labels arguments has been changed.")
def sparse_softmax_cross_entropy(logits, labels, weights=1.0, scope=None):
"""Cross-entropy loss using `tf.nn.sparse_softmax_cross_entropy_with_logits`.
@@ -429,7 +435,9 @@ def sparse_softmax_cross_entropy(logits, labels, weights=1.0, scope=None):
return compute_weighted_loss(losses, weights, scope=scope)
-@deprecated("2016-12-30", "Use tf.losses.log_loss instead.")
+@deprecated("2016-12-30",
+ "Use tf.losses.log_loss instead. Note that the order of the "
+ "predictions and labels arguments was changed.")
def log_loss(predictions, labels=None, weights=1.0, epsilon=1e-7, scope=None):
"""Adds a Log Loss term to the training procedure.
@@ -468,7 +476,9 @@ def log_loss(predictions, labels=None, weights=1.0, epsilon=1e-7, scope=None):
return compute_weighted_loss(losses, weights, scope=scope)
-@deprecated("2016-12-30", "Use tf.losses.hinge_loss instead.")
+@deprecated("2016-12-30",
+ "Use tf.losses.hinge_loss instead. Note that the order of the "
+ "predictions and labels arguments were changed.")
def hinge_loss(logits, labels=None, scope=None):
"""Method that returns the loss tensor for hinge loss.
@@ -530,7 +540,9 @@ def mean_squared_error(predictions, labels=None, weights=1.0, scope=None):
return compute_weighted_loss(losses, weights, scope=scope)
-@deprecated("2016-12-30", "Use tf.losses.mean_pairwise_squared_error instead.")
+@deprecated("2016-12-30",
+ "Use tf.losses.mean_pairwise_squared_error instead. Note that the "
+ "order of the predictions and labels arguments was changed.")
def mean_pairwise_squared_error(
predictions, labels=None, weights=1.0, scope=None):
"""Adds a pairwise-errors-squared loss to the training procedure.