aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authorGravatar A. Unique TensorFlower <nobody@tensorflow.org>2016-04-07 12:33:50 -0800
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2016-04-07 13:41:58 -0700
commitb4698ac2c18730df33f2845d0f48fc0f5b0386bb (patch)
tree97cb5b90da225dd1b571206058d934d4f7b1a731
parentbc11f3691918767361ed9e41bcb7001c74d95c61 (diff)
Fixes documentation on logistic and softmax ops in loss_ops.
Change: 119302560
-rw-r--r--tensorflow/contrib/losses/python/losses/loss_ops.py9
1 files changed, 4 insertions, 5 deletions
diff --git a/tensorflow/contrib/losses/python/losses/loss_ops.py b/tensorflow/contrib/losses/python/losses/loss_ops.py
index c704222c4b..ebc3564e3d 100644
--- a/tensorflow/contrib/losses/python/losses/loss_ops.py
+++ b/tensorflow/contrib/losses/python/losses/loss_ops.py
@@ -229,9 +229,8 @@ def squared(predicted, target, name=None):
def logistic(logit, target, name=None):
"""Calculates the logistic cross-entropy loss, averaged across batches.
- **WARNING:** `logit` must be unscaled, while the `target` should be a
- normalized probability prediction. See
- `tf.nn.sigmoid_cross_entropy_with_logits` for more details.
+ **WARNING:** `logit` must be unscaled.
+ See `tf.nn.sigmoid_cross_entropy_with_logits` for more details.
Args:
logit: A `Tensor` of shape `[batch_size, dim_1, ..., dim_n]`
@@ -257,8 +256,8 @@ def softmax(logit, target, name=None):
"""Calculates the softmax cross-entropy loss, averaged across batches.
**WARNING:** `logit` must be unscaled, while the `target` should be a
- normalized probability prediction. See
- `tf.nn.sigmoid_cross_entropy_with_logits` for more details.
+ normalized probability prediction.
+ See `tf.nn.softmax_cross_entropy_with_logits` for more details.
Args:
logit: Tensor of actual values. Shape must have rank 2, generally