aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/contrib/nn
diff options
context:
space:
mode:
authorGravatar Sourabh Bajaj <sourabhbajaj@google.com>2017-11-30 16:37:11 -0800
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2017-11-30 16:41:01 -0800
commitb2db981a6731e978453862a73dab892bc674db68 (patch)
treec11a7c4038e2595268113c2859c1d0d3072ede4f /tensorflow/contrib/nn
parent0438ac79bdb503ed267bec2146e7136ac8e99ff9 (diff)
Merge changes from github.
PiperOrigin-RevId: 177526301
Diffstat (limited to 'tensorflow/contrib/nn')
-rw-r--r--tensorflow/contrib/nn/python/ops/cross_entropy.py2
-rw-r--r--tensorflow/contrib/nn/python/ops/sampling_ops.py2
2 files changed, 2 insertions, 2 deletions
diff --git a/tensorflow/contrib/nn/python/ops/cross_entropy.py b/tensorflow/contrib/nn/python/ops/cross_entropy.py
index 61c1d1c6d9..5045f2c957 100644
--- a/tensorflow/contrib/nn/python/ops/cross_entropy.py
+++ b/tensorflow/contrib/nn/python/ops/cross_entropy.py
@@ -116,7 +116,7 @@ def deprecated_flipped_sparse_softmax_cross_entropy_with_logits(logits,
Raises:
ValueError: If logits are scalars (need to have rank >= 1) or if the rank
- of the labels is not equal to the rank of the labels minus one.
+ of the labels is not equal to the rank of the logits minus one.
"""
return nn.sparse_softmax_cross_entropy_with_logits(
labels=labels, logits=logits, name=name)
diff --git a/tensorflow/contrib/nn/python/ops/sampling_ops.py b/tensorflow/contrib/nn/python/ops/sampling_ops.py
index 2ae529e015..98749cff7e 100644
--- a/tensorflow/contrib/nn/python/ops/sampling_ops.py
+++ b/tensorflow/contrib/nn/python/ops/sampling_ops.py
@@ -34,7 +34,7 @@ def _rank_resample(weights, biases, inputs, sampled_values, num_resampled,
log(sum_j exp((w_i * x_j + b_i) / resampling_temperature))
- where w_i, b_i are the weight and bias of the i-th class, repsectively,
+ where w_i, b_i are the weight and bias of the i-th class, respectively,
and j ranges over the rows of `inputs`. For efficiency, we rearrange the
computation to