aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/examples/tutorials/word2vec
diff options
context:
space:
mode:
authorGravatar Jianwei Xie <xiejw@google.com>2016-12-06 16:31:01 -0800
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2016-12-06 16:44:33 -0800
commit761b12ed82d31195de002cdd687cbb77d7aba628 (patch)
treedbfc58f0fb2700631c2e3cf6e4e37cc3579f7e8e /tensorflow/examples/tutorials/word2vec
parent059ccad4d4bac851da9fa9694c23dd49a4089bc6 (diff)
Swap the input and label arguments in nce_loss
Change: 141244045
Diffstat (limited to 'tensorflow/examples/tutorials/word2vec')
-rw-r--r--tensorflow/examples/tutorials/word2vec/word2vec_basic.py8
1 files changed, 6 insertions, 2 deletions
diff --git a/tensorflow/examples/tutorials/word2vec/word2vec_basic.py b/tensorflow/examples/tutorials/word2vec/word2vec_basic.py
index 1131360ab5..bc502edd8b 100644
--- a/tensorflow/examples/tutorials/word2vec/word2vec_basic.py
+++ b/tensorflow/examples/tutorials/word2vec/word2vec_basic.py
@@ -160,8 +160,12 @@ with graph.as_default():
# tf.nce_loss automatically draws a new sample of the negative labels each
# time we evaluate the loss.
loss = tf.reduce_mean(
- tf.nn.nce_loss(nce_weights, nce_biases, embed, train_labels,
- num_sampled, vocabulary_size))
+ tf.nn.nce_loss(weights=nce_weights,
+ biases=nce_biases,
+ labels=train_labels,
+ inputs=embed,
+ num_sampled=num_sampled,
+ num_classes=vocabulary_size))
# Construct the SGD optimizer using a learning rate of 1.0.
optimizer = tf.train.GradientDescentOptimizer(1.0).minimize(loss)