aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/examples/udacity
diff options
context:
space:
mode:
authorGravatar A. Unique TensorFlower <gardener@tensorflow.org>2017-02-01 18:13:33 -0800
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2017-02-01 18:33:19 -0800
commitd1ba01f81d8fa1d0171ba9ce871599063d5c7eb9 (patch)
treecd28fd2d32712c59f8452ede903cd592e0dc95bd /tensorflow/examples/udacity
parentffc667757c6c328e48d80c14f97e32cf6a9d0f53 (diff)
Merge changes from github.
Change: 146316196
Diffstat (limited to 'tensorflow/examples/udacity')
-rw-r--r--tensorflow/examples/udacity/5_word2vec.ipynb4
1 files changed, 2 insertions, 2 deletions
diff --git a/tensorflow/examples/udacity/5_word2vec.ipynb b/tensorflow/examples/udacity/5_word2vec.ipynb
index ec6413a0a3..9d4243d7ae 100644
--- a/tensorflow/examples/udacity/5_word2vec.ipynb
+++ b/tensorflow/examples/udacity/5_word2vec.ipynb
@@ -442,8 +442,8 @@
" embed = tf.nn.embedding_lookup(embeddings, train_dataset)\n",
" # Compute the softmax loss, using a sample of the negative labels each time.\n",
" loss = tf.reduce_mean(\n",
- " tf.nn.sampled_softmax_loss(softmax_weights, softmax_biases, embed,\n",
- " train_labels, num_sampled, vocabulary_size))\n",
+ " tf.nn.sampled_softmax_loss(weights=softmax_weights, biases=softmax_biases, inputs=embed,\n",
+ " labels=train_labels, num_sampled=num_sampled, num_classes=vocabulary_size))\n",
"\n",
" # Optimizer.\n",
" # Note: The optimizer will optimize the softmax_weights AND the embeddings.\n",