aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/contrib/legacy_seq2seq
diff options
context:
space:
mode:
authorGravatar Dandelion Mané <dandelion@google.com>2017-03-10 14:43:23 -0800
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2017-03-10 15:18:15 -0800
commit0386a01ad3beb28364599d82199be1c0837b3fa9 (patch)
tree3a1d2ef947a7bf37286efc0e8ff760e0401ab319 /tensorflow/contrib/legacy_seq2seq
parente73ceaebb209a1e577e7240fba41c692c89143d0 (diff)
Merge changes from github.
Change: 149800363
Diffstat (limited to 'tensorflow/contrib/legacy_seq2seq')
-rw-r--r--tensorflow/contrib/legacy_seq2seq/python/ops/seq2seq.py4
1 files changed, 2 insertions, 2 deletions
diff --git a/tensorflow/contrib/legacy_seq2seq/python/ops/seq2seq.py b/tensorflow/contrib/legacy_seq2seq/python/ops/seq2seq.py
index 3d1589f27e..1202f961cc 100644
--- a/tensorflow/contrib/legacy_seq2seq/python/ops/seq2seq.py
+++ b/tensorflow/contrib/legacy_seq2seq/python/ops/seq2seq.py
@@ -1111,7 +1111,7 @@ def sequence_loss(logits,
average_across_timesteps: If set, divide the returned cost by the total
label weight.
average_across_batch: If set, divide the returned cost by the batch size.
- softmax_loss_function: Function (inputs-batch, labels-batch) -> loss-batch
+ softmax_loss_function: Function (labels-batch, inputs-batch) -> loss-batch
to be used instead of the standard softmax (the default if this is None).
name: Optional name for this operation, defaults to "sequence_loss".
@@ -1160,7 +1160,7 @@ def model_with_buckets(encoder_inputs,
seq2seq: A sequence-to-sequence model function; it takes 2 input that
agree with encoder_inputs and decoder_inputs, and returns a pair
consisting of outputs and states (as, e.g., basic_rnn_seq2seq).
- softmax_loss_function: Function (inputs-batch, labels-batch) -> loss-batch
+ softmax_loss_function: Function (labels-batch, inputs-batch) -> loss-batch
to be used instead of the standard softmax (the default if this is None).
per_example_loss: Boolean. If set, the returned loss will be a batch-sized
tensor of losses for each sequence in the batch. If unset, it will be