aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/contrib/seq2seq
diff options
context:
space:
mode:
authorGravatar A. Unique TensorFlower <gardener@tensorflow.org>2018-04-17 11:05:57 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2018-04-17 11:08:27 -0700
commitc06004be0a6c72c4fdf3905d94740035035b8083 (patch)
tree6b0633800d263ee85810680e7209b0f8c3f88d2b /tensorflow/contrib/seq2seq
parent69f392fab1445f18dbd31dcd0e97f1f65eeb68e0 (diff)
Fixes a comment in tf.contrib.seq2seq.monotonic_attention().
PiperOrigin-RevId: 193224285
Diffstat (limited to 'tensorflow/contrib/seq2seq')
-rw-r--r--tensorflow/contrib/seq2seq/python/ops/attention_wrapper.py2
1 files changed, 1 insertions, 1 deletions
diff --git a/tensorflow/contrib/seq2seq/python/ops/attention_wrapper.py b/tensorflow/contrib/seq2seq/python/ops/attention_wrapper.py
index f0f143ddfc..8a40a7ab53 100644
--- a/tensorflow/contrib/seq2seq/python/ops/attention_wrapper.py
+++ b/tensorflow/contrib/seq2seq/python/ops/attention_wrapper.py
@@ -654,7 +654,7 @@ def monotonic_attention(p_choose_i, previous_attention, mode):
shifted_1mp_choose_i = array_ops.concat(
[array_ops.ones((batch_size, 1)), 1 - p_choose_i[:, :-1]], 1)
# Compute attention distribution recursively as
- # q[i] = (1 - p_choose_i[i])*q[i - 1] + previous_attention[i]
+ # q[i] = (1 - p_choose_i[i - 1])*q[i - 1] + previous_attention[i]
# attention[i] = p_choose_i[i]*q[i]
attention = p_choose_i*array_ops.transpose(functional_ops.scan(
# Need to use reshape to remind TF of the shape between loop iterations