diff options
author | 2018-04-01 02:08:53 +0000 | |
---|---|---|
committer | 2018-04-16 20:04:11 +0000 | |
commit | d744b314682d2313bd3e8ffe0b34e022cbeacb7b (patch) | |
tree | 6d1f9e17ec0f0aac0d302865217dbbffcaaefbe3 /tensorflow/contrib/seq2seq | |
parent | fe4ab63ab258d67f37844f374db265130ceecf2a (diff) |
Fix pylint issue
Signed-off-by: Yong Tang <yong.tang.github@outlook.com>
Diffstat (limited to 'tensorflow/contrib/seq2seq')
-rw-r--r-- | tensorflow/contrib/seq2seq/python/ops/attention_wrapper.py | 3 |
1 files changed, 2 insertions, 1 deletions
diff --git a/tensorflow/contrib/seq2seq/python/ops/attention_wrapper.py b/tensorflow/contrib/seq2seq/python/ops/attention_wrapper.py index 867e49b565..a0f57417b8 100644 --- a/tensorflow/contrib/seq2seq/python/ops/attention_wrapper.py +++ b/tensorflow/contrib/seq2seq/python/ops/attention_wrapper.py @@ -472,7 +472,8 @@ def _bahdanau_score(processed_query, keys, normalize): # Scalar used in weight normalization g = variable_scope.get_variable( "attention_g", dtype=dtype, - initializer=init_ops.constant_initializer(math.sqrt((1. / num_units))), shape=()) + initializer=init_ops.constant_initializer(math.sqrt((1. / num_units))), + shape=()) # Bias added prior to the nonlinearity b = variable_scope.get_variable( "attention_b", [num_units], dtype=dtype, |