diff options
author | Michael Case <mikecase@google.com> | 2018-04-18 18:04:44 -0700 |
---|---|---|
committer | TensorFlower Gardener <gardener@tensorflow.org> | 2018-04-18 18:07:30 -0700 |
commit | f1fb08bbb70047af0c86cc440ccc0581e64fd85f (patch) | |
tree | 27937eda9bb5f6c390a06adcabd792725df8a86f /tensorflow/contrib/seq2seq | |
parent | d4976f754009d084514f4308d3bfc7dc3a106e29 (diff) |
Various lint fixes to TensorFlow detected after GitHub merge.
PiperOrigin-RevId: 193448139
Diffstat (limited to 'tensorflow/contrib/seq2seq')
-rw-r--r-- | tensorflow/contrib/seq2seq/python/kernel_tests/attention_wrapper_test.py | 12 |
1 files changed, 6 insertions, 6 deletions
diff --git a/tensorflow/contrib/seq2seq/python/kernel_tests/attention_wrapper_test.py b/tensorflow/contrib/seq2seq/python/kernel_tests/attention_wrapper_test.py index d508cf3f9d..0232103c41 100644 --- a/tensorflow/contrib/seq2seq/python/kernel_tests/attention_wrapper_test.py +++ b/tensorflow/contrib/seq2seq/python/kernel_tests/attention_wrapper_test.py @@ -355,11 +355,11 @@ class AttentionWrapperTest(test.TestCase): def testLuongScaledDType(self): # Test case for GitHub issue 18099 - for dtype in [np.float16, np.float32, np.float64]: + for dt in [np.float16, np.float32, np.float64]: num_units = 128 - encoder_outputs = array_ops.placeholder(dtype, shape=[64, None, 256]) + encoder_outputs = array_ops.placeholder(dt, shape=[64, None, 256]) encoder_sequence_length = array_ops.placeholder(dtypes.int32, shape=[64]) - decoder_inputs = array_ops.placeholder(dtype, shape=[64, None, 128]) + decoder_inputs = array_ops.placeholder(dt, shape=[64, None, 128]) decoder_sequence_length = array_ops.placeholder(dtypes.int32, shape=[64]) batch_size = 64 attention_mechanism = wrapper.LuongAttention( @@ -367,7 +367,7 @@ class AttentionWrapperTest(test.TestCase): memory=encoder_outputs, memory_sequence_length=encoder_sequence_length, scale=True, - dtype=dtype, + dtype=dt, ) cell = rnn_cell.LSTMCell(num_units) cell = wrapper.AttentionWrapper(cell, attention_mechanism) @@ -378,12 +378,12 @@ class AttentionWrapperTest(test.TestCase): cell=cell, helper=helper, initial_state=cell.zero_state( - dtype=dtype, batch_size=batch_size)) + dtype=dt, batch_size=batch_size)) final_outputs, final_state, _ = decoder.dynamic_decode(my_decoder) self.assertTrue( isinstance(final_outputs, basic_decoder.BasicDecoderOutput)) - self.assertEqual(final_outputs.rnn_output.dtype, dtype) + self.assertEqual(final_outputs.rnn_output.dtype, dt) self.assertTrue( isinstance(final_state, wrapper.AttentionWrapperState)) self.assertTrue( |