aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/python/kernel_tests/rnn_test.py
diff options
context:
space:
mode:
Diffstat (limited to 'tensorflow/python/kernel_tests/rnn_test.py')
-rw-r--r--tensorflow/python/kernel_tests/rnn_test.py33
1 files changed, 21 insertions, 12 deletions
diff --git a/tensorflow/python/kernel_tests/rnn_test.py b/tensorflow/python/kernel_tests/rnn_test.py
index 7d040c221e..be59ac08c2 100644
--- a/tensorflow/python/kernel_tests/rnn_test.py
+++ b/tensorflow/python/kernel_tests/rnn_test.py
@@ -782,11 +782,11 @@ class BidirectionalRNNTest(tf.test.TestCase):
tf.float32,
shape=(batch_size, input_size) if use_shape else (None, input_size))
]
- outputs = tf.nn.bidirectional_rnn(cell_fw,
- cell_bw,
- inputs,
- dtype=tf.float32,
- sequence_length=sequence_length)
+ outputs, state_fw, state_bw = tf.nn.bidirectional_rnn(cell_fw,
+ cell_bw,
+ inputs,
+ dtype=tf.float32,
+ sequence_length=sequence_length)
self.assertEqual(len(outputs), len(inputs))
for out in outputs:
self.assertEqual(
@@ -794,17 +794,19 @@ class BidirectionalRNNTest(tf.test.TestCase):
[batch_size if use_shape else None, 2 * num_units])
input_value = np.random.randn(batch_size, input_size)
+ outputs = tf.pack(outputs)
- return input_value, inputs, outputs, sequence_length
+ return input_value, inputs, outputs, state_fw, state_bw, sequence_length
def _testBidirectionalRNN(self, use_gpu, use_shape):
with self.test_session(use_gpu=use_gpu, graph=tf.Graph()) as sess:
- input_value, inputs, outputs, sequence_length = (
+ input_value, inputs, outputs, state_fw, state_bw, sequence_length = (
self._createBidirectionalRNN(use_gpu, use_shape, True))
tf.initialize_all_variables().run()
# Run with pre-specified sequence length of 2, 3
- out = sess.run(outputs, feed_dict={inputs[0]: input_value,
- sequence_length: [2, 3]})
+ out, s_fw, s_bw = sess.run([outputs, state_fw, state_bw],
+ feed_dict={inputs[0]: input_value,
+ sequence_length: [2, 3]})
# Since the forward and backward LSTM cells were initialized with the
# same parameters, the forward and backward output has to be the same,
@@ -836,13 +838,17 @@ class BidirectionalRNNTest(tf.test.TestCase):
self.assertEqual(out[2][1][0], out[0][1][3])
self.assertEqual(out[2][1][1], out[0][1][4])
self.assertEqual(out[2][1][2], out[0][1][5])
+ # Via the reasoning above, the forward and backward final state should be
+ # exactly the same
+ self.assertAllClose(s_fw, s_bw)
def _testBidirectionalRNNWithoutSequenceLength(self, use_gpu, use_shape):
with self.test_session(use_gpu=use_gpu, graph=tf.Graph()) as sess:
- input_value, inputs, outputs, _ = self._createBidirectionalRNN(
- use_gpu, use_shape, False)
+ input_value, inputs, outputs, state_fw, state_bw, _ = self._createBidirectionalRNN(
+ use_gpu, use_shape, False)
tf.initialize_all_variables().run()
- out = sess.run(outputs, feed_dict={inputs[0]: input_value})
+ out, s_fw, s_bw = sess.run([outputs, state_fw, state_bw],
+ feed_dict={inputs[0]: input_value})
# Since the forward and backward LSTM cells were initialized with the
# same parameters, the forward and backward output has to be the same,
@@ -861,6 +867,9 @@ class BidirectionalRNNTest(tf.test.TestCase):
self.assertEqual(out[i][1][0], out[8 - 1 - i][1][3])
self.assertEqual(out[i][1][1], out[8 - 1 - i][1][4])
self.assertEqual(out[i][1][2], out[8 - 1 - i][1][5])
+ # Via the reasoning above, the forward and backward final state should be
+ # exactly the same
+ self.assertAllClose(s_fw, s_bw)
def testBidirectionalRNN(self):
self._testBidirectionalRNN(use_gpu=False, use_shape=False)