diff options
author | A. Unique TensorFlower <gardener@tensorflow.org> | 2017-05-26 15:43:13 -0700 |
---|---|---|
committer | TensorFlower Gardener <gardener@tensorflow.org> | 2017-05-26 15:46:41 -0700 |
commit | 7a2375a06df72e687a7e909d97db94849c9c71c5 (patch) | |
tree | dc0d67a9d3a68f1719ff333f56fe26e344e99028 /tensorflow/python/kernel_tests/transpose_op_test.py | |
parent | d4756a5cf768408c6e94fc79dbbe0de5d8e00fb9 (diff) |
Catch more variants of transpose that are simply reshape.
In particular, transpose and reshape are identical as long as the non-singleton dimensions remain in ascending order in the permutation.
PiperOrigin-RevId: 157272046
Diffstat (limited to 'tensorflow/python/kernel_tests/transpose_op_test.py')
-rw-r--r-- | tensorflow/python/kernel_tests/transpose_op_test.py | 16 |
1 files changed, 15 insertions, 1 deletions
diff --git a/tensorflow/python/kernel_tests/transpose_op_test.py b/tensorflow/python/kernel_tests/transpose_op_test.py index 7b112a6a17..570fa79944 100644 --- a/tensorflow/python/kernel_tests/transpose_op_test.py +++ b/tensorflow/python/kernel_tests/transpose_op_test.py @@ -92,7 +92,7 @@ class TransposeTest(test.TestCase): # generate all permutations of [0, 1, ... n-1] in random order. all_perm = np.random.permutation( [p for p in itertools.permutations(range(n))]).astype(np.int32) - for p in all_perm[0:2]: + for p in all_perm[:2]: self._compareCpu(x, p) if use_gpu: self._compareGpu(x, p) @@ -310,6 +310,20 @@ class TransposeTest(test.TestCase): x_tf = array_ops.transpose(x_np).eval() self.assertAllEqual(x_tf, [[1, 4], [2, 5], [3, 6]]) + def testSingletonDims(self): + # A singleton dimension is a dimension i with shape[i] == 1. Such dimensions + # can be collapsed and expanded using reshape without changing the + # underlying data storage. If all non-singleton dimensions remain in + # ascending order, the shuffled singletons will be transposed by a reshape, + # saving a memory allocation & copy. Since this gets a special code-path in + # transpose_op.cc, we test that the codepath is exercised and the results + # are as expected; we do not test that we save the memory allocation and + # copy here. + for shape in [[2, 1, 2], [2, 1, 2, 1, 1, 2], [1, 2, 2, 1, 1, 1], + [1, 1, 1, 2, 2, 2], [2, 2, 1, 1, 1]]: + self._compare_cpu_gpu( + np.arange(np.prod(shape)).reshape(shape).astype(np.float32)) + def testTransposeShapes(self): self.assertEqual( [], |