diff options
author | Peter Hawkins <phawkins@google.com> | 2018-09-14 15:16:09 -0700 |
---|---|---|
committer | TensorFlower Gardener <gardener@tensorflow.org> | 2018-09-14 15:19:39 -0700 |
commit | ceb72bcdbf90fd23204b26f8e43afbd3c0a46563 (patch) | |
tree | d6ba3e5730bfea6b4a42386c30d27a0f21aacb21 /tensorflow/compiler/tf2xla | |
parent | 84d8423bececc26f127a1c40c00588463d8d1650 (diff) |
[TF:XLA] Remove special base case from BatchDot that has been redundant ever since xla::DotGeneral was added.
PiperOrigin-RevId: 213052269
Diffstat (limited to 'tensorflow/compiler/tf2xla')
-rw-r--r-- | tensorflow/compiler/tf2xla/lib/batch_dot.cc | 10 |
1 files changed, 0 insertions, 10 deletions
diff --git a/tensorflow/compiler/tf2xla/lib/batch_dot.cc b/tensorflow/compiler/tf2xla/lib/batch_dot.cc index 64f2d781a6..5400e8834c 100644 --- a/tensorflow/compiler/tf2xla/lib/batch_dot.cc +++ b/tensorflow/compiler/tf2xla/lib/batch_dot.cc @@ -100,16 +100,6 @@ xla::XlaOp BatchDot(xla::XlaOp x, xla::XlaOp y, bool transpose_x, precision_proto.add_operand_precision(precision); precision_proto.add_operand_precision(precision); - // If there are no batch dimensions, use a regular Dot. - // TODO(b/69062148) Remove this code when Dot emitters can be passed - // dimensions to transpose directly (i.e. without requiring a Transpose - // HLO). - if (batch_dimension_numbers.empty()) { - auto lhs = transpose_x ? xla::Transpose(x, {1, 0}) : x; - auto rhs = transpose_y ? xla::Transpose(y, {1, 0}) : y; - return xla::Dot(lhs, rhs, &precision_proto); - } - xla::DotDimensionNumbers dot_dnums; dot_dnums.add_lhs_contracting_dimensions(x_inner_dim); dot_dnums.add_rhs_contracting_dimensions(y_inner_dim); |