aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/compiler
diff options
context:
space:
mode:
authorGravatar Bjarke Hammersholt Roune <broune@google.com>2017-01-23 21:27:24 -0800
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2017-01-23 21:42:46 -0800
commit863bab34202f650282dfe00aaa082a4796fdd839 (patch)
tree72e2ca9c0baaaddc8c19dbb3662cda31f62e59b5 /tensorflow/compiler
parenta544628dc9fa3607c2a398ac72f5e859fd151e94 (diff)
Improvements to HLO text format printing.
Change: 145374835
Diffstat (limited to 'tensorflow/compiler')
-rw-r--r--tensorflow/compiler/xla/service/hlo_instruction.cc50
-rw-r--r--tensorflow/compiler/xla/service/hlo_instruction.h2
-rw-r--r--tensorflow/compiler/xla/shape_util.cc5
-rw-r--r--tensorflow/compiler/xla/shape_util_test.cc11
4 files changed, 34 insertions, 34 deletions
diff --git a/tensorflow/compiler/xla/service/hlo_instruction.cc b/tensorflow/compiler/xla/service/hlo_instruction.cc
index 46af52017e..e31c13ebd5 100644
--- a/tensorflow/compiler/xla/service/hlo_instruction.cc
+++ b/tensorflow/compiler/xla/service/hlo_instruction.cc
@@ -204,6 +204,12 @@ HloInstruction::CreateGetTupleElement(const Shape& shape,
const ConvolutionDimensionNumbers& dimension_numbers) {
auto instruction =
WrapUnique(new HloInstruction(HloOpcode::kConvolution, shape));
+ if (window_util::HasBaseDilation(window)) {
+ instruction->set_name(instruction->name() + "-base-dilated");
+ }
+ if (window_util::HasWindowDilation(window)) {
+ instruction->set_name(instruction->name() + "-window-dilated");
+ }
instruction->AppendOperand(lhs);
instruction->AppendOperand(rhs);
instruction->window_ = MakeUnique<Window>(window);
@@ -1294,10 +1300,10 @@ string HloInstruction::SignatureString() const {
ShapeUtil::HumanString(shape()));
}
-string HloInstruction::ToString() const {
+string HloInstruction::ToString(bool compact_operands) const {
string operands;
if (opcode() == HloOpcode::kConstant) {
- // For constants, emit the actual value in place of an empty operand list.
+ // For constants, show the actual value in place of an empty operand list.
if (ShapeUtil::ElementsIn(shape()) <= 10) {
// LiteralUtil::ToString emits multidimensional arrays over multiple
// lines. Compact this into one line by stripping out white space.
@@ -1313,32 +1319,28 @@ string HloInstruction::ToString() const {
first = false;
}
} else {
- // Don't try emitting large constants.
+ // Do not show large constants.
operands = "{...}";
}
} else {
+ tensorflow::gtl::ArraySlice<HloInstruction*> slice(operands_);
+ const int64 kMaxOperandsToShowIfCompact = 4;
+ if (compact_operands && slice.size() > kMaxOperandsToShowIfCompact) {
+ slice.remove_suffix(slice.size() - kMaxOperandsToShowIfCompact);
+ }
operands = tensorflow::str_util::Join(
- operands_, ", ", [](string* out, HloInstruction* operand) {
- tensorflow::strings::StrAppend(
- out, ShapeUtil::HumanStringWithLayout(operand->shape()), " ",
- operand->name());
+ slice, ", ", [&](string* out, HloInstruction* operand) {
+ *out += ShapeUtil::HumanStringWithLayout(operand->shape());
+ if (!compact_operands) {
+ tensorflow::strings::StrAppend(out, " ", operand->name());
+ }
});
- }
- string extra;
- if (LayoutUtil::HasLayout(shape())) {
- if (ShapeUtil::IsTuple(shape())) {
- // Tuple shapes are recursive, so the layout field of the top-level shape
- // does not include all layout information. In this case, print out the
- // entire shape with layout.
- tensorflow::strings::StrAppend(&extra, ", layout=",
- ShapeUtil::HumanStringWithLayout(shape()));
- } else {
- tensorflow::strings::StrAppend(
- &extra, tensorflow::strings::Printf(
- ", layout=%s",
- LayoutUtil::HumanString(shape().layout()).c_str()));
+ const int64 remaining = operands_.size() - slice.size();
+ if (slice.size() != operands_.size()) {
+ tensorflow::strings::StrAppend(&operands, ", ...(+", remaining, ")");
}
}
+ string extra;
if (CanHaveDimensionsField()) {
tensorflow::strings::StrAppend(
&extra, ", dimensions={",
@@ -1384,9 +1386,9 @@ string HloInstruction::ToString() const {
tensorflow::strings::StrAppend(&extra, ", index=", tuple_index());
}
return tensorflow::strings::Printf(
- "%s %s = %s(%s)%s", ShapeUtil::HumanString(shape()).c_str(),
- name().c_str(), HloOpcodeString(opcode()).c_str(), operands.c_str(),
- extra.c_str());
+ "%s = %s %s(%s)%s", name().c_str(),
+ ShapeUtil::HumanStringWithLayout(shape()).c_str(),
+ HloOpcodeString(opcode()).c_str(), operands.c_str(), extra.c_str());
}
string HloInstruction::ToShortString() const {
diff --git a/tensorflow/compiler/xla/service/hlo_instruction.h b/tensorflow/compiler/xla/service/hlo_instruction.h
index 07b3fb386d..ba1192cf7e 100644
--- a/tensorflow/compiler/xla/service/hlo_instruction.h
+++ b/tensorflow/compiler/xla/service/hlo_instruction.h
@@ -439,7 +439,7 @@ class HloInstruction {
string SignatureString() const;
// Returns a debugging string that represents this instruction.
- string ToString() const;
+ string ToString(bool compact_operands = false) const;
// As ToString, but returns a shorter string.
string ToShortString() const;
diff --git a/tensorflow/compiler/xla/shape_util.cc b/tensorflow/compiler/xla/shape_util.cc
index 6626fe5af8..6165f0d15b 100644
--- a/tensorflow/compiler/xla/shape_util.cc
+++ b/tensorflow/compiler/xla/shape_util.cc
@@ -365,10 +365,9 @@ bool CompareShapes(const Shape& lhs, const Shape& rhs, bool compare_layouts) {
string layout;
if (!IsScalar(shape) && !IsOpaque(shape)) {
if (LayoutUtil::HasLayout(shape)) {
- layout = tensorflow::strings::StrCat(
- " ", LayoutUtil::HumanString(shape.layout()));
+ layout = LayoutUtil::HumanString(shape.layout());
} else {
- layout = " (no layout)";
+ layout = "{no layout}";
}
}
return tensorflow::strings::StrCat(
diff --git a/tensorflow/compiler/xla/shape_util_test.cc b/tensorflow/compiler/xla/shape_util_test.cc
index fb2f8fb284..9e6b243611 100644
--- a/tensorflow/compiler/xla/shape_util_test.cc
+++ b/tensorflow/compiler/xla/shape_util_test.cc
@@ -338,13 +338,12 @@ TEST(ShapeUtilTest, HumanString) {
EXPECT_EQ("opaque[]", ShapeUtil::HumanStringWithLayout(opaque));
EXPECT_EQ("f32[]", ShapeUtil::HumanStringWithLayout(scalar));
- EXPECT_EQ("u32[1,2] {1,0}", ShapeUtil::HumanStringWithLayout(matrix));
- EXPECT_EQ("s32[3,4] {0,1}", ShapeUtil::HumanStringWithLayout(matrix2));
- EXPECT_EQ("(opaque[], f32[], u32[1,2] {1,0}, s32[3,4] {0,1})",
+ EXPECT_EQ("u32[1,2]{1,0}", ShapeUtil::HumanStringWithLayout(matrix));
+ EXPECT_EQ("s32[3,4]{0,1}", ShapeUtil::HumanStringWithLayout(matrix2));
+ EXPECT_EQ("(opaque[], f32[], u32[1,2]{1,0}, s32[3,4]{0,1})",
ShapeUtil::HumanStringWithLayout(tuple));
- EXPECT_EQ(
- "((opaque[], f32[], u32[1,2] {1,0}, s32[3,4] {0,1}), u32[1,2] {1,0})",
- ShapeUtil::HumanStringWithLayout(nested_tuple));
+ EXPECT_EQ("((opaque[], f32[], u32[1,2]{1,0}, s32[3,4]{0,1}), u32[1,2]{1,0})",
+ ShapeUtil::HumanStringWithLayout(nested_tuple));
ProgramShape prog = ShapeUtil::MakeProgramShape(
{opaque, scalar, matrix, matrix2, tuple, nested_tuple}, nested_tuple);