aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authorGravatar Justin Lebar <jlebar@google.com>2018-08-28 20:54:46 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2018-08-28 20:58:38 -0700
commitc010fec6abbe2039febe285ea35de52923eb6d75 (patch)
tree0b205f2140e73496981cafa57606f377802157f8
parent30a6b56176a9738bbe0a40b24f885503f112ae9f (diff)
[XLA] Make explicit that negative interior_padding is not allowed.
Also add testcases that negative convolution dilation is rejected. PiperOrigin-RevId: 210657224
-rw-r--r--tensorflow/compiler/xla/service/hlo_verifier_test.cc79
-rw-r--r--tensorflow/compiler/xla/service/shape_inference.cc16
-rw-r--r--tensorflow/compiler/xla/xla_data.proto14
3 files changed, 99 insertions, 10 deletions
diff --git a/tensorflow/compiler/xla/service/hlo_verifier_test.cc b/tensorflow/compiler/xla/service/hlo_verifier_test.cc
index 70b741353d..fc1f81bdd2 100644
--- a/tensorflow/compiler/xla/service/hlo_verifier_test.cc
+++ b/tensorflow/compiler/xla/service/hlo_verifier_test.cc
@@ -277,5 +277,84 @@ TEST_F(HloVerifierTest, RngElementTypeNotSupported) {
EXPECT_THAT(status.error_message(), HasSubstr("Element type not supported"));
}
+TEST_F(HloVerifierTest, NegativeInteriorPaddingNotAllowed) {
+ // This testcase can't be written using textual HLO, because it doesn't parse
+ // negative interior padding. That's probably a feature. :)
+ HloComputation::Builder builder(TestName());
+ HloInstruction* param =
+ builder.AddInstruction(HloInstruction::CreateParameter(
+ 0, ShapeUtil::MakeShape(F32, {100}), "param"));
+ PaddingConfig padding_config;
+ padding_config.add_dimensions()->set_interior_padding(-1);
+ builder.AddInstruction(HloInstruction::CreatePad(
+ ShapeUtil::MakeShape(F32, {100}), param,
+ builder.AddInstruction(HloInstruction::CreateConstant(
+ LiteralUtil::Zero(F32).CloneToUnique())),
+ padding_config));
+
+ auto module = CreateNewModule();
+ module->AddEntryComputation(builder.Build());
+
+ auto status = verifier().Run(module.get()).status();
+ ASSERT_FALSE(status.ok());
+ EXPECT_THAT(status.error_message(),
+ HasSubstr("Interior padding cannot be negative"));
+}
+
+TEST_F(HloVerifierTest, PadNegativeInteriorDilationNotAllowed) {
+ // This testcase can't be written using textual HLO, because it doesn't parse
+ // negative interior padding. That's probably a feature. :)
+ HloComputation::Builder builder(TestName());
+ HloInstruction* param =
+ builder.AddInstruction(HloInstruction::CreateParameter(
+ 0, ShapeUtil::MakeShape(F32, {100}), "param"));
+ PaddingConfig padding_config;
+ padding_config.add_dimensions()->set_interior_padding(-1);
+ builder.AddInstruction(HloInstruction::CreatePad(
+ ShapeUtil::MakeShape(F32, {100}), param,
+ builder.AddInstruction(HloInstruction::CreateConstant(
+ LiteralUtil::Zero(F32).CloneToUnique())),
+ padding_config));
+
+ auto module = CreateNewModule();
+ module->AddEntryComputation(builder.Build());
+
+ EXPECT_THAT(verifier().Run(module.get()).status().error_message(),
+ HasSubstr("Interior padding cannot be negative"));
+}
+
+// Simple module containing a convolution as the root.
+static const char* const kConvHloString = R"(
+HloModule module
+ENTRY entry_computation {
+ param0 = f16[128,128,56,56] parameter(0)
+ param1 = f16[3,3,128,128] parameter(1)
+ zero_f16 = f16[] constant(0)
+ ROOT conv = f16[128,128,28,28] convolution(param0, param1),
+ window={size=3x3 stride=2x2}, dim_labels=bf01_01io->bf01
+})";
+
+TEST_F(HloVerifierTest, ConvNegativeWindowDilationNotAllowed) {
+ TF_ASSERT_OK_AND_ASSIGN(auto module, ParseHloString(kConvHloString));
+ auto* conv = module->entry_computation()->root_instruction();
+ Window w = conv->window();
+ w.mutable_dimensions(0)->set_window_dilation(-1);
+ conv->set_window(w);
+
+ EXPECT_THAT(verifier().Run(module.get()).status().error_message(),
+ HasSubstr("non-positive window dilation factor"));
+}
+
+TEST_F(HloVerifierTest, ConvNegativeBaseDilationNotAllowed) {
+ TF_ASSERT_OK_AND_ASSIGN(auto module, ParseHloString(kConvHloString));
+ auto* conv = module->entry_computation()->root_instruction();
+ Window w = conv->window();
+ w.mutable_dimensions(0)->set_base_dilation(-1);
+ conv->set_window(w);
+
+ EXPECT_THAT(verifier().Run(module.get()).status().error_message(),
+ HasSubstr("non-positive base area dilation factor"));
+}
+
} // namespace
} // namespace xla
diff --git a/tensorflow/compiler/xla/service/shape_inference.cc b/tensorflow/compiler/xla/service/shape_inference.cc
index a04af8b0aa..f5217c5a11 100644
--- a/tensorflow/compiler/xla/service/shape_inference.cc
+++ b/tensorflow/compiler/xla/service/shape_inference.cc
@@ -505,13 +505,21 @@ StatusOr<Shape> InferWindowOutputShape(const Shape& base_shape,
return InvalidArgument(
"The element types of the operands to Pad do not match.");
}
+ if (absl::c_any_of(padding_config.dimensions(),
+ [](const PaddingConfig::PaddingConfigDimension& p) {
+ return p.interior_padding() < 0;
+ })) {
+ return InvalidArgument("Interior padding cannot be negative: %s",
+ padding_config.ShortDebugString());
+ }
+
std::vector<int64> dimensions(ShapeUtil::Rank(operand_shape));
for (int64 i = 0; i < operand_shape.dimensions_size(); ++i) {
- dimensions[i] = operand_shape.dimensions(i) +
- padding_config.dimensions(i).edge_padding_low() +
- padding_config.dimensions(i).edge_padding_high() +
+ const auto& p = padding_config.dimensions(i);
+ dimensions[i] = operand_shape.dimensions(i) + p.edge_padding_low() +
+ p.edge_padding_high() +
std::max<int64>(operand_shape.dimensions(i) - 1, 0LL) *
- padding_config.dimensions(i).interior_padding();
+ p.interior_padding();
}
return ShapeUtil::MakeShape(
ShapeUtil::HigherPrecisionElementType(operand_shape, padding_value_shape),
diff --git a/tensorflow/compiler/xla/xla_data.proto b/tensorflow/compiler/xla/xla_data.proto
index aaba5aa92e..8e43f275e1 100644
--- a/tensorflow/compiler/xla/xla_data.proto
+++ b/tensorflow/compiler/xla/xla_data.proto
@@ -105,13 +105,14 @@ enum PaddingValue {
message PaddingConfig {
// Describes the padding configuration for a dimension.
message PaddingConfigDimension {
- // Padding amount on the low-end (next to the index 0).
+ // Padding amount on the low-end (next to the index 0). May be negative.
int64 edge_padding_low = 1;
- // Padding amount on the high-end (next to the highest index).
+ // Padding amount on the high-end (next to the highest index). May be
+ // negative.
int64 edge_padding_high = 2;
- // Padding amount between the elements.
+ // Padding amount between the elements. May not be negative.
int64 interior_padding = 3;
}
@@ -393,13 +394,14 @@ message WindowDimension {
// Dilation factor of the sliding window in this dimension. A dilation factor
// of 1 means no dilation. window_dilation - 1 no-op entries ("holes") are
- // implicitly placed between each kernel element. See documentation for
- // convolution.
+ // implicitly placed between each kernel element. This value may not be less
+ // than 1. See documentation for convolution.
int64 window_dilation = 5;
// Dilation factor of the base area in this dimension. A dilation factor of 1
// means no dilation. base_dilation - 1 no-op entries ("holes") are implicitly
- // placed between each base area element. See documentation for convolution.
+ // placed between each base area element. This value may not be less than 1.
+ // See documentation for convolution.
int64 base_dilation = 6;
// Window reversal means that this dimension was logically reversed before the