aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/compiler/xla/service/shape_inference.cc
diff options
context:
space:
mode:
authorGravatar Justin Lebar <jlebar@google.com>2018-08-28 20:54:46 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2018-08-28 20:58:38 -0700
commitc010fec6abbe2039febe285ea35de52923eb6d75 (patch)
tree0b205f2140e73496981cafa57606f377802157f8 /tensorflow/compiler/xla/service/shape_inference.cc
parent30a6b56176a9738bbe0a40b24f885503f112ae9f (diff)
[XLA] Make explicit that negative interior_padding is not allowed.
Also add testcases that negative convolution dilation is rejected. PiperOrigin-RevId: 210657224
Diffstat (limited to 'tensorflow/compiler/xla/service/shape_inference.cc')
-rw-r--r--tensorflow/compiler/xla/service/shape_inference.cc16
1 files changed, 12 insertions, 4 deletions
diff --git a/tensorflow/compiler/xla/service/shape_inference.cc b/tensorflow/compiler/xla/service/shape_inference.cc
index a04af8b0aa..f5217c5a11 100644
--- a/tensorflow/compiler/xla/service/shape_inference.cc
+++ b/tensorflow/compiler/xla/service/shape_inference.cc
@@ -505,13 +505,21 @@ StatusOr<Shape> InferWindowOutputShape(const Shape& base_shape,
return InvalidArgument(
"The element types of the operands to Pad do not match.");
}
+ if (absl::c_any_of(padding_config.dimensions(),
+ [](const PaddingConfig::PaddingConfigDimension& p) {
+ return p.interior_padding() < 0;
+ })) {
+ return InvalidArgument("Interior padding cannot be negative: %s",
+ padding_config.ShortDebugString());
+ }
+
std::vector<int64> dimensions(ShapeUtil::Rank(operand_shape));
for (int64 i = 0; i < operand_shape.dimensions_size(); ++i) {
- dimensions[i] = operand_shape.dimensions(i) +
- padding_config.dimensions(i).edge_padding_low() +
- padding_config.dimensions(i).edge_padding_high() +
+ const auto& p = padding_config.dimensions(i);
+ dimensions[i] = operand_shape.dimensions(i) + p.edge_padding_low() +
+ p.edge_padding_high() +
std::max<int64>(operand_shape.dimensions(i) - 1, 0LL) *
- padding_config.dimensions(i).interior_padding();
+ p.interior_padding();
}
return ShapeUtil::MakeShape(
ShapeUtil::HigherPrecisionElementType(operand_shape, padding_value_shape),