aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authorGravatar Bjarke Hammersholt Roune <broune@google.com>2017-02-26 18:12:55 -0800
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2017-02-26 18:32:20 -0800
commit2bd01d16db41cf942f8b1cee335b19472dcfcbd1 (patch)
tree5b8ed59d813e538ef44a703c60ea03c7927b6b80
parentcf4f2e4d5546de87c68de30a84faacb66c572ac3 (diff)
Disable map inliner on CPU due to bug and add two tests that demonstrate the bug.
Change: 148607148
-rw-r--r--tensorflow/compiler/xla/service/cpu/cpu_compiler.cc6
-rw-r--r--tensorflow/compiler/xla/tests/map_test.cc47
2 files changed, 52 insertions, 1 deletions
diff --git a/tensorflow/compiler/xla/service/cpu/cpu_compiler.cc b/tensorflow/compiler/xla/service/cpu/cpu_compiler.cc
index f09f842834..08f417401d 100644
--- a/tensorflow/compiler/xla/service/cpu/cpu_compiler.cc
+++ b/tensorflow/compiler/xla/service/cpu/cpu_compiler.cc
@@ -213,7 +213,11 @@ Status CpuCompiler::RunHloPasses(HloModule* hlo_module,
HloDumper dump_hlo) {
// Optimization pipeline.
HloPassPipeline pipeline("CPU", dump_hlo);
- pipeline.AddPass<Inliner>();
+
+ // TODO(b/35786417): Re-enable inliner pass after fixing the bug and deciding
+ // where we will take this pass in future.
+ // pipeline.AddPass<Inliner>();
+
pipeline.AddPass<ConvCanonicalization>();
{
auto& pass = pipeline.AddPass<HloPassFix<HloPassPipeline>>("simplification",
diff --git a/tensorflow/compiler/xla/tests/map_test.cc b/tensorflow/compiler/xla/tests/map_test.cc
index 014417a205..2433c5653a 100644
--- a/tensorflow/compiler/xla/tests/map_test.cc
+++ b/tensorflow/compiler/xla/tests/map_test.cc
@@ -568,6 +568,53 @@ TEST_F(MapTestWithFullOpt, MapScalarPower) {
ErrorSpec(0.01f));
}
+// Regression test for b/35786417, where the inliner would not notice the change
+// of parameter order inside the map.
+TEST_F(MapTestWithFullOpt, MapSubtractOppositeOrder) {
+ ComputationBuilder builder(client_, TestName());
+
+ auto sub_builder = builder.CreateSubBuilder("power");
+ auto x = sub_builder->Parameter(0, ShapeUtil::MakeShape(F32, {}), "x");
+ auto y = sub_builder->Parameter(1, ShapeUtil::MakeShape(F32, {}), "y");
+ sub_builder->Sub(y, x); // note that this is y - x, not x - y
+ auto sub_opposite = sub_builder->BuildAndNoteError();
+
+ std::unique_ptr<Literal> param0_literal = LiteralUtil::CreateR0<float>(2.0f);
+ std::unique_ptr<Literal> param1_literal = LiteralUtil::CreateR0<float>(5.0f);
+ std::unique_ptr<GlobalData> param0_data =
+ client_->TransferToServer(*param0_literal).ConsumeValueOrDie();
+ std::unique_ptr<GlobalData> param1_data =
+ client_->TransferToServer(*param1_literal).ConsumeValueOrDie();
+
+ auto param0 = builder.Parameter(0, param0_literal->shape(), "param0");
+ auto param1 = builder.Parameter(1, param1_literal->shape(), "param1");
+ builder.Map({param0, param1}, sub_opposite);
+
+ ComputeAndCompareR0<float>(
+ &builder, 3.0f, {param0_data.get(), param1_data.get()}, ErrorSpec(0.01f));
+}
+
+// Regression test for b/35786417, where the inliner would CHECK-fail due to the
+// mul inside the map having more parameters than the map does.
+TEST_F(MapTestWithFullOpt, MapSquare) {
+ ComputationBuilder builder(client_, TestName());
+
+ auto sub_builder = builder.CreateSubBuilder("power");
+ auto x = sub_builder->Parameter(0, ShapeUtil::MakeShape(F32, {}), "x");
+ sub_builder->Mul(x, x);
+ auto square = sub_builder->BuildAndNoteError();
+
+ std::unique_ptr<Literal> param0_literal = LiteralUtil::CreateR0<float>(10.0f);
+ std::unique_ptr<GlobalData> param0_data =
+ client_->TransferToServer(*param0_literal).ConsumeValueOrDie();
+
+ auto param0 = builder.Parameter(0, param0_literal->shape(), "param0");
+ builder.Map({param0}, square);
+
+ ComputeAndCompareR0<float>(&builder, 100.0f, {param0_data.get()},
+ ErrorSpec(0.01f));
+}
+
} // namespace
} // namespace xla