diff options
author | Sanjoy Das <sanjoy@google.com> | 2018-03-15 15:47:09 -0700 |
---|---|---|
committer | TensorFlower Gardener <gardener@tensorflow.org> | 2018-03-15 16:01:07 -0700 |
commit | 829f2aff4e663c13b71253707c8a867ca929bb1e (patch) | |
tree | 46dd10f7120964229c704e525ee9a3ec5a21efc5 /tensorflow/compiler/xla/service/gpu/pad_insertion.cc | |
parent | 1e75c69339da2fbf2c5c5fbeb891243badae7ff8 (diff) |
Rename CreateXyzHlo utilities to MakeXyzHlo as discussed on cr/188968478; NFC
The rationale here is that MakeXyzHlo is less likely to be confused with
HloInstruction::CreateXyz and we already have a convention of using a "Make"
prefix for ergonomic factory functions.
PiperOrigin-RevId: 189259036
Diffstat (limited to 'tensorflow/compiler/xla/service/gpu/pad_insertion.cc')
-rw-r--r-- | tensorflow/compiler/xla/service/gpu/pad_insertion.cc | 10 |
1 files changed, 5 insertions, 5 deletions
diff --git a/tensorflow/compiler/xla/service/gpu/pad_insertion.cc b/tensorflow/compiler/xla/service/gpu/pad_insertion.cc index fa405b9329..7bda4e2fcd 100644 --- a/tensorflow/compiler/xla/service/gpu/pad_insertion.cc +++ b/tensorflow/compiler/xla/service/gpu/pad_insertion.cc @@ -69,7 +69,7 @@ HloInstruction* MaybePaddedAndSlicedInput( HloInstruction* padding = computation->AddInstruction(HloInstruction::CreateConstant( MakeUnique<Literal>(Literal::Zero(element_type)))); - input = CreatePadHlo(input, padding, padding_config).ValueOrDie(); + input = MakePadHlo(input, padding, padding_config).ValueOrDie(); } if (window_util::HasNegativePadding(conv_window)) { @@ -92,8 +92,8 @@ HloInstruction* MaybePaddedAndSlicedInput( std::max<int64>(0LL, -conv_window.dimensions(i).padding_high()); } - input = CreateSliceHlo(input, start_indices, limit_indices, strides) - .ValueOrDie(); + input = + MakeSliceHlo(input, start_indices, limit_indices, strides).ValueOrDie(); } return input; @@ -126,7 +126,7 @@ HloInstruction* MaybePaddedKernel(const Window& conv_window, HloInstruction* padding = computation->AddInstruction(HloInstruction::CreateConstant( MakeUnique<Literal>(Literal::Zero(element_type)))); - return CreatePadHlo(kernel, padding, padding_config).ValueOrDie(); + return MakePadHlo(kernel, padding, padding_config).ValueOrDie(); } } // namespace @@ -238,7 +238,7 @@ bool PadInsertion::CanonicalizeBackwardFilterConvolution( computation->AddInstruction(HloInstruction::CreateConstant( MakeUnique<Literal>(Literal::Zero(input->shape().element_type())))); HloInstruction* padded_input = - CreatePadHlo(input, padding, input_padding_config).ValueOrDie(); + MakePadHlo(input, padding, input_padding_config).ValueOrDie(); // The shape of the backward_conv CustomCall is a tuple (conv_result, // scratch_buffer). Extract out the shape of conv_result. |