aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/compiler/xla/layout_util.cc
diff options
context:
space:
mode:
authorGravatar Justin Lebar <jlebar@google.com>2018-05-11 16:04:54 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2018-05-11 16:07:35 -0700
commit2f5f2cb4253b4eaf7953cf7ed28f76e0bdee6fcc (patch)
tree40e3333bbb20237045d50565b24c250de5d7141c /tensorflow/compiler/xla/layout_util.cc
parent5828842e5956825a65a5423b1ca503f72b084e62 (diff)
[XLA] s/tensorflow::Status/Status/.
These are type aliases of one another; we'd like to be consistent and use the shorter one. PiperOrigin-RevId: 196322955
Diffstat (limited to 'tensorflow/compiler/xla/layout_util.cc')
-rw-r--r--tensorflow/compiler/xla/layout_util.cc22
1 files changed, 10 insertions, 12 deletions
diff --git a/tensorflow/compiler/xla/layout_util.cc b/tensorflow/compiler/xla/layout_util.cc
index c6f8f6766e..a76fdcda25 100644
--- a/tensorflow/compiler/xla/layout_util.cc
+++ b/tensorflow/compiler/xla/layout_util.cc
@@ -140,8 +140,7 @@ Layout CreateDefaultLayoutForRank(int64 rank) {
LayoutUtil::SetToDefaultLayout(program_shape->mutable_result());
}
-/* static */ tensorflow::Status LayoutUtil::ValidateLayoutInShape(
- const Shape& shape) {
+/* static */ Status LayoutUtil::ValidateLayoutInShape(const Shape& shape) {
if (ShapeUtil::IsTuple(shape)) {
// Tuple shape.
if (shape.has_layout()) {
@@ -150,12 +149,12 @@ Layout CreateDefaultLayoutForRank(int64 rank) {
for (auto& element_shape : shape.tuple_shapes()) {
TF_RETURN_IF_ERROR(ValidateLayoutInShape(element_shape));
}
- return tensorflow::Status::OK();
+ return Status::OK();
} else if (ShapeUtil::IsOpaque(shape)) {
if (shape.has_layout()) {
return InvalidArgument("opaque should not have a layout field");
}
- return tensorflow::Status::OK();
+ return Status::OK();
} else {
// Array shape.
if (!shape.has_layout()) {
@@ -166,14 +165,14 @@ Layout CreateDefaultLayoutForRank(int64 rank) {
}
}
-/* static */ tensorflow::Status LayoutUtil::ValidateLayoutForShape(
- const Layout& layout, const Shape& shape) {
+/* static */ Status LayoutUtil::ValidateLayoutForShape(const Layout& layout,
+ const Shape& shape) {
if (ShapeUtil::IsTuple(shape)) {
return InvalidArgument("a single Layout is not valid for tuple shapes");
}
if (ShapeUtil::IsOpaque(shape)) {
- return tensorflow::Status::OK();
+ return Status::OK();
}
if (layout.format() == INVALID_FORMAT) {
@@ -225,7 +224,7 @@ Layout CreateDefaultLayoutForRank(int64 rank) {
}
}
- return tensorflow::Status::OK();
+ return Status::OK();
}
/* static */ void LayoutUtil::ClearLayout(Shape* shape) {
@@ -384,7 +383,7 @@ Layout CreateDefaultLayoutForRank(int64 rank) {
namespace {
// Internal helper for recursively copying layouts.
-tensorflow::Status CopyLayoutInternal(const Shape& src, Shape* dst) {
+Status CopyLayoutInternal(const Shape& src, Shape* dst) {
if (ShapeUtil::IsTuple(src) != ShapeUtil::IsTuple(*dst)) {
return InvalidArgument(
"cannot copy layout from shape: shape structure differs");
@@ -411,14 +410,13 @@ tensorflow::Status CopyLayoutInternal(const Shape& src, Shape* dst) {
dst->clear_layout();
}
}
- return tensorflow::Status::OK();
+ return Status::OK();
}
} // namespace
/* static */
-tensorflow::Status LayoutUtil::CopyLayoutBetweenShapes(const Shape& src,
- Shape* dst) {
+Status LayoutUtil::CopyLayoutBetweenShapes(const Shape& src, Shape* dst) {
return CopyLayoutInternal(src, dst);
}