aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/compiler/xla
diff options
context:
space:
mode:
authorGravatar HyoukJoong Lee <hyouklee@google.com>2018-05-04 00:51:58 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2018-05-04 10:44:10 -0700
commit0bb55f02022e88affefc111cf9a8cf70a046d1da (patch)
treebc2430242746a1af948519ad142dd68b84c5ddf5 /tensorflow/compiler/xla
parentda0dcb21501b765932e392ae710ebbecefeb309c (diff)
Automated g4 rollback of changelist 194829761
PiperOrigin-RevId: 195379693
Diffstat (limited to 'tensorflow/compiler/xla')
-rw-r--r--tensorflow/compiler/xla/service/hlo_module_group_metadata.cc7
-rw-r--r--tensorflow/compiler/xla/service/hlo_module_group_metadata.h3
-rw-r--r--tensorflow/compiler/xla/service/service.cc13
3 files changed, 3 insertions, 20 deletions
diff --git a/tensorflow/compiler/xla/service/hlo_module_group_metadata.cc b/tensorflow/compiler/xla/service/hlo_module_group_metadata.cc
index 3367d76ded..54c34ce116 100644
--- a/tensorflow/compiler/xla/service/hlo_module_group_metadata.cc
+++ b/tensorflow/compiler/xla/service/hlo_module_group_metadata.cc
@@ -194,13 +194,6 @@ int64 HloModuleGroupMetadata::GetModuleId(const HloModule* module) const {
LOG(FATAL) << "unknown module";
}
-int64 HloModuleGroupMetadata::GetDeviceModulesCount() const {
- return std::count_if(modules_.begin(), modules_.end(),
- [](const HloModule* module) {
- return !module->config().is_host_module();
- });
-}
-
Status HloModuleGroupMetadata::RecordInstructions() {
const auto visitor = [this](HloInstruction* hlo) -> Status {
if (hlo->opcode() == HloOpcode::kWhile) {
diff --git a/tensorflow/compiler/xla/service/hlo_module_group_metadata.h b/tensorflow/compiler/xla/service/hlo_module_group_metadata.h
index d619082616..c48a7ab0b5 100644
--- a/tensorflow/compiler/xla/service/hlo_module_group_metadata.h
+++ b/tensorflow/compiler/xla/service/hlo_module_group_metadata.h
@@ -147,9 +147,6 @@ class HloModuleGroupMetadata {
// the module in the module vector.
int64 GetModuleId(const HloModule* module) const;
- // Returns the number of modules for devices (excluding the host module).
- int64 GetDeviceModulesCount() const;
-
// Returns the companion instructions for the given instruction.
//
// Precondition: IsCompanionWhile(instruction) is true.
diff --git a/tensorflow/compiler/xla/service/service.cc b/tensorflow/compiler/xla/service/service.cc
index 6ce03ab39d..495f8801ba 100644
--- a/tensorflow/compiler/xla/service/service.cc
+++ b/tensorflow/compiler/xla/service/service.cc
@@ -626,16 +626,9 @@ Service::ExecuteParallelAndRegisterResult(
// profiled.
std::map<int64, se::Stream*> index_to_profiled_streams;
- // Build DeviceAssignment for all cores based on the provided device handles.
- DeviceAssignment device_assignment(options_.number_of_replicas(),
- executables.size());
- for (int64 i = 0; i < executables.size(); i++) {
- TF_ASSIGN_OR_RETURN(auto replicas, Replicas(*backend, device_handles[i]));
- CHECK_EQ(replicas.size(), arguments[i].size());
- for (int64 replica = 0; replica < replicas.size(); ++replica) {
- device_assignment(replica, i) = replicas[replica]->device_ordinal();
- }
- }
+ TF_ASSIGN_OR_RETURN(DeviceAssignment device_assignment,
+ backend->computation_placer()->AssignDevices(
+ options_.number_of_replicas(), executables.size()));
for (int64 i = 0; i < executables.size(); i++) {
// Stream executors for the replicas of the current computation.