aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/tools/dist_test/python/mnist_replica.py
diff options
context:
space:
mode:
Diffstat (limited to 'tensorflow/tools/dist_test/python/mnist_replica.py')
-rw-r--r--tensorflow/tools/dist_test/python/mnist_replica.py6
1 files changed, 2 insertions, 4 deletions
diff --git a/tensorflow/tools/dist_test/python/mnist_replica.py b/tensorflow/tools/dist_test/python/mnist_replica.py
index f7dbfea7fb..e40ecb43f9 100644
--- a/tensorflow/tools/dist_test/python/mnist_replica.py
+++ b/tensorflow/tools/dist_test/python/mnist_replica.py
@@ -17,7 +17,7 @@
A simple softmax model with one hidden layer is defined. The parameters
(weights and biases) are located on one parameter server (ps), while the ops
-are executed on two worker nodes by default. The TF sessions also run on the
+are executed on two worker nodes by default. The TF sessions also run on the
worker node.
Multiple invocations of this script can be done in parallel, with different
values for --task_index. There should be exactly one invocation with
@@ -123,9 +123,7 @@ def main(unused_argv):
is_chief = (FLAGS.task_index == 0)
if FLAGS.num_gpus > 0:
- if FLAGS.num_gpus < num_workers:
- raise ValueError("number of gpus is less than number of workers")
- # Avoid gpu allocation conflict: now allocate task_num -> #gpu
+ # Avoid gpu allocation conflict: now allocate task_num -> #gpu
# for each worker in the corresponding machine
gpu = (FLAGS.task_index % FLAGS.num_gpus)
worker_device = "/job:worker/task:%d/gpu:%d" % (FLAGS.task_index, gpu)