aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow
diff options
context:
space:
mode:
authorGravatar Frank Chen <frankchn@google.com>2018-09-05 15:44:59 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2018-09-05 15:51:32 -0700
commit007443c69511aa001696a53150aa5a4334ffb8b9 (patch)
tree62f27b1623cce2d8c17d9de59165fa5f7b5f4520 /tensorflow
parent25241c4270ca3c8679710fbe1803c836b6c983ea (diff)
Temporarily disable distributed coordinator training when using TPUStrategy
PiperOrigin-RevId: 211712907
Diffstat (limited to 'tensorflow')
-rw-r--r--tensorflow/python/estimator/run_config.py7
1 files changed, 6 insertions, 1 deletions
diff --git a/tensorflow/python/estimator/run_config.py b/tensorflow/python/estimator/run_config.py
index b1ca207b62..3773810a04 100644
--- a/tensorflow/python/estimator/run_config.py
+++ b/tensorflow/python/estimator/run_config.py
@@ -521,7 +521,12 @@ class RunConfig(object):
eval_distribute=eval_distribute,
experimental_distribute=experimental_distribute)
- if train_distribute or eval_distribute or experimental_distribute:
+ # TODO(frankchn,priyag): Eventually use distributed coordinator for TPUs.
+ if ((train_distribute and
+ train_distribute.__class__.__name__ != 'TPUStrategy') or
+ (eval_distribute and
+ eval_distribute.__class__.__name__ != 'TPUStrategy') or
+ experimental_distribute):
logging.info('Initializing RunConfig with distribution strategies.')
distribute_coordinator_training.init_run_config(self, tf_config)
else: