aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authorGravatar Vijay Vasudevan <vrv@google.com>2016-03-04 22:03:52 -0800
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2016-03-05 15:52:27 -0800
commitd81bd790cb18a23795a6d0b83d683071c77ca6d2 (patch)
tree5953a81b01a91e94f513a644a9c463485ef6c1e4
parent8c42d1d76a30e6a81228a329e50270b44905f6fe (diff)
TensorFlow: Change apply_dense and apply_sparse to use a colocation
constraint rather than ops.device, since colocation is more portable. Change: 116431514
-rw-r--r--tensorflow/python/training/optimizer.py4
1 files changed, 3 insertions, 1 deletions
diff --git a/tensorflow/python/training/optimizer.py b/tensorflow/python/training/optimizer.py
index 9af92c66bf..1e8d6b0f12 100644
--- a/tensorflow/python/training/optimizer.py
+++ b/tensorflow/python/training/optimizer.py
@@ -292,7 +292,9 @@ class Optimizer(object):
for grad, var in grads_and_vars:
if not grad:
continue
- with ops.name_scope("update_" + var.op.name), ops.device(var.device):
+ # We colocate all ops created in _apply_dense or _apply_sparse
+ # on the same device as the variable.
+ with ops.name_scope("update_" + var.op.name), ops.colocate_with(var):
if isinstance(grad, ops.Tensor):
update_ops.append(self._apply_dense(grad, var))
else: