aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/contrib/optimizer_v2
diff options
context:
space:
mode:
authorGravatar A. Unique TensorFlower <gardener@tensorflow.org>2018-03-30 11:12:24 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2018-03-30 11:14:41 -0700
commitc4acdccbb7284c6a63e6824a7ee45ce7a86606b9 (patch)
treeb010d4c7fe9375a7e9d5f74eced731236ed9353d /tensorflow/contrib/optimizer_v2
parent528c64665f2c3220acb59031926274403b96dddb (diff)
Rename distributed_apply to _distributed_apply in OptimizerV2 to match
the Optimizer base class. PiperOrigin-RevId: 191089407
Diffstat (limited to 'tensorflow/contrib/optimizer_v2')
-rw-r--r--tensorflow/contrib/optimizer_v2/optimizer_v2.py4
1 files changed, 2 insertions, 2 deletions
diff --git a/tensorflow/contrib/optimizer_v2/optimizer_v2.py b/tensorflow/contrib/optimizer_v2/optimizer_v2.py
index 471992fdac..25d19578ea 100644
--- a/tensorflow/contrib/optimizer_v2/optimizer_v2.py
+++ b/tensorflow/contrib/optimizer_v2/optimizer_v2.py
@@ -866,7 +866,7 @@ class OptimizerV2(optimizer_v1.Optimizer):
raise ValueError("No gradients provided for any variable: %s." %
([str(v) for _, v in grads_and_vars],))
return distribute_lib.get_tower_context().merge_call(
- self.distributed_apply, filtered, global_step=global_step, name=name)
+ self._distributed_apply, filtered, global_step=global_step, name=name)
def _get_or_create_state(self, var_list=None):
"""Either looks up or creates `_OptimizerV2State`.
@@ -899,7 +899,7 @@ class OptimizerV2(optimizer_v1.Optimizer):
self._per_graph_state[graph_key] = per_graph_state
return per_graph_state
- def distributed_apply(self, distribution, grads_and_vars, global_step, name):
+ def _distributed_apply(self, distribution, grads_and_vars, global_step, name):
"""`apply_gradients` for use with a `DistributionStrategy`."""
reduced_grads = distribution.batch_reduce("sum", grads_and_vars)
var_list = [v for _, v in grads_and_vars]