aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/contrib/opt
diff options
context:
space:
mode:
authorGravatar weidankong <kongweidan84@gmail.com>2018-08-14 16:02:06 -0700
committerGravatar weidankong <kongweidan84@gmail.com>2018-08-14 16:02:06 -0700
commitfae346fb4b745cac4f93f503ed51a1debf48de41 (patch)
tree870ccd47efd199942edd0ec89ad1c65107e7e969 /tensorflow/contrib/opt
parent3b890362c4a38cedb812741f7c14b1b9499e2bac (diff)
fix sanity check failure
Diffstat (limited to 'tensorflow/contrib/opt')
-rw-r--r--tensorflow/contrib/opt/python/training/elastic_average_optimizer.py35
-rw-r--r--tensorflow/contrib/opt/python/training/elastic_average_optimizer_test.py37
2 files changed, 45 insertions, 27 deletions
diff --git a/tensorflow/contrib/opt/python/training/elastic_average_optimizer.py b/tensorflow/contrib/opt/python/training/elastic_average_optimizer.py
index f4ffb62a04..bbafd59aae 100644
--- a/tensorflow/contrib/opt/python/training/elastic_average_optimizer.py
+++ b/tensorflow/contrib/opt/python/training/elastic_average_optimizer.py
@@ -20,11 +20,9 @@ from __future__ import print_function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
-from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import gen_nn_ops
-from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variable_scope
@@ -130,7 +128,12 @@ class ElasticAverageCustomGetter(object):
= list(global_center_variable)[i]
return local_var
else:
- return getter(name, trainable=trainable, collections=collections, *args, **kwargs)
+ return getter(
+ name,
+ trainable=trainable,
+ collections=collections,
+ *args,
+ **kwargs)
class ElasticAverageOptimizer(optimizer.Optimizer):
@@ -172,11 +175,11 @@ class ElasticAverageOptimizer(optimizer.Optimizer):
rho=0.0 is suggested in async mode.
use_locking: If True use locks for update operations.
synchronous: Add_sync_queues_and_barrier or not.
- True: all workers will wait for each other before start training
- False: worker can start training when its initilization is done,
- no need to wait for everyone is ready.
- in case one worker is restarted, it can join and continue
- training without being blocked.
+ True: all workers will wait for each other before start training
+ False: worker can start training when its initilization is done,
+ no need to wait for everyone is ready.
+ in case one worker is restarted, it can join and continue
+ training without being blocked.
name: Optional name prefix for the operations created when applying
gradients. Defaults to "ElasticAverageOptimizer".
"""
@@ -393,13 +396,14 @@ class ElasticAverageOptimizer(optimizer.Optimizer):
def swapping_saver(self, var_list=None, name='swapping_saver', **kwargs):
"""Create a saver copy global_center_variable to trainable variables
- Please call this function after all your variables created with EACustomGetter.
- For evaluations or inference, use this saver during training. It will save the
- global_center_variable of the trained parameters under the original parameter names.
+ Please call this function after all your variables created with
+ ElasticAverageCustomGetter. For evaluations or inference, use this saver
+ during training. It will save the global_center_variable of the trained
+ parameters under the original parameter names.
Args:
var_list: List of variables to save, as per `Saver()`.
- If set to None, will save all the trainable_variables that have been
- created before this call.
+ If set to None, save all the trainable_variables that have
+ been created before this call.
name: The name of the saver.
**kwargs: Keyword arguments of `Saver()`.
Returns:
@@ -407,12 +411,13 @@ class ElasticAverageOptimizer(optimizer.Optimizer):
Raises:
RuntimeError: global_center_variable is empty, please make sure
this is called after model created and
- EACustomGetter is used when declaring you model
+ ElasticAverageCustomGetter is used when declaring you model
"""
if not self._global_map:
raise RuntimeError('global_center_variable is empty, please make sure '
'this is called after model created and '
- 'ElasticAverageCustomGetter is used when declaring you model')
+ 'ElasticAverageCustomGetter is used when declaring '
+ 'you model')
if var_list is None:
var_list = variables.trainable_variables()
diff --git a/tensorflow/contrib/opt/python/training/elastic_average_optimizer_test.py b/tensorflow/contrib/opt/python/training/elastic_average_optimizer_test.py
index c2dc0791d3..c2a680edd1 100644
--- a/tensorflow/contrib/opt/python/training/elastic_average_optimizer_test.py
+++ b/tensorflow/contrib/opt/python/training/elastic_average_optimizer_test.py
@@ -84,8 +84,10 @@ def _get_workers(num_workers, period, workers, moving_rate, num_ps=1):
var_0 = variable_scope.get_variable(initializer=0.0, name="v0")
var_1 = variable_scope.get_variable(initializer=1.0, name="v1")
if num_ps > 1:
- with variable_scope.variable_scope("",
- partitioner=partitioned_variables.fixed_size_partitioner(num_ps, axis=0),
+ with variable_scope.variable_scope(
+ "",
+ partitioner=partitioned_variables.fixed_size_partitioner(
+ num_ps, axis=0),
custom_getter=ea_custom), ops.device(
device_setter.replica_device_setter(
worker_device=worker_device,
@@ -93,7 +95,9 @@ def _get_workers(num_workers, period, workers, moving_rate, num_ps=1):
ps_tasks=num_ps)):
partition_var = variable_scope.get_variable(
- 'partition_var',shape=[2,4], initializer=init_ops.ones_initializer)
+ 'partition_var',
+ shape=[2, 4],
+ initializer=init_ops.ones_initializer)
part_0 = list(partition_var)[0]
part_1 = list(partition_var)[1]
@@ -112,13 +116,15 @@ def _get_workers(num_workers, period, workers, moving_rate, num_ps=1):
ea_custom_getter=ea_custom)
if num_ps == 1:
train_op = [
- opt.apply_gradients(([grads_0, var_0], [grads_1, var_1]),
+ opt.apply_gradients(([grads_0, var_0], [grads_1, var_1]),
global_step)
]
else:
train_op = [
- opt.apply_gradients(([grads_0, var_0], [grads_1, var_1],
- [grads_part_0, part_0], [grads_part_1, part_1]),
+ opt.apply_gradients(([grads_0, var_0],
+ [grads_1, var_1],
+ [grads_part_0, part_0],
+ [grads_part_1, part_1]),
global_step)
]
easgd_hook = opt.make_session_run_hook(is_chief, worker_id)
@@ -190,7 +196,8 @@ class ElasticAverageOptimizerTest(test.TestCase):
sessions[0].run(train_ops[0])
# save, data will be global value
- savers[0].save(sessions[0]._sess._sess._sess._sess, save_path='./model/model')
+ savers[0].save(sessions[0]._sess._sess._sess._sess,
+ save_path='./model/model')
ops.reset_default_graph() # restore on a new graph
with session.Session() as sess:
v0 = variable_scope.get_variable(initializer=0.0, name="v0")
@@ -219,7 +226,8 @@ class ElasticAverageOptimizerTest(test.TestCase):
var_0_g = graphs[0].get_tensor_by_name(GLOBAL_VARIABLE_NAME + "/v0:0")
var_1_g = graphs[0].get_tensor_by_name(GLOBAL_VARIABLE_NAME + "/v1:0")
- part_0_g = graphs[0].get_tensor_by_name(GLOBAL_VARIABLE_NAME + "/partition_var/part_0:0")
+ part_0_g = graphs[0].get_tensor_by_name(
+ GLOBAL_VARIABLE_NAME + "/partition_var/part_0:0")
# Verify the initialized value.
self.assertAllEqual(0.0, sessions[0].run(var_0))
@@ -241,16 +249,21 @@ class ElasticAverageOptimizerTest(test.TestCase):
# part_0 of global_center copy
part_0_g = sessions[0].run(part_0_g)
- savers[0].save(sessions[0]._sess._sess._sess._sess, save_path='./model/model')
+ savers[0].save(sessions[0]._sess._sess._sess._sess,
+ save_path='./model/model')
# verify restore of partitioned_variables
ops.reset_default_graph() # restore on a new graph
g = ops.get_default_graph()
with session.Session() as sess, g.as_default():
- with variable_scope.variable_scope("",
- partitioner=partitioned_variables.fixed_size_partitioner(num_ps, axis=0)):
+ with variable_scope.variable_scope(
+ "",
+ partitioner=partitioned_variables.fixed_size_partitioner(
+ num_ps, axis=0)):
partition_var = variable_scope.get_variable(
- 'partition_var',shape=[2,4], initializer=init_ops.ones_initializer)
+ 'partition_var',
+ shape=[2, 4],
+ initializer=init_ops.ones_initializer)
s = saver.Saver(var_list=[partition_var])
s.restore(sess, './model/model')
part_0 = g.get_tensor_by_name('partition_var/part_0:0')