aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/contrib/opt
diff options
context:
space:
mode:
authorGravatar weidankong <kongweidan84@gmail.com>2018-08-28 10:09:13 -0700
committerGravatar weidankong <kongweidan84@gmail.com>2018-08-28 10:09:13 -0700
commit40aee739c3d5c7aee63020f36b83aded09044efb (patch)
treeb016dbf8357dc8078d531ba2b5bdf05da5288165 /tensorflow/contrib/opt
parent540ca4a8755a3670920b49647860d085df834a00 (diff)
AGN: fix sanity failure
Diffstat (limited to 'tensorflow/contrib/opt')
-rw-r--r--tensorflow/contrib/opt/python/training/agn_optimizer_test.py10
1 files changed, 5 insertions, 5 deletions
diff --git a/tensorflow/contrib/opt/python/training/agn_optimizer_test.py b/tensorflow/contrib/opt/python/training/agn_optimizer_test.py
index 28732c2a1d..fc291f829f 100644
--- a/tensorflow/contrib/opt/python/training/agn_optimizer_test.py
+++ b/tensorflow/contrib/opt/python/training/agn_optimizer_test.py
@@ -111,15 +111,15 @@ def _get_workers(num_workers, period, workers, num_ps=1):
if num_ps == 1:
train_op = [
opt.apply_gradients(([grads_0, var_0], [grads_1, var_1]),
- global_step)
+ global_step)
]
else:
train_op = [
opt.apply_gradients(([grads_0, var_0],
- [grads_1, var_1],
- [grads_part_0, part_0],
- [grads_part_1, part_1]),
- global_step)
+ [grads_1, var_1],
+ [grads_part_0, part_0],
+ [grads_part_1, part_1]),
+ global_step)
]
hook = opt.make_session_run_hook(is_chief, worker_id)
# Creates MonitoredSession