aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/python/training
diff options
context:
space:
mode:
authorGravatar Allen Lavoie <allenl@google.com>2018-09-26 15:19:14 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2018-09-26 15:24:09 -0700
commitee9c6c17abce8450d08140750b857ad36b0508e8 (patch)
treefb5df39484ee3f854f5c7fd3391238f158290f7a /tensorflow/python/training
parentdc90d6c486f2ec1741766b0989e6f6e842d94437 (diff)
Fix Optimizer "No gradients provided" error messages to report variables instead of internal processor objects.
PiperOrigin-RevId: 214678470
Diffstat (limited to 'tensorflow/python/training')
-rw-r--r--tensorflow/python/training/optimizer.py2
1 files changed, 1 insertions, 1 deletions
diff --git a/tensorflow/python/training/optimizer.py b/tensorflow/python/training/optimizer.py
index 699162b30c..f004f3944a 100644
--- a/tensorflow/python/training/optimizer.py
+++ b/tensorflow/python/training/optimizer.py
@@ -585,7 +585,7 @@ class Optimizer(
var_list = [v for g, v, _ in converted_grads_and_vars if g is not None]
if not var_list:
raise ValueError("No gradients provided for any variable: %s." %
- ([str(v) for _, _, v in converted_grads_and_vars],))
+ ([str(v) for _, v, _ in converted_grads_and_vars],))
with ops.init_scope():
self._create_slots(var_list)
update_ops = []