aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/python/training/adam.py
diff options
context:
space:
mode:
authorGravatar Vijay Vasudevan <vrv@google.com>2016-02-17 11:42:30 -0800
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2016-02-17 12:56:41 -0800
commitfe056f0b5e52db86766761f5e6446a89c1aa3938 (patch)
tree68bce0e257d181a3fa37f83c97fdff0fdad877fc /tensorflow/python/training/adam.py
parent19d632338f983e02dd0268b931e9cced03b74805 (diff)
Merge changes from github.
Change: 114882676
Diffstat (limited to 'tensorflow/python/training/adam.py')
-rw-r--r--tensorflow/python/training/adam.py3
1 files changed, 2 insertions, 1 deletions
diff --git a/tensorflow/python/training/adam.py b/tensorflow/python/training/adam.py
index e691d1b1bf..a258ee57e2 100644
--- a/tensorflow/python/training/adam.py
+++ b/tensorflow/python/training/adam.py
@@ -30,7 +30,8 @@ from tensorflow.python.training import training_ops
class AdamOptimizer(optimizer.Optimizer):
"""Optimizer that implements the Adam algorithm.
- See this [paper](http://arxiv.org/pdf/1412.6980v7.pdf).
+ See [Kingma et. al., 2014](http://arxiv.org/abs/1412.6980)
+ ([pdf](http://arxiv.org/pdf/1412.6980.pdf)).
@@__init__
"""