diff options
author | 2016-02-17 11:42:30 -0800 | |
---|---|---|
committer | 2016-02-17 12:56:41 -0800 | |
commit | fe056f0b5e52db86766761f5e6446a89c1aa3938 (patch) | |
tree | 68bce0e257d181a3fa37f83c97fdff0fdad877fc /tensorflow/python/training/adam.py | |
parent | 19d632338f983e02dd0268b931e9cced03b74805 (diff) |
Merge changes from github.
Change: 114882676
Diffstat (limited to 'tensorflow/python/training/adam.py')
-rw-r--r-- | tensorflow/python/training/adam.py | 3 |
1 files changed, 2 insertions, 1 deletions
diff --git a/tensorflow/python/training/adam.py b/tensorflow/python/training/adam.py index e691d1b1bf..a258ee57e2 100644 --- a/tensorflow/python/training/adam.py +++ b/tensorflow/python/training/adam.py @@ -30,7 +30,8 @@ from tensorflow.python.training import training_ops class AdamOptimizer(optimizer.Optimizer): """Optimizer that implements the Adam algorithm. - See this [paper](http://arxiv.org/pdf/1412.6980v7.pdf). + See [Kingma et. al., 2014](http://arxiv.org/abs/1412.6980) + ([pdf](http://arxiv.org/pdf/1412.6980.pdf)). @@__init__ """ |