aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/contrib/opt
diff options
context:
space:
mode:
authorGravatar A. Unique TensorFlower <gardener@tensorflow.org>2018-08-25 20:53:46 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2018-08-25 20:57:50 -0700
commit8a05bdf333f34603b33c0f3a029e023deb27ae04 (patch)
tree6f43d21f79f3631da2292a592357aa141cd39bf1 /tensorflow/contrib/opt
parent0850044c3481fcea6e95122f63db760a53772b40 (diff)
Expose the RegAdagradOptimizer (which allows the user to specify whether a loss should update the accumulator) through tf.contrib.opt.
PiperOrigin-RevId: 210253451
Diffstat (limited to 'tensorflow/contrib/opt')
-rw-r--r--tensorflow/contrib/opt/__init__.py2
1 files changed, 2 insertions, 0 deletions
diff --git a/tensorflow/contrib/opt/__init__.py b/tensorflow/contrib/opt/__init__.py
index 781621dba0..ad7d7cfa6e 100644
--- a/tensorflow/contrib/opt/__init__.py
+++ b/tensorflow/contrib/opt/__init__.py
@@ -31,6 +31,7 @@ from tensorflow.contrib.opt.python.training.model_average_optimizer import *
from tensorflow.contrib.opt.python.training.moving_average_optimizer import *
from tensorflow.contrib.opt.python.training.multitask_optimizer_wrapper import *
from tensorflow.contrib.opt.python.training.nadam_optimizer import *
+from tensorflow.contrib.opt.python.training.reg_adagrad_optimizer import *
from tensorflow.contrib.opt.python.training.shampoo import *
from tensorflow.contrib.opt.python.training.weight_decay_optimizers import *
from tensorflow.contrib.opt.python.training.powersign import *
@@ -65,6 +66,7 @@ _allowed_symbols = [
'ModelAverageCustomGetter',
'GGTOptimizer',
'ShampooOptimizer',
+ 'RegAdagradOptimizer',
]
remove_undocumented(__name__, _allowed_symbols)