aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow
diff options
context:
space:
mode:
authorGravatar A. Unique TensorFlower <gardener@tensorflow.org>2018-10-01 15:25:27 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2018-10-01 15:29:11 -0700
commitcca204f12a5838f0ffdd4a80c27d451cf61d3636 (patch)
tree0338708443f5acb4d3e1caa9111ffb1316a861e7 /tensorflow
parentc7237e6070dbf4acd1ade5a40dc676418cbd889b (diff)
Added option (off by default) to enable a higher-performance variant of the
Adam optimizer's variable update formula. PiperOrigin-RevId: 215290881
Diffstat (limited to 'tensorflow')
-rw-r--r--tensorflow/contrib/tpu/proto/optimization_parameters.proto6
1 files changed, 5 insertions, 1 deletions
diff --git a/tensorflow/contrib/tpu/proto/optimization_parameters.proto b/tensorflow/contrib/tpu/proto/optimization_parameters.proto
index a43f45554f..8529b48c15 100644
--- a/tensorflow/contrib/tpu/proto/optimization_parameters.proto
+++ b/tensorflow/contrib/tpu/proto/optimization_parameters.proto
@@ -62,7 +62,10 @@ message FtrlParameters {
// (https://www.tensorflow.org/api_docs/python/tf/contrib/opt/AdamOptimizer). If
// use_non_lazy_adam is enabled, use_gradient_accumulation is also required in
// order to get correct results; a warning will be printed otherwise (which may
-// change to an error in the future).
+// change to an error in the future). If use_max_with_epsilon is set, the Adam
+// variable update formula will be changed from m / (sqrt(v) + epsilon) to
+// m / max(sqrt(v), abs(epsilon)); this option improves the performance of TPU
+// training and is not expected to harm model quality.
message AdamParameters {
float beta1 = 3;
float beta2 = 4;
@@ -70,6 +73,7 @@ message AdamParameters {
float initial_m = 6;
float initial_v = 7;
bool use_non_lazy_adam = 8;
+ bool use_max_with_epsilon = 9;
}
// https://www.tensorflow.org/api_docs/python/tf/train/MomentumOptimizer