aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authorGravatar A. Unique TensorFlower <gardener@tensorflow.org>2018-09-05 16:22:12 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2018-09-05 16:28:02 -0700
commit0a3036e9865672229619d1e673a8bf64a2c723d1 (patch)
tree690c393b6e102dae0c1634aac71348d203a0add0
parentdf7930083b73b91959420dc2f92463befbac5af4 (diff)
Re-added proto field for dynamic learning rate support (not usable yet).
PiperOrigin-RevId: 211719009
-rw-r--r--tensorflow/contrib/tpu/proto/optimization_parameters.proto8
1 files changed, 3 insertions, 5 deletions
diff --git a/tensorflow/contrib/tpu/proto/optimization_parameters.proto b/tensorflow/contrib/tpu/proto/optimization_parameters.proto
index cbf6809257..fc1320501b 100644
--- a/tensorflow/contrib/tpu/proto/optimization_parameters.proto
+++ b/tensorflow/contrib/tpu/proto/optimization_parameters.proto
@@ -9,8 +9,8 @@ message ClippingLimits {
google.protobuf.FloatValue upper = 2; // +inf if not set
}
-// Get the learning rate from a <yet to be determined> source that can change
-// dynamically.
+// Get the learning rate from the parameters of the SendTPUEmbeddingGradients
+// op.
message DynamicLearningRate {
}
@@ -18,10 +18,8 @@ message DynamicLearningRate {
message LearningRate {
oneof learning_rate {
float constant = 1;
- // DynamicLearningRate dynamic = 2; -- disabled while code is being
- // rewritten.
+ DynamicLearningRate dynamic = 2;
}
- reserved 2;
}
message AdagradParameters {