aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/python/training/learning_rate_decay.py
diff options
context:
space:
mode:
authorGravatar A. Unique TensorFlower <gardener@tensorflow.org>2016-07-31 22:07:30 -0800
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2016-07-31 23:17:46 -0700
commitabe9ab326625105adb3c9d46c027931aec947d1f (patch)
treed9fa7eb9a2fd9b37bc87f98cf353354391b9eb04 /tensorflow/python/training/learning_rate_decay.py
parentc0637048dbc099eac1f75878b765220cd02ccfc0 (diff)
Merge changes from github.
Change: 128958134
Diffstat (limited to 'tensorflow/python/training/learning_rate_decay.py')
-rw-r--r--tensorflow/python/training/learning_rate_decay.py8
1 files changed, 4 insertions, 4 deletions
diff --git a/tensorflow/python/training/learning_rate_decay.py b/tensorflow/python/training/learning_rate_decay.py
index f24f1f4a08..ef369e9095 100644
--- a/tensorflow/python/training/learning_rate_decay.py
+++ b/tensorflow/python/training/learning_rate_decay.py
@@ -54,7 +54,7 @@ def exponential_decay(learning_rate, global_step, decay_steps, decay_rate,
100000, 0.96, staircase=True)
# Passing global_step to minimize() will increment it at each step.
learning_step = (
- tf.GradientDescentOptimizer(learning_rate)
+ tf.train.GradientDescentOptimizer(learning_rate)
.minimize(...my loss..., global_step=global_step)
)
```
@@ -195,7 +195,7 @@ def polynomial_decay(learning_rate, global_step, decay_steps,
power=0.5)
# Passing global_step to minimize() will increment it at each step.
learning_step = (
- tf.GradientDescentOptimizer(learning_rate)
+ tf.train.GradientDescentOptimizer(learning_rate)
.minimize(...my loss..., global_step=global_step)
)
```
@@ -268,7 +268,7 @@ def natural_exp_decay(learning_rate, global_step, decay_steps, decay_rate,
# Passing global_step to minimize() will increment it at each step.
learning_step = (
- tf.GradientDescentOptimizer(learning_rate)
+ tf.train.GradientDescentOptimizer(learning_rate)
.minimize(...my loss..., global_step=global_step)
)
```
@@ -327,7 +327,7 @@ def inverse_time_decay(learning_rate, global_step, decay_steps, decay_rate,
# Passing global_step to minimize() will increment it at each step.
learning_step = (
- tf.GradientDescentOptimizer(learning_rate)
+ tf.train.GradientDescentOptimizer(learning_rate)
.minimize(...my loss..., global_step=global_step)
)
```