aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/python/training/ftrl.py
diff options
context:
space:
mode:
authorGravatar A. Unique TensorFlower <gardener@tensorflow.org>2018-02-27 13:38:24 -0800
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2018-02-27 13:44:11 -0800
commit180c457563271b072b33c90bf2f2fbbea450c943 (patch)
tree3ed5dc1f76f97c0b3b75f4a7594ddc37b38c7dc8 /tensorflow/python/training/ftrl.py
parent93f5dd54dab124a9ec3b4c5dcb42d31716fe2f95 (diff)
Allow the Ftrl-proximal optimizer parameter 'initial_accumulator_value' to take zero values.
PiperOrigin-RevId: 187224701
Diffstat (limited to 'tensorflow/python/training/ftrl.py')
-rw-r--r--tensorflow/python/training/ftrl.py9
1 files changed, 5 insertions, 4 deletions
diff --git a/tensorflow/python/training/ftrl.py b/tensorflow/python/training/ftrl.py
index 9d02e694db..4fa081fab7 100644
--- a/tensorflow/python/training/ftrl.py
+++ b/tensorflow/python/training/ftrl.py
@@ -53,7 +53,7 @@ class FtrlOptimizer(optimizer.Optimizer):
learning_rate: A float value or a constant float `Tensor`.
learning_rate_power: A float value, must be less or equal to zero.
initial_accumulator_value: The starting value for accumulators.
- Only positive values are allowed.
+ Only zero or positive values are allowed.
l1_regularization_strength: A float value, must be greater than or
equal to zero.
l2_regularization_strength: A float value, must be greater than or
@@ -84,9 +84,10 @@ class FtrlOptimizer(optimizer.Optimizer):
"""
super(FtrlOptimizer, self).__init__(use_locking, name)
- if initial_accumulator_value <= 0.0:
- raise ValueError("initial_accumulator_value %f needs to be positive" %
- initial_accumulator_value)
+ if initial_accumulator_value < 0.0:
+ raise ValueError(
+ "initial_accumulator_value %f needs to be be positive or zero" %
+ initial_accumulator_value)
if learning_rate_power > 0.0:
raise ValueError("learning_rate_power %f needs to be negative or zero" %
learning_rate_power)