diff options
author | A. Unique TensorFlower <gardener@tensorflow.org> | 2016-07-18 14:24:56 -0800 |
---|---|---|
committer | TensorFlower Gardener <gardener@tensorflow.org> | 2016-07-18 15:33:24 -0700 |
commit | ffd998d900532035a2cf1e6d576c844cf2458880 (patch) | |
tree | 065a808f57d8bf9da8ee1d7a32d29feed062f918 | |
parent | a33ebe0bfc0c6d0c617d48a71f6f5136be881041 (diff) |
optimizers: fix bug having to do with optimize_loss and unconnected variables
* if a variable does not have a gradient, its gradient is None as returned by tf.gradients
* this would cause an error in _add_scaled_noise_to_gradients (calling gradient.get_shape())
Change: 127765890
-rw-r--r-- | tensorflow/contrib/layers/python/layers/optimizers.py | 6 | ||||
-rw-r--r-- | tensorflow/contrib/layers/python/layers/optimizers_test.py | 5 |
2 files changed, 9 insertions, 2 deletions
diff --git a/tensorflow/contrib/layers/python/layers/optimizers.py b/tensorflow/contrib/layers/python/layers/optimizers.py index 554c2185b4..80fe56b9da 100644 --- a/tensorflow/contrib/layers/python/layers/optimizers.py +++ b/tensorflow/contrib/layers/python/layers/optimizers.py @@ -209,6 +209,9 @@ def _add_scaled_noise_to_gradients(grads_and_vars, gradient_noise_scale): gradients, variables = zip(*grads_and_vars) noisy_gradients = [] for gradient in gradients: + if gradient is None: + noisy_gradients.append(None) + continue if isinstance(gradient, ops.IndexedSlices): gradient_shape = gradient.dense_shape else: @@ -222,7 +225,8 @@ def _multiply_gradients(grads_and_vars, gradient_multipliers): """Multiply specified gradients.""" multiplied_grads_and_vars = [] for grad, var in grads_and_vars: - if var in gradient_multipliers or var.name in gradient_multipliers: + if (grad is not None and + (var in gradient_multipliers or var.name in gradient_multipliers)): key = var if var in gradient_multipliers else var.name grad *= constant_op.constant( gradient_multipliers[key], dtype=dtypes.float32) diff --git a/tensorflow/contrib/layers/python/layers/optimizers_test.py b/tensorflow/contrib/layers/python/layers/optimizers_test.py index 6814ccad67..a38a8fe90b 100644 --- a/tensorflow/contrib/layers/python/layers/optimizers_test.py +++ b/tensorflow/contrib/layers/python/layers/optimizers_test.py @@ -130,7 +130,10 @@ class OptimizersTest(tf.test.TestCase): unused_variable = tf.get_variable("ignore_me", []) tf.contrib.layers.optimize_loss( - loss, global_step, learning_rate=0.1, optimizer="SGD") + loss, global_step, learning_rate=0.1, optimizer="SGD", + gradient_noise_scale=10.0, + gradient_multipliers={unused_variable: 1.}, + clip_gradients=10.0) def testUpdateOp(self): optimizers = ["SGD", tf.train.GradientDescentOptimizer, |