diff options
author | Yong Tang <yong.tang.github@outlook.com> | 2018-09-17 01:06:54 +0000 |
---|---|---|
committer | Yong Tang <yong.tang.github@outlook.com> | 2018-09-17 01:06:54 +0000 |
commit | 8e6599d2d7b54fe8fba37ad1cc045b62bd7e50e5 (patch) | |
tree | f104ec9a6209436798079558939acbf4742d74e0 /tensorflow/contrib/layers | |
parent | 921186571f792562fa234f7f0a7516b67e867930 (diff) |
Allow different dtype of Tensor
Signed-off-by: Yong Tang <yong.tang.github@outlook.com>
Diffstat (limited to 'tensorflow/contrib/layers')
-rw-r--r-- | tensorflow/contrib/layers/python/layers/optimizers.py | 2 |
1 files changed, 1 insertions, 1 deletions
diff --git a/tensorflow/contrib/layers/python/layers/optimizers.py b/tensorflow/contrib/layers/python/layers/optimizers.py index 2ac58597c2..d92de3b58c 100644 --- a/tensorflow/contrib/layers/python/layers/optimizers.py +++ b/tensorflow/contrib/layers/python/layers/optimizers.py @@ -438,6 +438,6 @@ def _multiply_gradients(grads_and_vars, gradient_multipliers): grad_values = grad.values * multiplier grad = ops.IndexedSlices(grad_values, grad.indices, grad.dense_shape) else: - grad *= multiplier + grad *= math_ops.cast(multiplier, grad.dtype) multiplied_grads_and_vars.append((grad, var)) return multiplied_grads_and_vars |