aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/contrib/layers
diff options
context:
space:
mode:
authorGravatar Martin Wicke <577277+martinwicke@users.noreply.github.com>2018-04-19 17:11:18 -0700
committerGravatar GitHub <noreply@github.com>2018-04-19 17:11:18 -0700
commit904a7426de2759a6f0b5c6c6d024483a3baae71d (patch)
treec1dad2738ef1ae91a441a02eb523a0cda74a4c46 /tensorflow/contrib/layers
parente12f2a4f3f9b45efe102da596f31f948ac81ef99 (diff)
parent3bb161433069ea5012f1f5be97fbbd8d0784213d (diff)
Merge pull request #18582 from ntenenz/master
Remove conditional scope logic now that "current_arg_scope" exists in contrib
Diffstat (limited to 'tensorflow/contrib/layers')
-rw-r--r--tensorflow/contrib/layers/python/layers/rev_block_lib.py6
1 files changed, 1 insertions, 5 deletions
diff --git a/tensorflow/contrib/layers/python/layers/rev_block_lib.py b/tensorflow/contrib/layers/python/layers/rev_block_lib.py
index 02d294c68f..c4fa3392ef 100644
--- a/tensorflow/contrib/layers/python/layers/rev_block_lib.py
+++ b/tensorflow/contrib/layers/python/layers/rev_block_lib.py
@@ -504,11 +504,7 @@ def _recompute_grad(fn, args, use_data_dep=_USE_DEFAULT, tupleize_grads=False):
@_fn_with_custom_grad(grad_fn)
def fn_with_recompute(*args):
cached_vs.append(variable_scope.get_variable_scope())
- # TODO(rsepassi): Rm conditional in TF 1.4
- if hasattr(contrib_framework_ops, "current_arg_scope"):
- cached_arg_scope.append(contrib_framework_ops.current_arg_scope())
- else:
- cached_arg_scope.append({})
+ cached_arg_scope.append(contrib_framework_ops.current_arg_scope())
return fn(*args)
return fn_with_recompute(*args)