diff options
author | 2017-10-12 17:45:50 -0700 | |
---|---|---|
committer | 2017-10-12 17:50:14 -0700 | |
commit | dec1c9296f72e146423d5cb2fffed1c65ef4e8d6 (patch) | |
tree | c6cfa4027247757f8ffb248048d690fba18e1f7b /tensorflow/python | |
parent | 915a8ac568f0a67d6000ab70a665817deff7888c (diff) |
TFE: Adds a destructor to ResourceVariables in Python that destroys the underlying resource. This makes the lifetime of the underlying resource match that of its corresponding Python object.
PiperOrigin-RevId: 172039259
Diffstat (limited to 'tensorflow/python')
-rw-r--r-- | tensorflow/python/BUILD | 1 | ||||
-rw-r--r-- | tensorflow/python/kernel_tests/resource_variable_ops_test.py | 10 | ||||
-rw-r--r-- | tensorflow/python/ops/resource_variable_ops.py | 5 | ||||
-rw-r--r-- | tensorflow/python/training/adam_test.py | 82 | ||||
-rw-r--r-- | tensorflow/python/training/saver_test.py | 97 |
5 files changed, 107 insertions, 88 deletions
diff --git a/tensorflow/python/BUILD b/tensorflow/python/BUILD index ac16ca1830..9582fda88f 100644 --- a/tensorflow/python/BUILD +++ b/tensorflow/python/BUILD @@ -3396,6 +3396,7 @@ cuda_py_test( ":training", ":platform_test", ":client_testlib", + ":variable_scope", "//third_party/py/numpy", ], ) diff --git a/tensorflow/python/kernel_tests/resource_variable_ops_test.py b/tensorflow/python/kernel_tests/resource_variable_ops_test.py index 8cf8286ed1..6f2bc2f752 100644 --- a/tensorflow/python/kernel_tests/resource_variable_ops_test.py +++ b/tensorflow/python/kernel_tests/resource_variable_ops_test.py @@ -422,6 +422,16 @@ class ResourceVariableOpsTest(test_util.TensorFlowTestCase): self.assertEqual(1, v1.read_value().numpy()) self.assertEqual(2, v2.read_value().numpy()) + def testDestruction(self): + with context.eager_mode(): + var = resource_variable_ops.ResourceVariable(initial_value=1.0, + name="var8") + var.__del__() + with self.assertRaisesRegexp(errors.NotFoundError, + r"Resource .*\/var8\/.* does not exist."): + resource_variable_ops.destroy_resource_op(var._handle, + ignore_lookup_error=False) + if __name__ == "__main__": test.main() diff --git a/tensorflow/python/ops/resource_variable_ops.py b/tensorflow/python/ops/resource_variable_ops.py index cbfa141256..99ff02873b 100644 --- a/tensorflow/python/ops/resource_variable_ops.py +++ b/tensorflow/python/ops/resource_variable_ops.py @@ -427,6 +427,11 @@ class ResourceVariable(variables.Variable): self._constraint = None # LINT.ThenChange(//tensorflow/python/eager/graph_callable.py) + def __del__(self): + if context.in_eager_mode(): + gen_resource_variable_ops.destroy_resource_op(self._handle, + ignore_lookup_error=False) + @property def dtype(self): """The dtype of this variable.""" diff --git a/tensorflow/python/training/adam_test.py b/tensorflow/python/training/adam_test.py index defcf33714..96de9b921b 100644 --- a/tensorflow/python/training/adam_test.py +++ b/tensorflow/python/training/adam_test.py @@ -29,6 +29,7 @@ from tensorflow.python.framework import test_util from tensorflow.python.ops import array_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import resource_variable_ops +from tensorflow.python.ops import variable_scope from tensorflow.python.ops import variables from tensorflow.python.platform import test from tensorflow.python.training import adam @@ -152,53 +153,54 @@ class AdamOptimizerTest(test.TestCase): def doTestBasic(self, use_resource=False): for i, dtype in enumerate([dtypes.half, dtypes.float32, dtypes.float64]): - # Initialize variables for numpy implementation. - m0, v0, m1, v1 = 0.0, 0.0, 0.0, 0.0 - var0_np = np.array([1.0, 2.0], dtype=dtype.as_numpy_dtype) - grads0_np = np.array([0.1, 0.1], dtype=dtype.as_numpy_dtype) - var1_np = np.array([3.0, 4.0], dtype=dtype.as_numpy_dtype) - grads1_np = np.array([0.01, 0.01], dtype=dtype.as_numpy_dtype) - - if use_resource: - var0 = resource_variable_ops.ResourceVariable( - var0_np, name="var0_%d" % i) - var1 = resource_variable_ops.ResourceVariable( - var1_np, name="var1_%d" % i) - else: - var0 = variables.Variable(var0_np) - var1 = variables.Variable(var1_np) - grads0 = constant_op.constant(grads0_np) - grads1 = constant_op.constant(grads1_np) - - opt = adam.AdamOptimizer() - update = opt.apply_gradients(zip([grads0, grads1], [var0, var1])) + with variable_scope.variable_scope("%d" % i): + # Initialize variables for numpy implementation. + m0, v0, m1, v1 = 0.0, 0.0, 0.0, 0.0 + var0_np = np.array([1.0, 2.0], dtype=dtype.as_numpy_dtype) + grads0_np = np.array([0.1, 0.1], dtype=dtype.as_numpy_dtype) + var1_np = np.array([3.0, 4.0], dtype=dtype.as_numpy_dtype) + grads1_np = np.array([0.01, 0.01], dtype=dtype.as_numpy_dtype) - if context.in_graph_mode(): - self.evaluate(variables.global_variables_initializer()) - # Fetch params to validate initial values - self.assertAllClose([1.0, 2.0], self.evaluate(var0)) - self.assertAllClose([3.0, 4.0], self.evaluate(var1)) + if use_resource: + var0 = resource_variable_ops.ResourceVariable( + var0_np, name="var0_%d" % i) + var1 = resource_variable_ops.ResourceVariable( + var1_np, name="var1_%d" % i) + else: + var0 = variables.Variable(var0_np) + var1 = variables.Variable(var1_np) + grads0 = constant_op.constant(grads0_np) + grads1 = constant_op.constant(grads1_np) - beta1_power, beta2_power = opt._get_beta_accumulators() + opt = adam.AdamOptimizer() + update = opt.apply_gradients(zip([grads0, grads1], [var0, var1])) - # Run 3 steps of Adam - for t in range(1, 4): if context.in_graph_mode(): - self.evaluate(update) - elif t > 1: - opt.apply_gradients(zip([grads0, grads1], [var0, var1])) + self.evaluate(variables.global_variables_initializer()) + # Fetch params to validate initial values + self.assertAllClose([1.0, 2.0], self.evaluate(var0)) + self.assertAllClose([3.0, 4.0], self.evaluate(var1)) - self.assertAllCloseAccordingToType(0.9**(t + 1), - self.evaluate(beta1_power)) - self.assertAllCloseAccordingToType(0.999**(t + 1), - self.evaluate(beta2_power)) + beta1_power, beta2_power = opt._get_beta_accumulators() - var0_np, m0, v0 = adam_update_numpy(var0_np, grads0_np, t, m0, v0) - var1_np, m1, v1 = adam_update_numpy(var1_np, grads1_np, t, m1, v1) + # Run 3 steps of Adam + for t in range(1, 4): + if context.in_graph_mode(): + self.evaluate(update) + elif t > 1: + opt.apply_gradients(zip([grads0, grads1], [var0, var1])) + + self.assertAllCloseAccordingToType(0.9**(t + 1), + self.evaluate(beta1_power)) + self.assertAllCloseAccordingToType(0.999**(t + 1), + self.evaluate(beta2_power)) + + var0_np, m0, v0 = adam_update_numpy(var0_np, grads0_np, t, m0, v0) + var1_np, m1, v1 = adam_update_numpy(var1_np, grads1_np, t, m1, v1) - # Validate updated params - self.assertAllCloseAccordingToType(var0_np, self.evaluate(var0)) - self.assertAllCloseAccordingToType(var1_np, self.evaluate(var1)) + # Validate updated params + self.assertAllCloseAccordingToType(var0_np, self.evaluate(var0)) + self.assertAllCloseAccordingToType(var1_np, self.evaluate(var1)) def testBasic(self): with self.test_session(): diff --git a/tensorflow/python/training/saver_test.py b/tensorflow/python/training/saver_test.py index 07cd67a4b9..a8eb8e5fcf 100644 --- a/tensorflow/python/training/saver_test.py +++ b/tensorflow/python/training/saver_test.py @@ -110,32 +110,32 @@ class SaverTest(test.TestCase): # Start a second session. In that session the parameter nodes # have not been initialized either. with self.test_session(graph=ops_lib.Graph()) as sess: - v0 = variable_op(-1.0, name="v0") - v1 = variable_op(-1.0, name="v1") - v2 = saver_test_utils.CheckpointedOp(name="v2") + v0_2 = variable_op(-1.0, name="v0") + v1_2 = variable_op(-1.0, name="v1") + v2_2 = saver_test_utils.CheckpointedOp(name="v2") # Assert that the variables are not initialized. if context.in_graph_mode(): self.assertEqual( len(variables.report_uninitialized_variables().eval()), 2) - self.assertEqual(0, len(v2.keys().eval())) - self.assertEqual(0, len(v2.values().eval())) + self.assertEqual(0, len(v2_2.keys().eval())) + self.assertEqual(0, len(v2_2.values().eval())) # Restore the saved values in the parameter nodes. - save = saver_module.Saver({"v0": v0, "v1": v1, "v2": v2.saveable}) + save = saver_module.Saver({"v0": v0_2, "v1": v1_2, "v2": v2_2.saveable}) save.restore(sess, save_path) # Check that the parameter nodes have been restored. - self.assertEqual(10.0, self.evaluate(v0)) - self.assertEqual(20.0, self.evaluate(v1)) - self.assertEqual(b"k1", self.evaluate(v2.keys())) - self.assertEqual(30.0, self.evaluate(v2.values())) + self.assertEqual(10.0, self.evaluate(v0_2)) + self.assertEqual(20.0, self.evaluate(v1_2)) + self.assertEqual(b"k1", self.evaluate(v2_2.keys())) + self.assertEqual(30.0, self.evaluate(v2_2.values())) # Build another graph with 2 nodes, initialized # differently, and a Restore node for them. with self.test_session(graph=ops_lib.Graph()) as sess: - v0_2 = variable_op(1000.0, name="v0") - v1_2 = variable_op(2000.0, name="v1") - v2_2 = saver_test_utils.CheckpointedOp(name="v2") - v2_init = v2_2.insert("k1000", 3000.0) + v0_3 = variable_op(1000.0, name="v0") + v1_3 = variable_op(2000.0, name="v1") + v2_3 = saver_test_utils.CheckpointedOp(name="v2") + v2_init = v2_3.insert("k1000", 3000.0) # Check that the parameter nodes have been initialized. if context.in_graph_mode(): @@ -143,19 +143,19 @@ class SaverTest(test.TestCase): self.evaluate(init_all_op) # TODO(xpan): Why _mutable_hash_table_v2 doesn't create empty # table as it claims in eager mode? - self.assertEqual(b"k1000", self.evaluate(v2_2.keys())) - self.assertEqual(3000.0, self.evaluate(v2_2.values())) - self.assertEqual(1000.0, self.evaluate(v0_2)) - self.assertEqual(2000.0, self.evaluate(v1_2)) + self.assertEqual(b"k1000", self.evaluate(v2_3.keys())) + self.assertEqual(3000.0, self.evaluate(v2_3.values())) + self.assertEqual(1000.0, self.evaluate(v0_3)) + self.assertEqual(2000.0, self.evaluate(v1_3)) # Restore the values saved earlier in the parameter nodes. - save2 = saver_module.Saver({"v0": v0_2, "v1": v1_2, "v2": v2_2.saveable}) + save2 = saver_module.Saver({"v0": v0_3, "v1": v1_3, "v2": v2_3.saveable}) save2.restore(sess, save_path) # Check that the parameter nodes have been restored. - self.assertEqual(10.0, self.evaluate(v0_2)) - self.assertEqual(20.0, self.evaluate(v1_2)) - self.assertEqual(b"k1", self.evaluate(v2_2.keys())) - self.assertEqual(30.0, self.evaluate(v2_2.values())) + self.assertEqual(10.0, self.evaluate(v0_3)) + self.assertEqual(20.0, self.evaluate(v1_3)) + self.assertEqual(b"k1", self.evaluate(v2_3.keys())) + self.assertEqual(30.0, self.evaluate(v2_3.values())) def testBasic(self): self.basicSaveRestore(variables.Variable) @@ -487,10 +487,10 @@ class SaverTest(test.TestCase): val = save.save(sess, save_path) self.assertEqual(save_path, val) with self.test_session() as sess: - var = resource_variable_ops.ResourceVariable(other_value, name=var_name) - save = saver_module.Saver({var_name: var}) + var2 = resource_variable_ops.ResourceVariable(other_value, name=var_name) + save = saver_module.Saver({var_name: var2}) save.restore(sess, save_path) - self.assertAllClose(var_value, self.evaluate(var)) + self.assertAllClose(var_value, self.evaluate(var2)) def testCacheRereadsFile(self): save_path = os.path.join(self.get_temp_dir(), "cache_rereads") @@ -618,28 +618,29 @@ class SaverTest(test.TestCase): global_step_int = 5 # Save and reload one Variable named "var0". self._SaveAndLoad("var0", 0.0, 1.0, save_path) - for use_tensor in [True, False]: - var = resource_variable_ops.ResourceVariable(1.0, name="var0") - save = saver_module.Saver( - { - var._shared_name: var - }, pad_step_number=pad_step_number) - if context.in_graph_mode(): - self.evaluate(var.initializer) - sess = ops_lib.get_default_session() - else: - sess = None - if use_tensor: - global_step = constant_op.constant(global_step_int) - val = save.save(sess, save_path, global_step=global_step) - else: - val = save.save(sess, save_path, global_step=global_step_int) - if pad_step_number: - expected_save_path = "%s-%s" % (save_path, - "{:08d}".format(global_step_int)) - else: - expected_save_path = "%s-%d" % (save_path, global_step_int) - self.assertEqual(expected_save_path, val) + for i, use_tensor in enumerate([True, False]): + with variable_scope.variable_scope("%d" % i): + var = resource_variable_ops.ResourceVariable(1.0, name="var0") + save = saver_module.Saver( + { + var._shared_name: var + }, pad_step_number=pad_step_number) + if context.in_graph_mode(): + self.evaluate(var.initializer) + sess = ops_lib.get_default_session() + else: + sess = None + if use_tensor: + global_step = constant_op.constant(global_step_int) + val = save.save(sess, save_path, global_step=global_step) + else: + val = save.save(sess, save_path, global_step=global_step_int) + if pad_step_number: + expected_save_path = "%s-%s" % (save_path, + "{:08d}".format(global_step_int)) + else: + expected_save_path = "%s-%d" % (save_path, global_step_int) + self.assertEqual(expected_save_path, val) def testSaveWithGlobalStepWithPadding(self): self.testSaveWithGlobalStep(pad_step_number=True) |