diff options
author | Allen Lavoie <allenl@google.com> | 2018-09-17 14:24:17 -0700 |
---|---|---|
committer | TensorFlower Gardener <gardener@tensorflow.org> | 2018-09-17 14:28:07 -0700 |
commit | 28dd4d9fcbf8cac1008b2ccd2b4be3fa3c25afd1 (patch) | |
tree | 68e901eec6d952589b5a69f3be37d7f04dac8373 /tensorflow/python/training | |
parent | 4516558acc9763999b19d1af75ab1fcd6562e4f0 (diff) |
Keep only weak references to variables in graph functions
This enables cleanup of the variables referenced in defunned methods of objects when the object is garbage collected. Since one PolymorphicFunction is created per @defun, decorated methods before this change held on to all of the variables referenced in that method for any instance of the class (i.e. variables which should have been object-scoped were scoped to the lifetime of the class definition).
Raises an exception if variables used in the function have been deleted when it is called, which means no local variables.
PiperOrigin-RevId: 213337256
Diffstat (limited to 'tensorflow/python/training')
-rw-r--r-- | tensorflow/python/training/gradient_descent_test.py | 10 |
1 files changed, 5 insertions, 5 deletions
diff --git a/tensorflow/python/training/gradient_descent_test.py b/tensorflow/python/training/gradient_descent_test.py index 56d82a5b88..1ddea598e5 100644 --- a/tensorflow/python/training/gradient_descent_test.py +++ b/tensorflow/python/training/gradient_descent_test.py @@ -252,12 +252,12 @@ class GradientDescentOptimizerTest(test.TestCase): optimizer = gradient_descent.GradientDescentOptimizer(1.0) def step(): - v = resource_variable_ops.ResourceVariable(1.0) + self.v = resource_variable_ops.ResourceVariable(1.0) with backprop.GradientTape() as tape: - loss = v ** 2 - grad = tape.gradient(loss, v) - optimizer.apply_gradients([(grad, v)]) - return v.read_value() + loss = self.v ** 2 + grad = tape.gradient(loss, self.v) + optimizer.apply_gradients([(grad, self.v)]) + return self.v.read_value() compiled_step = function.defun(step) |