aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/python/training/adam_test.py
diff options
context:
space:
mode:
authorGravatar Ali Yahya <alive@google.com>2017-10-12 17:45:50 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2017-10-12 17:50:14 -0700
commitdec1c9296f72e146423d5cb2fffed1c65ef4e8d6 (patch)
treec6cfa4027247757f8ffb248048d690fba18e1f7b /tensorflow/python/training/adam_test.py
parent915a8ac568f0a67d6000ab70a665817deff7888c (diff)
TFE: Adds a destructor to ResourceVariables in Python that destroys the underlying resource. This makes the lifetime of the underlying resource match that of its corresponding Python object.
PiperOrigin-RevId: 172039259
Diffstat (limited to 'tensorflow/python/training/adam_test.py')
-rw-r--r--tensorflow/python/training/adam_test.py82
1 files changed, 42 insertions, 40 deletions
diff --git a/tensorflow/python/training/adam_test.py b/tensorflow/python/training/adam_test.py
index defcf33714..96de9b921b 100644
--- a/tensorflow/python/training/adam_test.py
+++ b/tensorflow/python/training/adam_test.py
@@ -29,6 +29,7 @@ from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import resource_variable_ops
+from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training import adam
@@ -152,53 +153,54 @@ class AdamOptimizerTest(test.TestCase):
def doTestBasic(self, use_resource=False):
for i, dtype in enumerate([dtypes.half, dtypes.float32, dtypes.float64]):
- # Initialize variables for numpy implementation.
- m0, v0, m1, v1 = 0.0, 0.0, 0.0, 0.0
- var0_np = np.array([1.0, 2.0], dtype=dtype.as_numpy_dtype)
- grads0_np = np.array([0.1, 0.1], dtype=dtype.as_numpy_dtype)
- var1_np = np.array([3.0, 4.0], dtype=dtype.as_numpy_dtype)
- grads1_np = np.array([0.01, 0.01], dtype=dtype.as_numpy_dtype)
-
- if use_resource:
- var0 = resource_variable_ops.ResourceVariable(
- var0_np, name="var0_%d" % i)
- var1 = resource_variable_ops.ResourceVariable(
- var1_np, name="var1_%d" % i)
- else:
- var0 = variables.Variable(var0_np)
- var1 = variables.Variable(var1_np)
- grads0 = constant_op.constant(grads0_np)
- grads1 = constant_op.constant(grads1_np)
-
- opt = adam.AdamOptimizer()
- update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
+ with variable_scope.variable_scope("%d" % i):
+ # Initialize variables for numpy implementation.
+ m0, v0, m1, v1 = 0.0, 0.0, 0.0, 0.0
+ var0_np = np.array([1.0, 2.0], dtype=dtype.as_numpy_dtype)
+ grads0_np = np.array([0.1, 0.1], dtype=dtype.as_numpy_dtype)
+ var1_np = np.array([3.0, 4.0], dtype=dtype.as_numpy_dtype)
+ grads1_np = np.array([0.01, 0.01], dtype=dtype.as_numpy_dtype)
- if context.in_graph_mode():
- self.evaluate(variables.global_variables_initializer())
- # Fetch params to validate initial values
- self.assertAllClose([1.0, 2.0], self.evaluate(var0))
- self.assertAllClose([3.0, 4.0], self.evaluate(var1))
+ if use_resource:
+ var0 = resource_variable_ops.ResourceVariable(
+ var0_np, name="var0_%d" % i)
+ var1 = resource_variable_ops.ResourceVariable(
+ var1_np, name="var1_%d" % i)
+ else:
+ var0 = variables.Variable(var0_np)
+ var1 = variables.Variable(var1_np)
+ grads0 = constant_op.constant(grads0_np)
+ grads1 = constant_op.constant(grads1_np)
- beta1_power, beta2_power = opt._get_beta_accumulators()
+ opt = adam.AdamOptimizer()
+ update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
- # Run 3 steps of Adam
- for t in range(1, 4):
if context.in_graph_mode():
- self.evaluate(update)
- elif t > 1:
- opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
+ self.evaluate(variables.global_variables_initializer())
+ # Fetch params to validate initial values
+ self.assertAllClose([1.0, 2.0], self.evaluate(var0))
+ self.assertAllClose([3.0, 4.0], self.evaluate(var1))
- self.assertAllCloseAccordingToType(0.9**(t + 1),
- self.evaluate(beta1_power))
- self.assertAllCloseAccordingToType(0.999**(t + 1),
- self.evaluate(beta2_power))
+ beta1_power, beta2_power = opt._get_beta_accumulators()
- var0_np, m0, v0 = adam_update_numpy(var0_np, grads0_np, t, m0, v0)
- var1_np, m1, v1 = adam_update_numpy(var1_np, grads1_np, t, m1, v1)
+ # Run 3 steps of Adam
+ for t in range(1, 4):
+ if context.in_graph_mode():
+ self.evaluate(update)
+ elif t > 1:
+ opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
+
+ self.assertAllCloseAccordingToType(0.9**(t + 1),
+ self.evaluate(beta1_power))
+ self.assertAllCloseAccordingToType(0.999**(t + 1),
+ self.evaluate(beta2_power))
+
+ var0_np, m0, v0 = adam_update_numpy(var0_np, grads0_np, t, m0, v0)
+ var1_np, m1, v1 = adam_update_numpy(var1_np, grads1_np, t, m1, v1)
- # Validate updated params
- self.assertAllCloseAccordingToType(var0_np, self.evaluate(var0))
- self.assertAllCloseAccordingToType(var1_np, self.evaluate(var1))
+ # Validate updated params
+ self.assertAllCloseAccordingToType(var0_np, self.evaluate(var0))
+ self.assertAllCloseAccordingToType(var1_np, self.evaluate(var1))
def testBasic(self):
with self.test_session():