aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authorGravatar Tom Hennigan <tomhennigan@google.com>2018-08-30 12:15:43 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2018-08-30 12:22:31 -0700
commiteeb7a787566b9dbbf8855525f6fbbdf65b3e536e (patch)
tree7521dcf2c481ce581037fff53649c92d54307684
parentf9bd3bc45a57d218c7fe3e970c4f953da8136dc6 (diff)
Remove explicit tape watching in backprop tests.
PiperOrigin-RevId: 210956690
-rw-r--r--tensorflow/python/eager/backprop_test.py9
1 files changed, 2 insertions, 7 deletions
diff --git a/tensorflow/python/eager/backprop_test.py b/tensorflow/python/eager/backprop_test.py
index 3d3f54b9c4..45f2d0d6ac 100644
--- a/tensorflow/python/eager/backprop_test.py
+++ b/tensorflow/python/eager/backprop_test.py
@@ -23,7 +23,6 @@ import numpy as np
from tensorflow.python import pywrap_tensorflow
from tensorflow.python.eager import backprop
from tensorflow.python.eager import context
-from tensorflow.python.eager import tape
from tensorflow.python.eager import test
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
@@ -87,7 +86,6 @@ class BackpropTest(test.TestCase):
initial_value=constant_op.constant(1.0), name='x')
def fn():
- tape.watch_variable(x)
b = constant_op.constant(2.0)
c = math_ops.add(x.value(), b)
return math_ops.add(c, constant_op.constant(3.0))
@@ -194,7 +192,6 @@ class BackpropTest(test.TestCase):
initial_value=random_init, dtype=dtypes.float32, name='embedding')
def f():
- tape.watch_variable(embedding)
embedded_x = embedding_ops.embedding_lookup(embedding, x)
return constant_op.constant(1.0, dtypes.float32) - embedded_x
@@ -404,7 +401,6 @@ class BackpropTest(test.TestCase):
def f():
with context.device('gpu:0'):
- tape.watch_variable(v)
return v.read_value()
self.assertEqual(
@@ -784,7 +780,6 @@ class BackpropTest(test.TestCase):
initial_value=array_ops.constant([1.0]), name='x')
def fn():
- tape.watch_variable(x)
a = math_ops.add(x.value(), 1.0)
# Make sure convert_to_tensor works correctly with list of TensorNodes.
b = array_ops.stack([a, a], axis=0)
@@ -931,12 +926,12 @@ class BackpropTest(test.TestCase):
with ops.Graph().as_default(), self.test_session():
t = constant_op.constant(1, dtype=dtypes.float32, shape=(10, 4))
x = constant_op.constant(2, dtype=dtypes.float32, shape=(10, 4))
- with backprop.GradientTape() as gt:
+ with backprop.GradientTape() as tape:
tape.watch(x)
x1, _ = array_ops.split(x, num_or_size_splits=2, axis=1)
y1 = x1**2
y = array_ops.concat([y1, t], axis=1)
- return self.evaluate(gt.gradient(y, x))
+ return self.evaluate(tape.gradient(y, x))
grad1 = get_grad()
grad2 = get_grad()