aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/python/eager/backprop_test.py
diff options
context:
space:
mode:
Diffstat (limited to 'tensorflow/python/eager/backprop_test.py')
-rw-r--r--tensorflow/python/eager/backprop_test.py12
1 files changed, 11 insertions, 1 deletions
diff --git a/tensorflow/python/eager/backprop_test.py b/tensorflow/python/eager/backprop_test.py
index 32731747b7..7e5c9f3cb6 100644
--- a/tensorflow/python/eager/backprop_test.py
+++ b/tensorflow/python/eager/backprop_test.py
@@ -548,6 +548,17 @@ class BackpropTest(test.TestCase):
grad = g.gradient(y, [x])[0]
self.assertEqual(self.evaluate(grad), 6.0)
+ @test_util.assert_no_new_tensors
+ @test_util.run_in_graph_and_eager_modes
+ def testGadientTapeCalledOnConstantTarget(self):
+ with backprop.GradientTape() as g:
+ x = variables.Variable([3.0])
+ y = variables.Variable([2.0])
+ with self.assertRaisesRegexp(
+ ValueError,
+ 'GradientTape.gradient is not supported for variable targets.'):
+ g.gradient(x, y)
+
@test_util.run_in_graph_and_eager_modes
def testGradientTapeWithCond(self):
x = constant_op.constant(3.0)
@@ -982,7 +993,6 @@ class BackpropTest(test.TestCase):
self.assertIsNone(dy)
self.assertEqual(self.evaluate(dz), 3.0)
-
@test_util.run_in_graph_and_eager_modes
def testDifferentiatingScalarCache(self):
# In the following test, if x2 = x1 (i.e the objects are the exact same),