aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/python/eager/backprop_test.py
diff options
context:
space:
mode:
authorGravatar Alexandre Passos <apassos@google.com>2018-04-30 09:29:31 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2018-04-30 09:32:36 -0700
commitaa2405ee79dbcfabb8862ef3e1f8ca60e52159a0 (patch)
tree842265cc624b9c2f201fb5bc75264c39ea92740a /tensorflow/python/eager/backprop_test.py
parenta5a51ad3a1200e2e5ef46c140bab717422e41ca2 (diff)
Fixes to tape gradient for providing outputs and having multiple targets.
PiperOrigin-RevId: 194796304
Diffstat (limited to 'tensorflow/python/eager/backprop_test.py')
-rw-r--r--tensorflow/python/eager/backprop_test.py20
1 files changed, 20 insertions, 0 deletions
diff --git a/tensorflow/python/eager/backprop_test.py b/tensorflow/python/eager/backprop_test.py
index 991b4dbe7a..8d9959fe20 100644
--- a/tensorflow/python/eager/backprop_test.py
+++ b/tensorflow/python/eager/backprop_test.py
@@ -96,6 +96,26 @@ class BackpropTest(test.TestCase):
self.assertAllEqual(grads_and_vars[0][0], 1.0)
self.assertAllEqual(id(grads_and_vars[0][1]), id(x))
+ def testTwoTargets(self):
+ with backprop.GradientTape() as t:
+ x = constant_op.constant(3.0)
+ y = constant_op.constant(2.0)
+ t.watch([x, y])
+ xx = 2 * x
+ yy = 3 * y
+ dx, dy = t.gradient([xx, yy], [x, y])
+ self.assertAllEqual(dx, 2.0)
+ self.assertAllEqual(dy, 3.0)
+
+ def testOutputGradUsedInComputation(self):
+ with backprop.GradientTape() as t:
+ x = constant_op.constant(3.0)
+ y = constant_op.constant(2.0)
+ t.watch([x, y])
+ loss = x * y
+ dx, = t.gradient([loss, x], [x], output_gradients=[1.0, 2.0])
+ self.assertAllEqual(dx, 4.0)
+
def testDy(self):
def f(x):