aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authorGravatar Igor Ganichev <iga@google.com>2018-03-26 14:33:10 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2018-03-26 14:35:49 -0700
commit2ff8e913ad000d379405c284857e7fc81eef9fed (patch)
tree49b43f92cc3e3a7977d72ef1f608b51e4b0e16d8
parent72ed3c3b743e5feef99e37058dbd2f4344bcc5e3 (diff)
Clarify eager gradient doc strings
PiperOrigin-RevId: 190526387
-rw-r--r--tensorflow/python/eager/backprop.py8
1 files changed, 4 insertions, 4 deletions
diff --git a/tensorflow/python/eager/backprop.py b/tensorflow/python/eager/backprop.py
index a7837b8a7f..c54a5a1445 100644
--- a/tensorflow/python/eager/backprop.py
+++ b/tensorflow/python/eager/backprop.py
@@ -171,8 +171,8 @@ def implicit_val_and_grad(f):
"""Returns a function which differentiates f with respect to variables.
The wrapped function returns the value and the gradient of f when called with
- the same arguments. The gradient is with respect to all TFE variables which
- are either trainable or have `variable.watch()` called on them by f.
+ the same arguments. The gradient is with respect to all trainable TFE
+ variables accessed by `f`.
This function is useful when the exact set of variables to differentiate with
is not known ahead of time.
@@ -249,8 +249,8 @@ def implicit_grad(f):
"""Returns a function which differentiates f with respect to variables.
The wrapped function returns the gradient of f when called with the same
- arguments. The gradient is with respect to all TFE variables which are
- either trainable or have `variable.watch()` called on them by f.
+ arguments. The gradient is with respect to all trainable TFE variables
+ accessed by `f`.
This function is useful when the exact set of variables to differentiate with
is not known ahead of time.