aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/cc/framework/gradients.h
diff options
context:
space:
mode:
authorGravatar Skye Wanderman-Milne <skyewm@google.com>2017-01-17 13:35:16 -0800
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2017-01-17 13:48:24 -0800
commit7a0d788c2697af366c6814396ce07cfed7c4bd60 (patch)
tree382e9bc1e1553f848e57aa8237a6879f69d52b2e /tensorflow/cc/framework/gradients.h
parentdbf4e01d6fbb7beb1ff2aed6d847da6605cb49e2 (diff)
Update C++ API comments to be more Doxygen friendly.
This patch: - Updates // comments to ///. I manually reverted some comments that shouldn't be docs (e.g. TODOs), but may have missed some. - Indents code blocks so they get formatted as such in the docs. - Removes /* */ comments from example code since it messes up Doxygen. - Puts a space between {{ and }} since it messes up devsite. - Adds some // START_SKIP_DOXYGEN and // END_SKIP_DOXYGEN comments for functions that aren't part of the public API (incomplete) This will likely require further small fixups, but this gets something to be generated. Change: 144749351
Diffstat (limited to 'tensorflow/cc/framework/gradients.h')
-rw-r--r--tensorflow/cc/framework/gradients.h24
1 files changed, 12 insertions, 12 deletions
diff --git a/tensorflow/cc/framework/gradients.h b/tensorflow/cc/framework/gradients.h
index fa5e608bd4..278ad79492 100644
--- a/tensorflow/cc/framework/gradients.h
+++ b/tensorflow/cc/framework/gradients.h
@@ -21,13 +21,13 @@ limitations under the License.
namespace tensorflow {
-// NOTE: This API is a work in progress and will likely be changing frequently.
-//
-// Given initial gradients 'grad_inputs' (which represent the symbolic partial
-// derivatives of some loss function 'L' w.r.t 'outputs'), adds gradient nodes
-// to the graph associated with 'scope', which compute (and return in
-// 'grad_outputs') the symbolic partial derivatives of 'L' w.r.t 'inputs'.
-//
+/// NOTE: This API is a work in progress and will likely be changing frequently.
+///
+/// Given initial gradients 'grad_inputs' (which represent the symbolic partial
+/// derivatives of some loss function 'L' w.r.t 'outputs'), adds gradient nodes
+/// to the graph associated with 'scope', which compute (and return in
+/// 'grad_outputs') the symbolic partial derivatives of 'L' w.r.t 'inputs'.
+///
// TODO(andydavis) Add overload of this function with no 'grad_inputs' arg.
// Implementation will fill in 'OnesLike' for all shapes in 'outputs'.
@@ -37,11 +37,11 @@ Status AddSymbolicGradients(const Scope& scope,
const std::vector<ops::Output>& grad_inputs,
std::vector<ops::Output>* grad_outputs);
-// Returns a sentinel Output that represents 'no gradient' (i.e. no gradient
-// flows along some graph edge during backpropagation).
-// Can be returned in 'grad_outputs' by an invocation of 'AddSymbolicGradients'
-// (note that gradient flow through an Output can be stopped through the use of
-// the StopGradient node).
+/// Returns a sentinel Output that represents 'no gradient' (i.e. no gradient
+/// flows along some graph edge during backpropagation).
+/// Can be returned in 'grad_outputs' by an invocation of 'AddSymbolicGradients'
+/// (note that gradient flow through an Output can be stopped through the use of
+/// the StopGradient node).
ops::Output NoGradient();
} // namespace tensorflow