aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/cc/ops/functional_grad.cc
blob: 28b8b4a0e5f6937a864490d27558b288dfda20d8 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
#include "tensorflow/core/framework/function.h"
#include "tensorflow/core/lib/core/errors.h"

namespace tensorflow {

typedef FunctionDefHelper FDH;

Status MapAccumulateGrad(const AttrSlice& attrs, FunctionDef* ret) {
  const NameAttrList* func;
  TF_RETURN_IF_ERROR(GetNodeAttr(attrs, "f", &func));
  DataType T;
  TF_RETURN_IF_ERROR(GetNodeAttr(attrs, "T", &T));
  int k;
  TF_RETURN_IF_ERROR(GetNodeAttr(attrs, "K", &k));
  // The gradient function of f.
  //  f : (K*T, T, T) -> T
  //  g : (K*T, T, T, T) -> (K*T, T, T)
  auto grad = FDH::FunctionRef("SymbolicGradient",
                               {{"f", *func},
                                {"Tin", std::vector<DataType>(k + 3, T)},
                                {"Tout", std::vector<DataType>(k + 2, T)}});
  *ret = FDH::Define(
      // Arg defs
      {"theta: K*T", "x: T", "u: T", "dy: T"},
      // Ret val defs
      {"dtheta: K*T", "dx: T", "du: T"},
      // Attr defs
      {{"T: {float, double}"}},
      // nodes.
      {{{"y"},
        "MapAccumulate",
        {"theta", "x", "u"},
        {{"f", *func}, {"T", "$T"}, {"K", k}}},
       {{"dtheta", "dx", "du"},
        "MapAccumulateGrad",
        {"theta", "x", "u", "y", "dy"},
        {{"g", grad}, {"T", "$T"}, {"K", k}}}});
  return Status::OK();
}
REGISTER_OP_GRADIENT("MapAccumulate", MapAccumulateGrad);

}  // end namespace tensorflow