aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/tools/api/golden/v2/tensorflow.train.-gradient-descent-optimizer.pbtxt
blob: c673e29cd4dd6cd3c01582abfbc306c092818892 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
path: "tensorflow.train.GradientDescentOptimizer"
tf_class {
  is_instance: "<class \'tensorflow.python.training.gradient_descent.GradientDescentOptimizer\'>"
  is_instance: "<class \'tensorflow.python.training.optimizer.Optimizer\'>"
  is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
  is_instance: "<type \'object\'>"
  member {
    name: "GATE_GRAPH"
    mtype: "<type \'int\'>"
  }
  member {
    name: "GATE_NONE"
    mtype: "<type \'int\'>"
  }
  member {
    name: "GATE_OP"
    mtype: "<type \'int\'>"
  }
  member_method {
    name: "__init__"
    argspec: "args=[\'self\', \'learning_rate\', \'use_locking\', \'name\'], varargs=None, keywords=None, defaults=[\'False\', \'GradientDescent\'], "
  }
  member_method {
    name: "apply_gradients"
    argspec: "args=[\'self\', \'grads_and_vars\', \'global_step\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
  }
  member_method {
    name: "compute_gradients"
    argspec: "args=[\'self\', \'loss\', \'var_list\', \'gate_gradients\', \'aggregation_method\', \'colocate_gradients_with_ops\', \'grad_loss\'], varargs=None, keywords=None, defaults=[\'None\', \'1\', \'None\', \'False\', \'None\'], "
  }
  member_method {
    name: "get_name"
    argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
  }
  member_method {
    name: "get_slot"
    argspec: "args=[\'self\', \'var\', \'name\'], varargs=None, keywords=None, defaults=None"
  }
  member_method {
    name: "get_slot_names"
    argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
  }
  member_method {
    name: "minimize"
    argspec: "args=[\'self\', \'loss\', \'global_step\', \'var_list\', \'gate_gradients\', \'aggregation_method\', \'colocate_gradients_with_ops\', \'name\', \'grad_loss\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'1\', \'None\', \'False\', \'None\', \'None\'], "
  }
  member_method {
    name: "variables"
    argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
  }
}