aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/tools/api/golden/tensorflow.train.-adam-optimizer.pbtxt
diff options
context:
space:
mode:
authorGravatar Gunhan Gulsoy <gunan@google.com>2017-04-09 20:20:25 -0800
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2017-04-09 21:30:21 -0700
commitcc97ef9c360082ba68b86328c8921e479b494b73 (patch)
tree3116a83a837222492cbdf6136fcec45370f59784 /tensorflow/tools/api/golden/tensorflow.train.-adam-optimizer.pbtxt
parenta61cff67c783101b5b48ba56fd6d12ca5ae10ccb (diff)
Automated rollback of change 152528732
Change: 152652473
Diffstat (limited to 'tensorflow/tools/api/golden/tensorflow.train.-adam-optimizer.pbtxt')
-rw-r--r--tensorflow/tools/api/golden/tensorflow.train.-adam-optimizer.pbtxt46
1 files changed, 46 insertions, 0 deletions
diff --git a/tensorflow/tools/api/golden/tensorflow.train.-adam-optimizer.pbtxt b/tensorflow/tools/api/golden/tensorflow.train.-adam-optimizer.pbtxt
new file mode 100644
index 0000000000..c8144e2db7
--- /dev/null
+++ b/tensorflow/tools/api/golden/tensorflow.train.-adam-optimizer.pbtxt
@@ -0,0 +1,46 @@
+path: "tensorflow.train.AdamOptimizer"
+tf_class {
+ is_instance: "<class \'tensorflow.python.training.adam.AdamOptimizer\'>"
+ is_instance: "<class \'tensorflow.python.training.optimizer.Optimizer\'>"
+ is_instance: "<type \'object\'>"
+ member {
+ name: "GATE_GRAPH"
+ mtype: "<type \'int\'>"
+ }
+ member {
+ name: "GATE_NONE"
+ mtype: "<type \'int\'>"
+ }
+ member {
+ name: "GATE_OP"
+ mtype: "<type \'int\'>"
+ }
+ member_method {
+ name: "__init__"
+ argspec: "args=[\'self\', \'learning_rate\', \'beta1\', \'beta2\', \'epsilon\', \'use_locking\', \'name\'], varargs=None, keywords=None, defaults=[\'0.001\', \'0.9\', \'0.999\', \'1e-08\', \'False\', \'Adam\'], "
+ }
+ member_method {
+ name: "apply_gradients"
+ argspec: "args=[\'self\', \'grads_and_vars\', \'global_step\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
+ }
+ member_method {
+ name: "compute_gradients"
+ argspec: "args=[\'self\', \'loss\', \'var_list\', \'gate_gradients\', \'aggregation_method\', \'colocate_gradients_with_ops\', \'grad_loss\'], varargs=None, keywords=None, defaults=[\'None\', \'1\', \'None\', \'False\', \'None\'], "
+ }
+ member_method {
+ name: "get_name"
+ argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
+ }
+ member_method {
+ name: "get_slot"
+ argspec: "args=[\'self\', \'var\', \'name\'], varargs=None, keywords=None, defaults=None"
+ }
+ member_method {
+ name: "get_slot_names"
+ argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
+ }
+ member_method {
+ name: "minimize"
+ argspec: "args=[\'self\', \'loss\', \'global_step\', \'var_list\', \'gate_gradients\', \'aggregation_method\', \'colocate_gradients_with_ops\', \'name\', \'grad_loss\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'1\', \'None\', \'False\', \'None\', \'None\'], "
+ }
+}