diff options
author | 2017-04-09 20:20:25 -0800 | |
---|---|---|
committer | 2017-04-09 21:30:21 -0700 | |
commit | cc97ef9c360082ba68b86328c8921e479b494b73 (patch) | |
tree | 3116a83a837222492cbdf6136fcec45370f59784 /tensorflow/tools/api/golden/tensorflow.train.-r-m-s-prop-optimizer.pbtxt | |
parent | a61cff67c783101b5b48ba56fd6d12ca5ae10ccb (diff) |
Automated rollback of change 152528732
Change: 152652473
Diffstat (limited to 'tensorflow/tools/api/golden/tensorflow.train.-r-m-s-prop-optimizer.pbtxt')
-rw-r--r-- | tensorflow/tools/api/golden/tensorflow.train.-r-m-s-prop-optimizer.pbtxt | 46 |
1 files changed, 46 insertions, 0 deletions
diff --git a/tensorflow/tools/api/golden/tensorflow.train.-r-m-s-prop-optimizer.pbtxt b/tensorflow/tools/api/golden/tensorflow.train.-r-m-s-prop-optimizer.pbtxt new file mode 100644 index 0000000000..2aa4ae6d2d --- /dev/null +++ b/tensorflow/tools/api/golden/tensorflow.train.-r-m-s-prop-optimizer.pbtxt @@ -0,0 +1,46 @@ +path: "tensorflow.train.RMSPropOptimizer" +tf_class { + is_instance: "<class \'tensorflow.python.training.rmsprop.RMSPropOptimizer\'>" + is_instance: "<class \'tensorflow.python.training.optimizer.Optimizer\'>" + is_instance: "<type \'object\'>" + member { + name: "GATE_GRAPH" + mtype: "<type \'int\'>" + } + member { + name: "GATE_NONE" + mtype: "<type \'int\'>" + } + member { + name: "GATE_OP" + mtype: "<type \'int\'>" + } + member_method { + name: "__init__" + argspec: "args=[\'self\', \'learning_rate\', \'decay\', \'momentum\', \'epsilon\', \'use_locking\', \'centered\', \'name\'], varargs=None, keywords=None, defaults=[\'0.9\', \'0.0\', \'1e-10\', \'False\', \'False\', \'RMSProp\'], " + } + member_method { + name: "apply_gradients" + argspec: "args=[\'self\', \'grads_and_vars\', \'global_step\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], " + } + member_method { + name: "compute_gradients" + argspec: "args=[\'self\', \'loss\', \'var_list\', \'gate_gradients\', \'aggregation_method\', \'colocate_gradients_with_ops\', \'grad_loss\'], varargs=None, keywords=None, defaults=[\'None\', \'1\', \'None\', \'False\', \'None\'], " + } + member_method { + name: "get_name" + argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None" + } + member_method { + name: "get_slot" + argspec: "args=[\'self\', \'var\', \'name\'], varargs=None, keywords=None, defaults=None" + } + member_method { + name: "get_slot_names" + argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None" + } + member_method { + name: "minimize" + argspec: "args=[\'self\', \'loss\', \'global_step\', \'var_list\', \'gate_gradients\', \'aggregation_method\', \'colocate_gradients_with_ops\', \'name\', \'grad_loss\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'1\', \'None\', \'False\', \'None\', \'None\'], " + } +} |