path: "tensorflow.keras.optimizers.Adamax" tf_class { is_instance: "" is_instance: "" is_instance: "" member_method { name: "__init__" argspec: "args=[\'self\', \'lr\', \'beta_1\', \'beta_2\', \'epsilon\', \'decay\'], varargs=None, keywords=kwargs, defaults=[\'0.002\', \'0.9\', \'0.999\', \'None\', \'0.0\'], " } member_method { name: "from_config" argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None" } member_method { name: "get_config" argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None" } member_method { name: "get_gradients" argspec: "args=[\'self\', \'loss\', \'params\'], varargs=None, keywords=None, defaults=None" } member_method { name: "get_updates" argspec: "args=[\'self\', \'loss\', \'params\'], varargs=None, keywords=None, defaults=None" } member_method { name: "get_weights" argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None" } member_method { name: "set_weights" argspec: "args=[\'self\', \'weights\'], varargs=None, keywords=None, defaults=None" } }