diff options
Diffstat (limited to 'tensorflow/python/keras/activations.py')
-rw-r--r-- | tensorflow/python/keras/activations.py | 2 |
1 files changed, 2 insertions, 0 deletions
diff --git a/tensorflow/python/keras/activations.py b/tensorflow/python/keras/activations.py index e487f583be..f608dea430 100644 --- a/tensorflow/python/keras/activations.py +++ b/tensorflow/python/keras/activations.py @@ -93,6 +93,8 @@ def selu(x): - To be used together with the initialization "lecun_normal". - To be used together with the dropout variant "AlphaDropout". + References: + - [Self-Normalizing Neural Networks](https://arxiv.org/abs/1706.02515) """ alpha = 1.6732632423543772848170429916717 scale = 1.0507009873554804934193349852946 |