diff options
Diffstat (limited to 'tensorflow/contrib/kfac/python/ops/loss_functions.py')
-rw-r--r-- | tensorflow/contrib/kfac/python/ops/loss_functions.py | 6 |
1 files changed, 3 insertions, 3 deletions
diff --git a/tensorflow/contrib/kfac/python/ops/loss_functions.py b/tensorflow/contrib/kfac/python/ops/loss_functions.py index 42d525c2c2..c8cebc42cb 100644 --- a/tensorflow/contrib/kfac/python/ops/loss_functions.py +++ b/tensorflow/contrib/kfac/python/ops/loss_functions.py @@ -214,7 +214,7 @@ class NegativeLogProbLoss(LossFunction): Here the 'Fisher' is the Fisher information matrix (i.e. expected outer- product of gradients) with respect to the parameters of the underlying - probability distribtion (whose log-prob defines the loss). Typically this + probability distribution (whose log-prob defines the loss). Typically this will be block-diagonal across different cases in the batch, since the distribution is usually (but not always) conditionally iid across different cases. @@ -238,7 +238,7 @@ class NegativeLogProbLoss(LossFunction): Here the 'Fisher' is the Fisher information matrix (i.e. expected outer- product of gradients) with respect to the parameters of the underlying - probability distribtion (whose log-prob defines the loss). Typically this + probability distribution (whose log-prob defines the loss). Typically this will be block-diagonal across different cases in the batch, since the distribution is usually (but not always) conditionally iid across different cases. @@ -262,7 +262,7 @@ class NegativeLogProbLoss(LossFunction): Here the 'Fisher' is the Fisher information matrix (i.e. expected outer- product of gradients) with respect to the parameters of the underlying - probability distribtion (whose log-prob defines the loss). Typically this + probability distribution (whose log-prob defines the loss). Typically this will be block-diagonal across different cases in the batch, since the distribution is usually (but not always) conditionally iid across different cases. |