aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/stream_executor/stream.cc
diff options
context:
space:
mode:
authorGravatar Justin Lebar <jlebar@google.com>2017-12-17 10:43:11 -0800
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2017-12-18 11:21:02 -0800
commit483e439c7494dbe30a660b90a3bca1349a1bf8fd (patch)
tree2a1610815b659c89465475e8e7c1c41f032db336 /tensorflow/stream_executor/stream.cc
parent01da208158687c575a9c459cb62e3c5f90968bd2 (diff)
[StreamExecutor] Change "variance" to "inv_var" in BatchNormalizationBackward.
This parameter is not the variance of the data, but rather is 1/(sqrt(variance + epsilon). Neglecting epsilon, this is the inverse standard deviation. "inv_stddev" might be a better name, but "inv_var" is certainly better than plain "variance", and it matches nvidia's name for this parameter, which I think may override the desire for a more precise name. No functional change. PiperOrigin-RevId: 179352839
Diffstat (limited to 'tensorflow/stream_executor/stream.cc')
-rw-r--r--tensorflow/stream_executor/stream.cc8
1 files changed, 4 insertions, 4 deletions
diff --git a/tensorflow/stream_executor/stream.cc b/tensorflow/stream_executor/stream.cc
index e92ed14779..ba5001e273 100644
--- a/tensorflow/stream_executor/stream.cc
+++ b/tensorflow/stream_executor/stream.cc
@@ -342,7 +342,7 @@ Stream &Stream::ThenBatchNormalizationForward(
Stream &Stream::ThenBatchNormalizationBackward(
const DeviceMemory<float> &y_backprop, const DeviceMemory<float> &x,
const DeviceMemory<float> &scale, const DeviceMemory<float> &mean,
- const DeviceMemory<float> &variance, const dnn::BatchDescriptor &x_desc,
+ const DeviceMemory<float> &inv_var, const dnn::BatchDescriptor &x_desc,
const dnn::BatchDescriptor &scale_offset_desc, const double epsilon,
DeviceMemory<float> *x_backprop, DeviceMemory<float> *scale_backprop,
DeviceMemory<float> *offset_backprop) {
@@ -352,7 +352,7 @@ Stream &Stream::ThenBatchNormalizationBackward(
if (ok()) {
if (dnn::DnnSupport *dnn = parent_->AsDnn()) {
CheckError(dnn->DoBatchNormalizationBackward(
- this, y_backprop, x, scale, mean, variance, x_desc, scale_offset_desc,
+ this, y_backprop, x, scale, mean, inv_var, x_desc, scale_offset_desc,
epsilon, x_backprop, scale_backprop, offset_backprop));
} else {
SetErrorAndLogNoDnnSupport();
@@ -392,7 +392,7 @@ Stream &Stream::ThenBatchNormalizationForward(
Stream &Stream::ThenBatchNormalizationBackward(
const DeviceMemory<Eigen::half> &y_backprop,
const DeviceMemory<Eigen::half> &x, const DeviceMemory<float> &scale,
- const DeviceMemory<float> &mean, const DeviceMemory<float> &variance,
+ const DeviceMemory<float> &mean, const DeviceMemory<float> &inv_var,
const dnn::BatchDescriptor &x_desc,
const dnn::BatchDescriptor &scale_offset_desc, const double epsilon,
DeviceMemory<Eigen::half> *x_backprop, DeviceMemory<float> *scale_backprop,
@@ -403,7 +403,7 @@ Stream &Stream::ThenBatchNormalizationBackward(
if (ok()) {
if (dnn::DnnSupport *dnn = parent_->AsDnn()) {
CheckError(dnn->DoBatchNormalizationBackward(
- this, y_backprop, x, scale, mean, variance, x_desc, scale_offset_desc,
+ this, y_backprop, x, scale, mean, inv_var, x_desc, scale_offset_desc,
epsilon, x_backprop, scale_backprop, offset_backprop));
} else {
SetErrorAndLogNoDnnSupport();