aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/examples/tutorials/mnist
diff options
context:
space:
mode:
authorGravatar Yuefeng Zhou <yuefengz@google.com>2016-08-28 14:28:18 -0800
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2016-08-28 15:32:53 -0700
commitaeac274ed160618afa242512c38d1fd496466136 (patch)
tree106c47339ebe08e6b97a990b724fd665e213d944 /tensorflow/examples/tutorials/mnist
parentcf35735c11f1022cadd5a8536617276db813e4ec (diff)
Extend softmax and logsoftmax to make them work on an arbitrary dimension of a non-scalar tensor.
Change: 131540860
Diffstat (limited to 'tensorflow/examples/tutorials/mnist')
-rw-r--r--tensorflow/examples/tutorials/mnist/mnist_with_summaries.py2
1 files changed, 1 insertions, 1 deletions
diff --git a/tensorflow/examples/tutorials/mnist/mnist_with_summaries.py b/tensorflow/examples/tutorials/mnist/mnist_with_summaries.py
index 23492e5122..868cfcc3e4 100644
--- a/tensorflow/examples/tutorials/mnist/mnist_with_summaries.py
+++ b/tensorflow/examples/tutorials/mnist/mnist_with_summaries.py
@@ -100,7 +100,7 @@ def train():
with tf.name_scope('Wx_plus_b'):
preactivate = tf.matmul(input_tensor, weights) + biases
tf.histogram_summary(layer_name + '/pre_activations', preactivate)
- activations = act(preactivate, 'activation')
+ activations = act(preactivate, name='activation')
tf.histogram_summary(layer_name + '/activations', activations)
return activations