aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/models/image
diff options
context:
space:
mode:
authorGravatar Vijay Vasudevan <vrv@google.com>2016-11-03 17:07:01 -0800
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2016-11-03 18:24:53 -0700
commit818993c7751601527d662d2417f220e4e856e4ef (patch)
treea9cb33d6332f3e37d740cd6eb6984a1837714237 /tensorflow/models/image
parenta19c425536bba29997807bbbd5ed43386d3cb7bd (diff)
Merge changes from github.
Change: 138143557
Diffstat (limited to 'tensorflow/models/image')
-rw-r--r--tensorflow/models/image/cifar10/cifar10.py5
-rw-r--r--tensorflow/models/image/mnist/convolutional.py10
2 files changed, 10 insertions, 5 deletions
diff --git a/tensorflow/models/image/cifar10/cifar10.py b/tensorflow/models/image/cifar10/cifar10.py
index fb3a42cbb1..7df2149d40 100644
--- a/tensorflow/models/image/cifar10/cifar10.py
+++ b/tensorflow/models/image/cifar10/cifar10.py
@@ -256,7 +256,10 @@ def inference(images):
local4 = tf.nn.relu(tf.matmul(local3, weights) + biases, name=scope.name)
_activation_summary(local4)
- # softmax, i.e. softmax(WX + b)
+ # linear layer(WX + b),
+ # We don't apply softmax here because
+ # tf.nn.sparse_softmax_cross_entropy_with_logits accepts the unscaled logits
+ # and performs the softmax internally for efficiency.
with tf.variable_scope('softmax_linear') as scope:
weights = _variable_with_weight_decay('weights', [192, NUM_CLASSES],
stddev=1/192.0, wd=0.0)
diff --git a/tensorflow/models/image/mnist/convolutional.py b/tensorflow/models/image/mnist/convolutional.py
index 3ef1411c15..b458280379 100644
--- a/tensorflow/models/image/mnist/convolutional.py
+++ b/tensorflow/models/image/mnist/convolutional.py
@@ -296,11 +296,13 @@ def main(_):
# node in the graph it should be fed to.
feed_dict = {train_data_node: batch_data,
train_labels_node: batch_labels}
- # Run the graph and fetch some of the nodes.
- _, l, lr, predictions = sess.run(
- [optimizer, loss, learning_rate, train_prediction],
- feed_dict=feed_dict)
+ # Run the optimizer to update weights.
+ sess.run(optimizer, feed_dict=feed_dict)
+ # print some extra information once reach the evaluation frequency
if step % EVAL_FREQUENCY == 0:
+ # fetch some extra nodes' data
+ l, lr, predictions = sess.run([loss, learning_rate, train_prediction],
+ feed_dict=feed_dict)
elapsed_time = time.time() - start_time
start_time = time.time()
print('Step %d (epoch %.2f), %.1f ms' %