aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authorGravatar Vincent Vanhoucke <vanhoucke@google.com>2016-02-01 20:40:54 -0800
committerGravatar Manjunath Kudlur <keveman@gmail.com>2016-02-02 08:35:39 -0800
commit1c167b7debf3d51e3dfdd745e59e1267e03fd02c (patch)
tree3ecd88b1ac660f7cba9d1f0bdaec81dc2c2a0bba
parentbc58a40a86126bf91c92cd85f7c47eb7fe4f4ca2 (diff)
Fix print formatting.
More general exclusion of files (h/t @shreyasva) Typo (h/t @seanpavlov) Change: 113597422
-rw-r--r--tensorflow/examples/udacity/1_notmnist.ipynb121
-rw-r--r--tensorflow/examples/udacity/2_fullyconnected.ipynb1
-rw-r--r--tensorflow/examples/udacity/3_regularization.ipynb3
-rw-r--r--tensorflow/examples/udacity/4_convolutions.ipynb3
-rw-r--r--tensorflow/examples/udacity/5_word2vec.ipynb33
-rw-r--r--tensorflow/examples/udacity/6_lstm.ipynb1
6 files changed, 85 insertions, 77 deletions
diff --git a/tensorflow/examples/udacity/1_notmnist.ipynb b/tensorflow/examples/udacity/1_notmnist.ipynb
index 661ea4df92..d3f72c4fe8 100644
--- a/tensorflow/examples/udacity/1_notmnist.ipynb
+++ b/tensorflow/examples/udacity/1_notmnist.ipynb
@@ -45,6 +45,7 @@
"source": [
"# These are all the modules we'll be using later. Make sure you can import them\n",
"# before proceeding further.\n",
+ "from __future__ import print_function\n",
"import matplotlib.pyplot as plt\n",
"import numpy as np\n",
"import os\n",
@@ -191,7 +192,8 @@
" tar.extractall()\n",
" tar.close()\n",
" data_folders = [\n",
- " os.path.join(root, d) for d in sorted(os.listdir(root)) if d != '.DS_Store']\n",
+ " os.path.join(root, d) for d in sorted(os.listdir(root))\n",
+ " if os.path.isdir(os.path.join(root, d))]\n",
" if len(data_folders) != num_classes:\n",
" raise Exception(\n",
" 'Expected %d folders, one per class. Found %d instead.' % (\n",
@@ -284,33 +286,34 @@
"pixel_depth = 255.0 # Number of levels per pixel.\n",
"\n",
"def load_letter(folder, min_num_images):\n",
- " image_files = os.listdir(folder)\n",
- " dataset = np.ndarray(shape=(len(image_files), image_size, image_size),\n",
+ " \"\"\"Load the data for a single letter label.\"\"\"\n",
+ " image_files = os.listdir(folder)\n",
+ " dataset = np.ndarray(shape=(len(image_files), image_size, image_size),\n",
" dtype=np.float32)\n",
- " image_index = 0\n",
- " print folder\n",
- " for image in os.listdir(folder):\n",
- " image_file = os.path.join(folder, image)\n",
- " try:\n",
- " image_data = (ndimage.imread(image_file).astype(float) - \n",
- " pixel_depth / 2) / pixel_depth\n",
- " if image_data.shape != (image_size, image_size):\n",
- " raise Exception('Unexpected image shape: %s' % str(image_data.shape))\n",
- " dataset[image_index, :, :] = image_data\n",
- " image_index += 1\n",
- " except IOError as e:\n",
- " print('Could not read:', image_file, ':', e, '- it\\'s ok, skipping.')\n",
+ " image_index = 0\n",
+ " print(folder)\n",
+ " for image in os.listdir(folder):\n",
+ " image_file = os.path.join(folder, image)\n",
+ " try:\n",
+ " image_data = (ndimage.imread(image_file).astype(float) - \n",
+ " pixel_depth / 2) / pixel_depth\n",
+ " if image_data.shape != (image_size, image_size):\n",
+ " raise Exception('Unexpected image shape: %s' % str(image_data.shape))\n",
+ " dataset[image_index, :, :] = image_data\n",
+ " image_index += 1\n",
+ " except IOError as e:\n",
+ " print('Could not read:', image_file, ':', e, '- it\\'s ok, skipping.')\n",
" \n",
- " num_images = image_index\n",
- " dataset = dataset[0:num_images, :, :]\n",
- " if num_images < min_num_images:\n",
- " raise Exception('Many fewer images than expected: %d < %d' % \n",
- " (num_images, min_num_images))\n",
+ " num_images = image_index\n",
+ " dataset = dataset[0:num_images, :, :]\n",
+ " if num_images < min_num_images:\n",
+ " raise Exception('Many fewer images than expected: %d < %d' %\n",
+ " (num_images, min_num_images))\n",
" \n",
- " print('Full dataset tensor:', dataset.shape)\n",
- " print('Mean:', np.mean(dataset))\n",
- " print('Standard deviation:', np.std(dataset))\n",
- " return dataset\n",
+ " print('Full dataset tensor:', dataset.shape)\n",
+ " print('Mean:', np.mean(dataset))\n",
+ " print('Standard deviation:', np.std(dataset))\n",
+ " return dataset\n",
" \n",
"def load(data_folders, min_num_images_per_class):\n",
" dataset_names = []\n",
@@ -506,44 +509,44 @@
},
"source": [
"def make_arrays(nb_rows, img_size):\n",
- " if nb_rows:\n",
- " dataset = np.ndarray((nb_rows, img_size, img_size), dtype=np.float32)\n",
- " labels = np.ndarray(nb_rows, dtype=np.int32)\n",
- " else:\n",
- " dataset, labels = None, None\n",
- " return dataset, labels\n",
+ " if nb_rows:\n",
+ " dataset = np.ndarray((nb_rows, img_size, img_size), dtype=np.float32)\n",
+ " labels = np.ndarray(nb_rows, dtype=np.int32)\n",
+ " else:\n",
+ " dataset, labels = None, None\n",
+ " return dataset, labels\n",
"\n",
"def merge_datasets(pickle_files, train_size, valid_size=0):\n",
- " num_classes = len(pickle_files)\n",
- " valid_dataset, valid_labels = make_arrays(valid_size, image_size)\n",
- " train_dataset, train_labels = make_arrays(train_size, image_size)\n",
- " vsize_per_class = valid_size // num_classes\n",
- " tsize_per_class = train_size // num_classes\n",
+ " num_classes = len(pickle_files)\n",
+ " valid_dataset, valid_labels = make_arrays(valid_size, image_size)\n",
+ " train_dataset, train_labels = make_arrays(train_size, image_size)\n",
+ " vsize_per_class = valid_size // num_classes\n",
+ " tsize_per_class = train_size // num_classes\n",
" \n",
- " start_v, start_t = 0, 0\n",
- " end_v, end_t = vsize_per_class, tsize_per_class\n",
- " end_l = vsize_per_class+tsize_per_class\n",
- " for label, pickle_file in enumerate(pickle_files): \n",
- " try:\n",
- " with open(pickle_file, 'rb') as f:\n",
- " letter_set = pickle.load(f)\n",
- " if valid_dataset is not None:\n",
- " valid_letter = letter_set[:vsize_per_class, :, :]\n",
- " valid_dataset[start_v:end_v, :, :] = valid_letter\n",
- " valid_labels[start_v:end_v] = label\n",
- " start_v += vsize_per_class\n",
- " end_v += vsize_per_class\n",
+ " start_v, start_t = 0, 0\n",
+ " end_v, end_t = vsize_per_class, tsize_per_class\n",
+ " end_l = vsize_per_class+tsize_per_class\n",
+ " for label, pickle_file in enumerate(pickle_files): \n",
+ " try:\n",
+ " with open(pickle_file, 'rb') as f:\n",
+ " letter_set = pickle.load(f)\n",
+ " if valid_dataset is not None:\n",
+ " valid_letter = letter_set[:vsize_per_class, :, :]\n",
+ " valid_dataset[start_v:end_v, :, :] = valid_letter\n",
+ " valid_labels[start_v:end_v] = label\n",
+ " start_v += vsize_per_class\n",
+ " end_v += vsize_per_class\n",
" \n",
- " train_letter = letter_set[vsize_per_class:end_l, :, :]\n",
- " train_dataset[start_t:end_t, :, :] = train_letter\n",
- " train_labels[start_t:end_t] = label\n",
- " start_t += tsize_per_class\n",
- " end_t += tsize_per_class\n",
- " except Exception as e:\n",
- " print('Unable to process data from', pickle_file, ':', e)\n",
- " raise\n",
+ " train_letter = letter_set[vsize_per_class:end_l, :, :]\n",
+ " train_dataset[start_t:end_t, :, :] = train_letter\n",
+ " train_labels[start_t:end_t] = label\n",
+ " start_t += tsize_per_class\n",
+ " end_t += tsize_per_class\n",
+ " except Exception as e:\n",
+ " print('Unable to process data from', pickle_file, ':', e)\n",
+ " raise\n",
" \n",
- " return valid_dataset, valid_labels, train_dataset, train_labels\n",
+ " return valid_dataset, valid_labels, train_dataset, train_labels\n",
" \n",
" \n",
"train_size = 200000\n",
@@ -757,4 +760,4 @@
]
}
]
-}
+} \ No newline at end of file
diff --git a/tensorflow/examples/udacity/2_fullyconnected.ipynb b/tensorflow/examples/udacity/2_fullyconnected.ipynb
index d7042e1313..2bf5a7f937 100644
--- a/tensorflow/examples/udacity/2_fullyconnected.ipynb
+++ b/tensorflow/examples/udacity/2_fullyconnected.ipynb
@@ -45,6 +45,7 @@
"source": [
"# These are all the modules we'll be using later. Make sure you can import them\n",
"# before proceeding further.\n",
+ "from __future__ import print_function\n",
"import numpy as np\n",
"import tensorflow as tf\n",
"from six.moves import cPickle as pickle\n",
diff --git a/tensorflow/examples/udacity/3_regularization.ipynb b/tensorflow/examples/udacity/3_regularization.ipynb
index c848c7c69b..7c587a6512 100644
--- a/tensorflow/examples/udacity/3_regularization.ipynb
+++ b/tensorflow/examples/udacity/3_regularization.ipynb
@@ -45,6 +45,7 @@
"source": [
"# These are all the modules we'll be using later. Make sure you can import them\n",
"# before proceeding further.\n",
+ "from __future__ import print_function\n",
"import numpy as np\n",
"import tensorflow as tf\n",
"from six.moves import cPickle as pickle"
@@ -234,7 +235,7 @@
"Problem 1\n",
"---------\n",
"\n",
- "Introduce and tune L2 regularization for both logistic and neural network models. Remember that L2 amounts to adding a penalty on the norm of the weights to the loss. In TensorFlow, you can compue the L2 loss for a tensor `t` using `nn.l2_loss(t)`. The right amount of regularization should improve your validation / test accuracy.\n",
+ "Introduce and tune L2 regularization for both logistic and neural network models. Remember that L2 amounts to adding a penalty on the norm of the weights to the loss. In TensorFlow, you can compute the L2 loss for a tensor `t` using `nn.l2_loss(t)`. The right amount of regularization should improve your validation / test accuracy.\n",
"\n",
"---"
]
diff --git a/tensorflow/examples/udacity/4_convolutions.ipynb b/tensorflow/examples/udacity/4_convolutions.ipynb
index 9ad41acb0c..680f72bff5 100644
--- a/tensorflow/examples/udacity/4_convolutions.ipynb
+++ b/tensorflow/examples/udacity/4_convolutions.ipynb
@@ -45,6 +45,7 @@
"source": [
"# These are all the modules we'll be using later. Make sure you can import them\n",
"# before proceeding further.\n",
+ "from __future__ import print_function\n",
"import numpy as np\n",
"import tensorflow as tf\n",
"from six.moves import cPickle as pickle\n",
@@ -461,4 +462,4 @@
]
}
]
-}
+} \ No newline at end of file
diff --git a/tensorflow/examples/udacity/5_word2vec.ipynb b/tensorflow/examples/udacity/5_word2vec.ipynb
index b3a7a71e2c..ed8049388f 100644
--- a/tensorflow/examples/udacity/5_word2vec.ipynb
+++ b/tensorflow/examples/udacity/5_word2vec.ipynb
@@ -43,6 +43,7 @@
"source": [
"# These are all the modules we'll be using later. Make sure you can import them\n",
"# before proceeding further.\n",
+ "from __future__ import print_function\n",
"import collections\n",
"import math\n",
"import numpy as np\n",
@@ -364,21 +365,21 @@
" print(' labels:', [reverse_dictionary[li] for li in labels.reshape(8)])"
],
"outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "data: ['anarchism', 'originated', 'as', 'a', 'term', 'of', 'abuse', 'first']\n",
- "\n",
- "with num_skips = 2 and skip_window = 1:\n",
- " batch: ['originated', 'originated', 'as', 'as', 'a', 'a', 'term', 'term']\n",
- " labels: ['as', 'anarchism', 'a', 'originated', 'term', 'as', 'a', 'of']\n",
- "\n",
- "with num_skips = 4 and skip_window = 2:\n",
- " batch: ['as', 'as', 'as', 'as', 'a', 'a', 'a', 'a']\n",
- " labels: ['anarchism', 'originated', 'term', 'a', 'as', 'of', 'originated', 'term']\n"
- ]
- }
+ {
+ "output_type": "stream",
+ "text": [
+ "data: ['anarchism', 'originated', 'as', 'a', 'term', 'of', 'abuse', 'first']\n",
+ "\n",
+ "with num_skips = 2 and skip_window = 1:\n",
+ " batch: ['originated', 'originated', 'as', 'as', 'a', 'a', 'term', 'term']\n",
+ " labels: ['as', 'anarchism', 'a', 'originated', 'term', 'as', 'a', 'of']\n",
+ "\n",
+ "with num_skips = 4 and skip_window = 2:\n",
+ " batch: ['as', 'as', 'as', 'as', 'a', 'a', 'a', 'a']\n",
+ " labels: ['anarchism', 'originated', 'term', 'a', 'as', 'of', 'originated', 'term']\n"
+ ],
+ "name": "stdout"
+ }
],
"execution_count": 0
},
@@ -886,4 +887,4 @@
]
}
]
-}
+} \ No newline at end of file
diff --git a/tensorflow/examples/udacity/6_lstm.ipynb b/tensorflow/examples/udacity/6_lstm.ipynb
index c41eef2a8f..a1ef14b787 100644
--- a/tensorflow/examples/udacity/6_lstm.ipynb
+++ b/tensorflow/examples/udacity/6_lstm.ipynb
@@ -53,6 +53,7 @@
"source": [
"# These are all the modules we'll be using later. Make sure you can import them\n",
"# before proceeding further.\n",
+ "from __future__ import print_function\n",
"import os\n",
"import numpy as np\n",
"import random\n",