aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/examples/ios
diff options
context:
space:
mode:
authorGravatar A. Unique TensorFlower <gardener@tensorflow.org>2017-06-07 10:59:18 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2017-06-07 11:03:49 -0700
commitfdb8e29354ce93afa8c2335a6287a59eb37d42fc (patch)
tree75489b1dc9b502d0cde992f3faf21fbc6ef39be5 /tensorflow/examples/ios
parentdffea202a506d7d0f5c2fb32798aecdcb1bab32a (diff)
Update iOS examples to use CocoaPods, and moved to tensorflow/examples/ios
PiperOrigin-RevId: 158289285
Diffstat (limited to 'tensorflow/examples/ios')
-rw-r--r--tensorflow/examples/ios/.gitignore4
-rw-r--r--tensorflow/examples/ios/README.md194
-rw-r--r--tensorflow/examples/ios/benchmark/AppDelegate.h21
-rw-r--r--tensorflow/examples/ios/benchmark/AppDelegate.mm44
-rw-r--r--tensorflow/examples/ios/benchmark/Benchmark-Info.plist47
-rw-r--r--tensorflow/examples/ios/benchmark/BenchmarkViewController.h24
-rw-r--r--tensorflow/examples/ios/benchmark/BenchmarkViewController.mm302
-rw-r--r--tensorflow/examples/ios/benchmark/BenchmarkViewController.xib47
-rw-r--r--tensorflow/examples/ios/benchmark/Podfile5
-rw-r--r--tensorflow/examples/ios/benchmark/data/grace_hopper.jpgbin0 -> 73746 bytes
-rw-r--r--tensorflow/examples/ios/benchmark/ios_image_load.h27
-rw-r--r--tensorflow/examples/ios/benchmark/ios_image_load.mm87
-rw-r--r--tensorflow/examples/ios/benchmark/main.mm22
-rw-r--r--tensorflow/examples/ios/benchmark/tf_benchmark_example.xcodeproj/project.pbxproj388
-rw-r--r--tensorflow/examples/ios/camera/CameraExampleAppDelegate.h21
-rw-r--r--tensorflow/examples/ios/camera/CameraExampleAppDelegate.m44
-rw-r--r--tensorflow/examples/ios/camera/CameraExampleViewController.h47
-rw-r--r--tensorflow/examples/ios/camera/CameraExampleViewController.mm621
-rw-r--r--tensorflow/examples/ios/camera/Info.plist44
-rw-r--r--tensorflow/examples/ios/camera/MainStoryboard_iPhone.storyboard46
-rw-r--r--tensorflow/examples/ios/camera/Podfile5
-rw-r--r--tensorflow/examples/ios/camera/data/grace_hopper.jpgbin0 -> 73746 bytes
-rw-r--r--tensorflow/examples/ios/camera/ios_image_load.h27
-rw-r--r--tensorflow/examples/ios/camera/ios_image_load.mm87
-rw-r--r--tensorflow/examples/ios/camera/main.mm27
-rw-r--r--tensorflow/examples/ios/camera/tensorflow_utils.h52
-rw-r--r--tensorflow/examples/ios/camera/tensorflow_utils.mm219
-rw-r--r--tensorflow/examples/ios/camera/tf_camera_example.xcodeproj/project.pbxproj412
-rw-r--r--tensorflow/examples/ios/simple/AppDelegate.h21
-rw-r--r--tensorflow/examples/ios/simple/AppDelegate.mm44
-rw-r--r--tensorflow/examples/ios/simple/Podfile5
-rw-r--r--tensorflow/examples/ios/simple/RunModel-Info.plist47
-rw-r--r--tensorflow/examples/ios/simple/RunModelViewController.h24
-rw-r--r--tensorflow/examples/ios/simple/RunModelViewController.mm253
-rw-r--r--tensorflow/examples/ios/simple/RunModelViewController.xib46
-rw-r--r--tensorflow/examples/ios/simple/data/grace_hopper.jpgbin0 -> 73746 bytes
-rw-r--r--tensorflow/examples/ios/simple/ios_image_load.h27
-rw-r--r--tensorflow/examples/ios/simple/ios_image_load.mm87
-rw-r--r--tensorflow/examples/ios/simple/main.mm22
-rw-r--r--tensorflow/examples/ios/simple/tf_simple_example.xcodeproj/project.pbxproj404
40 files changed, 3844 insertions, 0 deletions
diff --git a/tensorflow/examples/ios/.gitignore b/tensorflow/examples/ios/.gitignore
new file mode 100644
index 0000000000..e572b3012c
--- /dev/null
+++ b/tensorflow/examples/ios/.gitignore
@@ -0,0 +1,4 @@
+project.xcworkspace
+xcuserdata
+imagenet_comp_graph_label_strings.txt
+tensorflow_inception_graph.pb
diff --git a/tensorflow/examples/ios/README.md b/tensorflow/examples/ios/README.md
new file mode 100644
index 0000000000..9832399d72
--- /dev/null
+++ b/tensorflow/examples/ios/README.md
@@ -0,0 +1,194 @@
+# TensorFlow iOS Examples
+
+This folder contains examples of how to build applications for iOS devices using TensorFlow.
+
+## Running the Samples using CocoaPod
+ - You'll need Xcode 7.3 or later.
+
+ - There are currently three examples: simple, benchmark, and camera. For now,
+ you can download the sample code by cloning the main tensorflow repository
+ (we are planning to make the samples available as a separate repository
+ later).
+
+ - From the root of the tensorflow folder, download
+ [Inception v1](https://storage.googleapis.com/download.tensorflow.org/models/inception5h.zip),
+ and extract the label and graph files into the data folders inside both the
+ simple and camera examples:
+
+```bash
+mkdir -p ~/graphs
+curl -o ~/graphs/inception5h.zip \
+ https://storage.googleapis.com/download.tensorflow.org/models/inception5h.zip \
+ && unzip ~/graphs/inception5h.zip -d ~/graphs/inception5h
+cp ~/graphs/inception5h/* tensorflow/contrib/ios_examples/benchmark/data/
+cp ~/graphs/inception5h/* tensorflow/contrib/ios_examples/camera/data/
+cp ~/graphs/inception5h/* tensorflow/contrib/ios_examples/simple/data/
+```
+
+ - Change directory to one of the samples, download the TensorFlow-experimental
+ pod, and open the Xcode workspace. Observe: installing the pod can take a
+ long time since it is big (~450MB). For example, if you want to run the
+ simple example, then:
+```bash
+cd tensorflow/contrib/ios_examples/simple
+pod install
+open tf_simple_example.xcworkspace # obs, not the .xcodeproj directory
+```
+
+ - Run the simple app in the simulator. You should see a single-screen app with
+ a "Run Model" button. Tap that, and you should see some debug output appear
+ below indicating that the example Grace Hopper image in directory data has
+ been analyzed, with a military uniform recognized.
+
+ - Run the other samples using the same process. The camera example requires a
+ real device connected. Once you build and run that, you should get a live
+ camera view that you can point at objects to get real-time recognition
+ results.
+
+### Troubleshooting
+
+ - Make sure you use the TensorFlow-experimental pod (and not TensorFlow).
+
+ - The TensorFlow-experimental pod is current about ~450MB. The reason it is
+ so big is because we are bundling multiple platforms, and the pod includes
+ all TensorFlow functionality (e.g. operations). This is convenient during
+ development, but see below section on how you can build your own custom
+ TensorFlow library to reduce the size.
+
+### Creating Your own App
+
+ - Create your own app using Xcode then add a file named Podfile at the project
+ root directory with the following content:
+```bash
+target 'YourProjectName'
+ pod 'TensorFlow-experimental'
+```
+
+ - Then you run ```pod install``` to download and install the
+ TensorFlow-experimental pod, and finaly perform
+ ```open YourProjectName.xcworkspace``` and add your code.
+
+ - In your apps "Build Settings", make sure to add $(inherited) to sections
+ "Other Linker Flags", and "Header Search Paths".
+
+ - That's it. If you want to create your custom TensorFlow iOS library, for
+ example to reduce binary footprint, see below section.
+
+## Building the TensorFlow iOS libraries from source
+
+ - You'll need Xcode 7.3 or later, with the command-line tools installed.
+
+ - Follow the instructions at
+ [tensorflow/contrib/makefile](https://github.com/tensorflow/tensorflow/tree/master/tensorflow/contrib/makefile)
+ under "iOS" to compile a static library containing the core TensorFlow code.
+
+ - You should see a single-screen app with a "Run Model" button. Tap that, and
+ you should see some debug output appear below indicating that the example
+ Grace Hopper image has been analyzed, with a military uniform recognized.
+
+ - Once you have success there, make sure you have a real device connected and
+ open up the Xcode project in the `camera` subfolder. Once you build and run
+ that, you should get a live camera view that you can point at objects to get
+ real-time recognition results.
+
+### Troubleshooting
+
+If you're hitting problems, here's a checklist of common things to investigate:
+
+ - Make sure that you've run the `build_all_ios.sh` script.
+ This will run `download_dependencies.sh`,`compile_ios_protobuf.sh` and `compile_ios_tensorflow.sh`.
+ (check each one if they have run successful.)
+
+ - Check that you have version 7.3 of Xcode.
+
+ - If there's a complaint about no Sessions registered, that means that the C++
+ global constructors that TensorFlow relies on for registration haven't been
+ linked in properly. You'll have to make sure your project uses force_load, as
+ described below.
+
+### Creating your Own App from your source libraries
+
+You'll need to update various settings in your app to link against
+TensorFlow. You can view them in the example projects, but here's a full
+rundown:
+
+ - The `compile_ios_tensorflow.sh` script builds a universal static library in
+ `tensorflow/contrib/makefile/gen/lib/libtensorflow-core.a`. You'll need to add
+ this to your linking build stage, and in Search Paths add
+ `tensorflow/contrib/makefile/gen/lib` to the Library Search Paths setting.
+
+ - You'll also need to add `libprotobuf.a` and `libprotobuf-lite.a` from
+ `tensorflow/contrib/makefile/gen/protobuf_ios/lib` to your _Build Stages_ and
+ _Library Search Paths_.
+
+ - The _Header Search_ paths needs to contain:
+ - the root folder of tensorflow,
+ - `tensorflow/contrib/makefile/downloads/protobuf/src`
+ - `tensorflow/contrib/makefile/downloads`,
+ - `tensorflow/contrib/makefile/downloads/eigen`, and
+ - `tensorflow/contrib/makefile/gen/proto`.
+
+ - In the Linking section, you need to add `-force_load` followed by the path to
+ the TensorFlow static library in the _Other Linker_ Flags section. This ensures
+ that the global C++ objects that are used to register important classes
+ inside the library are not stripped out. To the linker, they can appear
+ unused because no other code references the variables, but in fact their
+ constructors have the important side effect of registering the class.
+
+ - You'll need to include the Accelerate framework in the "Link Binary with
+ Libraries" build phase of your project.
+
+ - C++11 support (or later) should be enabled by setting `C++ Language Dialect` to
+ `GNU++11` (or `GNU++14`), and `C++ Standard Library` to `libc++`.
+
+ - The library doesn't currently support bitcode, so you'll need to disable that
+ in your project settings.
+
+ - Remove any use of the `-all_load` flag in your project. The protocol buffers
+ libraries (full and lite versions) contain duplicate symbols, and the `-all_load`
+ flag will cause these duplicates to become link errors. If you were using
+ `-all_load` to avoid issues with Objective-C categories in static libraries,
+ you may be able to replace it with the `-ObjC` flag.
+
+### Reducing the binary size
+
+TensorFlow is a comparatively large library for a mobile device, so it will
+increase the size of your app. Currently on iOS we see around a 11 MB binary
+footprint per CPU architecture, though we're actively working on reducing that.
+It can be tricky to set up the right configuration in your own app to keep the
+size minimized, so if you do run into this issue we recommend you start by
+looking at the simple example to examine its size. Here's how you do that:
+
+ - Open the Xcode project in tensorflow/contrib/ios_examples/simple.
+
+ - Make sure you've followed the steps above to get the data files.
+
+ - Choose "Generic iOS Device" as the build configuration.
+
+ - Select Product->Build.
+
+ - Once the build's complete, open the Report Navigator and select the logs.
+
+ - Near the bottom, you'll see a line saying "Touch tf_simple_example.app".
+
+ - Expand that line using the icon on the right, and copy the first argument to
+ the Touch command.
+
+ - Go to the terminal, type `ls -lah ` and then paste the path you copied.
+
+ - For example it might look like `ls -lah /Users/petewarden/Library/Developer/Xcode/DerivedData/tf_simple_example-etdbksqytcnzeyfgdwiihzkqpxwr/Build/Products/Debug-iphoneos/tf_simple_example.app`
+
+ - Running this command will show the size of the executable as the
+ `tf_simple_example` line.
+
+Right now you'll see a size of around 23 MB, since it's including two
+architectures (armv7 and arm64). As a first step, you should make sure the size
+increase you see in your own app is similar, and if it's larger, look at the
+"Other Linker Flags" used in the Simple Xcode project settings to strip the
+executable.
+
+After that, you can manually look at modifying the list of kernels
+included in tensorflow/contrib/makefile/tf_op_files.txt to reduce the number of
+implementations to the ones you're actually using in your own model. We're
+hoping to automate this step in the future, but for now manually removing them
+is the best approach.
diff --git a/tensorflow/examples/ios/benchmark/AppDelegate.h b/tensorflow/examples/ios/benchmark/AppDelegate.h
new file mode 100644
index 0000000000..94046d9728
--- /dev/null
+++ b/tensorflow/examples/ios/benchmark/AppDelegate.h
@@ -0,0 +1,21 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#import <UIKit/UIKit.h>
+
+@interface AppDelegate : UIResponder<UIApplicationDelegate>
+
+@property(strong, nonatomic) UIWindow *window;
+
+@end
diff --git a/tensorflow/examples/ios/benchmark/AppDelegate.mm b/tensorflow/examples/ios/benchmark/AppDelegate.mm
new file mode 100644
index 0000000000..23ffba0f7b
--- /dev/null
+++ b/tensorflow/examples/ios/benchmark/AppDelegate.mm
@@ -0,0 +1,44 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#import "AppDelegate.h"
+
+#import "BenchmarkViewController.h"
+
+@implementation AppDelegate
+
+- (BOOL)application:(UIApplication *)application
+ didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
+
+ UITabBarController *bar = [[UITabBarController alloc] init];
+ [bar setViewControllers:
+ @[[[BenchmarkViewController alloc] init]]];
+ bar.selectedIndex = 0;
+ self.window = [[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]];
+ self.window.rootViewController = bar;
+ [self.window makeKeyAndVisible];
+ return YES;
+}
+
+- (void)applicationWillResignActive:(UIApplication *)application {}
+
+- (void)applicationDidEnterBackground:(UIApplication *)application {}
+
+- (void)applicationWillEnterForeground:(UIApplication *)application {}
+
+- (void)applicationDidBecomeActive:(UIApplication *)application {}
+
+- (void)applicationWillTerminate:(UIApplication *)application {}
+
+@end
diff --git a/tensorflow/examples/ios/benchmark/Benchmark-Info.plist b/tensorflow/examples/ios/benchmark/Benchmark-Info.plist
new file mode 100644
index 0000000000..0cdbf28a31
--- /dev/null
+++ b/tensorflow/examples/ios/benchmark/Benchmark-Info.plist
@@ -0,0 +1,47 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>en</string>
+ <key>CFBundleDisplayName</key>
+ <string>tf_benchmark_example</string>
+ <key>CFBundleExecutable</key>
+ <string>tf_benchmark_example</string>
+ <key>CFBundleIdentifier</key>
+ <string>com.google.tf_benchmark_example</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>ios-app</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleVersion</key>
+ <string>1.0</string>
+ <key>LSRequiresIPhoneOS</key>
+ <true/>
+ <key>UILaunchStoryboardName</key>
+ <string>BenchmarkViewController</string>
+ <key>UIRequiredDeviceCapabilities</key>
+ <array>
+ <string>armv7</string>
+ </array>
+ <key>UISupportedInterfaceOrientations</key>
+ <array>
+ <string>UIInterfaceOrientationPortrait</string>
+ <string>UIInterfaceOrientationLandscapeLeft</string>
+ <string>UIInterfaceOrientationLandscapeRight</string>
+ </array>
+ <key>UISupportedInterfaceOrientations~ipad</key>
+ <array>
+ <string>UIInterfaceOrientationPortrait</string>
+ <string>UIInterfaceOrientationPortraitUpsideDown</string>
+ <string>UIInterfaceOrientationLandscapeLeft</string>
+ <string>UIInterfaceOrientationLandscapeRight</string>
+ </array>
+</dict>
+</plist>
diff --git a/tensorflow/examples/ios/benchmark/BenchmarkViewController.h b/tensorflow/examples/ios/benchmark/BenchmarkViewController.h
new file mode 100644
index 0000000000..c9cbc49280
--- /dev/null
+++ b/tensorflow/examples/ios/benchmark/BenchmarkViewController.h
@@ -0,0 +1,24 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#import <UIKit/UIKit.h>
+
+@interface BenchmarkViewController : UIViewController
+
+- (IBAction)getUrl:(id)sender;
+
+@property(weak, nonatomic) IBOutlet UITextView *urlContentTextView;
+@property(weak, nonatomic) IBOutlet UITextField *urlTextField;
+
+@end
diff --git a/tensorflow/examples/ios/benchmark/BenchmarkViewController.mm b/tensorflow/examples/ios/benchmark/BenchmarkViewController.mm
new file mode 100644
index 0000000000..cab7b36f17
--- /dev/null
+++ b/tensorflow/examples/ios/benchmark/BenchmarkViewController.mm
@@ -0,0 +1,302 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#import "BenchmarkViewController.h"
+
+#include <pthread.h>
+#include <sys/time.h>
+#include <unistd.h>
+#include <fstream>
+#include <queue>
+#include <sstream>
+#include <string>
+
+//#include "google/protobuf/io/coded_stream.h"
+//#include "google/protobuf/io/zero_copy_stream_impl.h"
+//#include "google/protobuf/io/zero_copy_stream_impl_lite.h"
+//#include "google/protobuf/message_lite.h"
+#include "tensorflow/core/framework/op_kernel.h"
+//#include "tensorflow/core/framework/tensor.h"
+//#include "tensorflow/core/framework/types.pb.h"
+//#include "tensorflow/core/platform/env.h"
+//#include "tensorflow/core/platform/logging.h"
+//#include "tensorflow/core/platform/mutex.h"
+//#include "tensorflow/core/platform/types.h"
+#include "tensorflow/core/public/session.h"
+#include "tensorflow/core/util/stat_summarizer.h"
+
+#include "ios_image_load.h"
+
+NSString* RunInferenceOnImage();
+
+namespace {
+class IfstreamInputStream : public ::google::protobuf::io::CopyingInputStream {
+ public:
+ explicit IfstreamInputStream(const std::string& file_name)
+ : ifs_(file_name.c_str(), std::ios::in | std::ios::binary) {}
+ ~IfstreamInputStream() { ifs_.close(); }
+
+ int Read(void* buffer, int size) {
+ if (!ifs_) {
+ return -1;
+ }
+ ifs_.read(static_cast<char*>(buffer), size);
+ return (int)ifs_.gcount();
+ }
+
+ private:
+ std::ifstream ifs_;
+};
+} // namespace
+
+@interface BenchmarkViewController ()
+@end
+
+@implementation BenchmarkViewController {
+}
+
+- (IBAction)getUrl:(id)sender {
+ NSString* inference_result = RunInferenceOnImage();
+ self.urlContentTextView.text = inference_result;
+}
+
+@end
+
+// Returns the top N confidence values over threshold in the provided vector,
+// sorted by confidence in descending order.
+static void GetTopN(
+ const Eigen::TensorMap<Eigen::Tensor<float, 1, Eigen::RowMajor>,
+ Eigen::Aligned>& prediction,
+ const int num_results, const float threshold,
+ std::vector<std::pair<float, int>>* top_results) {
+ // Will contain top N results in ascending order.
+ std::priority_queue<std::pair<float, int>, std::vector<std::pair<float, int>>,
+ std::greater<std::pair<float, int>>>
+ top_result_pq;
+
+ long count = prediction.size();
+ for (int i = 0; i < count; ++i) {
+ const float value = prediction(i);
+
+ // Only add it if it beats the threshold and has a chance at being in
+ // the top N.
+ if (value < threshold) {
+ continue;
+ }
+
+ top_result_pq.push(std::pair<float, int>(value, i));
+
+ // If at capacity, kick the smallest value out.
+ if (top_result_pq.size() > num_results) {
+ top_result_pq.pop();
+ }
+ }
+
+ // Copy to output vector and reverse into descending order.
+ while (!top_result_pq.empty()) {
+ top_results->push_back(top_result_pq.top());
+ top_result_pq.pop();
+ }
+ std::reverse(top_results->begin(), top_results->end());
+}
+
+bool PortableReadFileToProto(const std::string& file_name,
+ ::google::protobuf::MessageLite* proto) {
+ ::google::protobuf::io::CopyingInputStreamAdaptor stream(
+ new IfstreamInputStream(file_name));
+ stream.SetOwnsCopyingStream(true);
+ // TODO(jiayq): the following coded stream is for debugging purposes to allow
+ // one to parse arbitrarily large messages for MessageLite. One most likely
+ // doesn't want to put protobufs larger than 64MB on Android, so we should
+ // eventually remove this and quit loud when a large protobuf is passed in.
+ ::google::protobuf::io::CodedInputStream coded_stream(&stream);
+ // Total bytes hard limit / warning limit are set to 1GB and 512MB
+ // respectively.
+ coded_stream.SetTotalBytesLimit(1024LL << 20, 512LL << 20);
+ return proto->ParseFromCodedStream(&coded_stream);
+}
+
+NSString* FilePathForResourceName(NSString* name, NSString* extension) {
+ NSString* file_path =
+ [[NSBundle mainBundle] pathForResource:name ofType:extension];
+ if (file_path == NULL) {
+ LOG(FATAL) << "Couldn't find '" << [name UTF8String] << "."
+ << [extension UTF8String] << "' in bundle.";
+ }
+ return file_path;
+}
+
+// A utility function to get the current time in seconds, for simple profiling.
+double time() {
+ timeval t;
+ gettimeofday(&t, nullptr);
+ return t.tv_sec + 1e-6 * t.tv_usec;
+}
+
+// Runs the session with profiling enabled, and prints out details of the time
+// that each node in the graph takes to the debug log.
+tensorflow::Status BenchmarkInference(
+ tensorflow::Session* session,
+ const std::vector<std::pair<tensorflow::string, tensorflow::Tensor>> inputs,
+ const std::vector<tensorflow::string>& output_layer_names,
+ std::vector<tensorflow::Tensor>* output_layers,
+ tensorflow::StatSummarizer* stat_summarizer, double* average_time) {
+ tensorflow::Status run_status;
+ const int iterations_count = 20;
+ double total_time = 0.0;
+ tensorflow::RunOptions run_options;
+ run_options.set_trace_level(tensorflow::RunOptions::FULL_TRACE);
+ tensorflow::RunMetadata run_metadata;
+ for (int iteration = 0; iteration < (iterations_count + 1); ++iteration) {
+ const double start_time = time();
+ run_status = session->Run(run_options, inputs, output_layer_names, {},
+ output_layers, &run_metadata);
+ const double end_time = time();
+ if (iteration != 0) {
+ total_time += end_time - start_time;
+ }
+ if (!run_status.ok()) {
+ LOG(ERROR) << "Running model failed: " << run_status;
+ tensorflow::LogAllRegisteredKernels();
+ return run_status;
+ }
+ }
+ assert(run_metadata.has_step_stats());
+ const tensorflow::StepStats& step_stats = run_metadata.step_stats();
+ stat_summarizer->ProcessStepStats(step_stats);
+ stat_summarizer->PrintStepStats();
+
+ *average_time = total_time / iterations_count;
+ NSLog(@"Took %f seconds", *average_time);
+
+ return tensorflow::Status::OK();
+}
+
+NSString* RunInferenceOnImage() {
+ tensorflow::SessionOptions options;
+
+ tensorflow::Session* session_pointer = nullptr;
+ tensorflow::Status session_status =
+ tensorflow::NewSession(options, &session_pointer);
+ if (!session_status.ok()) {
+ std::string status_string = session_status.ToString();
+ return [NSString
+ stringWithFormat:@"Session create failed - %s", status_string.c_str()];
+ }
+ std::unique_ptr<tensorflow::Session> session(session_pointer);
+ LOG(INFO) << "Session created.";
+
+ tensorflow::GraphDef tensorflow_graph;
+ LOG(INFO) << "Graph created.";
+
+ NSString* network_path =
+ FilePathForResourceName(@"tensorflow_inception_graph", @"pb");
+ PortableReadFileToProto([network_path UTF8String], &tensorflow_graph);
+
+ LOG(INFO) << "Creating session.";
+ tensorflow::Status s = session->Create(tensorflow_graph);
+ if (!s.ok()) {
+ LOG(ERROR) << "Could not create TensorFlow Graph: " << s;
+ return @"";
+ }
+
+ // Read the label list
+ NSString* labels_path =
+ FilePathForResourceName(@"imagenet_comp_graph_label_strings", @"txt");
+ std::vector<std::string> label_strings;
+ std::ifstream t;
+ t.open([labels_path UTF8String]);
+ std::string line;
+ while (t) {
+ std::getline(t, line);
+ label_strings.push_back(line);
+ }
+ t.close();
+
+ // Read the Grace Hopper image.
+ NSString* image_path = FilePathForResourceName(@"grace_hopper", @"jpg");
+ int image_width;
+ int image_height;
+ int image_channels;
+ std::vector<tensorflow::uint8> image_data = LoadImageFromFile(
+ [image_path UTF8String], &image_width, &image_height, &image_channels);
+ const int wanted_width = 224;
+ const int wanted_height = 224;
+ const int wanted_channels = 3;
+ const float input_mean = 117.0f;
+ const float input_std = 1.0f;
+ assert(image_channels >= wanted_channels);
+ tensorflow::Tensor image_tensor(
+ tensorflow::DT_FLOAT,
+ tensorflow::TensorShape(
+ {1, wanted_height, wanted_width, wanted_channels}));
+ auto image_tensor_mapped = image_tensor.tensor<float, 4>();
+ tensorflow::uint8* in = image_data.data();
+ float* out = image_tensor_mapped.data();
+ for (int y = 0; y < wanted_height; ++y) {
+ const int in_y = (y * image_height) / wanted_height;
+ tensorflow::uint8* in_row = in + (in_y * image_width * image_channels);
+ float* out_row = out + (y * wanted_width * wanted_channels);
+ for (int x = 0; x < wanted_width; ++x) {
+ const int in_x = (x * image_width) / wanted_width;
+ tensorflow::uint8* in_pixel = in_row + (in_x * image_channels);
+ float* out_pixel = out_row + (x * wanted_channels);
+ for (int c = 0; c < wanted_channels; ++c) {
+ out_pixel[c] = (in_pixel[c] - input_mean) / input_std;
+ }
+ }
+ }
+ tensorflow::string input_layer = "input";
+ tensorflow::string output_layer = "output";
+ std::vector<tensorflow::Tensor> outputs;
+ tensorflow::StatSummarizer stat_summarizer(tensorflow_graph);
+ double average_time = 0.0;
+ BenchmarkInference(session.get(), {{input_layer, image_tensor}},
+ {output_layer}, &outputs, &stat_summarizer, &average_time);
+ NSString* result =
+ [NSString stringWithFormat:@"Average time: %.4f seconds \n\n", average_time];
+
+ tensorflow::Tensor* output = &outputs[0];
+ const int kNumResults = 5;
+ const float kThreshold = 0.1f;
+ std::vector<std::pair<float, int>> top_results;
+ GetTopN(output->flat<float>(), kNumResults, kThreshold, &top_results);
+
+ std::stringstream ss;
+ ss.precision(3);
+ for (const auto& result : top_results) {
+ const float confidence = result.first;
+ const int index = result.second;
+
+ ss << index << " " << confidence << " ";
+
+ // Write out the result as a string
+ if (index < label_strings.size()) {
+ // just for safety: theoretically, the output is under 1000 unless there
+ // is some numerical issues leading to a wrong prediction.
+ ss << label_strings[index];
+ } else {
+ ss << "Prediction: " << index;
+ }
+
+ ss << "\n";
+ }
+
+ LOG(INFO) << "Predictions: " << ss.str();
+
+ tensorflow::string predictions = ss.str();
+ result = [NSString stringWithFormat:@"%@ - %s", result, predictions.c_str()];
+
+ return result;
+}
diff --git a/tensorflow/examples/ios/benchmark/BenchmarkViewController.xib b/tensorflow/examples/ios/benchmark/BenchmarkViewController.xib
new file mode 100644
index 0000000000..56c3708062
--- /dev/null
+++ b/tensorflow/examples/ios/benchmark/BenchmarkViewController.xib
@@ -0,0 +1,47 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="10117" systemVersion="15G31" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES">
+ <dependencies>
+ <deployment identifier="iOS"/>
+ <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="10085"/>
+ </dependencies>
+ <objects>
+ <placeholder placeholderIdentifier="IBFilesOwner" id="-1" userLabel="File's Owner" customClass="BenchmarkViewController">
+ <connections>
+ <outlet property="urlContentTextView" destination="quY-AK-ZCn" id="YjW-BO-1Ta"/>
+ <outlet property="urlTextField" destination="hPw-q5-vh5" id="wmc-b6-2CV"/>
+ <outlet property="view" destination="1" id="iHm-Rr-4wj"/>
+ </connections>
+ </placeholder>
+ <placeholder placeholderIdentifier="IBFirstResponder" id="-2" customClass="UIResponder"/>
+ <view contentMode="scaleToFill" id="1">
+ <rect key="frame" x="0.0" y="0.0" width="320" height="568"/>
+ <autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
+ <subviews>
+ <textView clipsSubviews="YES" contentMode="scaleToFill" fixedFrame="YES" editable="NO" text="The results of running the model will appear here." selectable="NO" translatesAutoresizingMaskIntoConstraints="NO" id="quY-AK-ZCn">
+ <rect key="frame" x="40" y="99" width="240" height="168"/>
+ <color key="backgroundColor" white="1" alpha="1" colorSpace="calibratedWhite"/>
+ <fontDescription key="fontDescription" type="system" pointSize="14"/>
+ <textInputTraits key="textInputTraits" autocapitalizationType="sentences"/>
+ </textView>
+ <button opaque="NO" contentMode="scaleToFill" fixedFrame="YES" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="roundedRect" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="AAC-Bk-PCC">
+ <rect key="frame" x="76" y="37" width="168" height="30"/>
+ <color key="backgroundColor" white="0.33333333333333331" alpha="1" colorSpace="calibratedWhite"/>
+ <state key="normal" title="Benchmark Model">
+ <color key="titleShadowColor" white="0.5" alpha="1" colorSpace="calibratedWhite"/>
+ </state>
+ <connections>
+ <action selector="getUrl:" destination="-1" eventType="touchUpInside" id="mdP-nK-k9T"/>
+ </connections>
+ </button>
+ </subviews>
+ <color key="backgroundColor" red="0.78314738357315861" green="0.79869981749999996" blue="0.56305065858222869" alpha="1" colorSpace="calibratedRGB"/>
+ </view>
+ <textField opaque="NO" clipsSubviews="YES" contentMode="scaleToFill" contentHorizontalAlignment="left" contentVerticalAlignment="center" text="http://localhost:8080" borderStyle="roundedRect" placeholder="Enter URL" minimumFontSize="17" id="hPw-q5-vh5">
+ <rect key="frame" x="0.0" y="0.0" width="280" height="30"/>
+ <autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
+ <fontDescription key="fontDescription" type="system" pointSize="14"/>
+ <textInputTraits key="textInputTraits"/>
+ <point key="canvasLocation" x="795" y="44"/>
+ </textField>
+ </objects>
+</document>
diff --git a/tensorflow/examples/ios/benchmark/Podfile b/tensorflow/examples/ios/benchmark/Podfile
new file mode 100644
index 0000000000..e163d56e8d
--- /dev/null
+++ b/tensorflow/examples/ios/benchmark/Podfile
@@ -0,0 +1,5 @@
+platform :ios, '8.0'
+inhibit_all_warnings!
+
+target 'tf_benchmark_example'
+ pod 'TensorFlow-experimental'
diff --git a/tensorflow/examples/ios/benchmark/data/grace_hopper.jpg b/tensorflow/examples/ios/benchmark/data/grace_hopper.jpg
new file mode 100644
index 0000000000..d2a427810f
--- /dev/null
+++ b/tensorflow/examples/ios/benchmark/data/grace_hopper.jpg
Binary files differ
diff --git a/tensorflow/examples/ios/benchmark/ios_image_load.h b/tensorflow/examples/ios/benchmark/ios_image_load.h
new file mode 100644
index 0000000000..78eaded8d7
--- /dev/null
+++ b/tensorflow/examples/ios/benchmark/ios_image_load.h
@@ -0,0 +1,27 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef TENSORFLOW_EXAMPLES_IOS_IOS_IMAGE_LOAD_H_
+#define TENSORFLOW_EXAMPLES_IOS_IOS_IMAGE_LOAD_H_
+
+#include <vector>
+
+#include "tensorflow/core/framework/types.h"
+
+std::vector<tensorflow::uint8> LoadImageFromFile(const char* file_name,
+ int* out_width,
+ int* out_height,
+ int* out_channels);
+
+#endif // TENSORFLOW_EXAMPLES_IOS_IOS_IMAGE_LOAD_H_
diff --git a/tensorflow/examples/ios/benchmark/ios_image_load.mm b/tensorflow/examples/ios/benchmark/ios_image_load.mm
new file mode 100644
index 0000000000..64d1ea21cf
--- /dev/null
+++ b/tensorflow/examples/ios/benchmark/ios_image_load.mm
@@ -0,0 +1,87 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "ios_image_load.h"
+
+#include <stdlib.h>
+#include <string.h>
+#include <assert.h>
+#include <stdio.h>
+
+#import <CoreImage/CoreImage.h>
+#import <ImageIO/ImageIO.h>
+
+using tensorflow::uint8;
+
+std::vector<uint8> LoadImageFromFile(const char* file_name,
+ int* out_width, int* out_height,
+ int* out_channels) {
+ FILE* file_handle = fopen(file_name, "rb");
+ fseek(file_handle, 0, SEEK_END);
+ const size_t bytes_in_file = ftell(file_handle);
+ fseek(file_handle, 0, SEEK_SET);
+ std::vector<uint8> file_data(bytes_in_file);
+ fread(file_data.data(), 1, bytes_in_file, file_handle);
+ fclose(file_handle);
+ CFDataRef file_data_ref = CFDataCreateWithBytesNoCopy(NULL, file_data.data(),
+ bytes_in_file,
+ kCFAllocatorNull);
+ CGDataProviderRef image_provider =
+ CGDataProviderCreateWithCFData(file_data_ref);
+
+ const char* suffix = strrchr(file_name, '.');
+ if (!suffix || suffix == file_name) {
+ suffix = "";
+ }
+ CGImageRef image;
+ if (strcasecmp(suffix, ".png") == 0) {
+ image = CGImageCreateWithPNGDataProvider(image_provider, NULL, true,
+ kCGRenderingIntentDefault);
+ } else if ((strcasecmp(suffix, ".jpg") == 0) ||
+ (strcasecmp(suffix, ".jpeg") == 0)) {
+ image = CGImageCreateWithJPEGDataProvider(image_provider, NULL, true,
+ kCGRenderingIntentDefault);
+ } else {
+ CFRelease(image_provider);
+ CFRelease(file_data_ref);
+ fprintf(stderr, "Unknown suffix for file '%s'\n", file_name);
+ *out_width = 0;
+ *out_height = 0;
+ *out_channels = 0;
+ return std::vector<uint8>();
+ }
+
+ const int width = (int)CGImageGetWidth(image);
+ const int height = (int)CGImageGetHeight(image);
+ const int channels = 4;
+ CGColorSpaceRef color_space = CGColorSpaceCreateDeviceRGB();
+ const int bytes_per_row = (width * channels);
+ const int bytes_in_image = (bytes_per_row * height);
+ std::vector<uint8> result(bytes_in_image);
+ const int bits_per_component = 8;
+ CGContextRef context = CGBitmapContextCreate(result.data(), width, height,
+ bits_per_component, bytes_per_row, color_space,
+ kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big);
+ CGColorSpaceRelease(color_space);
+ CGContextDrawImage(context, CGRectMake(0, 0, width, height), image);
+ CGContextRelease(context);
+ CFRelease(image);
+ CFRelease(image_provider);
+ CFRelease(file_data_ref);
+
+ *out_width = width;
+ *out_height = height;
+ *out_channels = channels;
+ return result;
+}
diff --git a/tensorflow/examples/ios/benchmark/main.mm b/tensorflow/examples/ios/benchmark/main.mm
new file mode 100644
index 0000000000..d70550a730
--- /dev/null
+++ b/tensorflow/examples/ios/benchmark/main.mm
@@ -0,0 +1,22 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#import <UIKit/UIKit.h>
+
+int main(int argc, char * argv[]) {
+ @autoreleasepool {
+ NSString *delegateClassName = @"AppDelegate";
+ return UIApplicationMain(argc, argv, nil, delegateClassName);
+ }
+}
diff --git a/tensorflow/examples/ios/benchmark/tf_benchmark_example.xcodeproj/project.pbxproj b/tensorflow/examples/ios/benchmark/tf_benchmark_example.xcodeproj/project.pbxproj
new file mode 100644
index 0000000000..d61b65ba61
--- /dev/null
+++ b/tensorflow/examples/ios/benchmark/tf_benchmark_example.xcodeproj/project.pbxproj
@@ -0,0 +1,388 @@
+// !$*UTF8*$!
+{
+ archiveVersion = 1;
+ classes = {
+ };
+ objectVersion = 46;
+ objects = {
+
+/* Begin PBXBuildFile section */
+ 1C8BA8FD1EC682E700CCCC8C /* main.mm in Sources */ = {isa = PBXBuildFile; fileRef = 59A3CFFC1CF4E68100C4259F /* main.mm */; };
+ 1C8BA8FE1EC682E700CCCC8C /* AppDelegate.mm in Sources */ = {isa = PBXBuildFile; fileRef = 59A3CFF21CF4E68100C4259F /* AppDelegate.mm */; };
+ 1C8BA8FF1EC682E700CCCC8C /* BenchmarkViewController.mm in Sources */ = {isa = PBXBuildFile; fileRef = 59A3CFFF1CF4E68100C4259F /* BenchmarkViewController.mm */; };
+ 1C8BA9001EC682E700CCCC8C /* ios_image_load.mm in Sources */ = {isa = PBXBuildFile; fileRef = 59A3CFFB1CF4E68100C4259F /* ios_image_load.mm */; };
+ 1C8BA9051EC682E700CCCC8C /* BenchmarkViewController.xib in Resources */ = {isa = PBXBuildFile; fileRef = 59A3D0001CF4E68100C4259F /* BenchmarkViewController.xib */; };
+ 1C8BA9061EC682E700CCCC8C /* imagenet_comp_graph_label_strings.txt in Resources */ = {isa = PBXBuildFile; fileRef = 59A3CFF71CF4E68100C4259F /* imagenet_comp_graph_label_strings.txt */; };
+ 1C8BA9071EC682E700CCCC8C /* tensorflow_inception_graph.pb in Resources */ = {isa = PBXBuildFile; fileRef = 59A3CFF91CF4E68100C4259F /* tensorflow_inception_graph.pb */; };
+ 1C8BA9081EC682E700CCCC8C /* grace_hopper.jpg in Resources */ = {isa = PBXBuildFile; fileRef = 59A3CFF51CF4E68100C4259F /* grace_hopper.jpg */; };
+ 1CB1883E1ECCC0DC00C93EF7 /* CoreGraphics.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1CB1883D1ECCC0DC00C93EF7 /* CoreGraphics.framework */; };
+ 1CB1883F1ECCC10D00C93EF7 /* UIKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1C7AC7FC1ECCBFE400EAE588 /* UIKit.framework */; };
+ 1E0EBA4DF4C722C63814B257 /* libPods-tf_benchmark_example.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 8C4FE48552EFB73D066C66E9 /* libPods-tf_benchmark_example.a */; };
+/* End PBXBuildFile section */
+
+/* Begin PBXFileReference section */
+ 1C7AC7FC1ECCBFE400EAE588 /* UIKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = UIKit.framework; path = System/Library/Frameworks/UIKit.framework; sourceTree = SDKROOT; };
+ 1C8BA90C1EC682E700CCCC8C /* tf_benchmark_example.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = tf_benchmark_example.app; sourceTree = BUILT_PRODUCTS_DIR; };
+ 1CB1883B1ECCC09A00C93EF7 /* CoreFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreFoundation.framework; path = System/Library/Frameworks/CoreFoundation.framework; sourceTree = SDKROOT; };
+ 1CB1883D1ECCC0DC00C93EF7 /* CoreGraphics.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreGraphics.framework; path = System/Library/Frameworks/CoreGraphics.framework; sourceTree = SDKROOT; };
+ 59A3CFF11CF4E68100C4259F /* AppDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = "<group>"; };
+ 59A3CFF21CF4E68100C4259F /* AppDelegate.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = AppDelegate.mm; sourceTree = "<group>"; };
+ 59A3CFF51CF4E68100C4259F /* grace_hopper.jpg */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = grace_hopper.jpg; sourceTree = "<group>"; };
+ 59A3CFF71CF4E68100C4259F /* imagenet_comp_graph_label_strings.txt */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = imagenet_comp_graph_label_strings.txt; sourceTree = "<group>"; };
+ 59A3CFF91CF4E68100C4259F /* tensorflow_inception_graph.pb */ = {isa = PBXFileReference; lastKnownFileType = file; path = tensorflow_inception_graph.pb; sourceTree = "<group>"; };
+ 59A3CFFA1CF4E68100C4259F /* ios_image_load.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ios_image_load.h; sourceTree = "<group>"; };
+ 59A3CFFB1CF4E68100C4259F /* ios_image_load.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = ios_image_load.mm; sourceTree = "<group>"; };
+ 59A3CFFC1CF4E68100C4259F /* main.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = main.mm; sourceTree = "<group>"; };
+ 59A3CFFD1CF4E68100C4259F /* Benchmark-Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; path = "Benchmark-Info.plist"; sourceTree = "<group>"; };
+ 59A3CFFE1CF4E68100C4259F /* BenchmarkViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = BenchmarkViewController.h; sourceTree = "<group>"; };
+ 59A3CFFF1CF4E68100C4259F /* BenchmarkViewController.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = BenchmarkViewController.mm; sourceTree = "<group>"; };
+ 59A3D0001CF4E68100C4259F /* BenchmarkViewController.xib */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.xib; path = BenchmarkViewController.xib; sourceTree = "<group>"; };
+ 5FD1623E64FC0154A67E8DD5 /* Pods-tf_benchmark_example.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-tf_benchmark_example.debug.xcconfig"; path = "Pods/Target Support Files/Pods-tf_benchmark_example/Pods-tf_benchmark_example.debug.xcconfig"; sourceTree = "<group>"; };
+ 8C4FE48552EFB73D066C66E9 /* libPods-tf_benchmark_example.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-tf_benchmark_example.a"; sourceTree = BUILT_PRODUCTS_DIR; };
+ DB6B3E596779C98202E84711 /* Pods-tf_benchmark_example.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-tf_benchmark_example.release.xcconfig"; path = "Pods/Target Support Files/Pods-tf_benchmark_example/Pods-tf_benchmark_example.release.xcconfig"; sourceTree = "<group>"; };
+/* End PBXFileReference section */
+
+/* Begin PBXFrameworksBuildPhase section */
+ 1C8BA9011EC682E700CCCC8C /* Frameworks */ = {
+ isa = PBXFrameworksBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 1CB1883F1ECCC10D00C93EF7 /* UIKit.framework in Frameworks */,
+ 1CB1883E1ECCC0DC00C93EF7 /* CoreGraphics.framework in Frameworks */,
+ 1E0EBA4DF4C722C63814B257 /* libPods-tf_benchmark_example.a in Frameworks */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXFrameworksBuildPhase section */
+
+/* Begin PBXGroup section */
+ 2BD56010B574F539C2070A57 /* Pods */ = {
+ isa = PBXGroup;
+ children = (
+ 5FD1623E64FC0154A67E8DD5 /* Pods-tf_benchmark_example.debug.xcconfig */,
+ DB6B3E596779C98202E84711 /* Pods-tf_benchmark_example.release.xcconfig */,
+ );
+ name = Pods;
+ sourceTree = "<group>";
+ };
+ 591157921CF4011C00C31E3A = {
+ isa = PBXGroup;
+ children = (
+ 59A3CFF11CF4E68100C4259F /* AppDelegate.h */,
+ 59A3CFF21CF4E68100C4259F /* AppDelegate.mm */,
+ 59A3CFF31CF4E68100C4259F /* data */,
+ 59A3CFFA1CF4E68100C4259F /* ios_image_load.h */,
+ 59A3CFFB1CF4E68100C4259F /* ios_image_load.mm */,
+ 59A3CFFC1CF4E68100C4259F /* main.mm */,
+ 59A3CFFD1CF4E68100C4259F /* Benchmark-Info.plist */,
+ 59A3CFFE1CF4E68100C4259F /* BenchmarkViewController.h */,
+ 59A3CFFF1CF4E68100C4259F /* BenchmarkViewController.mm */,
+ 59A3D0001CF4E68100C4259F /* BenchmarkViewController.xib */,
+ 5911579C1CF4011C00C31E3A /* Products */,
+ 2BD56010B574F539C2070A57 /* Pods */,
+ 76A25A27041EB307BDFF0DD1 /* Frameworks */,
+ );
+ sourceTree = "<group>";
+ };
+ 5911579C1CF4011C00C31E3A /* Products */ = {
+ isa = PBXGroup;
+ children = (
+ 1C8BA90C1EC682E700CCCC8C /* tf_benchmark_example.app */,
+ );
+ name = Products;
+ sourceTree = "<group>";
+ };
+ 59A3CFF31CF4E68100C4259F /* data */ = {
+ isa = PBXGroup;
+ children = (
+ 59A3CFF51CF4E68100C4259F /* grace_hopper.jpg */,
+ 59A3CFF71CF4E68100C4259F /* imagenet_comp_graph_label_strings.txt */,
+ 59A3CFF91CF4E68100C4259F /* tensorflow_inception_graph.pb */,
+ );
+ path = data;
+ sourceTree = "<group>";
+ };
+ 76A25A27041EB307BDFF0DD1 /* Frameworks */ = {
+ isa = PBXGroup;
+ children = (
+ 1CB1883D1ECCC0DC00C93EF7 /* CoreGraphics.framework */,
+ 1CB1883B1ECCC09A00C93EF7 /* CoreFoundation.framework */,
+ 1C7AC7FC1ECCBFE400EAE588 /* UIKit.framework */,
+ 8C4FE48552EFB73D066C66E9 /* libPods-tf_benchmark_example.a */,
+ );
+ name = Frameworks;
+ sourceTree = "<group>";
+ };
+/* End PBXGroup section */
+
+/* Begin PBXNativeTarget section */
+ 1C8BA8FB1EC682E700CCCC8C /* tf_benchmark_example */ = {
+ isa = PBXNativeTarget;
+ buildConfigurationList = 1C8BA9091EC682E700CCCC8C /* Build configuration list for PBXNativeTarget "tf_benchmark_example" */;
+ buildPhases = (
+ 0388D751057A257A12848245 /* [CP] Check Pods Manifest.lock */,
+ 1C8BA8FC1EC682E700CCCC8C /* Sources */,
+ 1C8BA9011EC682E700CCCC8C /* Frameworks */,
+ 1C8BA9041EC682E700CCCC8C /* Resources */,
+ 8999A303091D4E86202C2F64 /* [CP] Embed Pods Frameworks */,
+ A7B4B278BCC417B76A47ABB0 /* [CP] Copy Pods Resources */,
+ );
+ buildRules = (
+ );
+ dependencies = (
+ );
+ name = tf_benchmark_example;
+ productName = benchmark;
+ productReference = 1C8BA90C1EC682E700CCCC8C /* tf_benchmark_example.app */;
+ productType = "com.apple.product-type.application";
+ };
+/* End PBXNativeTarget section */
+
+/* Begin PBXProject section */
+ 591157931CF4011C00C31E3A /* Project object */ = {
+ isa = PBXProject;
+ attributes = {
+ LastUpgradeCheck = 0830;
+ ORGANIZATIONNAME = Google;
+ };
+ buildConfigurationList = 591157961CF4011C00C31E3A /* Build configuration list for PBXProject "tf_benchmark_example" */;
+ compatibilityVersion = "Xcode 3.2";
+ developmentRegion = English;
+ hasScannedForEncodings = 0;
+ knownRegions = (
+ en,
+ Base,
+ );
+ mainGroup = 591157921CF4011C00C31E3A;
+ productRefGroup = 5911579C1CF4011C00C31E3A /* Products */;
+ projectDirPath = "";
+ projectRoot = "";
+ targets = (
+ 1C8BA8FB1EC682E700CCCC8C /* tf_benchmark_example */,
+ );
+ };
+/* End PBXProject section */
+
+/* Begin PBXResourcesBuildPhase section */
+ 1C8BA9041EC682E700CCCC8C /* Resources */ = {
+ isa = PBXResourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 1C8BA9051EC682E700CCCC8C /* BenchmarkViewController.xib in Resources */,
+ 1C8BA9061EC682E700CCCC8C /* imagenet_comp_graph_label_strings.txt in Resources */,
+ 1C8BA9071EC682E700CCCC8C /* tensorflow_inception_graph.pb in Resources */,
+ 1C8BA9081EC682E700CCCC8C /* grace_hopper.jpg in Resources */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXResourcesBuildPhase section */
+
+/* Begin PBXShellScriptBuildPhase section */
+ 0388D751057A257A12848245 /* [CP] Check Pods Manifest.lock */ = {
+ isa = PBXShellScriptBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ );
+ inputPaths = (
+ );
+ name = "[CP] Check Pods Manifest.lock";
+ outputPaths = (
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ shellPath = /bin/sh;
+ shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n";
+ showEnvVarsInLog = 0;
+ };
+ 8999A303091D4E86202C2F64 /* [CP] Embed Pods Frameworks */ = {
+ isa = PBXShellScriptBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ );
+ inputPaths = (
+ );
+ name = "[CP] Embed Pods Frameworks";
+ outputPaths = (
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ shellPath = /bin/sh;
+ shellScript = "\"${SRCROOT}/Pods/Target Support Files/Pods-tf_benchmark_example/Pods-tf_benchmark_example-frameworks.sh\"\n";
+ showEnvVarsInLog = 0;
+ };
+ A7B4B278BCC417B76A47ABB0 /* [CP] Copy Pods Resources */ = {
+ isa = PBXShellScriptBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ );
+ inputPaths = (
+ );
+ name = "[CP] Copy Pods Resources";
+ outputPaths = (
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ shellPath = /bin/sh;
+ shellScript = "\"${SRCROOT}/Pods/Target Support Files/Pods-tf_benchmark_example/Pods-tf_benchmark_example-resources.sh\"\n";
+ showEnvVarsInLog = 0;
+ };
+/* End PBXShellScriptBuildPhase section */
+
+/* Begin PBXSourcesBuildPhase section */
+ 1C8BA8FC1EC682E700CCCC8C /* Sources */ = {
+ isa = PBXSourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 1C8BA8FD1EC682E700CCCC8C /* main.mm in Sources */,
+ 1C8BA8FE1EC682E700CCCC8C /* AppDelegate.mm in Sources */,
+ 1C8BA8FF1EC682E700CCCC8C /* BenchmarkViewController.mm in Sources */,
+ 1C8BA9001EC682E700CCCC8C /* ios_image_load.mm in Sources */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXSourcesBuildPhase section */
+
+/* Begin XCBuildConfiguration section */
+ 1C8BA90A1EC682E700CCCC8C /* Debug */ = {
+ isa = XCBuildConfiguration;
+ baseConfigurationReference = 5FD1623E64FC0154A67E8DD5 /* Pods-tf_benchmark_example.debug.xcconfig */;
+ buildSettings = {
+ CODE_SIGN_IDENTITY = "iPhone Developer";
+ ENABLE_BITCODE = NO;
+ HEADER_SEARCH_PATHS = "$(inherited)";
+ INFOPLIST_FILE = "$(SRCROOT)/Benchmark-Info.plist";
+ IPHONEOS_DEPLOYMENT_TARGET = 8.0;
+ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
+ LIBRARY_SEARCH_PATHS = "";
+ OTHER_LDFLAGS = "$(inherited)";
+ PRODUCT_BUNDLE_IDENTIFIER = "com.google.tf-benchmark-example";
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ };
+ name = Debug;
+ };
+ 1C8BA90B1EC682E700CCCC8C /* Release */ = {
+ isa = XCBuildConfiguration;
+ baseConfigurationReference = DB6B3E596779C98202E84711 /* Pods-tf_benchmark_example.release.xcconfig */;
+ buildSettings = {
+ CODE_SIGN_IDENTITY = "iPhone Developer";
+ ENABLE_BITCODE = NO;
+ HEADER_SEARCH_PATHS = "$(inherited)";
+ INFOPLIST_FILE = "$(SRCROOT)/Benchmark-Info.plist";
+ IPHONEOS_DEPLOYMENT_TARGET = 8.0;
+ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
+ LIBRARY_SEARCH_PATHS = "";
+ ONLY_ACTIVE_ARCH = YES;
+ OTHER_LDFLAGS = "$(inherited)";
+ PRODUCT_BUNDLE_IDENTIFIER = "com.google.tf-benchmark-example";
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ };
+ name = Release;
+ };
+ 591157B01CF4011D00C31E3A /* Debug */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ALWAYS_SEARCH_USER_PATHS = NO;
+ CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
+ CLANG_CXX_LIBRARY = "libc++";
+ CLANG_ENABLE_MODULES = YES;
+ CLANG_ENABLE_OBJC_ARC = YES;
+ CLANG_WARN_BOOL_CONVERSION = YES;
+ CLANG_WARN_CONSTANT_CONVERSION = YES;
+ CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
+ CLANG_WARN_EMPTY_BODY = YES;
+ CLANG_WARN_ENUM_CONVERSION = YES;
+ CLANG_WARN_INFINITE_RECURSION = YES;
+ CLANG_WARN_INT_CONVERSION = YES;
+ CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
+ CLANG_WARN_SUSPICIOUS_MOVE = YES;
+ CLANG_WARN_UNREACHABLE_CODE = YES;
+ CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
+ "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
+ COPY_PHASE_STRIP = NO;
+ DEBUG_INFORMATION_FORMAT = dwarf;
+ ENABLE_STRICT_OBJC_MSGSEND = YES;
+ ENABLE_TESTABILITY = YES;
+ GCC_C_LANGUAGE_STANDARD = gnu99;
+ GCC_DYNAMIC_NO_PIC = NO;
+ GCC_NO_COMMON_BLOCKS = YES;
+ GCC_OPTIMIZATION_LEVEL = 0;
+ GCC_PREPROCESSOR_DEFINITIONS = (
+ "DEBUG=1",
+ "$(inherited)",
+ );
+ GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
+ GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
+ GCC_WARN_UNDECLARED_SELECTOR = YES;
+ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
+ GCC_WARN_UNUSED_FUNCTION = YES;
+ GCC_WARN_UNUSED_VARIABLE = YES;
+ IPHONEOS_DEPLOYMENT_TARGET = 8.0;
+ MTL_ENABLE_DEBUG_INFO = YES;
+ ONLY_ACTIVE_ARCH = YES;
+ SDKROOT = iphoneos;
+ TARGETED_DEVICE_FAMILY = "1,2";
+ };
+ name = Debug;
+ };
+ 591157B11CF4011D00C31E3A /* Release */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ALWAYS_SEARCH_USER_PATHS = NO;
+ CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
+ CLANG_CXX_LIBRARY = "libc++";
+ CLANG_ENABLE_MODULES = YES;
+ CLANG_ENABLE_OBJC_ARC = YES;
+ CLANG_WARN_BOOL_CONVERSION = YES;
+ CLANG_WARN_CONSTANT_CONVERSION = YES;
+ CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
+ CLANG_WARN_EMPTY_BODY = YES;
+ CLANG_WARN_ENUM_CONVERSION = YES;
+ CLANG_WARN_INFINITE_RECURSION = YES;
+ CLANG_WARN_INT_CONVERSION = YES;
+ CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
+ CLANG_WARN_SUSPICIOUS_MOVE = YES;
+ CLANG_WARN_UNREACHABLE_CODE = YES;
+ CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
+ "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
+ COPY_PHASE_STRIP = NO;
+ DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
+ ENABLE_NS_ASSERTIONS = NO;
+ ENABLE_STRICT_OBJC_MSGSEND = YES;
+ GCC_C_LANGUAGE_STANDARD = gnu99;
+ GCC_NO_COMMON_BLOCKS = YES;
+ GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
+ GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
+ GCC_WARN_UNDECLARED_SELECTOR = YES;
+ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
+ GCC_WARN_UNUSED_FUNCTION = YES;
+ GCC_WARN_UNUSED_VARIABLE = YES;
+ IPHONEOS_DEPLOYMENT_TARGET = 8.0;
+ MTL_ENABLE_DEBUG_INFO = NO;
+ SDKROOT = iphoneos;
+ TARGETED_DEVICE_FAMILY = "1,2";
+ VALIDATE_PRODUCT = YES;
+ };
+ name = Release;
+ };
+/* End XCBuildConfiguration section */
+
+/* Begin XCConfigurationList section */
+ 1C8BA9091EC682E700CCCC8C /* Build configuration list for PBXNativeTarget "tf_benchmark_example" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ 1C8BA90A1EC682E700CCCC8C /* Debug */,
+ 1C8BA90B1EC682E700CCCC8C /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
+ 591157961CF4011C00C31E3A /* Build configuration list for PBXProject "tf_benchmark_example" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ 591157B01CF4011D00C31E3A /* Debug */,
+ 591157B11CF4011D00C31E3A /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
+/* End XCConfigurationList section */
+ };
+ rootObject = 591157931CF4011C00C31E3A /* Project object */;
+}
diff --git a/tensorflow/examples/ios/camera/CameraExampleAppDelegate.h b/tensorflow/examples/ios/camera/CameraExampleAppDelegate.h
new file mode 100644
index 0000000000..0039d5e7ca
--- /dev/null
+++ b/tensorflow/examples/ios/camera/CameraExampleAppDelegate.h
@@ -0,0 +1,21 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#import <UIKit/UIKit.h>
+
+@interface CameraExampleAppDelegate : UIResponder<UIApplicationDelegate>
+
+@property(strong, nonatomic) UIWindow *window;
+
+@end
diff --git a/tensorflow/examples/ios/camera/CameraExampleAppDelegate.m b/tensorflow/examples/ios/camera/CameraExampleAppDelegate.m
new file mode 100644
index 0000000000..d134c2b591
--- /dev/null
+++ b/tensorflow/examples/ios/camera/CameraExampleAppDelegate.m
@@ -0,0 +1,44 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#import "CameraExampleAppDelegate.h"
+
+@implementation CameraExampleAppDelegate
+
+@synthesize window = _window;
+
+- (BOOL)application:(UIApplication *)application
+ didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
+ [self.window makeKeyAndVisible];
+ return YES;
+}
+
+- (void)applicationWillResignActive:(UIApplication *)application {
+ [[UIApplication sharedApplication] setIdleTimerDisabled:NO];
+}
+
+- (void)applicationDidEnterBackground:(UIApplication *)application {
+}
+
+- (void)applicationWillEnterForeground:(UIApplication *)application {
+}
+
+- (void)applicationDidBecomeActive:(UIApplication *)application {
+ [[UIApplication sharedApplication] setIdleTimerDisabled:YES];
+}
+
+- (void)applicationWillTerminate:(UIApplication *)application {
+}
+
+@end
diff --git a/tensorflow/examples/ios/camera/CameraExampleViewController.h b/tensorflow/examples/ios/camera/CameraExampleViewController.h
new file mode 100644
index 0000000000..0aefbc6eed
--- /dev/null
+++ b/tensorflow/examples/ios/camera/CameraExampleViewController.h
@@ -0,0 +1,47 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#import <AVFoundation/AVFoundation.h>
+#import <UIKit/UIKit.h>
+
+#include <memory>
+#include "tensorflow/core/public/session.h"
+#include "tensorflow/core/util/memmapped_file_system.h"
+
+@interface CameraExampleViewController
+ : UIViewController<UIGestureRecognizerDelegate,
+ AVCaptureVideoDataOutputSampleBufferDelegate> {
+ IBOutlet UIView *previewView;
+ IBOutlet UISegmentedControl *camerasControl;
+ AVCaptureVideoPreviewLayer *previewLayer;
+ AVCaptureVideoDataOutput *videoDataOutput;
+ dispatch_queue_t videoDataOutputQueue;
+ AVCaptureStillImageOutput *stillImageOutput;
+ UIView *flashView;
+ UIImage *square;
+ BOOL isUsingFrontFacingCamera;
+ AVSpeechSynthesizer *synth;
+ NSMutableDictionary *oldPredictionValues;
+ NSMutableArray *labelLayers;
+ AVCaptureSession *session;
+ std::unique_ptr<tensorflow::Session> tf_session;
+ std::unique_ptr<tensorflow::MemmappedEnv> tf_memmapped_env;
+ std::vector<std::string> labels;
+}
+@property(strong, nonatomic) CATextLayer *predictionTextLayer;
+
+- (IBAction)takePicture:(id)sender;
+- (IBAction)switchCameras:(id)sender;
+
+@end
diff --git a/tensorflow/examples/ios/camera/CameraExampleViewController.mm b/tensorflow/examples/ios/camera/CameraExampleViewController.mm
new file mode 100644
index 0000000000..d113d50ff8
--- /dev/null
+++ b/tensorflow/examples/ios/camera/CameraExampleViewController.mm
@@ -0,0 +1,621 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#import <AssertMacros.h>
+#import <AssetsLibrary/AssetsLibrary.h>
+#import <CoreImage/CoreImage.h>
+#import <ImageIO/ImageIO.h>
+#import "CameraExampleViewController.h"
+
+#include <sys/time.h>
+
+#include "tensorflow_utils.h"
+
+// If you have your own model, modify this to the file name, and make sure
+// you've added the file to your app resources too.
+static NSString* model_file_name = @"tensorflow_inception_graph";
+static NSString* model_file_type = @"pb";
+// This controls whether we'll be loading a plain GraphDef proto, or a
+// file created by the convert_graphdef_memmapped_format utility that wraps a
+// GraphDef and parameter file that can be mapped into memory from file to
+// reduce overall memory usage.
+const bool model_uses_memory_mapping = false;
+// If you have your own model, point this to the labels file.
+static NSString* labels_file_name = @"imagenet_comp_graph_label_strings";
+static NSString* labels_file_type = @"txt";
+// These dimensions need to match those the model was trained with.
+const int wanted_input_width = 224;
+const int wanted_input_height = 224;
+const int wanted_input_channels = 3;
+const float input_mean = 117.0f;
+const float input_std = 1.0f;
+const std::string input_layer_name = "input";
+const std::string output_layer_name = "softmax1";
+
+static void *AVCaptureStillImageIsCapturingStillImageContext =
+ &AVCaptureStillImageIsCapturingStillImageContext;
+
+@interface CameraExampleViewController (InternalMethods)
+- (void)setupAVCapture;
+- (void)teardownAVCapture;
+@end
+
+@implementation CameraExampleViewController
+
+- (void)setupAVCapture {
+ NSError *error = nil;
+
+ session = [AVCaptureSession new];
+ if ([[UIDevice currentDevice] userInterfaceIdiom] ==
+ UIUserInterfaceIdiomPhone)
+ [session setSessionPreset:AVCaptureSessionPreset640x480];
+ else
+ [session setSessionPreset:AVCaptureSessionPresetPhoto];
+
+ AVCaptureDevice *device =
+ [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
+ AVCaptureDeviceInput *deviceInput =
+ [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
+ assert(error == nil);
+
+ isUsingFrontFacingCamera = NO;
+ if ([session canAddInput:deviceInput]) [session addInput:deviceInput];
+
+ stillImageOutput = [AVCaptureStillImageOutput new];
+ [stillImageOutput
+ addObserver:self
+ forKeyPath:@"capturingStillImage"
+ options:NSKeyValueObservingOptionNew
+ context:(void *)(AVCaptureStillImageIsCapturingStillImageContext)];
+ if ([session canAddOutput:stillImageOutput])
+ [session addOutput:stillImageOutput];
+
+ videoDataOutput = [AVCaptureVideoDataOutput new];
+
+ NSDictionary *rgbOutputSettings = [NSDictionary
+ dictionaryWithObject:[NSNumber numberWithInt:kCMPixelFormat_32BGRA]
+ forKey:(id)kCVPixelBufferPixelFormatTypeKey];
+ [videoDataOutput setVideoSettings:rgbOutputSettings];
+ [videoDataOutput setAlwaysDiscardsLateVideoFrames:YES];
+ videoDataOutputQueue =
+ dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL);
+ [videoDataOutput setSampleBufferDelegate:self queue:videoDataOutputQueue];
+
+ if ([session canAddOutput:videoDataOutput])
+ [session addOutput:videoDataOutput];
+ [[videoDataOutput connectionWithMediaType:AVMediaTypeVideo] setEnabled:YES];
+
+ previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
+ [previewLayer setBackgroundColor:[[UIColor blackColor] CGColor]];
+ [previewLayer setVideoGravity:AVLayerVideoGravityResizeAspect];
+ CALayer *rootLayer = [previewView layer];
+ [rootLayer setMasksToBounds:YES];
+ [previewLayer setFrame:[rootLayer bounds]];
+ [rootLayer addSublayer:previewLayer];
+ [session startRunning];
+
+ if (error) {
+ NSString *title = [NSString stringWithFormat:@"Failed with error %d", (int)[error code]];
+ UIAlertController *alertController =
+ [UIAlertController alertControllerWithTitle:title
+ message:[error localizedDescription]
+ preferredStyle:UIAlertControllerStyleAlert];
+ UIAlertAction *dismiss =
+ [UIAlertAction actionWithTitle:@"Dismiss" style:UIAlertActionStyleDefault handler:nil];
+ [alertController addAction:dismiss];
+ [self presentViewController:alertController animated:YES completion:nil];
+ [self teardownAVCapture];
+ }
+}
+
+- (void)teardownAVCapture {
+ [stillImageOutput removeObserver:self forKeyPath:@"isCapturingStillImage"];
+ [previewLayer removeFromSuperlayer];
+}
+
+- (void)observeValueForKeyPath:(NSString *)keyPath
+ ofObject:(id)object
+ change:(NSDictionary *)change
+ context:(void *)context {
+ if (context == AVCaptureStillImageIsCapturingStillImageContext) {
+ BOOL isCapturingStillImage =
+ [[change objectForKey:NSKeyValueChangeNewKey] boolValue];
+
+ if (isCapturingStillImage) {
+ // do flash bulb like animation
+ flashView = [[UIView alloc] initWithFrame:[previewView frame]];
+ [flashView setBackgroundColor:[UIColor whiteColor]];
+ [flashView setAlpha:0.f];
+ [[[self view] window] addSubview:flashView];
+
+ [UIView animateWithDuration:.4f
+ animations:^{
+ [flashView setAlpha:1.f];
+ }];
+ } else {
+ [UIView animateWithDuration:.4f
+ animations:^{
+ [flashView setAlpha:0.f];
+ }
+ completion:^(BOOL finished) {
+ [flashView removeFromSuperview];
+ flashView = nil;
+ }];
+ }
+ }
+}
+
+- (AVCaptureVideoOrientation)avOrientationForDeviceOrientation:
+ (UIDeviceOrientation)deviceOrientation {
+ AVCaptureVideoOrientation result =
+ (AVCaptureVideoOrientation)(deviceOrientation);
+ if (deviceOrientation == UIDeviceOrientationLandscapeLeft)
+ result = AVCaptureVideoOrientationLandscapeRight;
+ else if (deviceOrientation == UIDeviceOrientationLandscapeRight)
+ result = AVCaptureVideoOrientationLandscapeLeft;
+ return result;
+}
+
+- (IBAction)takePicture:(id)sender {
+ if ([session isRunning]) {
+ [session stopRunning];
+ [sender setTitle:@"Continue" forState:UIControlStateNormal];
+
+ flashView = [[UIView alloc] initWithFrame:[previewView frame]];
+ [flashView setBackgroundColor:[UIColor whiteColor]];
+ [flashView setAlpha:0.f];
+ [[[self view] window] addSubview:flashView];
+
+ [UIView animateWithDuration:.2f
+ animations:^{
+ [flashView setAlpha:1.f];
+ }
+ completion:^(BOOL finished) {
+ [UIView animateWithDuration:.2f
+ animations:^{
+ [flashView setAlpha:0.f];
+ }
+ completion:^(BOOL finished) {
+ [flashView removeFromSuperview];
+ flashView = nil;
+ }];
+ }];
+
+ } else {
+ [session startRunning];
+ [sender setTitle:@"Freeze Frame" forState:UIControlStateNormal];
+ }
+}
+
++ (CGRect)videoPreviewBoxForGravity:(NSString *)gravity
+ frameSize:(CGSize)frameSize
+ apertureSize:(CGSize)apertureSize {
+ CGFloat apertureRatio = apertureSize.height / apertureSize.width;
+ CGFloat viewRatio = frameSize.width / frameSize.height;
+
+ CGSize size = CGSizeZero;
+ if ([gravity isEqualToString:AVLayerVideoGravityResizeAspectFill]) {
+ if (viewRatio > apertureRatio) {
+ size.width = frameSize.width;
+ size.height =
+ apertureSize.width * (frameSize.width / apertureSize.height);
+ } else {
+ size.width =
+ apertureSize.height * (frameSize.height / apertureSize.width);
+ size.height = frameSize.height;
+ }
+ } else if ([gravity isEqualToString:AVLayerVideoGravityResizeAspect]) {
+ if (viewRatio > apertureRatio) {
+ size.width =
+ apertureSize.height * (frameSize.height / apertureSize.width);
+ size.height = frameSize.height;
+ } else {
+ size.width = frameSize.width;
+ size.height =
+ apertureSize.width * (frameSize.width / apertureSize.height);
+ }
+ } else if ([gravity isEqualToString:AVLayerVideoGravityResize]) {
+ size.width = frameSize.width;
+ size.height = frameSize.height;
+ }
+
+ CGRect videoBox;
+ videoBox.size = size;
+ if (size.width < frameSize.width)
+ videoBox.origin.x = (frameSize.width - size.width) / 2;
+ else
+ videoBox.origin.x = (size.width - frameSize.width) / 2;
+
+ if (size.height < frameSize.height)
+ videoBox.origin.y = (frameSize.height - size.height) / 2;
+ else
+ videoBox.origin.y = (size.height - frameSize.height) / 2;
+
+ return videoBox;
+}
+
+- (void)captureOutput:(AVCaptureOutput *)captureOutput
+didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
+ fromConnection:(AVCaptureConnection *)connection {
+ CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
+ CFRetain(pixelBuffer);
+ [self runCNNOnFrame:pixelBuffer];
+ CFRelease(pixelBuffer);
+}
+
+- (void)runCNNOnFrame:(CVPixelBufferRef)pixelBuffer {
+ assert(pixelBuffer != NULL);
+
+ OSType sourcePixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer);
+ int doReverseChannels;
+ if (kCVPixelFormatType_32ARGB == sourcePixelFormat) {
+ doReverseChannels = 1;
+ } else if (kCVPixelFormatType_32BGRA == sourcePixelFormat) {
+ doReverseChannels = 0;
+ } else {
+ assert(false); // Unknown source format
+ }
+
+ const int sourceRowBytes = (int)CVPixelBufferGetBytesPerRow(pixelBuffer);
+ const int image_width = (int)CVPixelBufferGetWidth(pixelBuffer);
+ const int fullHeight = (int)CVPixelBufferGetHeight(pixelBuffer);
+
+ CVPixelBufferLockFlags unlockFlags = kNilOptions;
+ CVPixelBufferLockBaseAddress(pixelBuffer, unlockFlags);
+
+ unsigned char *sourceBaseAddr =
+ (unsigned char *)(CVPixelBufferGetBaseAddress(pixelBuffer));
+ int image_height;
+ unsigned char *sourceStartAddr;
+ if (fullHeight <= image_width) {
+ image_height = fullHeight;
+ sourceStartAddr = sourceBaseAddr;
+ } else {
+ image_height = image_width;
+ const int marginY = ((fullHeight - image_width) / 2);
+ sourceStartAddr = (sourceBaseAddr + (marginY * sourceRowBytes));
+ }
+ const int image_channels = 4;
+
+ assert(image_channels >= wanted_input_channels);
+ tensorflow::Tensor image_tensor(
+ tensorflow::DT_FLOAT,
+ tensorflow::TensorShape(
+ {1, wanted_input_height, wanted_input_width, wanted_input_channels}));
+ auto image_tensor_mapped = image_tensor.tensor<float, 4>();
+ tensorflow::uint8 *in = sourceStartAddr;
+ float *out = image_tensor_mapped.data();
+ for (int y = 0; y < wanted_input_height; ++y) {
+ float *out_row = out + (y * wanted_input_width * wanted_input_channels);
+ for (int x = 0; x < wanted_input_width; ++x) {
+ const int in_x = (y * image_width) / wanted_input_width;
+ const int in_y = (x * image_height) / wanted_input_height;
+ tensorflow::uint8 *in_pixel =
+ in + (in_y * image_width * image_channels) + (in_x * image_channels);
+ float *out_pixel = out_row + (x * wanted_input_channels);
+ for (int c = 0; c < wanted_input_channels; ++c) {
+ out_pixel[c] = (in_pixel[c] - input_mean) / input_std;
+ }
+ }
+ }
+
+ CVPixelBufferUnlockBaseAddress(pixelBuffer, unlockFlags);
+
+ if (tf_session.get()) {
+ std::vector<tensorflow::Tensor> outputs;
+ tensorflow::Status run_status = tf_session->Run(
+ {{input_layer_name, image_tensor}}, {output_layer_name}, {}, &outputs);
+ if (!run_status.ok()) {
+ LOG(ERROR) << "Running model failed:" << run_status;
+ } else {
+ tensorflow::Tensor *output = &outputs[0];
+ auto predictions = output->flat<float>();
+
+ NSMutableDictionary *newValues = [NSMutableDictionary dictionary];
+ for (int index = 0; index < predictions.size(); index += 1) {
+ const float predictionValue = predictions(index);
+ if (predictionValue > 0.05f) {
+ std::string label = labels[index % predictions.size()];
+ NSString *labelObject = [NSString stringWithUTF8String:label.c_str()];
+ NSNumber *valueObject = [NSNumber numberWithFloat:predictionValue];
+ [newValues setObject:valueObject forKey:labelObject];
+ }
+ }
+ dispatch_async(dispatch_get_main_queue(), ^(void) {
+ [self setPredictionValues:newValues];
+ });
+ }
+ }
+ CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
+}
+
+- (void)dealloc {
+ [self teardownAVCapture];
+}
+
+// use front/back camera
+- (IBAction)switchCameras:(id)sender {
+ AVCaptureDevicePosition desiredPosition;
+ if (isUsingFrontFacingCamera)
+ desiredPosition = AVCaptureDevicePositionBack;
+ else
+ desiredPosition = AVCaptureDevicePositionFront;
+
+ for (AVCaptureDevice *d in
+ [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
+ if ([d position] == desiredPosition) {
+ [[previewLayer session] beginConfiguration];
+ AVCaptureDeviceInput *input =
+ [AVCaptureDeviceInput deviceInputWithDevice:d error:nil];
+ for (AVCaptureInput *oldInput in [[previewLayer session] inputs]) {
+ [[previewLayer session] removeInput:oldInput];
+ }
+ [[previewLayer session] addInput:input];
+ [[previewLayer session] commitConfiguration];
+ break;
+ }
+ }
+ isUsingFrontFacingCamera = !isUsingFrontFacingCamera;
+}
+
+- (void)didReceiveMemoryWarning {
+ [super didReceiveMemoryWarning];
+}
+
+- (void)viewDidLoad {
+ [super viewDidLoad];
+ square = [UIImage imageNamed:@"squarePNG"];
+ synth = [[AVSpeechSynthesizer alloc] init];
+ labelLayers = [[NSMutableArray alloc] init];
+ oldPredictionValues = [[NSMutableDictionary alloc] init];
+
+ tensorflow::Status load_status;
+ if (model_uses_memory_mapping) {
+ load_status = LoadMemoryMappedModel(
+ model_file_name, model_file_type, &tf_session, &tf_memmapped_env);
+ } else {
+ load_status = LoadModel(model_file_name, model_file_type, &tf_session);
+ }
+ if (!load_status.ok()) {
+ LOG(FATAL) << "Couldn't load model: " << load_status;
+ }
+
+ tensorflow::Status labels_status =
+ LoadLabels(labels_file_name, labels_file_type, &labels);
+ if (!labels_status.ok()) {
+ LOG(FATAL) << "Couldn't load labels: " << labels_status;
+ }
+ [self setupAVCapture];
+}
+
+- (void)viewDidUnload {
+ [super viewDidUnload];
+}
+
+- (void)viewWillAppear:(BOOL)animated {
+ [super viewWillAppear:animated];
+}
+
+- (void)viewDidAppear:(BOOL)animated {
+ [super viewDidAppear:animated];
+}
+
+- (void)viewWillDisappear:(BOOL)animated {
+ [super viewWillDisappear:animated];
+}
+
+- (void)viewDidDisappear:(BOOL)animated {
+ [super viewDidDisappear:animated];
+}
+
+- (BOOL)shouldAutorotateToInterfaceOrientation:
+ (UIInterfaceOrientation)interfaceOrientation {
+ return (interfaceOrientation == UIInterfaceOrientationPortrait);
+}
+
+- (BOOL)prefersStatusBarHidden {
+ return YES;
+}
+
+- (void)setPredictionValues:(NSDictionary *)newValues {
+ const float decayValue = 0.75f;
+ const float updateValue = 0.25f;
+ const float minimumThreshold = 0.01f;
+
+ NSMutableDictionary *decayedPredictionValues =
+ [[NSMutableDictionary alloc] init];
+ for (NSString *label in oldPredictionValues) {
+ NSNumber *oldPredictionValueObject =
+ [oldPredictionValues objectForKey:label];
+ const float oldPredictionValue = [oldPredictionValueObject floatValue];
+ const float decayedPredictionValue = (oldPredictionValue * decayValue);
+ if (decayedPredictionValue > minimumThreshold) {
+ NSNumber *decayedPredictionValueObject =
+ [NSNumber numberWithFloat:decayedPredictionValue];
+ [decayedPredictionValues setObject:decayedPredictionValueObject
+ forKey:label];
+ }
+ }
+ oldPredictionValues = decayedPredictionValues;
+
+ for (NSString *label in newValues) {
+ NSNumber *newPredictionValueObject = [newValues objectForKey:label];
+ NSNumber *oldPredictionValueObject =
+ [oldPredictionValues objectForKey:label];
+ if (!oldPredictionValueObject) {
+ oldPredictionValueObject = [NSNumber numberWithFloat:0.0f];
+ }
+ const float newPredictionValue = [newPredictionValueObject floatValue];
+ const float oldPredictionValue = [oldPredictionValueObject floatValue];
+ const float updatedPredictionValue =
+ (oldPredictionValue + (newPredictionValue * updateValue));
+ NSNumber *updatedPredictionValueObject =
+ [NSNumber numberWithFloat:updatedPredictionValue];
+ [oldPredictionValues setObject:updatedPredictionValueObject forKey:label];
+ }
+ NSArray *candidateLabels = [NSMutableArray array];
+ for (NSString *label in oldPredictionValues) {
+ NSNumber *oldPredictionValueObject =
+ [oldPredictionValues objectForKey:label];
+ const float oldPredictionValue = [oldPredictionValueObject floatValue];
+ if (oldPredictionValue > 0.05f) {
+ NSDictionary *entry = @{
+ @"label" : label,
+ @"value" : oldPredictionValueObject
+ };
+ candidateLabels = [candidateLabels arrayByAddingObject:entry];
+ }
+ }
+ NSSortDescriptor *sort =
+ [NSSortDescriptor sortDescriptorWithKey:@"value" ascending:NO];
+ NSArray *sortedLabels = [candidateLabels
+ sortedArrayUsingDescriptors:[NSArray arrayWithObject:sort]];
+
+ const float leftMargin = 10.0f;
+ const float topMargin = 10.0f;
+
+ const float valueWidth = 48.0f;
+ const float valueHeight = 26.0f;
+
+ const float labelWidth = 246.0f;
+ const float labelHeight = 26.0f;
+
+ const float labelMarginX = 5.0f;
+ const float labelMarginY = 5.0f;
+
+ [self removeAllLabelLayers];
+
+ int labelCount = 0;
+ for (NSDictionary *entry in sortedLabels) {
+ NSString *label = [entry objectForKey:@"label"];
+ NSNumber *valueObject = [entry objectForKey:@"value"];
+ const float value = [valueObject floatValue];
+
+ const float originY =
+ (topMargin + ((labelHeight + labelMarginY) * labelCount));
+
+ const int valuePercentage = (int)roundf(value * 100.0f);
+
+ const float valueOriginX = leftMargin;
+ NSString *valueText = [NSString stringWithFormat:@"%d%%", valuePercentage];
+
+ [self addLabelLayerWithText:valueText
+ originX:valueOriginX
+ originY:originY
+ width:valueWidth
+ height:valueHeight
+ alignment:kCAAlignmentRight];
+
+ const float labelOriginX = (leftMargin + valueWidth + labelMarginX);
+
+ [self addLabelLayerWithText:[label capitalizedString]
+ originX:labelOriginX
+ originY:originY
+ width:labelWidth
+ height:labelHeight
+ alignment:kCAAlignmentLeft];
+
+ if ((labelCount == 0) && (value > 0.5f)) {
+ [self speak:[label capitalizedString]];
+ }
+
+ labelCount += 1;
+ if (labelCount > 4) {
+ break;
+ }
+ }
+}
+
+- (void)removeAllLabelLayers {
+ for (CATextLayer *layer in labelLayers) {
+ [layer removeFromSuperlayer];
+ }
+ [labelLayers removeAllObjects];
+}
+
+- (void)addLabelLayerWithText:(NSString *)text
+ originX:(float)originX
+ originY:(float)originY
+ width:(float)width
+ height:(float)height
+ alignment:(NSString *)alignment {
+ CFTypeRef font = (CFTypeRef) @"Menlo-Regular";
+ const float fontSize = 20.0f;
+
+ const float marginSizeX = 5.0f;
+ const float marginSizeY = 2.0f;
+
+ const CGRect backgroundBounds = CGRectMake(originX, originY, width, height);
+
+ const CGRect textBounds =
+ CGRectMake((originX + marginSizeX), (originY + marginSizeY),
+ (width - (marginSizeX * 2)), (height - (marginSizeY * 2)));
+
+ CATextLayer *background = [CATextLayer layer];
+ [background setBackgroundColor:[UIColor blackColor].CGColor];
+ [background setOpacity:0.5f];
+ [background setFrame:backgroundBounds];
+ background.cornerRadius = 5.0f;
+
+ [[self.view layer] addSublayer:background];
+ [labelLayers addObject:background];
+
+ CATextLayer *layer = [CATextLayer layer];
+ [layer setForegroundColor:[UIColor whiteColor].CGColor];
+ [layer setFrame:textBounds];
+ [layer setAlignmentMode:alignment];
+ [layer setWrapped:YES];
+ [layer setFont:font];
+ [layer setFontSize:fontSize];
+ layer.contentsScale = [[UIScreen mainScreen] scale];
+ [layer setString:text];
+
+ [[self.view layer] addSublayer:layer];
+ [labelLayers addObject:layer];
+}
+
+- (void)setPredictionText:(NSString *)text withDuration:(float)duration {
+ if (duration > 0.0) {
+ CABasicAnimation *colorAnimation =
+ [CABasicAnimation animationWithKeyPath:@"foregroundColor"];
+ colorAnimation.duration = duration;
+ colorAnimation.fillMode = kCAFillModeForwards;
+ colorAnimation.removedOnCompletion = NO;
+ colorAnimation.fromValue = (id)[UIColor darkGrayColor].CGColor;
+ colorAnimation.toValue = (id)[UIColor whiteColor].CGColor;
+ colorAnimation.timingFunction =
+ [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionLinear];
+ [self.predictionTextLayer addAnimation:colorAnimation
+ forKey:@"colorAnimation"];
+ } else {
+ self.predictionTextLayer.foregroundColor = [UIColor whiteColor].CGColor;
+ }
+
+ [self.predictionTextLayer removeFromSuperlayer];
+ [[self.view layer] addSublayer:self.predictionTextLayer];
+ [self.predictionTextLayer setString:text];
+}
+
+- (void)speak:(NSString *)words {
+ if ([synth isSpeaking]) {
+ return;
+ }
+ AVSpeechUtterance *utterance =
+ [AVSpeechUtterance speechUtteranceWithString:words];
+ utterance.voice = [AVSpeechSynthesisVoice voiceWithLanguage:@"en-US"];
+ utterance.rate = 0.75 * AVSpeechUtteranceDefaultSpeechRate;
+ [synth speakUtterance:utterance];
+}
+
+@end
diff --git a/tensorflow/examples/ios/camera/Info.plist b/tensorflow/examples/ios/camera/Info.plist
new file mode 100644
index 0000000000..772fb38dcc
--- /dev/null
+++ b/tensorflow/examples/ios/camera/Info.plist
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>en</string>
+ <key>CFBundleDisplayName</key>
+ <string>tf_camera_example</string>
+ <key>CFBundleExecutable</key>
+ <string>${EXECUTABLE_NAME}</string>
+ <key>CFBundleIdentifier</key>
+ <string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>${PRODUCT_NAME}</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleVersion</key>
+ <string>1.0</string>
+ <key>LSRequiresIPhoneOS</key>
+ <true/>
+ <key>NSCameraUsageDescription</key>
+ <string>Capture images to detect object</string>
+ <key>UIMainStoryboardFile</key>
+ <string>MainStoryboard_iPhone</string>
+ <key>UIRequiresFullScreen</key>
+ <true/>
+ <key>UIStatusBarHidden</key>
+ <true/>
+ <key>UISupportedInterfaceOrientations</key>
+ <array>
+ <string>UIInterfaceOrientationPortrait</string>
+ </array>
+ <key>UISupportedInterfaceOrientations~ipad</key>
+ <array>
+ <string>UIInterfaceOrientationPortrait</string>
+ </array>
+</dict>
+</plist>
diff --git a/tensorflow/examples/ios/camera/MainStoryboard_iPhone.storyboard b/tensorflow/examples/ios/camera/MainStoryboard_iPhone.storyboard
new file mode 100644
index 0000000000..0f10a22e41
--- /dev/null
+++ b/tensorflow/examples/ios/camera/MainStoryboard_iPhone.storyboard
@@ -0,0 +1,46 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="9531" systemVersion="15E65" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" initialViewController="2">
+ <dependencies>
+ <deployment identifier="iOS"/>
+ <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="9529"/>
+ </dependencies>
+ <scenes>
+ <!--Camera Example View Controller-->
+ <scene sceneID="5">
+ <objects>
+ <viewController id="2" customClass="CameraExampleViewController" sceneMemberID="viewController">
+ <view key="view" contentMode="scaleToFill" id="3">
+ <rect key="frame" x="0.0" y="0.0" width="320" height="568"/>
+ <autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
+ <subviews>
+ <view contentMode="scaleToFill" id="12">
+ <rect key="frame" x="0.0" y="0.0" width="320" height="522"/>
+ <autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
+ <color key="backgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="calibratedWhite"/>
+ <gestureRecognizers/>
+ </view>
+ <button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="roundedRect" lineBreakMode="middleTruncation" id="iD8-yH-eWH">
+ <rect key="frame" x="0.0" y="454" width="320" height="33"/>
+ <autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
+ <color key="backgroundColor" red="0.0" green="0.0" blue="0.0" alpha="1" colorSpace="calibratedRGB"/>
+ <fontDescription key="fontDescription" name="Menlo-Regular" family="Menlo" pointSize="20"/>
+ <state key="normal" title="Freeze Frame">
+ <color key="titleColor" white="1" alpha="1" colorSpace="calibratedWhite"/>
+ <color key="titleShadowColor" white="0.5" alpha="1" colorSpace="calibratedWhite"/>
+ </state>
+ <connections>
+ <action selector="takePicture:" destination="2" eventType="touchUpInside" id="BTy-7E-XUS"/>
+ </connections>
+ </button>
+ </subviews>
+ <color key="backgroundColor" red="0.0" green="0.0" blue="0.0" alpha="1" colorSpace="calibratedRGB"/>
+ </view>
+ <connections>
+ <outlet property="previewView" destination="12" id="13"/>
+ </connections>
+ </viewController>
+ <placeholder placeholderIdentifier="IBFirstResponder" id="4" sceneMemberID="firstResponder"/>
+ </objects>
+ </scene>
+ </scenes>
+</document>
diff --git a/tensorflow/examples/ios/camera/Podfile b/tensorflow/examples/ios/camera/Podfile
new file mode 100644
index 0000000000..117828f071
--- /dev/null
+++ b/tensorflow/examples/ios/camera/Podfile
@@ -0,0 +1,5 @@
+platform :ios, '8.0'
+inhibit_all_warnings!
+
+target 'tf_camera_example'
+ pod 'TensorFlow-experimental'
diff --git a/tensorflow/examples/ios/camera/data/grace_hopper.jpg b/tensorflow/examples/ios/camera/data/grace_hopper.jpg
new file mode 100644
index 0000000000..d2a427810f
--- /dev/null
+++ b/tensorflow/examples/ios/camera/data/grace_hopper.jpg
Binary files differ
diff --git a/tensorflow/examples/ios/camera/ios_image_load.h b/tensorflow/examples/ios/camera/ios_image_load.h
new file mode 100644
index 0000000000..87a847e145
--- /dev/null
+++ b/tensorflow/examples/ios/camera/ios_image_load.h
@@ -0,0 +1,27 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef TENSORFLOW_CONTRIB_IOS_EXAMPLES_CAMERA_IMAGE_LOAD_H_
+#define TENSORFLOW_CONTRIB_IOS_EXAMPLES_CAMERA_IMAGE_LOAD_H_
+
+#include <vector>
+
+#include "tensorflow/core/framework/types.h"
+
+std::vector<tensorflow::uint8> LoadImageFromFile(const char* file_name,
+ int* out_width,
+ int* out_height,
+ int* out_channels);
+
+#endif // TENSORFLOW_CONTRIB_IOS_EXAMPLES_CAMERA_IMAGE_LOAD_H_
diff --git a/tensorflow/examples/ios/camera/ios_image_load.mm b/tensorflow/examples/ios/camera/ios_image_load.mm
new file mode 100644
index 0000000000..64d1ea21cf
--- /dev/null
+++ b/tensorflow/examples/ios/camera/ios_image_load.mm
@@ -0,0 +1,87 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "ios_image_load.h"
+
+#include <stdlib.h>
+#include <string.h>
+#include <assert.h>
+#include <stdio.h>
+
+#import <CoreImage/CoreImage.h>
+#import <ImageIO/ImageIO.h>
+
+using tensorflow::uint8;
+
+std::vector<uint8> LoadImageFromFile(const char* file_name,
+ int* out_width, int* out_height,
+ int* out_channels) {
+ FILE* file_handle = fopen(file_name, "rb");
+ fseek(file_handle, 0, SEEK_END);
+ const size_t bytes_in_file = ftell(file_handle);
+ fseek(file_handle, 0, SEEK_SET);
+ std::vector<uint8> file_data(bytes_in_file);
+ fread(file_data.data(), 1, bytes_in_file, file_handle);
+ fclose(file_handle);
+ CFDataRef file_data_ref = CFDataCreateWithBytesNoCopy(NULL, file_data.data(),
+ bytes_in_file,
+ kCFAllocatorNull);
+ CGDataProviderRef image_provider =
+ CGDataProviderCreateWithCFData(file_data_ref);
+
+ const char* suffix = strrchr(file_name, '.');
+ if (!suffix || suffix == file_name) {
+ suffix = "";
+ }
+ CGImageRef image;
+ if (strcasecmp(suffix, ".png") == 0) {
+ image = CGImageCreateWithPNGDataProvider(image_provider, NULL, true,
+ kCGRenderingIntentDefault);
+ } else if ((strcasecmp(suffix, ".jpg") == 0) ||
+ (strcasecmp(suffix, ".jpeg") == 0)) {
+ image = CGImageCreateWithJPEGDataProvider(image_provider, NULL, true,
+ kCGRenderingIntentDefault);
+ } else {
+ CFRelease(image_provider);
+ CFRelease(file_data_ref);
+ fprintf(stderr, "Unknown suffix for file '%s'\n", file_name);
+ *out_width = 0;
+ *out_height = 0;
+ *out_channels = 0;
+ return std::vector<uint8>();
+ }
+
+ const int width = (int)CGImageGetWidth(image);
+ const int height = (int)CGImageGetHeight(image);
+ const int channels = 4;
+ CGColorSpaceRef color_space = CGColorSpaceCreateDeviceRGB();
+ const int bytes_per_row = (width * channels);
+ const int bytes_in_image = (bytes_per_row * height);
+ std::vector<uint8> result(bytes_in_image);
+ const int bits_per_component = 8;
+ CGContextRef context = CGBitmapContextCreate(result.data(), width, height,
+ bits_per_component, bytes_per_row, color_space,
+ kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big);
+ CGColorSpaceRelease(color_space);
+ CGContextDrawImage(context, CGRectMake(0, 0, width, height), image);
+ CGContextRelease(context);
+ CFRelease(image);
+ CFRelease(image_provider);
+ CFRelease(file_data_ref);
+
+ *out_width = width;
+ *out_height = height;
+ *out_channels = channels;
+ return result;
+}
diff --git a/tensorflow/examples/ios/camera/main.mm b/tensorflow/examples/ios/camera/main.mm
new file mode 100644
index 0000000000..42eff697ef
--- /dev/null
+++ b/tensorflow/examples/ios/camera/main.mm
@@ -0,0 +1,27 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#import <UIKit/UIKit.h>
+
+#import "CameraExampleAppDelegate.h"
+
+int main(int argc, char *argv[]) {
+ int retVal = 0;
+
+ @autoreleasepool {
+ retVal = UIApplicationMain(
+ argc, argv, nil, NSStringFromClass([CameraExampleAppDelegate class]));
+ }
+ return retVal;
+}
diff --git a/tensorflow/examples/ios/camera/tensorflow_utils.h b/tensorflow/examples/ios/camera/tensorflow_utils.h
new file mode 100644
index 0000000000..78bdb82aae
--- /dev/null
+++ b/tensorflow/examples/ios/camera/tensorflow_utils.h
@@ -0,0 +1,52 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef TENSORFLOW_CONTRIB_IOS_EXAMPLES_CAMERA_TENSORFLOW_UTILS_H_
+#define TENSORFLOW_CONTRIB_IOS_EXAMPLES_CAMERA_TENSORFLOW_UTILS_H_
+
+#include <memory>
+#include <vector>
+
+#include "tensorflow/core/public/session.h"
+#include "tensorflow/core/util/memmapped_file_system.h"
+#include "third_party/eigen3/unsupported/Eigen/CXX11/Tensor"
+
+// Reads a serialized GraphDef protobuf file from the bundle, typically
+// created with the freeze_graph script. Populates the session argument with a
+// Session object that has the model loaded.
+tensorflow::Status LoadModel(NSString* file_name, NSString* file_type,
+ std::unique_ptr<tensorflow::Session>* session);
+
+// Loads a model from a file that has been created using the
+// convert_graphdef_memmapped_format tool. This bundles together a GraphDef
+// proto together with a file that can be memory-mapped, containing the weight
+// parameters for the model. This is useful because it reduces the overall
+// memory pressure, since the read-only parameter regions can be easily paged
+// out and don't count toward memory limits on iOS.
+tensorflow::Status LoadMemoryMappedModel(
+ NSString* file_name, NSString* file_type,
+ std::unique_ptr<tensorflow::Session>* session,
+ std::unique_ptr<tensorflow::MemmappedEnv>* memmapped_env);
+
+// Takes a text file with a single label on each line, and returns a list.
+tensorflow::Status LoadLabels(NSString* file_name, NSString* file_type,
+ std::vector<std::string>* label_strings);
+
+// Sorts the results from a model execution, and returns the highest scoring.
+void GetTopN(const Eigen::TensorMap<Eigen::Tensor<float, 1, Eigen::RowMajor>,
+ Eigen::Aligned>& prediction,
+ const int num_results, const float threshold,
+ std::vector<std::pair<float, int> >* top_results);
+
+#endif // TENSORFLOW_CONTRIB_IOS_EXAMPLES_CAMERA_TENSORFLOW_UTILS_H_
diff --git a/tensorflow/examples/ios/camera/tensorflow_utils.mm b/tensorflow/examples/ios/camera/tensorflow_utils.mm
new file mode 100644
index 0000000000..56d1e53081
--- /dev/null
+++ b/tensorflow/examples/ios/camera/tensorflow_utils.mm
@@ -0,0 +1,219 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#import <Foundation/Foundation.h>
+
+#include "tensorflow_utils.h"
+
+#include <pthread.h>
+#include <unistd.h>
+#include <fstream>
+#include <queue>
+#include <sstream>
+#include <string>
+
+namespace {
+
+// Helper class used to load protobufs efficiently.
+class IfstreamInputStream : public ::google::protobuf::io::CopyingInputStream {
+ public:
+ explicit IfstreamInputStream(const std::string& file_name)
+ : ifs_(file_name.c_str(), std::ios::in | std::ios::binary) {}
+ ~IfstreamInputStream() { ifs_.close(); }
+
+ int Read(void* buffer, int size) {
+ if (!ifs_) {
+ return -1;
+ }
+ ifs_.read(static_cast<char*>(buffer), size);
+ return ifs_.gcount();
+ }
+
+ private:
+ std::ifstream ifs_;
+};
+} // namespace
+
+// Returns the top N confidence values over threshold in the provided vector,
+// sorted by confidence in descending order.
+void GetTopN(const Eigen::TensorMap<Eigen::Tensor<float, 1, Eigen::RowMajor>,
+ Eigen::Aligned>& prediction,
+ const int num_results, const float threshold,
+ std::vector<std::pair<float, int> >* top_results) {
+ // Will contain top N results in ascending order.
+ std::priority_queue<std::pair<float, int>,
+ std::vector<std::pair<float, int> >,
+ std::greater<std::pair<float, int> > >
+ top_result_pq;
+
+ const int count = prediction.size();
+ for (int i = 0; i < count; ++i) {
+ const float value = prediction(i);
+
+ // Only add it if it beats the threshold and has a chance at being in
+ // the top N.
+ if (value < threshold) {
+ continue;
+ }
+
+ top_result_pq.push(std::pair<float, int>(value, i));
+
+ // If at capacity, kick the smallest value out.
+ if (top_result_pq.size() > num_results) {
+ top_result_pq.pop();
+ }
+ }
+
+ // Copy to output vector and reverse into descending order.
+ while (!top_result_pq.empty()) {
+ top_results->push_back(top_result_pq.top());
+ top_result_pq.pop();
+ }
+ std::reverse(top_results->begin(), top_results->end());
+}
+
+bool PortableReadFileToProto(const std::string& file_name,
+ ::google::protobuf::MessageLite* proto) {
+ ::google::protobuf::io::CopyingInputStreamAdaptor stream(
+ new IfstreamInputStream(file_name));
+ stream.SetOwnsCopyingStream(true);
+ ::google::protobuf::io::CodedInputStream coded_stream(&stream);
+ // Total bytes hard limit / warning limit are set to 1GB and 512MB
+ // respectively.
+ coded_stream.SetTotalBytesLimit(1024LL << 20, 512LL << 20);
+ return proto->ParseFromCodedStream(&coded_stream);
+}
+
+NSString* FilePathForResourceName(NSString* name, NSString* extension) {
+ NSString* file_path =
+ [[NSBundle mainBundle] pathForResource:name ofType:extension];
+ if (file_path == NULL) {
+ LOG(FATAL) << "Couldn't find '" << [name UTF8String] << "."
+ << [extension UTF8String] << "' in bundle.";
+ return nullptr;
+ }
+ return file_path;
+}
+
+tensorflow::Status LoadModel(NSString* file_name, NSString* file_type,
+ std::unique_ptr<tensorflow::Session>* session) {
+ tensorflow::SessionOptions options;
+
+ tensorflow::Session* session_pointer = nullptr;
+ tensorflow::Status session_status =
+ tensorflow::NewSession(options, &session_pointer);
+ if (!session_status.ok()) {
+ LOG(ERROR) << "Could not create TensorFlow Session: " << session_status;
+ return session_status;
+ }
+ session->reset(session_pointer);
+
+ tensorflow::GraphDef tensorflow_graph;
+
+ NSString* model_path = FilePathForResourceName(file_name, file_type);
+ if (!model_path) {
+ LOG(ERROR) << "Failed to find model proto at" << [file_name UTF8String]
+ << [file_type UTF8String];
+ return tensorflow::errors::NotFound([file_name UTF8String],
+ [file_type UTF8String]);
+ }
+ const bool read_proto_succeeded =
+ PortableReadFileToProto([model_path UTF8String], &tensorflow_graph);
+ if (!read_proto_succeeded) {
+ LOG(ERROR) << "Failed to load model proto from" << [model_path UTF8String];
+ return tensorflow::errors::NotFound([model_path UTF8String]);
+ }
+
+ tensorflow::Status create_status = (*session)->Create(tensorflow_graph);
+ if (!create_status.ok()) {
+ LOG(ERROR) << "Could not create TensorFlow Graph: " << create_status;
+ return create_status;
+ }
+
+ return tensorflow::Status::OK();
+}
+
+tensorflow::Status LoadMemoryMappedModel(
+ NSString* file_name, NSString* file_type,
+ std::unique_ptr<tensorflow::Session>* session,
+ std::unique_ptr<tensorflow::MemmappedEnv>* memmapped_env) {
+ NSString* network_path = FilePathForResourceName(file_name, file_type);
+ memmapped_env->reset(
+ new tensorflow::MemmappedEnv(tensorflow::Env::Default()));
+ tensorflow::Status mmap_status =
+ (memmapped_env->get())->InitializeFromFile([network_path UTF8String]);
+ if (!mmap_status.ok()) {
+ LOG(ERROR) << "MMap failed with " << mmap_status.error_message();
+ return mmap_status;
+ }
+
+ tensorflow::GraphDef tensorflow_graph;
+ tensorflow::Status load_graph_status = ReadBinaryProto(
+ memmapped_env->get(),
+ tensorflow::MemmappedFileSystem::kMemmappedPackageDefaultGraphDef,
+ &tensorflow_graph);
+ if (!load_graph_status.ok()) {
+ LOG(ERROR) << "MMap load graph failed with "
+ << load_graph_status.error_message();
+ return load_graph_status;
+ }
+
+ tensorflow::SessionOptions options;
+ // Disable optimizations on this graph so that constant folding doesn't
+ // increase the memory footprint by creating new constant copies of the weight
+ // parameters.
+ options.config.mutable_graph_options()
+ ->mutable_optimizer_options()
+ ->set_opt_level(::tensorflow::OptimizerOptions::L0);
+ options.env = memmapped_env->get();
+
+ tensorflow::Session* session_pointer = nullptr;
+ tensorflow::Status session_status =
+ tensorflow::NewSession(options, &session_pointer);
+ if (!session_status.ok()) {
+ LOG(ERROR) << "Could not create TensorFlow Session: " << session_status;
+ return session_status;
+ }
+
+ tensorflow::Status create_status = session_pointer->Create(tensorflow_graph);
+ if (!create_status.ok()) {
+ LOG(ERROR) << "Could not create TensorFlow Graph: " << create_status;
+ return create_status;
+ }
+
+ session->reset(session_pointer);
+
+ return tensorflow::Status::OK();
+}
+
+tensorflow::Status LoadLabels(NSString* file_name, NSString* file_type,
+ std::vector<std::string>* label_strings) {
+ // Read the label list
+ NSString* labels_path = FilePathForResourceName(file_name, file_type);
+ if (!labels_path) {
+ LOG(ERROR) << "Failed to find model proto at" << [file_name UTF8String]
+ << [file_type UTF8String];
+ return tensorflow::errors::NotFound([file_name UTF8String],
+ [file_type UTF8String]);
+ }
+ std::ifstream t;
+ t.open([labels_path UTF8String]);
+ std::string line;
+ while (t) {
+ std::getline(t, line);
+ label_strings->push_back(line);
+ }
+ t.close();
+ return tensorflow::Status::OK();
+}
diff --git a/tensorflow/examples/ios/camera/tf_camera_example.xcodeproj/project.pbxproj b/tensorflow/examples/ios/camera/tf_camera_example.xcodeproj/project.pbxproj
new file mode 100644
index 0000000000..ee9fe57c79
--- /dev/null
+++ b/tensorflow/examples/ios/camera/tf_camera_example.xcodeproj/project.pbxproj
@@ -0,0 +1,412 @@
+// !$*UTF8*$!
+{
+ archiveVersion = 1;
+ classes = {
+ };
+ objectVersion = 46;
+ objects = {
+
+/* Begin PBXBuildFile section */
+ 1C3C9DCB1ED3AB4200B8B5FA /* ios_image_load.mm in Sources */ = {isa = PBXBuildFile; fileRef = 1C3C9DC91ED3AB4200B8B5FA /* ios_image_load.mm */; };
+ 1C3C9DCC1ED3AB4200B8B5FA /* main.mm in Sources */ = {isa = PBXBuildFile; fileRef = 1C3C9DCA1ED3AB4200B8B5FA /* main.mm */; };
+ 1C968D171ED3B8F20054F5C3 /* grace_hopper.jpg in Resources */ = {isa = PBXBuildFile; fileRef = 59A3CFF51CF4E68100C4259F /* grace_hopper.jpg */; };
+ 1C968D181ED3B8F20054F5C3 /* imagenet_comp_graph_label_strings.txt in Resources */ = {isa = PBXBuildFile; fileRef = 59A3CFF71CF4E68100C4259F /* imagenet_comp_graph_label_strings.txt */; };
+ 1C968D191ED3B8F20054F5C3 /* tensorflow_inception_graph.pb in Resources */ = {isa = PBXBuildFile; fileRef = 59A3CFF91CF4E68100C4259F /* tensorflow_inception_graph.pb */; };
+ 1C99111C1ED3B0E600A6BFB9 /* MainStoryboard_iPhone.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 1C99111B1ED3B0E600A6BFB9 /* MainStoryboard_iPhone.storyboard */; };
+ 1CA5EB931ED3ABFB00247A34 /* CoreMedia.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1CA5EB921ED3ABFB00247A34 /* CoreMedia.framework */; };
+ 1CB47D491ED3AD1700DF7666 /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1CB47D481ED3AD1700DF7666 /* AVFoundation.framework */; };
+ 1CDB2D491ED3A9CD007929E9 /* CameraExampleAppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 1CDB2D431ED3A9CD007929E9 /* CameraExampleAppDelegate.m */; };
+ 1CDB2D4A1ED3A9CD007929E9 /* CameraExampleViewController.mm in Sources */ = {isa = PBXBuildFile; fileRef = 1CDB2D451ED3A9CD007929E9 /* CameraExampleViewController.mm */; };
+ 1CDB2D4C1ED3A9CD007929E9 /* tensorflow_utils.mm in Sources */ = {isa = PBXBuildFile; fileRef = 1CDB2D481ED3A9CD007929E9 /* tensorflow_utils.mm */; };
+ 1CDB2D4E1ED3AA35007929E9 /* Info.plist in Resources */ = {isa = PBXBuildFile; fileRef = 1CDB2D4D1ED3AA35007929E9 /* Info.plist */; };
+ 54DC6C3C5F734F3A58069F0C /* libPods-tf_camera_example.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 3BA8BF92C84895BFE59D8236 /* libPods-tf_camera_example.a */; };
+/* End PBXBuildFile section */
+
+/* Begin PBXFileReference section */
+ 1C0D73481ECCC41B008C1DAB /* CoreImage.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreImage.framework; path = System/Library/Frameworks/CoreImage.framework; sourceTree = SDKROOT; };
+ 1C0D734A1ECCC460008C1DAB /* CoreGraphics.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreGraphics.framework; path = System/Library/Frameworks/CoreGraphics.framework; sourceTree = SDKROOT; };
+ 1C3C9DC81ED3AB4200B8B5FA /* ios_image_load.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ios_image_load.h; sourceTree = "<group>"; };
+ 1C3C9DC91ED3AB4200B8B5FA /* ios_image_load.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = ios_image_load.mm; sourceTree = "<group>"; };
+ 1C3C9DCA1ED3AB4200B8B5FA /* main.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = main.mm; sourceTree = "<group>"; };
+ 1C564C0D1ED3A92E00087306 /* tf_camera_example.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = tf_camera_example.app; sourceTree = BUILT_PRODUCTS_DIR; };
+ 1C99111B1ED3B0E600A6BFB9 /* MainStoryboard_iPhone.storyboard */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.storyboard; path = MainStoryboard_iPhone.storyboard; sourceTree = "<group>"; };
+ 1CA45FFE1ECCC356002FA6A4 /* UIKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = UIKit.framework; path = System/Library/Frameworks/UIKit.framework; sourceTree = SDKROOT; };
+ 1CA5EB921ED3ABFB00247A34 /* CoreMedia.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreMedia.framework; path = System/Library/Frameworks/CoreMedia.framework; sourceTree = SDKROOT; };
+ 1CB47D481ED3AD1700DF7666 /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = SDKROOT; };
+ 1CDB2D421ED3A9CD007929E9 /* CameraExampleAppDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CameraExampleAppDelegate.h; sourceTree = "<group>"; };
+ 1CDB2D431ED3A9CD007929E9 /* CameraExampleAppDelegate.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CameraExampleAppDelegate.m; sourceTree = "<group>"; };
+ 1CDB2D441ED3A9CD007929E9 /* CameraExampleViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CameraExampleViewController.h; sourceTree = "<group>"; };
+ 1CDB2D451ED3A9CD007929E9 /* CameraExampleViewController.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = CameraExampleViewController.mm; sourceTree = "<group>"; };
+ 1CDB2D471ED3A9CD007929E9 /* tensorflow_utils.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = tensorflow_utils.h; sourceTree = "<group>"; };
+ 1CDB2D481ED3A9CD007929E9 /* tensorflow_utils.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = tensorflow_utils.mm; sourceTree = "<group>"; };
+ 1CDB2D4D1ED3AA35007929E9 /* Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
+ 3BA8BF92C84895BFE59D8236 /* libPods-tf_camera_example.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-tf_camera_example.a"; sourceTree = BUILT_PRODUCTS_DIR; };
+ 3BC5BE4BBD09374D3E98F082 /* Pods-tf_camera_example.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-tf_camera_example.debug.xcconfig"; path = "Pods/Target Support Files/Pods-tf_camera_example/Pods-tf_camera_example.debug.xcconfig"; sourceTree = "<group>"; };
+ 55ED318E8D29C8AFEF03DF1E /* Pods-tf_camera_example.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-tf_camera_example.release.xcconfig"; path = "Pods/Target Support Files/Pods-tf_camera_example/Pods-tf_camera_example.release.xcconfig"; sourceTree = "<group>"; };
+ 59A3CFF51CF4E68100C4259F /* grace_hopper.jpg */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = grace_hopper.jpg; sourceTree = "<group>"; };
+ 59A3CFF71CF4E68100C4259F /* imagenet_comp_graph_label_strings.txt */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = imagenet_comp_graph_label_strings.txt; sourceTree = "<group>"; };
+ 59A3CFF91CF4E68100C4259F /* tensorflow_inception_graph.pb */ = {isa = PBXFileReference; lastKnownFileType = file; path = tensorflow_inception_graph.pb; sourceTree = "<group>"; };
+/* End PBXFileReference section */
+
+/* Begin PBXFrameworksBuildPhase section */
+ 1C564C0A1ED3A92E00087306 /* Frameworks */ = {
+ isa = PBXFrameworksBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 1CB47D491ED3AD1700DF7666 /* AVFoundation.framework in Frameworks */,
+ 1CA5EB931ED3ABFB00247A34 /* CoreMedia.framework in Frameworks */,
+ 54DC6C3C5F734F3A58069F0C /* libPods-tf_camera_example.a in Frameworks */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXFrameworksBuildPhase section */
+
+/* Begin PBXGroup section */
+ 24D7686C331131624F4454A0 /* Frameworks */ = {
+ isa = PBXGroup;
+ children = (
+ 1CB47D481ED3AD1700DF7666 /* AVFoundation.framework */,
+ 1CA5EB921ED3ABFB00247A34 /* CoreMedia.framework */,
+ 1C0D734A1ECCC460008C1DAB /* CoreGraphics.framework */,
+ 1C0D73481ECCC41B008C1DAB /* CoreImage.framework */,
+ 1CA45FFE1ECCC356002FA6A4 /* UIKit.framework */,
+ 3BA8BF92C84895BFE59D8236 /* libPods-tf_camera_example.a */,
+ );
+ name = Frameworks;
+ sourceTree = "<group>";
+ };
+ 3E9FC355632FB928EA23BEED /* Pods */ = {
+ isa = PBXGroup;
+ children = (
+ 3BC5BE4BBD09374D3E98F082 /* Pods-tf_camera_example.debug.xcconfig */,
+ 55ED318E8D29C8AFEF03DF1E /* Pods-tf_camera_example.release.xcconfig */,
+ );
+ name = Pods;
+ sourceTree = "<group>";
+ };
+ 591157921CF4011C00C31E3A = {
+ isa = PBXGroup;
+ children = (
+ 1C99111B1ED3B0E600A6BFB9 /* MainStoryboard_iPhone.storyboard */,
+ 1C3C9DC81ED3AB4200B8B5FA /* ios_image_load.h */,
+ 1C3C9DC91ED3AB4200B8B5FA /* ios_image_load.mm */,
+ 1C3C9DCA1ED3AB4200B8B5FA /* main.mm */,
+ 1CDB2D4D1ED3AA35007929E9 /* Info.plist */,
+ 1CDB2D421ED3A9CD007929E9 /* CameraExampleAppDelegate.h */,
+ 1CDB2D431ED3A9CD007929E9 /* CameraExampleAppDelegate.m */,
+ 1CDB2D441ED3A9CD007929E9 /* CameraExampleViewController.h */,
+ 1CDB2D451ED3A9CD007929E9 /* CameraExampleViewController.mm */,
+ 1CDB2D471ED3A9CD007929E9 /* tensorflow_utils.h */,
+ 1CDB2D481ED3A9CD007929E9 /* tensorflow_utils.mm */,
+ 59A3CFF31CF4E68100C4259F /* data */,
+ 5911579C1CF4011C00C31E3A /* Products */,
+ 3E9FC355632FB928EA23BEED /* Pods */,
+ 24D7686C331131624F4454A0 /* Frameworks */,
+ );
+ sourceTree = "<group>";
+ };
+ 5911579C1CF4011C00C31E3A /* Products */ = {
+ isa = PBXGroup;
+ children = (
+ 1C564C0D1ED3A92E00087306 /* tf_camera_example.app */,
+ );
+ name = Products;
+ sourceTree = "<group>";
+ };
+ 59A3CFF31CF4E68100C4259F /* data */ = {
+ isa = PBXGroup;
+ children = (
+ 59A3CFF51CF4E68100C4259F /* grace_hopper.jpg */,
+ 59A3CFF71CF4E68100C4259F /* imagenet_comp_graph_label_strings.txt */,
+ 59A3CFF91CF4E68100C4259F /* tensorflow_inception_graph.pb */,
+ );
+ path = data;
+ sourceTree = "<group>";
+ };
+/* End PBXGroup section */
+
+/* Begin PBXNativeTarget section */
+ 1C564C0C1ED3A92E00087306 /* tf_camera_example */ = {
+ isa = PBXNativeTarget;
+ buildConfigurationList = 1C564C351ED3A92E00087306 /* Build configuration list for PBXNativeTarget "tf_camera_example" */;
+ buildPhases = (
+ 66DAEAAEE9EF6550C3A061E0 /* [CP] Check Pods Manifest.lock */,
+ 1C564C091ED3A92E00087306 /* Sources */,
+ 1C564C0A1ED3A92E00087306 /* Frameworks */,
+ 1C564C0B1ED3A92E00087306 /* Resources */,
+ 00E875C3B066535AE6B77101 /* [CP] Embed Pods Frameworks */,
+ 5C2D02120E3E5E09567AA946 /* [CP] Copy Pods Resources */,
+ );
+ buildRules = (
+ );
+ dependencies = (
+ );
+ name = tf_camera_example;
+ productName = tf_camera_example;
+ productReference = 1C564C0D1ED3A92E00087306 /* tf_camera_example.app */;
+ productType = "com.apple.product-type.application";
+ };
+/* End PBXNativeTarget section */
+
+/* Begin PBXProject section */
+ 591157931CF4011C00C31E3A /* Project object */ = {
+ isa = PBXProject;
+ attributes = {
+ LastSwiftUpdateCheck = 0830;
+ LastUpgradeCheck = 0830;
+ ORGANIZATIONNAME = Google;
+ TargetAttributes = {
+ 1C564C0C1ED3A92E00087306 = {
+ CreatedOnToolsVersion = 8.3.2;
+ DevelopmentTeam = 5DRPWFQSHP;
+ ProvisioningStyle = Automatic;
+ };
+ };
+ };
+ buildConfigurationList = 591157961CF4011C00C31E3A /* Build configuration list for PBXProject "tf_camera_example" */;
+ compatibilityVersion = "Xcode 3.2";
+ developmentRegion = English;
+ hasScannedForEncodings = 0;
+ knownRegions = (
+ en,
+ Base,
+ );
+ mainGroup = 591157921CF4011C00C31E3A;
+ productRefGroup = 5911579C1CF4011C00C31E3A /* Products */;
+ projectDirPath = "";
+ projectRoot = "";
+ targets = (
+ 1C564C0C1ED3A92E00087306 /* tf_camera_example */,
+ );
+ };
+/* End PBXProject section */
+
+/* Begin PBXResourcesBuildPhase section */
+ 1C564C0B1ED3A92E00087306 /* Resources */ = {
+ isa = PBXResourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 1C968D171ED3B8F20054F5C3 /* grace_hopper.jpg in Resources */,
+ 1C968D181ED3B8F20054F5C3 /* imagenet_comp_graph_label_strings.txt in Resources */,
+ 1C968D191ED3B8F20054F5C3 /* tensorflow_inception_graph.pb in Resources */,
+ 1C99111C1ED3B0E600A6BFB9 /* MainStoryboard_iPhone.storyboard in Resources */,
+ 1CDB2D4E1ED3AA35007929E9 /* Info.plist in Resources */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXResourcesBuildPhase section */
+
+/* Begin PBXShellScriptBuildPhase section */
+ 00E875C3B066535AE6B77101 /* [CP] Embed Pods Frameworks */ = {
+ isa = PBXShellScriptBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ );
+ inputPaths = (
+ );
+ name = "[CP] Embed Pods Frameworks";
+ outputPaths = (
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ shellPath = /bin/sh;
+ shellScript = "\"${SRCROOT}/Pods/Target Support Files/Pods-tf_camera_example/Pods-tf_camera_example-frameworks.sh\"\n";
+ showEnvVarsInLog = 0;
+ };
+ 5C2D02120E3E5E09567AA946 /* [CP] Copy Pods Resources */ = {
+ isa = PBXShellScriptBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ );
+ inputPaths = (
+ );
+ name = "[CP] Copy Pods Resources";
+ outputPaths = (
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ shellPath = /bin/sh;
+ shellScript = "\"${SRCROOT}/Pods/Target Support Files/Pods-tf_camera_example/Pods-tf_camera_example-resources.sh\"\n";
+ showEnvVarsInLog = 0;
+ };
+ 66DAEAAEE9EF6550C3A061E0 /* [CP] Check Pods Manifest.lock */ = {
+ isa = PBXShellScriptBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ );
+ inputPaths = (
+ );
+ name = "[CP] Check Pods Manifest.lock";
+ outputPaths = (
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ shellPath = /bin/sh;
+ shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n";
+ showEnvVarsInLog = 0;
+ };
+/* End PBXShellScriptBuildPhase section */
+
+/* Begin PBXSourcesBuildPhase section */
+ 1C564C091ED3A92E00087306 /* Sources */ = {
+ isa = PBXSourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 1CDB2D4C1ED3A9CD007929E9 /* tensorflow_utils.mm in Sources */,
+ 1C3C9DCB1ED3AB4200B8B5FA /* ios_image_load.mm in Sources */,
+ 1CDB2D4A1ED3A9CD007929E9 /* CameraExampleViewController.mm in Sources */,
+ 1CDB2D491ED3A9CD007929E9 /* CameraExampleAppDelegate.m in Sources */,
+ 1C3C9DCC1ED3AB4200B8B5FA /* main.mm in Sources */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXSourcesBuildPhase section */
+
+/* Begin XCBuildConfiguration section */
+ 1C564C361ED3A92E00087306 /* Debug */ = {
+ isa = XCBuildConfiguration;
+ baseConfigurationReference = 3BC5BE4BBD09374D3E98F082 /* Pods-tf_camera_example.debug.xcconfig */;
+ buildSettings = {
+ ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
+ CLANG_ANALYZER_NONNULL = YES;
+ CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
+ CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
+ DEVELOPMENT_TEAM = 5DRPWFQSHP;
+ INFOPLIST_FILE = Info.plist;
+ IPHONEOS_DEPLOYMENT_TARGET = 10.3;
+ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
+ PRODUCT_BUNDLE_IDENTIFIER = "com.pf.tf-camera-example";
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG;
+ SWIFT_OPTIMIZATION_LEVEL = "-Onone";
+ SWIFT_VERSION = 3.0;
+ };
+ name = Debug;
+ };
+ 1C564C371ED3A92E00087306 /* Release */ = {
+ isa = XCBuildConfiguration;
+ baseConfigurationReference = 55ED318E8D29C8AFEF03DF1E /* Pods-tf_camera_example.release.xcconfig */;
+ buildSettings = {
+ ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
+ CLANG_ANALYZER_NONNULL = YES;
+ CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
+ CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
+ DEVELOPMENT_TEAM = 5DRPWFQSHP;
+ INFOPLIST_FILE = Info.plist;
+ IPHONEOS_DEPLOYMENT_TARGET = 10.3;
+ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
+ PRODUCT_BUNDLE_IDENTIFIER = "com.pf.tf-camera-example";
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule";
+ SWIFT_VERSION = 3.0;
+ };
+ name = Release;
+ };
+ 591157B01CF4011D00C31E3A /* Debug */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ALWAYS_SEARCH_USER_PATHS = NO;
+ CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
+ CLANG_CXX_LIBRARY = "libc++";
+ CLANG_ENABLE_MODULES = YES;
+ CLANG_ENABLE_OBJC_ARC = YES;
+ CLANG_WARN_BOOL_CONVERSION = YES;
+ CLANG_WARN_CONSTANT_CONVERSION = YES;
+ CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
+ CLANG_WARN_EMPTY_BODY = YES;
+ CLANG_WARN_ENUM_CONVERSION = YES;
+ CLANG_WARN_INFINITE_RECURSION = YES;
+ CLANG_WARN_INT_CONVERSION = YES;
+ CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
+ CLANG_WARN_SUSPICIOUS_MOVE = YES;
+ CLANG_WARN_UNREACHABLE_CODE = YES;
+ CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
+ "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
+ COPY_PHASE_STRIP = NO;
+ DEBUG_INFORMATION_FORMAT = dwarf;
+ ENABLE_STRICT_OBJC_MSGSEND = YES;
+ ENABLE_TESTABILITY = YES;
+ GCC_C_LANGUAGE_STANDARD = gnu99;
+ GCC_DYNAMIC_NO_PIC = NO;
+ GCC_NO_COMMON_BLOCKS = YES;
+ GCC_OPTIMIZATION_LEVEL = 0;
+ GCC_PREPROCESSOR_DEFINITIONS = (
+ "DEBUG=1",
+ "$(inherited)",
+ );
+ GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
+ GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
+ GCC_WARN_UNDECLARED_SELECTOR = YES;
+ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
+ GCC_WARN_UNUSED_FUNCTION = YES;
+ GCC_WARN_UNUSED_VARIABLE = YES;
+ IPHONEOS_DEPLOYMENT_TARGET = 8.0;
+ MTL_ENABLE_DEBUG_INFO = YES;
+ ONLY_ACTIVE_ARCH = YES;
+ SDKROOT = iphoneos;
+ TARGETED_DEVICE_FAMILY = "1,2";
+ };
+ name = Debug;
+ };
+ 591157B11CF4011D00C31E3A /* Release */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ALWAYS_SEARCH_USER_PATHS = NO;
+ CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
+ CLANG_CXX_LIBRARY = "libc++";
+ CLANG_ENABLE_MODULES = YES;
+ CLANG_ENABLE_OBJC_ARC = YES;
+ CLANG_WARN_BOOL_CONVERSION = YES;
+ CLANG_WARN_CONSTANT_CONVERSION = YES;
+ CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
+ CLANG_WARN_EMPTY_BODY = YES;
+ CLANG_WARN_ENUM_CONVERSION = YES;
+ CLANG_WARN_INFINITE_RECURSION = YES;
+ CLANG_WARN_INT_CONVERSION = YES;
+ CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
+ CLANG_WARN_SUSPICIOUS_MOVE = YES;
+ CLANG_WARN_UNREACHABLE_CODE = YES;
+ CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
+ "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
+ COPY_PHASE_STRIP = NO;
+ DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
+ ENABLE_NS_ASSERTIONS = NO;
+ ENABLE_STRICT_OBJC_MSGSEND = YES;
+ GCC_C_LANGUAGE_STANDARD = gnu99;
+ GCC_NO_COMMON_BLOCKS = YES;
+ GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
+ GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
+ GCC_WARN_UNDECLARED_SELECTOR = YES;
+ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
+ GCC_WARN_UNUSED_FUNCTION = YES;
+ GCC_WARN_UNUSED_VARIABLE = YES;
+ IPHONEOS_DEPLOYMENT_TARGET = 8.0;
+ MTL_ENABLE_DEBUG_INFO = NO;
+ SDKROOT = iphoneos;
+ TARGETED_DEVICE_FAMILY = "1,2";
+ VALIDATE_PRODUCT = YES;
+ };
+ name = Release;
+ };
+/* End XCBuildConfiguration section */
+
+/* Begin XCConfigurationList section */
+ 1C564C351ED3A92E00087306 /* Build configuration list for PBXNativeTarget "tf_camera_example" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ 1C564C361ED3A92E00087306 /* Debug */,
+ 1C564C371ED3A92E00087306 /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
+ 591157961CF4011C00C31E3A /* Build configuration list for PBXProject "tf_camera_example" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ 591157B01CF4011D00C31E3A /* Debug */,
+ 591157B11CF4011D00C31E3A /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
+/* End XCConfigurationList section */
+ };
+ rootObject = 591157931CF4011C00C31E3A /* Project object */;
+}
diff --git a/tensorflow/examples/ios/simple/AppDelegate.h b/tensorflow/examples/ios/simple/AppDelegate.h
new file mode 100644
index 0000000000..75b1f1da38
--- /dev/null
+++ b/tensorflow/examples/ios/simple/AppDelegate.h
@@ -0,0 +1,21 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#import <UIKit/UIKit.h>
+
+@interface AppDelegate : UIResponder <UIApplicationDelegate>
+
+@property (strong, nonatomic) UIWindow *window;
+
+@end
diff --git a/tensorflow/examples/ios/simple/AppDelegate.mm b/tensorflow/examples/ios/simple/AppDelegate.mm
new file mode 100644
index 0000000000..1e808eb976
--- /dev/null
+++ b/tensorflow/examples/ios/simple/AppDelegate.mm
@@ -0,0 +1,44 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#import "AppDelegate.h"
+
+#import "RunModelViewController.h"
+
+@implementation AppDelegate
+
+- (BOOL)application:(UIApplication *)application
+ didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
+
+ UITabBarController *bar = [[UITabBarController alloc] init];
+ [bar setViewControllers:
+ @[[[RunModelViewController alloc] init]]];
+ bar.selectedIndex = 0;
+ self.window = [[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]];
+ self.window.rootViewController = bar;
+ [self.window makeKeyAndVisible];
+ return YES;
+}
+
+- (void)applicationWillResignActive:(UIApplication *)application {}
+
+- (void)applicationDidEnterBackground:(UIApplication *)application {}
+
+- (void)applicationWillEnterForeground:(UIApplication *)application {}
+
+- (void)applicationDidBecomeActive:(UIApplication *)application {}
+
+- (void)applicationWillTerminate:(UIApplication *)application {}
+
+@end
diff --git a/tensorflow/examples/ios/simple/Podfile b/tensorflow/examples/ios/simple/Podfile
new file mode 100644
index 0000000000..1740ad6457
--- /dev/null
+++ b/tensorflow/examples/ios/simple/Podfile
@@ -0,0 +1,5 @@
+platform :ios, '8.0'
+inhibit_all_warnings!
+
+target 'tf_simple_example'
+ pod 'TensorFlow-experimental'
diff --git a/tensorflow/examples/ios/simple/RunModel-Info.plist b/tensorflow/examples/ios/simple/RunModel-Info.plist
new file mode 100644
index 0000000000..d0a8742456
--- /dev/null
+++ b/tensorflow/examples/ios/simple/RunModel-Info.plist
@@ -0,0 +1,47 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>en</string>
+ <key>CFBundleDisplayName</key>
+ <string>tf_simple_example</string>
+ <key>CFBundleExecutable</key>
+ <string>tf_simple_example</string>
+ <key>CFBundleIdentifier</key>
+ <string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>ios-app</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleVersion</key>
+ <string>1.0</string>
+ <key>LSRequiresIPhoneOS</key>
+ <true/>
+ <key>UILaunchStoryboardName</key>
+ <string>RunModelViewController</string>
+ <key>UIRequiredDeviceCapabilities</key>
+ <array>
+ <string>armv7</string>
+ </array>
+ <key>UISupportedInterfaceOrientations</key>
+ <array>
+ <string>UIInterfaceOrientationPortrait</string>
+ <string>UIInterfaceOrientationLandscapeLeft</string>
+ <string>UIInterfaceOrientationLandscapeRight</string>
+ </array>
+ <key>UISupportedInterfaceOrientations~ipad</key>
+ <array>
+ <string>UIInterfaceOrientationPortrait</string>
+ <string>UIInterfaceOrientationPortraitUpsideDown</string>
+ <string>UIInterfaceOrientationLandscapeLeft</string>
+ <string>UIInterfaceOrientationLandscapeRight</string>
+ </array>
+</dict>
+</plist>
diff --git a/tensorflow/examples/ios/simple/RunModelViewController.h b/tensorflow/examples/ios/simple/RunModelViewController.h
new file mode 100644
index 0000000000..4e1a83ccf5
--- /dev/null
+++ b/tensorflow/examples/ios/simple/RunModelViewController.h
@@ -0,0 +1,24 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#import <UIKit/UIKit.h>
+
+@interface RunModelViewController : UIViewController
+
+- (IBAction)getUrl:(id)sender;
+
+@property (weak, nonatomic) IBOutlet UITextView *urlContentTextView;
+@property (weak, nonatomic) IBOutlet UITextField *urlTextField;
+
+@end
diff --git a/tensorflow/examples/ios/simple/RunModelViewController.mm b/tensorflow/examples/ios/simple/RunModelViewController.mm
new file mode 100644
index 0000000000..c8ccb5c77b
--- /dev/null
+++ b/tensorflow/examples/ios/simple/RunModelViewController.mm
@@ -0,0 +1,253 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#import "RunModelViewController.h"
+
+#include <fstream>
+#include <pthread.h>
+#include <unistd.h>
+#include <queue>
+#include <sstream>
+#include <string>
+
+#include "tensorflow/core/framework/op_kernel.h"
+#include "tensorflow/core/public/session.h"
+
+#include "ios_image_load.h"
+
+NSString* RunInferenceOnImage();
+
+namespace {
+class IfstreamInputStream : public ::google::protobuf::io::CopyingInputStream {
+ public:
+ explicit IfstreamInputStream(const std::string& file_name)
+ : ifs_(file_name.c_str(), std::ios::in | std::ios::binary) {}
+ ~IfstreamInputStream() { ifs_.close(); }
+
+ int Read(void* buffer, int size) {
+ if (!ifs_) {
+ return -1;
+ }
+ ifs_.read(static_cast<char*>(buffer), size);
+ return (int)ifs_.gcount();
+ }
+
+ private:
+ std::ifstream ifs_;
+};
+} // namespace
+
+@interface RunModelViewController ()
+@end
+
+@implementation RunModelViewController {
+}
+
+- (IBAction)getUrl:(id)sender {
+ NSString* inference_result = RunInferenceOnImage();
+ self.urlContentTextView.text = inference_result;
+}
+
+@end
+
+// Returns the top N confidence values over threshold in the provided vector,
+// sorted by confidence in descending order.
+static void GetTopN(
+ const Eigen::TensorMap<Eigen::Tensor<float, 1, Eigen::RowMajor>,
+ Eigen::Aligned>& prediction,
+ const int num_results, const float threshold,
+ std::vector<std::pair<float, int> >* top_results) {
+ // Will contain top N results in ascending order.
+ std::priority_queue<std::pair<float, int>,
+ std::vector<std::pair<float, int> >,
+ std::greater<std::pair<float, int> > > top_result_pq;
+
+ const long count = prediction.size();
+ for (int i = 0; i < count; ++i) {
+ const float value = prediction(i);
+
+ // Only add it if it beats the threshold and has a chance at being in
+ // the top N.
+ if (value < threshold) {
+ continue;
+ }
+
+ top_result_pq.push(std::pair<float, int>(value, i));
+
+ // If at capacity, kick the smallest value out.
+ if (top_result_pq.size() > num_results) {
+ top_result_pq.pop();
+ }
+ }
+
+ // Copy to output vector and reverse into descending order.
+ while (!top_result_pq.empty()) {
+ top_results->push_back(top_result_pq.top());
+ top_result_pq.pop();
+ }
+ std::reverse(top_results->begin(), top_results->end());
+}
+
+
+bool PortableReadFileToProto(const std::string& file_name,
+ ::google::protobuf::MessageLite* proto) {
+ ::google::protobuf::io::CopyingInputStreamAdaptor stream(
+ new IfstreamInputStream(file_name));
+ stream.SetOwnsCopyingStream(true);
+ // TODO(jiayq): the following coded stream is for debugging purposes to allow
+ // one to parse arbitrarily large messages for MessageLite. One most likely
+ // doesn't want to put protobufs larger than 64MB on Android, so we should
+ // eventually remove this and quit loud when a large protobuf is passed in.
+ ::google::protobuf::io::CodedInputStream coded_stream(&stream);
+ // Total bytes hard limit / warning limit are set to 1GB and 512MB
+ // respectively.
+ coded_stream.SetTotalBytesLimit(1024LL << 20, 512LL << 20);
+ return proto->ParseFromCodedStream(&coded_stream);
+}
+
+NSString* FilePathForResourceName(NSString* name, NSString* extension) {
+ NSString* file_path = [[NSBundle mainBundle] pathForResource:name ofType:extension];
+ if (file_path == NULL) {
+ LOG(FATAL) << "Couldn't find '" << [name UTF8String] << "."
+ << [extension UTF8String] << "' in bundle.";
+ }
+ return file_path;
+}
+
+NSString* RunInferenceOnImage() {
+ tensorflow::SessionOptions options;
+
+ tensorflow::Session* session_pointer = nullptr;
+ tensorflow::Status session_status = tensorflow::NewSession(options, &session_pointer);
+ if (!session_status.ok()) {
+ std::string status_string = session_status.ToString();
+ return [NSString stringWithFormat: @"Session create failed - %s",
+ status_string.c_str()];
+ }
+ std::unique_ptr<tensorflow::Session> session(session_pointer);
+ LOG(INFO) << "Session created.";
+
+ tensorflow::GraphDef tensorflow_graph;
+ LOG(INFO) << "Graph created.";
+
+ NSString* network_path = FilePathForResourceName(@"tensorflow_inception_graph", @"pb");
+ PortableReadFileToProto([network_path UTF8String], &tensorflow_graph);
+
+ LOG(INFO) << "Creating session.";
+ tensorflow::Status s = session->Create(tensorflow_graph);
+ if (!s.ok()) {
+ LOG(ERROR) << "Could not create TensorFlow Graph: " << s;
+ return @"";
+ }
+
+ // Read the label list
+ NSString* labels_path = FilePathForResourceName(@"imagenet_comp_graph_label_strings", @"txt");
+ std::vector<std::string> label_strings;
+ std::ifstream t;
+ t.open([labels_path UTF8String]);
+ std::string line;
+ while(t){
+ std::getline(t, line);
+ label_strings.push_back(line);
+ }
+ t.close();
+
+ // Read the Grace Hopper image.
+ NSString* image_path = FilePathForResourceName(@"grace_hopper", @"jpg");
+ int image_width;
+ int image_height;
+ int image_channels;
+ std::vector<tensorflow::uint8> image_data = LoadImageFromFile(
+ [image_path UTF8String], &image_width, &image_height, &image_channels);
+ const int wanted_width = 224;
+ const int wanted_height = 224;
+ const int wanted_channels = 3;
+ const float input_mean = 117.0f;
+ const float input_std = 1.0f;
+ assert(image_channels >= wanted_channels);
+ tensorflow::Tensor image_tensor(
+ tensorflow::DT_FLOAT,
+ tensorflow::TensorShape({
+ 1, wanted_height, wanted_width, wanted_channels}));
+ auto image_tensor_mapped = image_tensor.tensor<float, 4>();
+ tensorflow::uint8* in = image_data.data();
+ // tensorflow::uint8* in_end = (in + (image_height * image_width * image_channels));
+ float* out = image_tensor_mapped.data();
+ for (int y = 0; y < wanted_height; ++y) {
+ const int in_y = (y * image_height) / wanted_height;
+ tensorflow::uint8* in_row = in + (in_y * image_width * image_channels);
+ float* out_row = out + (y * wanted_width * wanted_channels);
+ for (int x = 0; x < wanted_width; ++x) {
+ const int in_x = (x * image_width) / wanted_width;
+ tensorflow::uint8* in_pixel = in_row + (in_x * image_channels);
+ float* out_pixel = out_row + (x * wanted_channels);
+ for (int c = 0; c < wanted_channels; ++c) {
+ out_pixel[c] = (in_pixel[c] - input_mean) / input_std;
+ }
+ }
+ }
+
+ NSString* result = [network_path stringByAppendingString: @" - loaded!"];
+ result = [NSString stringWithFormat: @"%@ - %lu, %s - %dx%d", result,
+ label_strings.size(), label_strings[0].c_str(), image_width, image_height];
+
+ std::string input_layer = "input";
+ std::string output_layer = "output";
+ std::vector<tensorflow::Tensor> outputs;
+ tensorflow::Status run_status = session->Run({{input_layer, image_tensor}},
+ {output_layer}, {}, &outputs);
+ if (!run_status.ok()) {
+ LOG(ERROR) << "Running model failed: " << run_status;
+ tensorflow::LogAllRegisteredKernels();
+ result = @"Error running model";
+ return result;
+ }
+ tensorflow::string status_string = run_status.ToString();
+ result = [NSString stringWithFormat: @"%@ - %s", result,
+ status_string.c_str()];
+
+ tensorflow::Tensor* output = &outputs[0];
+ const int kNumResults = 5;
+ const float kThreshold = 0.1f;
+ std::vector<std::pair<float, int> > top_results;
+ GetTopN(output->flat<float>(), kNumResults, kThreshold, &top_results);
+
+ std::stringstream ss;
+ ss.precision(3);
+ for (const auto& result : top_results) {
+ const float confidence = result.first;
+ const int index = result.second;
+
+ ss << index << " " << confidence << " ";
+
+ // Write out the result as a string
+ if (index < label_strings.size()) {
+ // just for safety: theoretically, the output is under 1000 unless there
+ // is some numerical issues leading to a wrong prediction.
+ ss << label_strings[index];
+ } else {
+ ss << "Prediction: " << index;
+ }
+
+ ss << "\n";
+ }
+
+ LOG(INFO) << "Predictions: " << ss.str();
+
+ tensorflow::string predictions = ss.str();
+ result = [NSString stringWithFormat: @"%@ - %s", result,
+ predictions.c_str()];
+
+ return result;
+}
diff --git a/tensorflow/examples/ios/simple/RunModelViewController.xib b/tensorflow/examples/ios/simple/RunModelViewController.xib
new file mode 100644
index 0000000000..93f334b985
--- /dev/null
+++ b/tensorflow/examples/ios/simple/RunModelViewController.xib
@@ -0,0 +1,46 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="9531" systemVersion="15D21" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES">
+ <dependencies>
+ <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="9529"/>
+ </dependencies>
+ <objects>
+ <placeholder placeholderIdentifier="IBFilesOwner" id="-1" userLabel="File's Owner" customClass="RunModelViewController">
+ <connections>
+ <outlet property="urlContentTextView" destination="quY-AK-ZCn" id="YjW-BO-1Ta"/>
+ <outlet property="urlTextField" destination="hPw-q5-vh5" id="wmc-b6-2CV"/>
+ <outlet property="view" destination="1" id="iHm-Rr-4wj"/>
+ </connections>
+ </placeholder>
+ <placeholder placeholderIdentifier="IBFirstResponder" id="-2" customClass="UIResponder"/>
+ <view contentMode="scaleToFill" id="1">
+ <rect key="frame" x="0.0" y="0.0" width="320" height="568"/>
+ <autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
+ <subviews>
+ <textView clipsSubviews="YES" contentMode="scaleToFill" fixedFrame="YES" editable="NO" text="The results of running the model will appear here." selectable="NO" translatesAutoresizingMaskIntoConstraints="NO" id="quY-AK-ZCn">
+ <rect key="frame" x="40" y="99" width="240" height="168"/>
+ <color key="backgroundColor" white="1" alpha="1" colorSpace="calibratedWhite"/>
+ <fontDescription key="fontDescription" type="system" pointSize="14"/>
+ <textInputTraits key="textInputTraits" autocapitalizationType="sentences"/>
+ </textView>
+ <button opaque="NO" contentMode="scaleToFill" fixedFrame="YES" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="roundedRect" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="AAC-Bk-PCC">
+ <rect key="frame" x="76" y="37" width="168" height="30"/>
+ <color key="backgroundColor" white="0.33333333333333331" alpha="1" colorSpace="calibratedWhite"/>
+ <state key="normal" title="Run Model">
+ <color key="titleShadowColor" white="0.5" alpha="1" colorSpace="calibratedWhite"/>
+ </state>
+ <connections>
+ <action selector="getUrl:" destination="-1" eventType="touchUpInside" id="mdP-nK-k9T"/>
+ </connections>
+ </button>
+ </subviews>
+ <color key="backgroundColor" red="0.78314738357315861" green="0.79869981749999996" blue="0.56305065858222869" alpha="1" colorSpace="calibratedRGB"/>
+ </view>
+ <textField opaque="NO" clipsSubviews="YES" contentMode="scaleToFill" contentHorizontalAlignment="left" contentVerticalAlignment="center" text="http://localhost:8080" borderStyle="roundedRect" placeholder="Enter URL" minimumFontSize="17" id="hPw-q5-vh5">
+ <rect key="frame" x="0.0" y="0.0" width="280" height="30"/>
+ <autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
+ <fontDescription key="fontDescription" type="system" pointSize="14"/>
+ <textInputTraits key="textInputTraits"/>
+ <point key="canvasLocation" x="795" y="44"/>
+ </textField>
+ </objects>
+</document>
diff --git a/tensorflow/examples/ios/simple/data/grace_hopper.jpg b/tensorflow/examples/ios/simple/data/grace_hopper.jpg
new file mode 100644
index 0000000000..d2a427810f
--- /dev/null
+++ b/tensorflow/examples/ios/simple/data/grace_hopper.jpg
Binary files differ
diff --git a/tensorflow/examples/ios/simple/ios_image_load.h b/tensorflow/examples/ios/simple/ios_image_load.h
new file mode 100644
index 0000000000..0e0b771118
--- /dev/null
+++ b/tensorflow/examples/ios/simple/ios_image_load.h
@@ -0,0 +1,27 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef TENSORFLOW_EXAMPLES_IOS_IOS_IMAGE_LOAD_H_
+#define TENSORFLOW_EXAMPLES_IOS_IOS_IMAGE_LOAD_H_
+
+#include <vector>
+
+#include "tensorflow/core/framework/types.h"
+
+std::vector<tensorflow::uint8> LoadImageFromFile(const char* file_name,
+ int* out_width,
+ int* out_height,
+ int* out_channels);
+
+#endif // TENSORFLOW_EXAMPLES_IOS_IOS_IMAGE_LOAD_H_
diff --git a/tensorflow/examples/ios/simple/ios_image_load.mm b/tensorflow/examples/ios/simple/ios_image_load.mm
new file mode 100644
index 0000000000..64d1ea21cf
--- /dev/null
+++ b/tensorflow/examples/ios/simple/ios_image_load.mm
@@ -0,0 +1,87 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "ios_image_load.h"
+
+#include <stdlib.h>
+#include <string.h>
+#include <assert.h>
+#include <stdio.h>
+
+#import <CoreImage/CoreImage.h>
+#import <ImageIO/ImageIO.h>
+
+using tensorflow::uint8;
+
+std::vector<uint8> LoadImageFromFile(const char* file_name,
+ int* out_width, int* out_height,
+ int* out_channels) {
+ FILE* file_handle = fopen(file_name, "rb");
+ fseek(file_handle, 0, SEEK_END);
+ const size_t bytes_in_file = ftell(file_handle);
+ fseek(file_handle, 0, SEEK_SET);
+ std::vector<uint8> file_data(bytes_in_file);
+ fread(file_data.data(), 1, bytes_in_file, file_handle);
+ fclose(file_handle);
+ CFDataRef file_data_ref = CFDataCreateWithBytesNoCopy(NULL, file_data.data(),
+ bytes_in_file,
+ kCFAllocatorNull);
+ CGDataProviderRef image_provider =
+ CGDataProviderCreateWithCFData(file_data_ref);
+
+ const char* suffix = strrchr(file_name, '.');
+ if (!suffix || suffix == file_name) {
+ suffix = "";
+ }
+ CGImageRef image;
+ if (strcasecmp(suffix, ".png") == 0) {
+ image = CGImageCreateWithPNGDataProvider(image_provider, NULL, true,
+ kCGRenderingIntentDefault);
+ } else if ((strcasecmp(suffix, ".jpg") == 0) ||
+ (strcasecmp(suffix, ".jpeg") == 0)) {
+ image = CGImageCreateWithJPEGDataProvider(image_provider, NULL, true,
+ kCGRenderingIntentDefault);
+ } else {
+ CFRelease(image_provider);
+ CFRelease(file_data_ref);
+ fprintf(stderr, "Unknown suffix for file '%s'\n", file_name);
+ *out_width = 0;
+ *out_height = 0;
+ *out_channels = 0;
+ return std::vector<uint8>();
+ }
+
+ const int width = (int)CGImageGetWidth(image);
+ const int height = (int)CGImageGetHeight(image);
+ const int channels = 4;
+ CGColorSpaceRef color_space = CGColorSpaceCreateDeviceRGB();
+ const int bytes_per_row = (width * channels);
+ const int bytes_in_image = (bytes_per_row * height);
+ std::vector<uint8> result(bytes_in_image);
+ const int bits_per_component = 8;
+ CGContextRef context = CGBitmapContextCreate(result.data(), width, height,
+ bits_per_component, bytes_per_row, color_space,
+ kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big);
+ CGColorSpaceRelease(color_space);
+ CGContextDrawImage(context, CGRectMake(0, 0, width, height), image);
+ CGContextRelease(context);
+ CFRelease(image);
+ CFRelease(image_provider);
+ CFRelease(file_data_ref);
+
+ *out_width = width;
+ *out_height = height;
+ *out_channels = channels;
+ return result;
+}
diff --git a/tensorflow/examples/ios/simple/main.mm b/tensorflow/examples/ios/simple/main.mm
new file mode 100644
index 0000000000..d70550a730
--- /dev/null
+++ b/tensorflow/examples/ios/simple/main.mm
@@ -0,0 +1,22 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#import <UIKit/UIKit.h>
+
+int main(int argc, char * argv[]) {
+ @autoreleasepool {
+ NSString *delegateClassName = @"AppDelegate";
+ return UIApplicationMain(argc, argv, nil, delegateClassName);
+ }
+}
diff --git a/tensorflow/examples/ios/simple/tf_simple_example.xcodeproj/project.pbxproj b/tensorflow/examples/ios/simple/tf_simple_example.xcodeproj/project.pbxproj
new file mode 100644
index 0000000000..55c06e28fb
--- /dev/null
+++ b/tensorflow/examples/ios/simple/tf_simple_example.xcodeproj/project.pbxproj
@@ -0,0 +1,404 @@
+// !$*UTF8*$!
+{
+ archiveVersion = 1;
+ classes = {
+ };
+ objectVersion = 46;
+ objects = {
+
+/* Begin PBXBuildFile section */
+ 1C0D734B1ECCC460008C1DAB /* CoreGraphics.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1C0D734A1ECCC460008C1DAB /* CoreGraphics.framework */; };
+ 1CA45FFF1ECCC356002FA6A4 /* UIKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1CA45FFE1ECCC356002FA6A4 /* UIKit.framework */; };
+ 2530463E3C9A9D5FB9299C0E /* libPods-tf_simple_example.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 73DBC33C5DD9A526EE6D1EF2 /* libPods-tf_simple_example.a */; };
+ 59A3D0011CF4E68100C4259F /* AppDelegate.mm in Sources */ = {isa = PBXBuildFile; fileRef = 59A3CFF21CF4E68100C4259F /* AppDelegate.mm */; };
+ 59A3D0031CF4E68100C4259F /* grace_hopper.jpg in Resources */ = {isa = PBXBuildFile; fileRef = 59A3CFF51CF4E68100C4259F /* grace_hopper.jpg */; };
+ 59A3D0051CF4E68100C4259F /* imagenet_comp_graph_label_strings.txt in Resources */ = {isa = PBXBuildFile; fileRef = 59A3CFF71CF4E68100C4259F /* imagenet_comp_graph_label_strings.txt */; };
+ 59A3D0071CF4E68100C4259F /* tensorflow_inception_graph.pb in Resources */ = {isa = PBXBuildFile; fileRef = 59A3CFF91CF4E68100C4259F /* tensorflow_inception_graph.pb */; };
+ 59A3D0081CF4E68100C4259F /* ios_image_load.mm in Sources */ = {isa = PBXBuildFile; fileRef = 59A3CFFB1CF4E68100C4259F /* ios_image_load.mm */; };
+ 59A3D0091CF4E68100C4259F /* main.mm in Sources */ = {isa = PBXBuildFile; fileRef = 59A3CFFC1CF4E68100C4259F /* main.mm */; };
+ 59A3D00B1CF4E68100C4259F /* RunModelViewController.mm in Sources */ = {isa = PBXBuildFile; fileRef = 59A3CFFF1CF4E68100C4259F /* RunModelViewController.mm */; };
+ 59A3D00C1CF4E68100C4259F /* RunModelViewController.xib in Resources */ = {isa = PBXBuildFile; fileRef = 59A3D0001CF4E68100C4259F /* RunModelViewController.xib */; };
+/* End PBXBuildFile section */
+
+/* Begin PBXFileReference section */
+ 1C0D73481ECCC41B008C1DAB /* CoreImage.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreImage.framework; path = System/Library/Frameworks/CoreImage.framework; sourceTree = SDKROOT; };
+ 1C0D734A1ECCC460008C1DAB /* CoreGraphics.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreGraphics.framework; path = System/Library/Frameworks/CoreGraphics.framework; sourceTree = SDKROOT; };
+ 1CA45FFE1ECCC356002FA6A4 /* UIKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = UIKit.framework; path = System/Library/Frameworks/UIKit.framework; sourceTree = SDKROOT; };
+ 5911579B1CF4011C00C31E3A /* tf_simple_example.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = tf_simple_example.app; sourceTree = BUILT_PRODUCTS_DIR; };
+ 59A3CFF11CF4E68100C4259F /* AppDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = "<group>"; };
+ 59A3CFF21CF4E68100C4259F /* AppDelegate.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = AppDelegate.mm; sourceTree = "<group>"; };
+ 59A3CFF51CF4E68100C4259F /* grace_hopper.jpg */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = grace_hopper.jpg; sourceTree = "<group>"; };
+ 59A3CFF71CF4E68100C4259F /* imagenet_comp_graph_label_strings.txt */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = imagenet_comp_graph_label_strings.txt; sourceTree = "<group>"; };
+ 59A3CFF91CF4E68100C4259F /* tensorflow_inception_graph.pb */ = {isa = PBXFileReference; lastKnownFileType = file; path = tensorflow_inception_graph.pb; sourceTree = "<group>"; };
+ 59A3CFFA1CF4E68100C4259F /* ios_image_load.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ios_image_load.h; sourceTree = "<group>"; };
+ 59A3CFFB1CF4E68100C4259F /* ios_image_load.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = ios_image_load.mm; sourceTree = "<group>"; };
+ 59A3CFFC1CF4E68100C4259F /* main.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = main.mm; sourceTree = "<group>"; };
+ 59A3CFFD1CF4E68100C4259F /* RunModel-Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; path = "RunModel-Info.plist"; sourceTree = "<group>"; };
+ 59A3CFFE1CF4E68100C4259F /* RunModelViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RunModelViewController.h; sourceTree = "<group>"; };
+ 59A3CFFF1CF4E68100C4259F /* RunModelViewController.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = RunModelViewController.mm; sourceTree = "<group>"; };
+ 59A3D0001CF4E68100C4259F /* RunModelViewController.xib */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.xib; path = RunModelViewController.xib; sourceTree = "<group>"; };
+ 73DBC33C5DD9A526EE6D1EF2 /* libPods-tf_simple_example.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-tf_simple_example.a"; sourceTree = BUILT_PRODUCTS_DIR; };
+ 87ABECA6543FF90E81111A6D /* Pods-tf_simple_example.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-tf_simple_example.release.xcconfig"; path = "Pods/Target Support Files/Pods-tf_simple_example/Pods-tf_simple_example.release.xcconfig"; sourceTree = "<group>"; };
+ 8C94FEE43FD467468C5B75AA /* Pods-tf_simple_example.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-tf_simple_example.debug.xcconfig"; path = "Pods/Target Support Files/Pods-tf_simple_example/Pods-tf_simple_example.debug.xcconfig"; sourceTree = "<group>"; };
+/* End PBXFileReference section */
+
+/* Begin PBXFrameworksBuildPhase section */
+ 591157981CF4011C00C31E3A /* Frameworks */ = {
+ isa = PBXFrameworksBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 1C0D734B1ECCC460008C1DAB /* CoreGraphics.framework in Frameworks */,
+ 1CA45FFF1ECCC356002FA6A4 /* UIKit.framework in Frameworks */,
+ 2530463E3C9A9D5FB9299C0E /* libPods-tf_simple_example.a in Frameworks */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXFrameworksBuildPhase section */
+
+/* Begin PBXGroup section */
+ 24D7686C331131624F4454A0 /* Frameworks */ = {
+ isa = PBXGroup;
+ children = (
+ 1C0D734A1ECCC460008C1DAB /* CoreGraphics.framework */,
+ 1C0D73481ECCC41B008C1DAB /* CoreImage.framework */,
+ 1CA45FFE1ECCC356002FA6A4 /* UIKit.framework */,
+ 73DBC33C5DD9A526EE6D1EF2 /* libPods-tf_simple_example.a */,
+ );
+ name = Frameworks;
+ sourceTree = "<group>";
+ };
+ 3E9FC355632FB928EA23BEED /* Pods */ = {
+ isa = PBXGroup;
+ children = (
+ 8C94FEE43FD467468C5B75AA /* Pods-tf_simple_example.debug.xcconfig */,
+ 87ABECA6543FF90E81111A6D /* Pods-tf_simple_example.release.xcconfig */,
+ );
+ name = Pods;
+ sourceTree = "<group>";
+ };
+ 591157921CF4011C00C31E3A = {
+ isa = PBXGroup;
+ children = (
+ 59A3CFF11CF4E68100C4259F /* AppDelegate.h */,
+ 59A3CFF21CF4E68100C4259F /* AppDelegate.mm */,
+ 59A3CFF31CF4E68100C4259F /* data */,
+ 59A3CFFA1CF4E68100C4259F /* ios_image_load.h */,
+ 59A3CFFB1CF4E68100C4259F /* ios_image_load.mm */,
+ 59A3CFFC1CF4E68100C4259F /* main.mm */,
+ 59A3CFFD1CF4E68100C4259F /* RunModel-Info.plist */,
+ 59A3CFFE1CF4E68100C4259F /* RunModelViewController.h */,
+ 59A3CFFF1CF4E68100C4259F /* RunModelViewController.mm */,
+ 59A3D0001CF4E68100C4259F /* RunModelViewController.xib */,
+ 5911579C1CF4011C00C31E3A /* Products */,
+ 3E9FC355632FB928EA23BEED /* Pods */,
+ 24D7686C331131624F4454A0 /* Frameworks */,
+ );
+ sourceTree = "<group>";
+ };
+ 5911579C1CF4011C00C31E3A /* Products */ = {
+ isa = PBXGroup;
+ children = (
+ 5911579B1CF4011C00C31E3A /* tf_simple_example.app */,
+ );
+ name = Products;
+ sourceTree = "<group>";
+ };
+ 59A3CFF31CF4E68100C4259F /* data */ = {
+ isa = PBXGroup;
+ children = (
+ 59A3CFF51CF4E68100C4259F /* grace_hopper.jpg */,
+ 59A3CFF71CF4E68100C4259F /* imagenet_comp_graph_label_strings.txt */,
+ 59A3CFF91CF4E68100C4259F /* tensorflow_inception_graph.pb */,
+ );
+ path = data;
+ sourceTree = "<group>";
+ };
+/* End PBXGroup section */
+
+/* Begin PBXNativeTarget section */
+ 5911579A1CF4011C00C31E3A /* tf_simple_example */ = {
+ isa = PBXNativeTarget;
+ buildConfigurationList = 591157B21CF4011D00C31E3A /* Build configuration list for PBXNativeTarget "tf_simple_example" */;
+ buildPhases = (
+ 1CD07C1CEB04E50C5975C7BB /* [CP] Check Pods Manifest.lock */,
+ 591157971CF4011C00C31E3A /* Sources */,
+ 591157981CF4011C00C31E3A /* Frameworks */,
+ 591157991CF4011C00C31E3A /* Resources */,
+ 0EABEF9F31578BDA8CA9D2A7 /* [CP] Embed Pods Frameworks */,
+ 96DDF9E6E35958387A215092 /* [CP] Copy Pods Resources */,
+ );
+ buildRules = (
+ );
+ dependencies = (
+ );
+ name = tf_simple_example;
+ productName = tf_ios_makefile_example;
+ productReference = 5911579B1CF4011C00C31E3A /* tf_simple_example.app */;
+ productType = "com.apple.product-type.application";
+ };
+/* End PBXNativeTarget section */
+
+/* Begin PBXProject section */
+ 591157931CF4011C00C31E3A /* Project object */ = {
+ isa = PBXProject;
+ attributes = {
+ LastUpgradeCheck = 0830;
+ ORGANIZATIONNAME = Google;
+ TargetAttributes = {
+ 5911579A1CF4011C00C31E3A = {
+ CreatedOnToolsVersion = 7.2;
+ DevelopmentTeam = 85Z3VXS37U;
+ };
+ };
+ };
+ buildConfigurationList = 591157961CF4011C00C31E3A /* Build configuration list for PBXProject "tf_simple_example" */;
+ compatibilityVersion = "Xcode 3.2";
+ developmentRegion = English;
+ hasScannedForEncodings = 0;
+ knownRegions = (
+ en,
+ Base,
+ );
+ mainGroup = 591157921CF4011C00C31E3A;
+ productRefGroup = 5911579C1CF4011C00C31E3A /* Products */;
+ projectDirPath = "";
+ projectRoot = "";
+ targets = (
+ 5911579A1CF4011C00C31E3A /* tf_simple_example */,
+ );
+ };
+/* End PBXProject section */
+
+/* Begin PBXResourcesBuildPhase section */
+ 591157991CF4011C00C31E3A /* Resources */ = {
+ isa = PBXResourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 59A3D00C1CF4E68100C4259F /* RunModelViewController.xib in Resources */,
+ 59A3D0051CF4E68100C4259F /* imagenet_comp_graph_label_strings.txt in Resources */,
+ 59A3D0071CF4E68100C4259F /* tensorflow_inception_graph.pb in Resources */,
+ 59A3D0031CF4E68100C4259F /* grace_hopper.jpg in Resources */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXResourcesBuildPhase section */
+
+/* Begin PBXShellScriptBuildPhase section */
+ 0EABEF9F31578BDA8CA9D2A7 /* [CP] Embed Pods Frameworks */ = {
+ isa = PBXShellScriptBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ );
+ inputPaths = (
+ );
+ name = "[CP] Embed Pods Frameworks";
+ outputPaths = (
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ shellPath = /bin/sh;
+ shellScript = "\"${SRCROOT}/Pods/Target Support Files/Pods-tf_simple_example/Pods-tf_simple_example-frameworks.sh\"\n";
+ showEnvVarsInLog = 0;
+ };
+ 1CD07C1CEB04E50C5975C7BB /* [CP] Check Pods Manifest.lock */ = {
+ isa = PBXShellScriptBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ );
+ inputPaths = (
+ );
+ name = "[CP] Check Pods Manifest.lock";
+ outputPaths = (
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ shellPath = /bin/sh;
+ shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n";
+ showEnvVarsInLog = 0;
+ };
+ 96DDF9E6E35958387A215092 /* [CP] Copy Pods Resources */ = {
+ isa = PBXShellScriptBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ );
+ inputPaths = (
+ );
+ name = "[CP] Copy Pods Resources";
+ outputPaths = (
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ shellPath = /bin/sh;
+ shellScript = "\"${SRCROOT}/Pods/Target Support Files/Pods-tf_simple_example/Pods-tf_simple_example-resources.sh\"\n";
+ showEnvVarsInLog = 0;
+ };
+/* End PBXShellScriptBuildPhase section */
+
+/* Begin PBXSourcesBuildPhase section */
+ 591157971CF4011C00C31E3A /* Sources */ = {
+ isa = PBXSourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 59A3D0091CF4E68100C4259F /* main.mm in Sources */,
+ 59A3D0011CF4E68100C4259F /* AppDelegate.mm in Sources */,
+ 59A3D00B1CF4E68100C4259F /* RunModelViewController.mm in Sources */,
+ 59A3D0081CF4E68100C4259F /* ios_image_load.mm in Sources */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXSourcesBuildPhase section */
+
+/* Begin XCBuildConfiguration section */
+ 591157B01CF4011D00C31E3A /* Debug */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ALWAYS_SEARCH_USER_PATHS = NO;
+ CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
+ CLANG_CXX_LIBRARY = "libc++";
+ CLANG_ENABLE_MODULES = YES;
+ CLANG_ENABLE_OBJC_ARC = YES;
+ CLANG_WARN_BOOL_CONVERSION = YES;
+ CLANG_WARN_CONSTANT_CONVERSION = YES;
+ CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
+ CLANG_WARN_EMPTY_BODY = YES;
+ CLANG_WARN_ENUM_CONVERSION = YES;
+ CLANG_WARN_INFINITE_RECURSION = YES;
+ CLANG_WARN_INT_CONVERSION = YES;
+ CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
+ CLANG_WARN_SUSPICIOUS_MOVE = YES;
+ CLANG_WARN_UNREACHABLE_CODE = YES;
+ CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
+ "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
+ COPY_PHASE_STRIP = NO;
+ DEBUG_INFORMATION_FORMAT = dwarf;
+ ENABLE_STRICT_OBJC_MSGSEND = YES;
+ ENABLE_TESTABILITY = YES;
+ GCC_C_LANGUAGE_STANDARD = gnu99;
+ GCC_DYNAMIC_NO_PIC = NO;
+ GCC_NO_COMMON_BLOCKS = YES;
+ GCC_OPTIMIZATION_LEVEL = 0;
+ GCC_PREPROCESSOR_DEFINITIONS = (
+ "DEBUG=1",
+ "$(inherited)",
+ );
+ GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
+ GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
+ GCC_WARN_UNDECLARED_SELECTOR = YES;
+ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
+ GCC_WARN_UNUSED_FUNCTION = YES;
+ GCC_WARN_UNUSED_VARIABLE = YES;
+ IPHONEOS_DEPLOYMENT_TARGET = 8.0;
+ MTL_ENABLE_DEBUG_INFO = YES;
+ ONLY_ACTIVE_ARCH = YES;
+ SDKROOT = iphoneos;
+ TARGETED_DEVICE_FAMILY = "1,2";
+ };
+ name = Debug;
+ };
+ 591157B11CF4011D00C31E3A /* Release */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ALWAYS_SEARCH_USER_PATHS = NO;
+ CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
+ CLANG_CXX_LIBRARY = "libc++";
+ CLANG_ENABLE_MODULES = YES;
+ CLANG_ENABLE_OBJC_ARC = YES;
+ CLANG_WARN_BOOL_CONVERSION = YES;
+ CLANG_WARN_CONSTANT_CONVERSION = YES;
+ CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
+ CLANG_WARN_EMPTY_BODY = YES;
+ CLANG_WARN_ENUM_CONVERSION = YES;
+ CLANG_WARN_INFINITE_RECURSION = YES;
+ CLANG_WARN_INT_CONVERSION = YES;
+ CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
+ CLANG_WARN_SUSPICIOUS_MOVE = YES;
+ CLANG_WARN_UNREACHABLE_CODE = YES;
+ CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
+ "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
+ COPY_PHASE_STRIP = NO;
+ DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
+ ENABLE_NS_ASSERTIONS = NO;
+ ENABLE_STRICT_OBJC_MSGSEND = YES;
+ GCC_C_LANGUAGE_STANDARD = gnu99;
+ GCC_NO_COMMON_BLOCKS = YES;
+ GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
+ GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
+ GCC_WARN_UNDECLARED_SELECTOR = YES;
+ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
+ GCC_WARN_UNUSED_FUNCTION = YES;
+ GCC_WARN_UNUSED_VARIABLE = YES;
+ IPHONEOS_DEPLOYMENT_TARGET = 8.0;
+ MTL_ENABLE_DEBUG_INFO = NO;
+ SDKROOT = iphoneos;
+ TARGETED_DEVICE_FAMILY = "1,2";
+ VALIDATE_PRODUCT = YES;
+ };
+ name = Release;
+ };
+ 591157B31CF4011D00C31E3A /* Debug */ = {
+ isa = XCBuildConfiguration;
+ baseConfigurationReference = 8C94FEE43FD467468C5B75AA /* Pods-tf_simple_example.debug.xcconfig */;
+ buildSettings = {
+ CLANG_DEBUG_INFORMATION_LEVEL = default;
+ CODE_SIGN_IDENTITY = "iPhone Developer";
+ ENABLE_BITCODE = NO;
+ GCC_ENABLE_CPP_EXCEPTIONS = YES;
+ GCC_ENABLE_CPP_RTTI = YES;
+ HEADER_SEARCH_PATHS = "$(inherited)";
+ INFOPLIST_FILE = "$(SRCROOT)/RunModel-Info.plist";
+ IPHONEOS_DEPLOYMENT_TARGET = 9.2;
+ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
+ LIBRARY_SEARCH_PATHS = "";
+ OTHER_CPLUSPLUSFLAGS = "$(OTHER_CFLAGS)";
+ OTHER_LDFLAGS = "$(inherited)";
+ PRODUCT_BUNDLE_IDENTIFIER = "com.google.tf-simple-example";
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ SEPARATE_STRIP = NO;
+ };
+ name = Debug;
+ };
+ 591157B41CF4011D00C31E3A /* Release */ = {
+ isa = XCBuildConfiguration;
+ baseConfigurationReference = 87ABECA6543FF90E81111A6D /* Pods-tf_simple_example.release.xcconfig */;
+ buildSettings = {
+ CLANG_DEBUG_INFORMATION_LEVEL = default;
+ CODE_SIGN_IDENTITY = "iPhone Developer";
+ ENABLE_BITCODE = NO;
+ GCC_ENABLE_CPP_EXCEPTIONS = YES;
+ GCC_ENABLE_CPP_RTTI = YES;
+ HEADER_SEARCH_PATHS = "$(inherited)";
+ INFOPLIST_FILE = "$(SRCROOT)/RunModel-Info.plist";
+ IPHONEOS_DEPLOYMENT_TARGET = 9.2;
+ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
+ LIBRARY_SEARCH_PATHS = "";
+ ONLY_ACTIVE_ARCH = YES;
+ OTHER_CPLUSPLUSFLAGS = "$(OTHER_CFLAGS)";
+ OTHER_LDFLAGS = "$(inherited)";
+ PRODUCT_BUNDLE_IDENTIFIER = "com.google.tf-simple-example";
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ SEPARATE_STRIP = NO;
+ };
+ name = Release;
+ };
+/* End XCBuildConfiguration section */
+
+/* Begin XCConfigurationList section */
+ 591157961CF4011C00C31E3A /* Build configuration list for PBXProject "tf_simple_example" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ 591157B01CF4011D00C31E3A /* Debug */,
+ 591157B11CF4011D00C31E3A /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
+ 591157B21CF4011D00C31E3A /* Build configuration list for PBXNativeTarget "tf_simple_example" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ 591157B31CF4011D00C31E3A /* Debug */,
+ 591157B41CF4011D00C31E3A /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
+/* End XCConfigurationList section */
+ };
+ rootObject = 591157931CF4011C00C31E3A /* Project object */;
+}