aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authorGravatar Pete Warden <pete@petewarden.com>2016-06-02 15:11:08 -0700
committerGravatar Pete Warden <pete@petewarden.com>2016-06-02 15:11:08 -0700
commit5508516010b539709da6a75bfbe5a3fd1a944b75 (patch)
tree77b5bb8a058286990db9e6ceeee04978ecdeed75
parent79174afa30046ecdc437b531812f2cb41a32695e (diff)
Added iOS example using the camera (#2613)
* Added Raspberry Pi cross-compilation support to makefile * Fixed makefile linking problem with Pi * Added documentation for Pi compilation * Added live camera iOS example
-rw-r--r--tensorflow/contrib/ios_examples/.gitignore4
-rw-r--r--tensorflow/contrib/ios_examples/camera/CameraExampleAppDelegate.h21
-rw-r--r--tensorflow/contrib/ios_examples/camera/CameraExampleAppDelegate.m44
-rw-r--r--tensorflow/contrib/ios_examples/camera/CameraExampleViewController.h45
-rw-r--r--tensorflow/contrib/ios_examples/camera/CameraExampleViewController.mm604
-rw-r--r--tensorflow/contrib/ios_examples/camera/Info.plist40
-rw-r--r--tensorflow/contrib/ios_examples/camera/camera_example.xcodeproj/project.pbxproj397
-rw-r--r--tensorflow/contrib/ios_examples/camera/data/grace_hopper.jpg (renamed from tensorflow/contrib/ios_example/data/grace_hopper.jpg)bin73746 -> 73746 bytes
-rw-r--r--tensorflow/contrib/ios_examples/camera/en.lproj/MainStoryboard_iPhone.storyboard46
-rw-r--r--tensorflow/contrib/ios_examples/camera/ios_image_load.h27
-rw-r--r--tensorflow/contrib/ios_examples/camera/ios_image_load.mm (renamed from tensorflow/contrib/ios_example/ios_image_load.mm)0
-rw-r--r--tensorflow/contrib/ios_examples/camera/main.mm27
-rw-r--r--tensorflow/contrib/ios_examples/camera/squarePNG.pngbin0 -> 9432 bytes
-rw-r--r--tensorflow/contrib/ios_examples/camera/tensorflow_utils.h33
-rw-r--r--tensorflow/contrib/ios_examples/camera/tensorflow_utils.mm178
-rw-r--r--tensorflow/contrib/ios_examples/simple/AppDelegate.h (renamed from tensorflow/contrib/ios_example/AppDelegate.h)0
-rw-r--r--tensorflow/contrib/ios_examples/simple/AppDelegate.mm (renamed from tensorflow/contrib/ios_example/AppDelegate.mm)0
-rw-r--r--tensorflow/contrib/ios_examples/simple/RunModel-Info.plist (renamed from tensorflow/contrib/ios_example/RunModel-Info.plist)0
-rw-r--r--tensorflow/contrib/ios_examples/simple/RunModelViewController.h (renamed from tensorflow/contrib/ios_example/RunModelViewController.h)0
-rw-r--r--tensorflow/contrib/ios_examples/simple/RunModelViewController.mm (renamed from tensorflow/contrib/ios_example/RunModelViewController.mm)1
-rw-r--r--tensorflow/contrib/ios_examples/simple/RunModelViewController.xib (renamed from tensorflow/contrib/ios_example/RunModelViewController.xib)0
-rw-r--r--tensorflow/contrib/ios_examples/simple/data/grace_hopper.jpgbin0 -> 73746 bytes
-rw-r--r--tensorflow/contrib/ios_examples/simple/ios_image_load.h (renamed from tensorflow/contrib/ios_example/ios_image_load.h)0
-rw-r--r--tensorflow/contrib/ios_examples/simple/ios_image_load.mm87
-rw-r--r--tensorflow/contrib/ios_examples/simple/main.mm (renamed from tensorflow/contrib/ios_example/main.mm)0
-rw-r--r--tensorflow/contrib/ios_examples/simple/tf_ios_makefile_example.xcodeproj/project.pbxproj (renamed from tensorflow/contrib/ios_example/tf_ios_makefile_example.xcodeproj/project.pbxproj)44
-rwxr-xr-xtensorflow/contrib/makefile/compile_ios_tensorflow.sh10
27 files changed, 1574 insertions, 34 deletions
diff --git a/tensorflow/contrib/ios_examples/.gitignore b/tensorflow/contrib/ios_examples/.gitignore
new file mode 100644
index 0000000000..e572b3012c
--- /dev/null
+++ b/tensorflow/contrib/ios_examples/.gitignore
@@ -0,0 +1,4 @@
+project.xcworkspace
+xcuserdata
+imagenet_comp_graph_label_strings.txt
+tensorflow_inception_graph.pb
diff --git a/tensorflow/contrib/ios_examples/camera/CameraExampleAppDelegate.h b/tensorflow/contrib/ios_examples/camera/CameraExampleAppDelegate.h
new file mode 100644
index 0000000000..0039d5e7ca
--- /dev/null
+++ b/tensorflow/contrib/ios_examples/camera/CameraExampleAppDelegate.h
@@ -0,0 +1,21 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#import <UIKit/UIKit.h>
+
+@interface CameraExampleAppDelegate : UIResponder<UIApplicationDelegate>
+
+@property(strong, nonatomic) UIWindow *window;
+
+@end
diff --git a/tensorflow/contrib/ios_examples/camera/CameraExampleAppDelegate.m b/tensorflow/contrib/ios_examples/camera/CameraExampleAppDelegate.m
new file mode 100644
index 0000000000..d134c2b591
--- /dev/null
+++ b/tensorflow/contrib/ios_examples/camera/CameraExampleAppDelegate.m
@@ -0,0 +1,44 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#import "CameraExampleAppDelegate.h"
+
+@implementation CameraExampleAppDelegate
+
+@synthesize window = _window;
+
+- (BOOL)application:(UIApplication *)application
+ didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
+ [self.window makeKeyAndVisible];
+ return YES;
+}
+
+- (void)applicationWillResignActive:(UIApplication *)application {
+ [[UIApplication sharedApplication] setIdleTimerDisabled:NO];
+}
+
+- (void)applicationDidEnterBackground:(UIApplication *)application {
+}
+
+- (void)applicationWillEnterForeground:(UIApplication *)application {
+}
+
+- (void)applicationDidBecomeActive:(UIApplication *)application {
+ [[UIApplication sharedApplication] setIdleTimerDisabled:YES];
+}
+
+- (void)applicationWillTerminate:(UIApplication *)application {
+}
+
+@end
diff --git a/tensorflow/contrib/ios_examples/camera/CameraExampleViewController.h b/tensorflow/contrib/ios_examples/camera/CameraExampleViewController.h
new file mode 100644
index 0000000000..1e730a456d
--- /dev/null
+++ b/tensorflow/contrib/ios_examples/camera/CameraExampleViewController.h
@@ -0,0 +1,45 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#import <AVFoundation/AVFoundation.h>
+#import <UIKit/UIKit.h>
+
+#include <memory>
+#include "tensorflow/core/public/session.h"
+
+@interface CameraExampleViewController
+ : UIViewController<UIGestureRecognizerDelegate,
+ AVCaptureVideoDataOutputSampleBufferDelegate> {
+ IBOutlet UIView *previewView;
+ IBOutlet UISegmentedControl *camerasControl;
+ AVCaptureVideoPreviewLayer *previewLayer;
+ AVCaptureVideoDataOutput *videoDataOutput;
+ dispatch_queue_t videoDataOutputQueue;
+ AVCaptureStillImageOutput *stillImageOutput;
+ UIView *flashView;
+ UIImage *square;
+ BOOL isUsingFrontFacingCamera;
+ AVSpeechSynthesizer *synth;
+ NSMutableDictionary *oldPredictionValues;
+ NSMutableArray *labelLayers;
+ AVCaptureSession *session;
+ std::unique_ptr<tensorflow::Session> tf_session;
+ std::vector<std::string> labels;
+}
+@property(retain, nonatomic) CATextLayer *predictionTextLayer;
+
+- (IBAction)takePicture:(id)sender;
+- (IBAction)switchCameras:(id)sender;
+
+@end
diff --git a/tensorflow/contrib/ios_examples/camera/CameraExampleViewController.mm b/tensorflow/contrib/ios_examples/camera/CameraExampleViewController.mm
new file mode 100644
index 0000000000..c529a2e171
--- /dev/null
+++ b/tensorflow/contrib/ios_examples/camera/CameraExampleViewController.mm
@@ -0,0 +1,604 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#import <AssertMacros.h>
+#import <AssetsLibrary/AssetsLibrary.h>
+#import <CoreImage/CoreImage.h>
+#import <ImageIO/ImageIO.h>
+#import "CameraExampleViewController.h"
+
+#include <sys/time.h>
+
+#include "tensorflow_utils.h"
+
+static const NSString *AVCaptureStillImageIsCapturingStillImageContext =
+ @"AVCaptureStillImageIsCapturingStillImageContext";
+
+@interface CameraExampleViewController (InternalMethods)
+- (void)setupAVCapture;
+- (void)teardownAVCapture;
+@end
+
+@implementation CameraExampleViewController
+
+- (void)setupAVCapture {
+ NSError *error = nil;
+
+ session = [AVCaptureSession new];
+ if ([[UIDevice currentDevice] userInterfaceIdiom] ==
+ UIUserInterfaceIdiomPhone)
+ [session setSessionPreset:AVCaptureSessionPreset640x480];
+ else
+ [session setSessionPreset:AVCaptureSessionPresetPhoto];
+
+ AVCaptureDevice *device =
+ [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
+ AVCaptureDeviceInput *deviceInput =
+ [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
+ assert(error == nil);
+
+ isUsingFrontFacingCamera = NO;
+ if ([session canAddInput:deviceInput]) [session addInput:deviceInput];
+
+ stillImageOutput = [AVCaptureStillImageOutput new];
+ [stillImageOutput
+ addObserver:self
+ forKeyPath:@"capturingStillImage"
+ options:NSKeyValueObservingOptionNew
+ context:(void *)(AVCaptureStillImageIsCapturingStillImageContext)];
+ if ([session canAddOutput:stillImageOutput])
+ [session addOutput:stillImageOutput];
+
+ videoDataOutput = [AVCaptureVideoDataOutput new];
+
+ NSDictionary *rgbOutputSettings = [NSDictionary
+ dictionaryWithObject:[NSNumber numberWithInt:kCMPixelFormat_32BGRA]
+ forKey:(id)kCVPixelBufferPixelFormatTypeKey];
+ [videoDataOutput setVideoSettings:rgbOutputSettings];
+ [videoDataOutput setAlwaysDiscardsLateVideoFrames:YES];
+ videoDataOutputQueue =
+ dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL);
+ [videoDataOutput setSampleBufferDelegate:self queue:videoDataOutputQueue];
+
+ if ([session canAddOutput:videoDataOutput])
+ [session addOutput:videoDataOutput];
+ [[videoDataOutput connectionWithMediaType:AVMediaTypeVideo] setEnabled:YES];
+
+ previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
+ [previewLayer setBackgroundColor:[[UIColor blackColor] CGColor]];
+ [previewLayer setVideoGravity:AVLayerVideoGravityResizeAspect];
+ CALayer *rootLayer = [previewView layer];
+ [rootLayer setMasksToBounds:YES];
+ [previewLayer setFrame:[rootLayer bounds]];
+ [rootLayer addSublayer:previewLayer];
+ [session startRunning];
+
+ [session release];
+ if (error) {
+ UIAlertView *alertView = [[UIAlertView alloc]
+ initWithTitle:[NSString stringWithFormat:@"Failed with error %d",
+ (int)[error code]]
+ message:[error localizedDescription]
+ delegate:nil
+ cancelButtonTitle:@"Dismiss"
+ otherButtonTitles:nil];
+ [alertView show];
+ [alertView release];
+ [self teardownAVCapture];
+ }
+}
+
+- (void)teardownAVCapture {
+ [videoDataOutput release];
+ if (videoDataOutputQueue) dispatch_release(videoDataOutputQueue);
+ [stillImageOutput removeObserver:self forKeyPath:@"isCapturingStillImage"];
+ [stillImageOutput release];
+ [previewLayer removeFromSuperlayer];
+ [previewLayer release];
+}
+
+- (void)observeValueForKeyPath:(NSString *)keyPath
+ ofObject:(id)object
+ change:(NSDictionary *)change
+ context:(void *)context {
+ if (context == AVCaptureStillImageIsCapturingStillImageContext) {
+ BOOL isCapturingStillImage =
+ [[change objectForKey:NSKeyValueChangeNewKey] boolValue];
+
+ if (isCapturingStillImage) {
+ // do flash bulb like animation
+ flashView = [[UIView alloc] initWithFrame:[previewView frame]];
+ [flashView setBackgroundColor:[UIColor whiteColor]];
+ [flashView setAlpha:0.f];
+ [[[self view] window] addSubview:flashView];
+
+ [UIView animateWithDuration:.4f
+ animations:^{
+ [flashView setAlpha:1.f];
+ }];
+ } else {
+ [UIView animateWithDuration:.4f
+ animations:^{
+ [flashView setAlpha:0.f];
+ }
+ completion:^(BOOL finished) {
+ [flashView removeFromSuperview];
+ [flashView release];
+ flashView = nil;
+ }];
+ }
+ }
+}
+
+- (AVCaptureVideoOrientation)avOrientationForDeviceOrientation:
+ (UIDeviceOrientation)deviceOrientation {
+ AVCaptureVideoOrientation result =
+ (AVCaptureVideoOrientation)(deviceOrientation);
+ if (deviceOrientation == UIDeviceOrientationLandscapeLeft)
+ result = AVCaptureVideoOrientationLandscapeRight;
+ else if (deviceOrientation == UIDeviceOrientationLandscapeRight)
+ result = AVCaptureVideoOrientationLandscapeLeft;
+ return result;
+}
+
+- (IBAction)takePicture:(id)sender {
+ if ([session isRunning]) {
+ [session stopRunning];
+ [sender setTitle:@"Continue" forState:UIControlStateNormal];
+
+ flashView = [[UIView alloc] initWithFrame:[previewView frame]];
+ [flashView setBackgroundColor:[UIColor whiteColor]];
+ [flashView setAlpha:0.f];
+ [[[self view] window] addSubview:flashView];
+
+ [UIView animateWithDuration:.2f
+ animations:^{
+ [flashView setAlpha:1.f];
+ }
+ completion:^(BOOL finished) {
+ [UIView animateWithDuration:.2f
+ animations:^{
+ [flashView setAlpha:0.f];
+ }
+ completion:^(BOOL finished) {
+ [flashView removeFromSuperview];
+ [flashView release];
+ flashView = nil;
+ }];
+ }];
+
+ } else {
+ [session startRunning];
+ [sender setTitle:@"Freeze Frame" forState:UIControlStateNormal];
+ }
+}
+
++ (CGRect)videoPreviewBoxForGravity:(NSString *)gravity
+ frameSize:(CGSize)frameSize
+ apertureSize:(CGSize)apertureSize {
+ CGFloat apertureRatio = apertureSize.height / apertureSize.width;
+ CGFloat viewRatio = frameSize.width / frameSize.height;
+
+ CGSize size = CGSizeZero;
+ if ([gravity isEqualToString:AVLayerVideoGravityResizeAspectFill]) {
+ if (viewRatio > apertureRatio) {
+ size.width = frameSize.width;
+ size.height =
+ apertureSize.width * (frameSize.width / apertureSize.height);
+ } else {
+ size.width =
+ apertureSize.height * (frameSize.height / apertureSize.width);
+ size.height = frameSize.height;
+ }
+ } else if ([gravity isEqualToString:AVLayerVideoGravityResizeAspect]) {
+ if (viewRatio > apertureRatio) {
+ size.width =
+ apertureSize.height * (frameSize.height / apertureSize.width);
+ size.height = frameSize.height;
+ } else {
+ size.width = frameSize.width;
+ size.height =
+ apertureSize.width * (frameSize.width / apertureSize.height);
+ }
+ } else if ([gravity isEqualToString:AVLayerVideoGravityResize]) {
+ size.width = frameSize.width;
+ size.height = frameSize.height;
+ }
+
+ CGRect videoBox;
+ videoBox.size = size;
+ if (size.width < frameSize.width)
+ videoBox.origin.x = (frameSize.width - size.width) / 2;
+ else
+ videoBox.origin.x = (size.width - frameSize.width) / 2;
+
+ if (size.height < frameSize.height)
+ videoBox.origin.y = (frameSize.height - size.height) / 2;
+ else
+ videoBox.origin.y = (size.height - frameSize.height) / 2;
+
+ return videoBox;
+}
+
+- (void)captureOutput:(AVCaptureOutput *)captureOutput
+didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
+ fromConnection:(AVCaptureConnection *)connection {
+ CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
+ [self runCNNOnFrame:pixelBuffer];
+}
+
+- (void)runCNNOnFrame:(CVPixelBufferRef)pixelBuffer {
+ assert(pixelBuffer != NULL);
+
+ OSType sourcePixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer);
+ int doReverseChannels;
+ if (kCVPixelFormatType_32ARGB == sourcePixelFormat) {
+ doReverseChannels = 1;
+ } else if (kCVPixelFormatType_32BGRA == sourcePixelFormat) {
+ doReverseChannels = 0;
+ } else {
+ assert(false); // Unknown source format
+ }
+
+ const int sourceRowBytes = (int)CVPixelBufferGetBytesPerRow(pixelBuffer);
+ const int image_width = (int)CVPixelBufferGetWidth(pixelBuffer);
+ const int fullHeight = (int)CVPixelBufferGetHeight(pixelBuffer);
+ CVPixelBufferLockBaseAddress(pixelBuffer, 0);
+ unsigned char *sourceBaseAddr =
+ (unsigned char *)(CVPixelBufferGetBaseAddress(pixelBuffer));
+ int image_height;
+ unsigned char *sourceStartAddr;
+ if (fullHeight <= image_width) {
+ image_height = fullHeight;
+ sourceStartAddr = sourceBaseAddr;
+ } else {
+ image_height = image_width;
+ const int marginY = ((fullHeight - image_width) / 2);
+ sourceStartAddr = (sourceBaseAddr + (marginY * sourceRowBytes));
+ }
+ const int image_channels = 4;
+
+ const int wanted_width = 224;
+ const int wanted_height = 224;
+ const int wanted_channels = 3;
+ const float input_mean = 117.0f;
+ const float input_std = 1.0f;
+ assert(image_channels >= wanted_channels);
+ tensorflow::Tensor image_tensor(
+ tensorflow::DT_FLOAT,
+ tensorflow::TensorShape(
+ {1, wanted_height, wanted_width, wanted_channels}));
+ auto image_tensor_mapped = image_tensor.tensor<float, 4>();
+ tensorflow::uint8 *in = sourceStartAddr;
+ float *out = image_tensor_mapped.data();
+ for (int y = 0; y < wanted_height; ++y) {
+ float *out_row = out + (y * wanted_width * wanted_channels);
+ for (int x = 0; x < wanted_width; ++x) {
+ const int in_x = (y * image_width) / wanted_width;
+ const int in_y = (x * image_height) / wanted_height;
+ tensorflow::uint8 *in_pixel =
+ in + (in_y * image_width * image_channels) + (in_x * image_channels);
+ float *out_pixel = out_row + (x * wanted_channels);
+ for (int c = 0; c < wanted_channels; ++c) {
+ out_pixel[c] = (in_pixel[c] / input_std) - input_mean;
+ }
+ }
+ }
+
+ if (tf_session.get()) {
+ std::string input_layer = "input";
+ std::string output_layer = "output";
+ std::vector<tensorflow::Tensor> outputs;
+ tensorflow::Status run_status = tf_session->Run(
+ {{input_layer, image_tensor}}, {output_layer}, {}, &outputs);
+ if (!run_status.ok()) {
+ LOG(ERROR) << "Running model failed:" << run_status;
+ } else {
+ tensorflow::Tensor *output = &outputs[0];
+ auto predictions = output->flat<float>();
+
+ NSMutableDictionary *newValues = [NSMutableDictionary dictionary];
+ for (int index = 0; index < predictions.size(); index += 1) {
+ const float predictionValue = predictions(index);
+ if (predictionValue > 0.05f) {
+ std::string label = labels[index % predictions.size()];
+ NSString *labelObject = [NSString stringWithCString:label.c_str()];
+ NSNumber *valueObject = [NSNumber numberWithFloat:predictionValue];
+ [newValues setObject:valueObject forKey:labelObject];
+ }
+ }
+ dispatch_async(dispatch_get_main_queue(), ^(void) {
+ [self setPredictionValues:newValues];
+ });
+ }
+ }
+}
+
+- (void)dealloc {
+ [self teardownAVCapture];
+ [square release];
+ [super dealloc];
+}
+
+// use front/back camera
+- (IBAction)switchCameras:(id)sender {
+ AVCaptureDevicePosition desiredPosition;
+ if (isUsingFrontFacingCamera)
+ desiredPosition = AVCaptureDevicePositionBack;
+ else
+ desiredPosition = AVCaptureDevicePositionFront;
+
+ for (AVCaptureDevice *d in
+ [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
+ if ([d position] == desiredPosition) {
+ [[previewLayer session] beginConfiguration];
+ AVCaptureDeviceInput *input =
+ [AVCaptureDeviceInput deviceInputWithDevice:d error:nil];
+ for (AVCaptureInput *oldInput in [[previewLayer session] inputs]) {
+ [[previewLayer session] removeInput:oldInput];
+ }
+ [[previewLayer session] addInput:input];
+ [[previewLayer session] commitConfiguration];
+ break;
+ }
+ }
+ isUsingFrontFacingCamera = !isUsingFrontFacingCamera;
+}
+
+- (void)didReceiveMemoryWarning {
+ [super didReceiveMemoryWarning];
+}
+
+- (void)viewDidLoad {
+ [super viewDidLoad];
+ [self setupAVCapture];
+ square = [[UIImage imageNamed:@"squarePNG"] retain];
+ synth = [[AVSpeechSynthesizer alloc] init];
+ labelLayers = [[NSMutableArray alloc] init];
+ oldPredictionValues = [[NSMutableDictionary alloc] init];
+ tensorflow::Status load_status =
+ LoadModel(@"tensorflow_inception_graph", @"pb", &tf_session);
+ if (!load_status.ok()) {
+ LOG(FATAL) << "Couldn't load model: " << load_status;
+ }
+
+ tensorflow::Status labels_status =
+ LoadLabels(@"imagenet_comp_graph_label_strings", @"txt", &labels);
+ if (!labels_status.ok()) {
+ LOG(FATAL) << "Couldn't load labels: " << labels_status;
+ }
+}
+
+- (void)viewDidUnload {
+ [super viewDidUnload];
+ [oldPredictionValues release];
+}
+
+- (void)viewWillAppear:(BOOL)animated {
+ [super viewWillAppear:animated];
+}
+
+- (void)viewDidAppear:(BOOL)animated {
+ [super viewDidAppear:animated];
+}
+
+- (void)viewWillDisappear:(BOOL)animated {
+ [super viewWillDisappear:animated];
+}
+
+- (void)viewDidDisappear:(BOOL)animated {
+ [super viewDidDisappear:animated];
+}
+
+- (BOOL)shouldAutorotateToInterfaceOrientation:
+ (UIInterfaceOrientation)interfaceOrientation {
+ return (interfaceOrientation == UIInterfaceOrientationPortrait);
+}
+
+- (BOOL)prefersStatusBarHidden {
+ return YES;
+}
+
+- (void)setPredictionValues:(NSDictionary *)newValues {
+ const float decayValue = 0.75f;
+ const float updateValue = 0.25f;
+ const float minimumThreshold = 0.01f;
+
+ NSMutableDictionary *decayedPredictionValues =
+ [[NSMutableDictionary alloc] init];
+ for (NSString *label in oldPredictionValues) {
+ NSNumber *oldPredictionValueObject =
+ [oldPredictionValues objectForKey:label];
+ const float oldPredictionValue = [oldPredictionValueObject floatValue];
+ const float decayedPredictionValue = (oldPredictionValue * decayValue);
+ if (decayedPredictionValue > minimumThreshold) {
+ NSNumber *decayedPredictionValueObject =
+ [NSNumber numberWithFloat:decayedPredictionValue];
+ [decayedPredictionValues setObject:decayedPredictionValueObject
+ forKey:label];
+ }
+ }
+ [oldPredictionValues release];
+ oldPredictionValues = decayedPredictionValues;
+
+ for (NSString *label in newValues) {
+ NSNumber *newPredictionValueObject = [newValues objectForKey:label];
+ NSNumber *oldPredictionValueObject =
+ [oldPredictionValues objectForKey:label];
+ if (!oldPredictionValueObject) {
+ oldPredictionValueObject = [NSNumber numberWithFloat:0.0f];
+ }
+ const float newPredictionValue = [newPredictionValueObject floatValue];
+ const float oldPredictionValue = [oldPredictionValueObject floatValue];
+ const float updatedPredictionValue =
+ (oldPredictionValue + (newPredictionValue * updateValue));
+ NSNumber *updatedPredictionValueObject =
+ [NSNumber numberWithFloat:updatedPredictionValue];
+ [oldPredictionValues setObject:updatedPredictionValueObject forKey:label];
+ }
+ NSArray *candidateLabels = [NSMutableArray array];
+ for (NSString *label in oldPredictionValues) {
+ NSNumber *oldPredictionValueObject =
+ [oldPredictionValues objectForKey:label];
+ const float oldPredictionValue = [oldPredictionValueObject floatValue];
+ if (oldPredictionValue > 0.05f) {
+ NSDictionary *entry = @{
+ @"label" : label,
+ @"value" : oldPredictionValueObject
+ };
+ candidateLabels = [candidateLabels arrayByAddingObject:entry];
+ }
+ }
+ NSSortDescriptor *sort =
+ [NSSortDescriptor sortDescriptorWithKey:@"value" ascending:NO];
+ NSArray *sortedLabels = [candidateLabels
+ sortedArrayUsingDescriptors:[NSArray arrayWithObject:sort]];
+
+ const float leftMargin = 10.0f;
+ const float topMargin = 10.0f;
+
+ const float valueWidth = 48.0f;
+ const float valueHeight = 26.0f;
+
+ const float labelWidth = 246.0f;
+ const float labelHeight = 26.0f;
+
+ const float labelMarginX = 5.0f;
+ const float labelMarginY = 5.0f;
+
+ [self removeAllLabelLayers];
+
+ int labelCount = 0;
+ for (NSDictionary *entry in sortedLabels) {
+ NSString *label = [entry objectForKey:@"label"];
+ NSNumber *valueObject = [entry objectForKey:@"value"];
+ const float value = [valueObject floatValue];
+
+ const float originY =
+ (topMargin + ((labelHeight + labelMarginY) * labelCount));
+
+ const int valuePercentage = (int)roundf(value * 100.0f);
+
+ const float valueOriginX = leftMargin;
+ NSString *valueText = [NSString stringWithFormat:@"%d%%", valuePercentage];
+
+ [self addLabelLayerWithText:valueText
+ originX:valueOriginX
+ originY:originY
+ width:valueWidth
+ height:valueHeight
+ alignment:kCAAlignmentRight];
+
+ const float labelOriginX = (leftMargin + valueWidth + labelMarginX);
+
+ [self addLabelLayerWithText:[label capitalizedString]
+ originX:labelOriginX
+ originY:originY
+ width:labelWidth
+ height:labelHeight
+ alignment:kCAAlignmentLeft];
+
+ if ((labelCount == 0) && (value > 0.5f)) {
+ [self speak:[label capitalizedString]];
+ }
+
+ labelCount += 1;
+ if (labelCount > 4) {
+ break;
+ }
+ }
+}
+
+- (void)removeAllLabelLayers {
+ for (CATextLayer *layer in labelLayers) {
+ [layer removeFromSuperlayer];
+ }
+ [labelLayers removeAllObjects];
+}
+
+- (void)addLabelLayerWithText:(NSString *)text
+ originX:(float)originX
+ originY:(float)originY
+ width:(float)width
+ height:(float)height
+ alignment:(NSString *)alignment {
+ NSString *const font = @"Menlo-Regular";
+ const float fontSize = 20.0f;
+
+ const float marginSizeX = 5.0f;
+ const float marginSizeY = 2.0f;
+
+ const CGRect backgroundBounds = CGRectMake(originX, originY, width, height);
+
+ const CGRect textBounds =
+ CGRectMake((originX + marginSizeX), (originY + marginSizeY),
+ (width - (marginSizeX * 2)), (height - (marginSizeY * 2)));
+
+ CATextLayer *background = [CATextLayer layer];
+ [background setBackgroundColor:[UIColor blackColor].CGColor];
+ [background setOpacity:0.5f];
+ [background setFrame:backgroundBounds];
+ background.cornerRadius = 5.0f;
+
+ [[self.view layer] addSublayer:background];
+ [labelLayers addObject:background];
+
+ CATextLayer *layer = [CATextLayer layer];
+ [layer setForegroundColor:[UIColor whiteColor].CGColor];
+ [layer setFrame:textBounds];
+ [layer setAlignmentMode:alignment];
+ [layer setWrapped:YES];
+ [layer setFont:font];
+ [layer setFontSize:fontSize];
+ layer.contentsScale = [[UIScreen mainScreen] scale];
+ [layer setString:text];
+
+ [[self.view layer] addSublayer:layer];
+ [labelLayers addObject:layer];
+}
+
+- (void)setPredictionText:(NSString *)text withDuration:(float)duration {
+ if (duration > 0.0) {
+ CABasicAnimation *colorAnimation =
+ [CABasicAnimation animationWithKeyPath:@"foregroundColor"];
+ colorAnimation.duration = duration;
+ colorAnimation.fillMode = kCAFillModeForwards;
+ colorAnimation.removedOnCompletion = NO;
+ colorAnimation.fromValue = (id)[UIColor darkGrayColor].CGColor;
+ colorAnimation.toValue = (id)[UIColor whiteColor].CGColor;
+ colorAnimation.timingFunction =
+ [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionLinear];
+ [self.predictionTextLayer addAnimation:colorAnimation
+ forKey:@"colorAnimation"];
+ } else {
+ self.predictionTextLayer.foregroundColor = [UIColor whiteColor].CGColor;
+ }
+
+ [self.predictionTextLayer removeFromSuperlayer];
+ [[self.view layer] addSublayer:self.predictionTextLayer];
+ [self.predictionTextLayer setString:text];
+}
+
+- (void)speak:(NSString *)words {
+ if ([synth isSpeaking]) {
+ return;
+ }
+ AVSpeechUtterance *utterance =
+ [AVSpeechUtterance speechUtteranceWithString:words];
+ utterance.voice = [AVSpeechSynthesisVoice voiceWithLanguage:@"en-US"];
+ utterance.rate = 0.75 * AVSpeechUtteranceDefaultSpeechRate;
+ [synth speakUtterance:utterance];
+}
+
+@end
diff --git a/tensorflow/contrib/ios_examples/camera/Info.plist b/tensorflow/contrib/ios_examples/camera/Info.plist
new file mode 100644
index 0000000000..d374f914b7
--- /dev/null
+++ b/tensorflow/contrib/ios_examples/camera/Info.plist
@@ -0,0 +1,40 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>en</string>
+ <key>CFBundleDisplayName</key>
+ <string>${PRODUCT_NAME}</string>
+ <key>CFBundleExecutable</key>
+ <string>${EXECUTABLE_NAME}</string>
+ <key>CFBundleIdentifier</key>
+ <string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>${PRODUCT_NAME}</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleVersion</key>
+ <string>1.0</string>
+ <key>LSRequiresIPhoneOS</key>
+ <true/>
+ <key>UIMainStoryboardFile</key>
+ <string>MainStoryboard_iPhone</string>
+ <key>UIStatusBarHidden</key>
+ <true/>
+ <key>UISupportedInterfaceOrientations</key>
+ <array>
+ <string>UIInterfaceOrientationPortrait</string>
+ </array>
+ <key>UISupportedInterfaceOrientations~ipad</key>
+ <array>
+ <string>UIInterfaceOrientationPortrait</string>
+ </array>
+</dict>
+</plist>
diff --git a/tensorflow/contrib/ios_examples/camera/camera_example.xcodeproj/project.pbxproj b/tensorflow/contrib/ios_examples/camera/camera_example.xcodeproj/project.pbxproj
new file mode 100644
index 0000000000..7e17c644bc
--- /dev/null
+++ b/tensorflow/contrib/ios_examples/camera/camera_example.xcodeproj/project.pbxproj
@@ -0,0 +1,397 @@
+// !$*UTF8*$!
+{
+ archiveVersion = 1;
+ classes = {
+ };
+ objectVersion = 46;
+ objects = {
+
+/* Begin PBXBuildFile section */
+ 591D3EC51CFF7F130059011C /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 591D3EC41CFF7F120059011C /* AVFoundation.framework */; };
+ 591D3ECB1CFF7F5F0059011C /* CoreMedia.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 591D3ECA1CFF7F5F0059011C /* CoreMedia.framework */; };
+ 591D3ECD1CFF7F9F0059011C /* AssetsLibrary.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 591D3ECC1CFF7F9F0059011C /* AssetsLibrary.framework */; };
+ 591D3ECF1CFF7FCE0059011C /* ImageIO.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 591D3ECE1CFF7FCE0059011C /* ImageIO.framework */; };
+ 591D3ED21CFF85C30059011C /* ios_image_load.mm in Sources */ = {isa = PBXBuildFile; fileRef = 591D3ED11CFF85C30059011C /* ios_image_load.mm */; };
+ 591D3ED51CFF85FD0059011C /* tensorflow_utils.mm in Sources */ = {isa = PBXBuildFile; fileRef = 591D3ED31CFF85FD0059011C /* tensorflow_utils.mm */; };
+ 591D3EDA1CFFA83A0059011C /* grace_hopper.jpg in Resources */ = {isa = PBXBuildFile; fileRef = 591D3ED71CFFA83A0059011C /* grace_hopper.jpg */; };
+ 591D3EDB1CFFA83A0059011C /* imagenet_comp_graph_label_strings.txt in Resources */ = {isa = PBXBuildFile; fileRef = 591D3ED81CFFA83A0059011C /* imagenet_comp_graph_label_strings.txt */; };
+ 591D3EDC1CFFA83A0059011C /* tensorflow_inception_graph.pb in Resources */ = {isa = PBXBuildFile; fileRef = 591D3ED91CFFA83A0059011C /* tensorflow_inception_graph.pb */; };
+ 591D3EDF1CFFAD230059011C /* libprotobuf-lite.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 591D3EDD1CFFAD230059011C /* libprotobuf-lite.a */; };
+ 591D3EE01CFFAD230059011C /* libprotobuf.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 591D3EDE1CFFAD230059011C /* libprotobuf.a */; };
+ 592FF8B918ECBD7600C164F8 /* Foundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 592FF8B818ECBD7600C164F8 /* Foundation.framework */; };
+ 592FF8BB18ECBD7600C164F8 /* CoreGraphics.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 592FF8BA18ECBD7600C164F8 /* CoreGraphics.framework */; };
+ 592FF90218ECC66200C164F8 /* main.mm in Sources */ = {isa = PBXBuildFile; fileRef = 592FF90118ECC66200C164F8 /* main.mm */; };
+ 592FF90D18EDD0DA00C164F8 /* MainStoryboard_iPhone.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 592FF90A18EDD0DA00C164F8 /* MainStoryboard_iPhone.storyboard */; };
+ 592FF92518EE240200C164F8 /* CameraExampleAppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 592FF92218EE240200C164F8 /* CameraExampleAppDelegate.m */; };
+ 592FF92618EE240200C164F8 /* CameraExampleViewController.mm in Sources */ = {isa = PBXBuildFile; fileRef = 592FF92418EE240200C164F8 /* CameraExampleViewController.mm */; };
+/* End PBXBuildFile section */
+
+/* Begin PBXFileReference section */
+ 591D3EC41CFF7F120059011C /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS9.2.sdk/System/Library/Frameworks/AVFoundation.framework; sourceTree = DEVELOPER_DIR; };
+ 591D3EC61CFF7F370059011C /* CoreFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreFoundation.framework; path = Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS9.2.sdk/System/Library/Frameworks/CoreFoundation.framework; sourceTree = DEVELOPER_DIR; };
+ 591D3EC81CFF7F500059011C /* CoreImage.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreImage.framework; path = Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS9.2.sdk/System/Library/Frameworks/CoreImage.framework; sourceTree = DEVELOPER_DIR; };
+ 591D3ECA1CFF7F5F0059011C /* CoreMedia.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreMedia.framework; path = Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS9.2.sdk/System/Library/Frameworks/CoreMedia.framework; sourceTree = DEVELOPER_DIR; };
+ 591D3ECC1CFF7F9F0059011C /* AssetsLibrary.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AssetsLibrary.framework; path = Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS9.2.sdk/System/Library/Frameworks/AssetsLibrary.framework; sourceTree = DEVELOPER_DIR; };
+ 591D3ECE1CFF7FCE0059011C /* ImageIO.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = ImageIO.framework; path = Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS9.2.sdk/System/Library/Frameworks/ImageIO.framework; sourceTree = DEVELOPER_DIR; };
+ 591D3ED01CFF85C30059011C /* ios_image_load.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ios_image_load.h; sourceTree = SOURCE_ROOT; };
+ 591D3ED11CFF85C30059011C /* ios_image_load.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = ios_image_load.mm; sourceTree = SOURCE_ROOT; };
+ 591D3ED31CFF85FD0059011C /* tensorflow_utils.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = tensorflow_utils.mm; sourceTree = SOURCE_ROOT; };
+ 591D3ED41CFF85FD0059011C /* tensorflow_utils.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = tensorflow_utils.h; sourceTree = SOURCE_ROOT; };
+ 591D3ED71CFFA83A0059011C /* grace_hopper.jpg */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = grace_hopper.jpg; sourceTree = "<group>"; };
+ 591D3ED81CFFA83A0059011C /* imagenet_comp_graph_label_strings.txt */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = imagenet_comp_graph_label_strings.txt; sourceTree = "<group>"; };
+ 591D3ED91CFFA83A0059011C /* tensorflow_inception_graph.pb */ = {isa = PBXFileReference; lastKnownFileType = file; path = tensorflow_inception_graph.pb; sourceTree = "<group>"; };
+ 591D3EDD1CFFAD230059011C /* libprotobuf-lite.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = "libprotobuf-lite.a"; path = "../../makefile/gen/protobuf_ios/lib/libprotobuf-lite.a"; sourceTree = "<group>"; };
+ 591D3EDE1CFFAD230059011C /* libprotobuf.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libprotobuf.a; path = ../../makefile/gen/protobuf_ios/lib/libprotobuf.a; sourceTree = "<group>"; };
+ 592FF8B518ECBD7600C164F8 /* CameraExample.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = CameraExample.app; sourceTree = BUILT_PRODUCTS_DIR; };
+ 592FF8B818ECBD7600C164F8 /* Foundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Foundation.framework; path = System/Library/Frameworks/Foundation.framework; sourceTree = SDKROOT; };
+ 592FF8BA18ECBD7600C164F8 /* CoreGraphics.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreGraphics.framework; path = System/Library/Frameworks/CoreGraphics.framework; sourceTree = SDKROOT; };
+ 592FF90118ECC66200C164F8 /* main.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = main.mm; sourceTree = SOURCE_ROOT; };
+ 592FF90318ECCB8300C164F8 /* Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = SOURCE_ROOT; };
+ 592FF90B18EDD0DA00C164F8 /* en */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = en; path = MainStoryboard_iPhone.storyboard; sourceTree = "<group>"; };
+ 592FF92118EE240200C164F8 /* CameraExampleAppDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CameraExampleAppDelegate.h; sourceTree = SOURCE_ROOT; };
+ 592FF92218EE240200C164F8 /* CameraExampleAppDelegate.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CameraExampleAppDelegate.m; sourceTree = SOURCE_ROOT; };
+ 592FF92318EE240200C164F8 /* CameraExampleViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CameraExampleViewController.h; sourceTree = SOURCE_ROOT; };
+ 592FF92418EE240200C164F8 /* CameraExampleViewController.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = CameraExampleViewController.mm; sourceTree = SOURCE_ROOT; };
+/* End PBXFileReference section */
+
+/* Begin PBXFrameworksBuildPhase section */
+ 592FF8B218ECBD7600C164F8 /* Frameworks */ = {
+ isa = PBXFrameworksBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 591D3EDF1CFFAD230059011C /* libprotobuf-lite.a in Frameworks */,
+ 591D3EE01CFFAD230059011C /* libprotobuf.a in Frameworks */,
+ 591D3ECF1CFF7FCE0059011C /* ImageIO.framework in Frameworks */,
+ 591D3ECD1CFF7F9F0059011C /* AssetsLibrary.framework in Frameworks */,
+ 591D3ECB1CFF7F5F0059011C /* CoreMedia.framework in Frameworks */,
+ 591D3EC51CFF7F130059011C /* AVFoundation.framework in Frameworks */,
+ 592FF8BB18ECBD7600C164F8 /* CoreGraphics.framework in Frameworks */,
+ 592FF8B918ECBD7600C164F8 /* Foundation.framework in Frameworks */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXFrameworksBuildPhase section */
+
+/* Begin PBXGroup section */
+ 591D3ED61CFFA83A0059011C /* data */ = {
+ isa = PBXGroup;
+ children = (
+ 591D3ED71CFFA83A0059011C /* grace_hopper.jpg */,
+ 591D3ED81CFFA83A0059011C /* imagenet_comp_graph_label_strings.txt */,
+ 591D3ED91CFFA83A0059011C /* tensorflow_inception_graph.pb */,
+ );
+ path = data;
+ sourceTree = SOURCE_ROOT;
+ };
+ 592FF8AA18ECBD3600C164F8 = {
+ isa = PBXGroup;
+ children = (
+ 592FF8BE18ECBD7600C164F8 /* CameraExample */,
+ 592FF8B718ECBD7600C164F8 /* Frameworks */,
+ 592FF8B618ECBD7600C164F8 /* Products */,
+ );
+ sourceTree = "<group>";
+ };
+ 592FF8B618ECBD7600C164F8 /* Products */ = {
+ isa = PBXGroup;
+ children = (
+ 592FF8B518ECBD7600C164F8 /* CameraExample.app */,
+ );
+ name = Products;
+ sourceTree = "<group>";
+ };
+ 592FF8B718ECBD7600C164F8 /* Frameworks */ = {
+ isa = PBXGroup;
+ children = (
+ 591D3EDD1CFFAD230059011C /* libprotobuf-lite.a */,
+ 591D3EDE1CFFAD230059011C /* libprotobuf.a */,
+ 591D3ECE1CFF7FCE0059011C /* ImageIO.framework */,
+ 591D3ECC1CFF7F9F0059011C /* AssetsLibrary.framework */,
+ 591D3ECA1CFF7F5F0059011C /* CoreMedia.framework */,
+ 591D3EC81CFF7F500059011C /* CoreImage.framework */,
+ 591D3EC61CFF7F370059011C /* CoreFoundation.framework */,
+ 591D3EC41CFF7F120059011C /* AVFoundation.framework */,
+ 592FF8B818ECBD7600C164F8 /* Foundation.framework */,
+ 592FF8BA18ECBD7600C164F8 /* CoreGraphics.framework */,
+ );
+ name = Frameworks;
+ sourceTree = "<group>";
+ };
+ 592FF8BE18ECBD7600C164F8 /* CameraExample */ = {
+ isa = PBXGroup;
+ children = (
+ 591D3ED61CFFA83A0059011C /* data */,
+ 592FF90718EDD0DA00C164F8 /* en.lproj */,
+ 592FF92118EE240200C164F8 /* CameraExampleAppDelegate.h */,
+ 592FF92218EE240200C164F8 /* CameraExampleAppDelegate.m */,
+ 592FF92318EE240200C164F8 /* CameraExampleViewController.h */,
+ 592FF92418EE240200C164F8 /* CameraExampleViewController.mm */,
+ 592FF90318ECCB8300C164F8 /* Info.plist */,
+ 591D3ED01CFF85C30059011C /* ios_image_load.h */,
+ 591D3ED11CFF85C30059011C /* ios_image_load.mm */,
+ 592FF90118ECC66200C164F8 /* main.mm */,
+ 591D3ED31CFF85FD0059011C /* tensorflow_utils.mm */,
+ 591D3ED41CFF85FD0059011C /* tensorflow_utils.h */,
+ );
+ name = CameraExample;
+ path = SimpleExample;
+ sourceTree = "<group>";
+ };
+ 592FF90718EDD0DA00C164F8 /* en.lproj */ = {
+ isa = PBXGroup;
+ children = (
+ 592FF90A18EDD0DA00C164F8 /* MainStoryboard_iPhone.storyboard */,
+ );
+ path = en.lproj;
+ sourceTree = SOURCE_ROOT;
+ };
+/* End PBXGroup section */
+
+/* Begin PBXNativeTarget section */
+ 592FF8B418ECBD7600C164F8 /* CameraExample */ = {
+ isa = PBXNativeTarget;
+ buildConfigurationList = 592FF8E318ECBD7600C164F8 /* Build configuration list for PBXNativeTarget "CameraExample" */;
+ buildPhases = (
+ 592FF8B118ECBD7600C164F8 /* Sources */,
+ 592FF8B218ECBD7600C164F8 /* Frameworks */,
+ 592FF8B318ECBD7600C164F8 /* Resources */,
+ );
+ buildRules = (
+ );
+ dependencies = (
+ );
+ name = CameraExample;
+ productName = SimpleExample;
+ productReference = 592FF8B518ECBD7600C164F8 /* CameraExample.app */;
+ productType = "com.apple.product-type.application";
+ };
+/* End PBXNativeTarget section */
+
+/* Begin PBXProject section */
+ 592FF8AB18ECBD3600C164F8 /* Project object */ = {
+ isa = PBXProject;
+ attributes = {
+ LastUpgradeCheck = 0720;
+ };
+ buildConfigurationList = 592FF8AE18ECBD3600C164F8 /* Build configuration list for PBXProject "camera_example" */;
+ compatibilityVersion = "Xcode 3.2";
+ developmentRegion = English;
+ hasScannedForEncodings = 0;
+ knownRegions = (
+ en,
+ );
+ mainGroup = 592FF8AA18ECBD3600C164F8;
+ productRefGroup = 592FF8B618ECBD7600C164F8 /* Products */;
+ projectDirPath = "";
+ projectRoot = "";
+ targets = (
+ 592FF8B418ECBD7600C164F8 /* CameraExample */,
+ );
+ };
+/* End PBXProject section */
+
+/* Begin PBXResourcesBuildPhase section */
+ 592FF8B318ECBD7600C164F8 /* Resources */ = {
+ isa = PBXResourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 591D3EDA1CFFA83A0059011C /* grace_hopper.jpg in Resources */,
+ 591D3EDC1CFFA83A0059011C /* tensorflow_inception_graph.pb in Resources */,
+ 592FF90D18EDD0DA00C164F8 /* MainStoryboard_iPhone.storyboard in Resources */,
+ 591D3EDB1CFFA83A0059011C /* imagenet_comp_graph_label_strings.txt in Resources */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXResourcesBuildPhase section */
+
+/* Begin PBXSourcesBuildPhase section */
+ 592FF8B118ECBD7600C164F8 /* Sources */ = {
+ isa = PBXSourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 592FF90218ECC66200C164F8 /* main.mm in Sources */,
+ 591D3ED21CFF85C30059011C /* ios_image_load.mm in Sources */,
+ 592FF92618EE240200C164F8 /* CameraExampleViewController.mm in Sources */,
+ 592FF92518EE240200C164F8 /* CameraExampleAppDelegate.m in Sources */,
+ 591D3ED51CFF85FD0059011C /* tensorflow_utils.mm in Sources */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXSourcesBuildPhase section */
+
+/* Begin PBXVariantGroup section */
+ 592FF90A18EDD0DA00C164F8 /* MainStoryboard_iPhone.storyboard */ = {
+ isa = PBXVariantGroup;
+ children = (
+ 592FF90B18EDD0DA00C164F8 /* en */,
+ );
+ name = MainStoryboard_iPhone.storyboard;
+ sourceTree = "<group>";
+ };
+/* End PBXVariantGroup section */
+
+/* Begin XCBuildConfiguration section */
+ 592FF8AF18ECBD3600C164F8 /* Debug */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ENABLE_TESTABILITY = YES;
+ ONLY_ACTIVE_ARCH = YES;
+ };
+ name = Debug;
+ };
+ 592FF8B018ECBD3600C164F8 /* Release */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ };
+ name = Release;
+ };
+ 592FF8DF18ECBD7600C164F8 /* Debug */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ALWAYS_SEARCH_USER_PATHS = NO;
+ ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
+ ASSETCATALOG_COMPILER_LAUNCHIMAGE_NAME = LaunchImage;
+ CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
+ CLANG_CXX_LIBRARY = "compiler-default";
+ CLANG_ENABLE_MODULES = YES;
+ CLANG_ENABLE_OBJC_ARC = NO;
+ CLANG_WARN_BOOL_CONVERSION = YES;
+ CLANG_WARN_CONSTANT_CONVERSION = YES;
+ CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
+ CLANG_WARN_EMPTY_BODY = YES;
+ CLANG_WARN_ENUM_CONVERSION = YES;
+ CLANG_WARN_INT_CONVERSION = YES;
+ CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
+ CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
+ "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
+ COPY_PHASE_STRIP = NO;
+ ENABLE_BITCODE = NO;
+ FRAMEWORK_SEARCH_PATHS = "$(inherited)";
+ GCC_C_LANGUAGE_STANDARD = gnu99;
+ GCC_DYNAMIC_NO_PIC = NO;
+ GCC_OPTIMIZATION_LEVEL = 0;
+ GCC_PRECOMPILE_PREFIX_HEADER = YES;
+ GCC_PREFIX_HEADER = "";
+ GCC_PREPROCESSOR_DEFINITIONS = (
+ "DEBUG=1",
+ "$(inherited)",
+ );
+ GCC_SYMBOLS_PRIVATE_EXTERN = NO;
+ GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
+ GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
+ GCC_WARN_UNDECLARED_SELECTOR = YES;
+ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
+ GCC_WARN_UNUSED_FUNCTION = YES;
+ GCC_WARN_UNUSED_VARIABLE = YES;
+ HEADER_SEARCH_PATHS = (
+ "$(SRCROOT)/../../makefile/gen/proto",
+ "$(SRCROOT)/../../makefile/downloads/eigen-eigen-f3a13643ac1f",
+ "$(SRCROOT)/../../makefile/downloads",
+ "$(SRCROOT)/../../makefile/downloads/protobuf/src/",
+ "$(SRCROOT)/../../../..",
+ );
+ INFOPLIST_FILE = "$(SRCROOT)/Info.plist";
+ IPHONEOS_DEPLOYMENT_TARGET = 7.0;
+ LIBRARY_SEARCH_PATHS = (
+ "$(SRCROOT)/../../makefile/gen/lib",
+ "$(SRCROOT)/../../makefile/gen/protobuf_ios/lib",
+ );
+ ONLY_ACTIVE_ARCH = NO;
+ OTHER_LDFLAGS = (
+ "-force_load",
+ "$(SRCROOT)/../../makefile/gen/lib/libtensorflow-core.a",
+ );
+ PRODUCT_BUNDLE_IDENTIFIER = com.google.CameraExample;
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ SDKROOT = iphoneos;
+ TARGETED_DEVICE_FAMILY = "1,2";
+ VALID_ARCHS = "arm64 armv7 armv7s";
+ WRAPPER_EXTENSION = app;
+ };
+ name = Debug;
+ };
+ 592FF8E018ECBD7600C164F8 /* Release */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ALWAYS_SEARCH_USER_PATHS = NO;
+ ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
+ ASSETCATALOG_COMPILER_LAUNCHIMAGE_NAME = LaunchImage;
+ CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
+ CLANG_CXX_LIBRARY = "compiler-default";
+ CLANG_ENABLE_MODULES = YES;
+ CLANG_ENABLE_OBJC_ARC = NO;
+ CLANG_WARN_BOOL_CONVERSION = YES;
+ CLANG_WARN_CONSTANT_CONVERSION = YES;
+ CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
+ CLANG_WARN_EMPTY_BODY = YES;
+ CLANG_WARN_ENUM_CONVERSION = YES;
+ CLANG_WARN_INT_CONVERSION = YES;
+ CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
+ CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
+ "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
+ COPY_PHASE_STRIP = YES;
+ ENABLE_BITCODE = NO;
+ ENABLE_NS_ASSERTIONS = NO;
+ FRAMEWORK_SEARCH_PATHS = "$(inherited)";
+ GCC_C_LANGUAGE_STANDARD = gnu99;
+ GCC_PRECOMPILE_PREFIX_HEADER = YES;
+ GCC_PREFIX_HEADER = "";
+ GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
+ GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
+ GCC_WARN_UNDECLARED_SELECTOR = YES;
+ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
+ GCC_WARN_UNUSED_FUNCTION = YES;
+ GCC_WARN_UNUSED_VARIABLE = YES;
+ HEADER_SEARCH_PATHS = (
+ "$(SRCROOT)/../../makefile/gen/proto",
+ "$(SRCROOT)/../../makefile/downloads/eigen-eigen-f3a13643ac1f",
+ "$(SRCROOT)/../../makefile/downloads",
+ "$(SRCROOT)/../../makefile/downloads/protobuf/src/",
+ "$(SRCROOT)/../../../..",
+ );
+ INFOPLIST_FILE = "$(SRCROOT)/Info.plist";
+ IPHONEOS_DEPLOYMENT_TARGET = 7.0;
+ LIBRARY_SEARCH_PATHS = (
+ "$(SRCROOT)/../../makefile/gen/lib",
+ "$(SRCROOT)/../../makefile/gen/protobuf_ios/lib",
+ );
+ ONLY_ACTIVE_ARCH = NO;
+ OTHER_LDFLAGS = (
+ "-force_load",
+ "$(SRCROOT)/../../makefile/gen/lib/libtensorflow-core.a",
+ );
+ PRODUCT_BUNDLE_IDENTIFIER = com.google.CameraExample;
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ SDKROOT = iphoneos;
+ TARGETED_DEVICE_FAMILY = "1,2";
+ VALIDATE_PRODUCT = YES;
+ VALID_ARCHS = "arm64 armv7 armv7s";
+ WRAPPER_EXTENSION = app;
+ };
+ name = Release;
+ };
+/* End XCBuildConfiguration section */
+
+/* Begin XCConfigurationList section */
+ 592FF8AE18ECBD3600C164F8 /* Build configuration list for PBXProject "camera_example" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ 592FF8AF18ECBD3600C164F8 /* Debug */,
+ 592FF8B018ECBD3600C164F8 /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
+ 592FF8E318ECBD7600C164F8 /* Build configuration list for PBXNativeTarget "CameraExample" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ 592FF8DF18ECBD7600C164F8 /* Debug */,
+ 592FF8E018ECBD7600C164F8 /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
+/* End XCConfigurationList section */
+ };
+ rootObject = 592FF8AB18ECBD3600C164F8 /* Project object */;
+}
diff --git a/tensorflow/contrib/ios_example/data/grace_hopper.jpg b/tensorflow/contrib/ios_examples/camera/data/grace_hopper.jpg
index d2a427810f..d2a427810f 100644
--- a/tensorflow/contrib/ios_example/data/grace_hopper.jpg
+++ b/tensorflow/contrib/ios_examples/camera/data/grace_hopper.jpg
Binary files differ
diff --git a/tensorflow/contrib/ios_examples/camera/en.lproj/MainStoryboard_iPhone.storyboard b/tensorflow/contrib/ios_examples/camera/en.lproj/MainStoryboard_iPhone.storyboard
new file mode 100644
index 0000000000..0f10a22e41
--- /dev/null
+++ b/tensorflow/contrib/ios_examples/camera/en.lproj/MainStoryboard_iPhone.storyboard
@@ -0,0 +1,46 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="9531" systemVersion="15E65" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" initialViewController="2">
+ <dependencies>
+ <deployment identifier="iOS"/>
+ <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="9529"/>
+ </dependencies>
+ <scenes>
+ <!--Camera Example View Controller-->
+ <scene sceneID="5">
+ <objects>
+ <viewController id="2" customClass="CameraExampleViewController" sceneMemberID="viewController">
+ <view key="view" contentMode="scaleToFill" id="3">
+ <rect key="frame" x="0.0" y="0.0" width="320" height="568"/>
+ <autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
+ <subviews>
+ <view contentMode="scaleToFill" id="12">
+ <rect key="frame" x="0.0" y="0.0" width="320" height="522"/>
+ <autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
+ <color key="backgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="calibratedWhite"/>
+ <gestureRecognizers/>
+ </view>
+ <button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="roundedRect" lineBreakMode="middleTruncation" id="iD8-yH-eWH">
+ <rect key="frame" x="0.0" y="454" width="320" height="33"/>
+ <autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
+ <color key="backgroundColor" red="0.0" green="0.0" blue="0.0" alpha="1" colorSpace="calibratedRGB"/>
+ <fontDescription key="fontDescription" name="Menlo-Regular" family="Menlo" pointSize="20"/>
+ <state key="normal" title="Freeze Frame">
+ <color key="titleColor" white="1" alpha="1" colorSpace="calibratedWhite"/>
+ <color key="titleShadowColor" white="0.5" alpha="1" colorSpace="calibratedWhite"/>
+ </state>
+ <connections>
+ <action selector="takePicture:" destination="2" eventType="touchUpInside" id="BTy-7E-XUS"/>
+ </connections>
+ </button>
+ </subviews>
+ <color key="backgroundColor" red="0.0" green="0.0" blue="0.0" alpha="1" colorSpace="calibratedRGB"/>
+ </view>
+ <connections>
+ <outlet property="previewView" destination="12" id="13"/>
+ </connections>
+ </viewController>
+ <placeholder placeholderIdentifier="IBFirstResponder" id="4" sceneMemberID="firstResponder"/>
+ </objects>
+ </scene>
+ </scenes>
+</document>
diff --git a/tensorflow/contrib/ios_examples/camera/ios_image_load.h b/tensorflow/contrib/ios_examples/camera/ios_image_load.h
new file mode 100644
index 0000000000..87a847e145
--- /dev/null
+++ b/tensorflow/contrib/ios_examples/camera/ios_image_load.h
@@ -0,0 +1,27 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef TENSORFLOW_CONTRIB_IOS_EXAMPLES_CAMERA_IMAGE_LOAD_H_
+#define TENSORFLOW_CONTRIB_IOS_EXAMPLES_CAMERA_IMAGE_LOAD_H_
+
+#include <vector>
+
+#include "tensorflow/core/framework/types.h"
+
+std::vector<tensorflow::uint8> LoadImageFromFile(const char* file_name,
+ int* out_width,
+ int* out_height,
+ int* out_channels);
+
+#endif // TENSORFLOW_CONTRIB_IOS_EXAMPLES_CAMERA_IMAGE_LOAD_H_
diff --git a/tensorflow/contrib/ios_example/ios_image_load.mm b/tensorflow/contrib/ios_examples/camera/ios_image_load.mm
index 64d1ea21cf..64d1ea21cf 100644
--- a/tensorflow/contrib/ios_example/ios_image_load.mm
+++ b/tensorflow/contrib/ios_examples/camera/ios_image_load.mm
diff --git a/tensorflow/contrib/ios_examples/camera/main.mm b/tensorflow/contrib/ios_examples/camera/main.mm
new file mode 100644
index 0000000000..42eff697ef
--- /dev/null
+++ b/tensorflow/contrib/ios_examples/camera/main.mm
@@ -0,0 +1,27 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#import <UIKit/UIKit.h>
+
+#import "CameraExampleAppDelegate.h"
+
+int main(int argc, char *argv[]) {
+ int retVal = 0;
+
+ @autoreleasepool {
+ retVal = UIApplicationMain(
+ argc, argv, nil, NSStringFromClass([CameraExampleAppDelegate class]));
+ }
+ return retVal;
+}
diff --git a/tensorflow/contrib/ios_examples/camera/squarePNG.png b/tensorflow/contrib/ios_examples/camera/squarePNG.png
new file mode 100644
index 0000000000..e26ff840ed
--- /dev/null
+++ b/tensorflow/contrib/ios_examples/camera/squarePNG.png
Binary files differ
diff --git a/tensorflow/contrib/ios_examples/camera/tensorflow_utils.h b/tensorflow/contrib/ios_examples/camera/tensorflow_utils.h
new file mode 100644
index 0000000000..9110a106fe
--- /dev/null
+++ b/tensorflow/contrib/ios_examples/camera/tensorflow_utils.h
@@ -0,0 +1,33 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef TENSORFLOW_CONTRIB_IOS_EXAMPLES_CAMERA_TENSORFLOW_UTILS_H_
+#define TENSORFLOW_CONTRIB_IOS_EXAMPLES_CAMERA_TENSORFLOW_UTILS_H_
+
+#include <memory>
+#include <vector>
+
+#include "tensorflow/core/public/session.h"
+#include "third_party/eigen3/unsupported/Eigen/CXX11/Tensor"
+
+tensorflow::Status LoadModel(NSString* file_name, NSString* file_type,
+ std::unique_ptr<tensorflow::Session>* session);
+tensorflow::Status LoadLabels(NSString* file_name, NSString* file_type,
+ std::vector<std::string>* label_strings);
+void GetTopN(const Eigen::TensorMap<Eigen::Tensor<float, 1, Eigen::RowMajor>,
+ Eigen::Aligned>& prediction, const int num_results,
+ const float threshold,
+ std::vector<std::pair<float, int> >* top_results);
+
+#endif // TENSORFLOW_CONTRIB_IOS_EXAMPLES_CAMERA_TENSORFLOW_UTILS_H_
diff --git a/tensorflow/contrib/ios_examples/camera/tensorflow_utils.mm b/tensorflow/contrib/ios_examples/camera/tensorflow_utils.mm
new file mode 100644
index 0000000000..1df912f6e2
--- /dev/null
+++ b/tensorflow/contrib/ios_examples/camera/tensorflow_utils.mm
@@ -0,0 +1,178 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#import <Foundation/Foundation.h>
+
+#include "tensorflow_utils.h"
+
+#include <fstream>
+#include <pthread.h>
+#include <unistd.h>
+#include <queue>
+#include <sstream>
+#include <string>
+
+#include "google/protobuf/io/coded_stream.h"
+#include "google/protobuf/io/zero_copy_stream_impl.h"
+#include "google/protobuf/io/zero_copy_stream_impl_lite.h"
+#include "google/protobuf/message_lite.h"
+#include "tensorflow/core/framework/tensor.h"
+#include "tensorflow/core/framework/types.pb.h"
+#include "tensorflow/core/platform/env.h"
+#include "tensorflow/core/platform/logging.h"
+#include "tensorflow/core/platform/mutex.h"
+#include "tensorflow/core/platform/types.h"
+#include "tensorflow/core/public/session.h"
+
+
+namespace {
+ class IfstreamInputStream : public ::google::protobuf::io::CopyingInputStream {
+ public:
+ explicit IfstreamInputStream(const std::string& file_name)
+ : ifs_(file_name.c_str(), std::ios::in | std::ios::binary) {}
+ ~IfstreamInputStream() { ifs_.close(); }
+
+ int Read(void* buffer, int size) {
+ if (!ifs_) {
+ return -1;
+ }
+ ifs_.read(static_cast<char*>(buffer), size);
+ return ifs_.gcount();
+ }
+
+ private:
+ std::ifstream ifs_;
+ };
+} // namespace
+
+// Returns the top N confidence values over threshold in the provided vector,
+// sorted by confidence in descending order.
+void GetTopN(const Eigen::TensorMap<Eigen::Tensor<float, 1, Eigen::RowMajor>,
+ Eigen::Aligned>& prediction, const int num_results,
+ const float threshold,
+ std::vector<std::pair<float, int> >* top_results) {
+ // Will contain top N results in ascending order.
+ std::priority_queue<std::pair<float, int>,
+ std::vector<std::pair<float, int> >,
+ std::greater<std::pair<float, int> > > top_result_pq;
+
+ const int count = prediction.size();
+ for (int i = 0; i < count; ++i) {
+ const float value = prediction(i);
+
+ // Only add it if it beats the threshold and has a chance at being in
+ // the top N.
+ if (value < threshold) {
+ continue;
+ }
+
+ top_result_pq.push(std::pair<float, int>(value, i));
+
+ // If at capacity, kick the smallest value out.
+ if (top_result_pq.size() > num_results) {
+ top_result_pq.pop();
+ }
+ }
+
+ // Copy to output vector and reverse into descending order.
+ while (!top_result_pq.empty()) {
+ top_results->push_back(top_result_pq.top());
+ top_result_pq.pop();
+ }
+ std::reverse(top_results->begin(), top_results->end());
+}
+
+
+bool PortableReadFileToProto(const std::string& file_name,
+ ::google::protobuf::MessageLite* proto) {
+ ::google::protobuf::io::CopyingInputStreamAdaptor stream(
+ new IfstreamInputStream(file_name));
+ stream.SetOwnsCopyingStream(true);
+ ::google::protobuf::io::CodedInputStream coded_stream(&stream);
+ // Total bytes hard limit / warning limit are set to 1GB and 512MB
+ // respectively.
+ coded_stream.SetTotalBytesLimit(1024LL << 20, 512LL << 20);
+ return proto->ParseFromCodedStream(&coded_stream);
+}
+
+NSString* FilePathForResourceName(NSString* name, NSString* extension) {
+ NSString* file_path = [[NSBundle mainBundle] pathForResource:name ofType:extension];
+ if (file_path == NULL) {
+ LOG(FATAL) << "Couldn't find '" << [name UTF8String] << "."
+ << [extension UTF8String] << "' in bundle.";
+ return nullptr;
+ }
+ return file_path;
+}
+
+tensorflow::Status LoadModel(NSString* file_name, NSString* file_type,
+ std::unique_ptr<tensorflow::Session>* session) {
+ tensorflow::SessionOptions options;
+
+ tensorflow::Session* session_pointer = nullptr;
+ tensorflow::Status session_status = tensorflow::NewSession(options, &session_pointer);
+ if (!session_status.ok()) {
+ LOG(ERROR) << "Could not create Tensorflow Session: " << session_status;
+ return session_status;
+ }
+ session->reset(session_pointer);
+ LOG(INFO) << "Session created.";
+
+ tensorflow::GraphDef tensorflow_graph;
+ LOG(INFO) << "Graph created.";
+
+ NSString* model_path = FilePathForResourceName(file_name, file_type);
+ if (!model_path) {
+ LOG(ERROR) << "Failed to find model proto at" << [file_name UTF8String]
+ << [file_type UTF8String];
+ return tensorflow::errors::NotFound([file_name UTF8String],
+ [file_type UTF8String]);
+ }
+ const bool read_proto_succeeded = PortableReadFileToProto(
+ [model_path UTF8String], &tensorflow_graph);
+ if (!read_proto_succeeded) {
+ LOG(ERROR) << "Failed to load model proto from" << [model_path UTF8String];
+ return tensorflow::errors::NotFound([model_path UTF8String]);
+ }
+
+ LOG(INFO) << "Creating session.";
+ tensorflow::Status create_status = (*session)->Create(tensorflow_graph);
+ if (!create_status.ok()) {
+ LOG(ERROR) << "Could not create Tensorflow Graph: " << create_status;
+ return create_status;
+ }
+
+ return tensorflow::Status::OK();
+}
+
+tensorflow::Status LoadLabels(NSString* file_name, NSString* file_type,
+ std::vector<std::string>* label_strings) {
+ // Read the label list
+ NSString* labels_path = FilePathForResourceName(file_name, file_type);
+ if (!labels_path) {
+ LOG(ERROR) << "Failed to find model proto at" << [file_name UTF8String]
+ << [file_type UTF8String];
+ return tensorflow::errors::NotFound([file_name UTF8String],
+ [file_type UTF8String]);
+ }
+ std::ifstream t;
+ t.open([labels_path UTF8String]);
+ std::string line;
+ while(t){
+ std::getline(t, line);
+ label_strings->push_back(line);
+ }
+ t.close();
+ return tensorflow::Status::OK();
+} \ No newline at end of file
diff --git a/tensorflow/contrib/ios_example/AppDelegate.h b/tensorflow/contrib/ios_examples/simple/AppDelegate.h
index 75b1f1da38..75b1f1da38 100644
--- a/tensorflow/contrib/ios_example/AppDelegate.h
+++ b/tensorflow/contrib/ios_examples/simple/AppDelegate.h
diff --git a/tensorflow/contrib/ios_example/AppDelegate.mm b/tensorflow/contrib/ios_examples/simple/AppDelegate.mm
index 1e808eb976..1e808eb976 100644
--- a/tensorflow/contrib/ios_example/AppDelegate.mm
+++ b/tensorflow/contrib/ios_examples/simple/AppDelegate.mm
diff --git a/tensorflow/contrib/ios_example/RunModel-Info.plist b/tensorflow/contrib/ios_examples/simple/RunModel-Info.plist
index ca80e68091..ca80e68091 100644
--- a/tensorflow/contrib/ios_example/RunModel-Info.plist
+++ b/tensorflow/contrib/ios_examples/simple/RunModel-Info.plist
diff --git a/tensorflow/contrib/ios_example/RunModelViewController.h b/tensorflow/contrib/ios_examples/simple/RunModelViewController.h
index 4e1a83ccf5..4e1a83ccf5 100644
--- a/tensorflow/contrib/ios_example/RunModelViewController.h
+++ b/tensorflow/contrib/ios_examples/simple/RunModelViewController.h
diff --git a/tensorflow/contrib/ios_example/RunModelViewController.mm b/tensorflow/contrib/ios_examples/simple/RunModelViewController.mm
index cbd7fa509b..19f00ad479 100644
--- a/tensorflow/contrib/ios_example/RunModelViewController.mm
+++ b/tensorflow/contrib/ios_examples/simple/RunModelViewController.mm
@@ -204,7 +204,6 @@ NSString* RunInferenceOnImage() {
for (int c = 0; c < wanted_channels; ++c) {
out_pixel[c] = (in_pixel[c] / input_std) - input_mean;
}
-
}
}
diff --git a/tensorflow/contrib/ios_example/RunModelViewController.xib b/tensorflow/contrib/ios_examples/simple/RunModelViewController.xib
index 93f334b985..93f334b985 100644
--- a/tensorflow/contrib/ios_example/RunModelViewController.xib
+++ b/tensorflow/contrib/ios_examples/simple/RunModelViewController.xib
diff --git a/tensorflow/contrib/ios_examples/simple/data/grace_hopper.jpg b/tensorflow/contrib/ios_examples/simple/data/grace_hopper.jpg
new file mode 100644
index 0000000000..d2a427810f
--- /dev/null
+++ b/tensorflow/contrib/ios_examples/simple/data/grace_hopper.jpg
Binary files differ
diff --git a/tensorflow/contrib/ios_example/ios_image_load.h b/tensorflow/contrib/ios_examples/simple/ios_image_load.h
index 0e0b771118..0e0b771118 100644
--- a/tensorflow/contrib/ios_example/ios_image_load.h
+++ b/tensorflow/contrib/ios_examples/simple/ios_image_load.h
diff --git a/tensorflow/contrib/ios_examples/simple/ios_image_load.mm b/tensorflow/contrib/ios_examples/simple/ios_image_load.mm
new file mode 100644
index 0000000000..64d1ea21cf
--- /dev/null
+++ b/tensorflow/contrib/ios_examples/simple/ios_image_load.mm
@@ -0,0 +1,87 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "ios_image_load.h"
+
+#include <stdlib.h>
+#include <string.h>
+#include <assert.h>
+#include <stdio.h>
+
+#import <CoreImage/CoreImage.h>
+#import <ImageIO/ImageIO.h>
+
+using tensorflow::uint8;
+
+std::vector<uint8> LoadImageFromFile(const char* file_name,
+ int* out_width, int* out_height,
+ int* out_channels) {
+ FILE* file_handle = fopen(file_name, "rb");
+ fseek(file_handle, 0, SEEK_END);
+ const size_t bytes_in_file = ftell(file_handle);
+ fseek(file_handle, 0, SEEK_SET);
+ std::vector<uint8> file_data(bytes_in_file);
+ fread(file_data.data(), 1, bytes_in_file, file_handle);
+ fclose(file_handle);
+ CFDataRef file_data_ref = CFDataCreateWithBytesNoCopy(NULL, file_data.data(),
+ bytes_in_file,
+ kCFAllocatorNull);
+ CGDataProviderRef image_provider =
+ CGDataProviderCreateWithCFData(file_data_ref);
+
+ const char* suffix = strrchr(file_name, '.');
+ if (!suffix || suffix == file_name) {
+ suffix = "";
+ }
+ CGImageRef image;
+ if (strcasecmp(suffix, ".png") == 0) {
+ image = CGImageCreateWithPNGDataProvider(image_provider, NULL, true,
+ kCGRenderingIntentDefault);
+ } else if ((strcasecmp(suffix, ".jpg") == 0) ||
+ (strcasecmp(suffix, ".jpeg") == 0)) {
+ image = CGImageCreateWithJPEGDataProvider(image_provider, NULL, true,
+ kCGRenderingIntentDefault);
+ } else {
+ CFRelease(image_provider);
+ CFRelease(file_data_ref);
+ fprintf(stderr, "Unknown suffix for file '%s'\n", file_name);
+ *out_width = 0;
+ *out_height = 0;
+ *out_channels = 0;
+ return std::vector<uint8>();
+ }
+
+ const int width = (int)CGImageGetWidth(image);
+ const int height = (int)CGImageGetHeight(image);
+ const int channels = 4;
+ CGColorSpaceRef color_space = CGColorSpaceCreateDeviceRGB();
+ const int bytes_per_row = (width * channels);
+ const int bytes_in_image = (bytes_per_row * height);
+ std::vector<uint8> result(bytes_in_image);
+ const int bits_per_component = 8;
+ CGContextRef context = CGBitmapContextCreate(result.data(), width, height,
+ bits_per_component, bytes_per_row, color_space,
+ kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big);
+ CGColorSpaceRelease(color_space);
+ CGContextDrawImage(context, CGRectMake(0, 0, width, height), image);
+ CGContextRelease(context);
+ CFRelease(image);
+ CFRelease(image_provider);
+ CFRelease(file_data_ref);
+
+ *out_width = width;
+ *out_height = height;
+ *out_channels = channels;
+ return result;
+}
diff --git a/tensorflow/contrib/ios_example/main.mm b/tensorflow/contrib/ios_examples/simple/main.mm
index d70550a730..d70550a730 100644
--- a/tensorflow/contrib/ios_example/main.mm
+++ b/tensorflow/contrib/ios_examples/simple/main.mm
diff --git a/tensorflow/contrib/ios_example/tf_ios_makefile_example.xcodeproj/project.pbxproj b/tensorflow/contrib/ios_examples/simple/tf_ios_makefile_example.xcodeproj/project.pbxproj
index 3af2048eaa..0c41e0ea34 100644
--- a/tensorflow/contrib/ios_example/tf_ios_makefile_example.xcodeproj/project.pbxproj
+++ b/tensorflow/contrib/ios_examples/simple/tf_ios_makefile_example.xcodeproj/project.pbxproj
@@ -17,9 +17,6 @@
59A3D00C1CF4E68100C4259F /* RunModelViewController.xib in Resources */ = {isa = PBXBuildFile; fileRef = 59A3D0001CF4E68100C4259F /* RunModelViewController.xib */; };
59A3D0141CF4E82500C4259F /* CoreGraphics.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 59A3D0131CF4E82500C4259F /* CoreGraphics.framework */; };
59A3D0181CF4E86100C4259F /* UIKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 59A3D0171CF4E86100C4259F /* UIKit.framework */; };
- 59A3D01A1CF7B0DE00C4259F /* libtensorflow-core.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 59A3D0191CF7B0DE00C4259F /* libtensorflow-core.a */; };
- 59A3D01B1CF7C16600C4259F /* libprotobuf-lite.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 59A3D00F1CF4E7F200C4259F /* libprotobuf-lite.a */; };
- 59A3D01C1CF7C16600C4259F /* libprotobuf.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 59A3D0101CF4E7F200C4259F /* libprotobuf.a */; };
/* End PBXBuildFile section */
/* Begin PBXFileReference section */
@@ -39,12 +36,9 @@
59A3CFFE1CF4E68100C4259F /* RunModelViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RunModelViewController.h; sourceTree = "<group>"; };
59A3CFFF1CF4E68100C4259F /* RunModelViewController.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = RunModelViewController.mm; sourceTree = "<group>"; };
59A3D0001CF4E68100C4259F /* RunModelViewController.xib */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.xib; path = RunModelViewController.xib; sourceTree = "<group>"; };
- 59A3D00F1CF4E7F200C4259F /* libprotobuf-lite.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = "libprotobuf-lite.a"; path = "../tensorflow4prs/tensorflow/contrib/makefile/gen/protobuf_ios/lib/libprotobuf-lite.a"; sourceTree = "<group>"; };
- 59A3D0101CF4E7F200C4259F /* libprotobuf.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libprotobuf.a; path = ../tensorflow4prs/tensorflow/contrib/makefile/gen/protobuf_ios/lib/libprotobuf.a; sourceTree = "<group>"; };
59A3D0131CF4E82500C4259F /* CoreGraphics.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreGraphics.framework; path = System/Library/Frameworks/CoreGraphics.framework; sourceTree = SDKROOT; };
59A3D0151CF4E83D00C4259F /* Foundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Foundation.framework; path = System/Library/Frameworks/Foundation.framework; sourceTree = SDKROOT; };
59A3D0171CF4E86100C4259F /* UIKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = UIKit.framework; path = System/Library/Frameworks/UIKit.framework; sourceTree = SDKROOT; };
- 59A3D0191CF7B0DE00C4259F /* libtensorflow-core.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = "libtensorflow-core.a"; path = "../tensorflow4prs/tensorflow/contrib/makefile/gen/lib/libtensorflow-core.a"; sourceTree = "<group>"; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
@@ -52,9 +46,6 @@
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
- 59A3D01B1CF7C16600C4259F /* libprotobuf-lite.a in Frameworks */,
- 59A3D01C1CF7C16600C4259F /* libprotobuf.a in Frameworks */,
- 59A3D01A1CF7B0DE00C4259F /* libtensorflow-core.a in Frameworks */,
59A3D0181CF4E86100C4259F /* UIKit.framework in Frameworks */,
59A3D0141CF4E82500C4259F /* CoreGraphics.framework in Frameworks */,
);
@@ -69,9 +60,6 @@
59A3D0171CF4E86100C4259F /* UIKit.framework */,
59A3D0151CF4E83D00C4259F /* Foundation.framework */,
59A3D0131CF4E82500C4259F /* CoreGraphics.framework */,
- 59A3D00F1CF4E7F200C4259F /* libprotobuf-lite.a */,
- 59A3D0101CF4E7F200C4259F /* libprotobuf.a */,
- 59A3D0191CF7B0DE00C4259F /* libtensorflow-core.a */,
59A3CFF11CF4E68100C4259F /* AppDelegate.h */,
59A3CFF21CF4E68100C4259F /* AppDelegate.mm */,
59A3CFF31CF4E68100C4259F /* data */,
@@ -277,21 +265,21 @@
CODE_SIGN_IDENTITY = "iPhone Developer";
ENABLE_BITCODE = NO;
HEADER_SEARCH_PATHS = (
- "$(SRCROOT)/../../..",
- "$(SRCROOT)/../makefile/downloads/protobuf/src/",
- "$(SRCROOT)/../makefile/downloads",
- "$(SRCROOT)/../makefile/downloads/eigen-eigen-f3a13643ac1f",
- "$(SRCROOT)/../makefile/gen/proto",
+ "$(SRCROOT)/../../../..",
+ "$(SRCROOT)/../../makefile/downloads/protobuf/src/",
+ "$(SRCROOT)/../../makefile/downloads",
+ "$(SRCROOT)/../../makefile/downloads/eigen-eigen-f3a13643ac1f",
+ "$(SRCROOT)/../../makefile/gen/proto",
);
INFOPLIST_FILE = "$(SRCROOT)/RunModel-Info.plist";
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
LIBRARY_SEARCH_PATHS = (
- "$(SRCROOT)/../makefile/gen/protobuf_ios/lib",
- "$(SRCROOT)/../makefile/gen/lib",
+ "$(SRCROOT)/../../makefile/gen/protobuf_ios/lib",
+ "$(SRCROOT)/../../makefile/gen/lib",
);
OTHER_LDFLAGS = (
"-force_load",
- "$(SRCROOT)/../makefile/gen/lib/libtensorflow-core.a",
+ "$(SRCROOT)/../../makefile/gen/lib/libtensorflow-core.a",
);
PRODUCT_BUNDLE_IDENTIFIER = "com.google.TF-Test";
PRODUCT_NAME = "$(TARGET_NAME)";
@@ -304,21 +292,21 @@
CODE_SIGN_IDENTITY = "iPhone Developer";
ENABLE_BITCODE = NO;
HEADER_SEARCH_PATHS = (
- "$(SRCROOT)/../../..",
- "$(SRCROOT)/../makefile/downloads/protobuf/src/",
- "$(SRCROOT)/../makefile/downloads",
- "$(SRCROOT)/../makefile/downloads/eigen-eigen-f3a13643ac1f",
- "$(SRCROOT)/../makefile/gen/proto",
+ "$(SRCROOT)/../../../..",
+ "$(SRCROOT)/../../makefile/downloads/protobuf/src/",
+ "$(SRCROOT)/../../makefile/downloads",
+ "$(SRCROOT)/../../makefile/downloads/eigen-eigen-f3a13643ac1f",
+ "$(SRCROOT)/../../makefile/gen/proto",
);
INFOPLIST_FILE = "$(SRCROOT)/RunModel-Info.plist";
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
LIBRARY_SEARCH_PATHS = (
- "$(SRCROOT)/../makefile/gen/protobuf_ios/lib",
- "$(SRCROOT)/../makefile/gen/lib",
+ "$(SRCROOT)/../../makefile/gen/protobuf_ios/lib",
+ "$(SRCROOT)/../../makefile/gen/lib",
);
OTHER_LDFLAGS = (
"-force_load",
- "$(SRCROOT)/../makefile/gen/lib/libtensorflow-core.a",
+ "$(SRCROOT)/../../makefile/gen/lib/libtensorflow-core.a",
);
PRODUCT_BUNDLE_IDENTIFIER = "com.google.TF-Test";
PRODUCT_NAME = "$(TARGET_NAME)";
diff --git a/tensorflow/contrib/makefile/compile_ios_tensorflow.sh b/tensorflow/contrib/makefile/compile_ios_tensorflow.sh
index 2fc2ba8331..84fded9b85 100755
--- a/tensorflow/contrib/makefile/compile_ios_tensorflow.sh
+++ b/tensorflow/contrib/makefile/compile_ios_tensorflow.sh
@@ -8,7 +8,7 @@ LIB_PREFIX=libtensorflow-core
make -f tensorflow/contrib/makefile/Makefile cleantarget
make -f tensorflow/contrib/makefile/Makefile \
-TARGET=IOS IOS_ARCH=ARMV7 LIB_NAME=${LIB_PREFIX}-armv7.a OPTFLAGS="$1"
+TARGET=IOS IOS_ARCH=ARMV7 LIB_NAME=${LIB_PREFIX}-armv7.a OPTFLAGS="$1" $2 $3
if [ $? -ne 0 ]
then
echo "armv7 compilation failed."
@@ -17,7 +17,7 @@ fi
make -f tensorflow/contrib/makefile/Makefile cleantarget
make -f tensorflow/contrib/makefile/Makefile \
-TARGET=IOS IOS_ARCH=ARMV7S LIB_NAME=${LIB_PREFIX}-armv7s.a OPTFLAGS="$1"
+TARGET=IOS IOS_ARCH=ARMV7S LIB_NAME=${LIB_PREFIX}-armv7s.a OPTFLAGS="$1" $2 $3
if [ $? -ne 0 ]
then
echo "arm7vs compilation failed."
@@ -26,7 +26,7 @@ fi
make -f tensorflow/contrib/makefile/Makefile cleantarget
make -f tensorflow/contrib/makefile/Makefile \
-TARGET=IOS IOS_ARCH=ARM64 LIB_NAME=${LIB_PREFIX}-arm64.a OPTFLAGS="$1"
+TARGET=IOS IOS_ARCH=ARM64 LIB_NAME=${LIB_PREFIX}-arm64.a OPTFLAGS="$1" $2 $3
if [ $? -ne 0 ]
then
echo "arm64 compilation failed."
@@ -35,7 +35,7 @@ fi
make -f tensorflow/contrib/makefile/Makefile cleantarget
make -f tensorflow/contrib/makefile/Makefile \
-TARGET=IOS IOS_ARCH=I386 LIB_NAME=${LIB_PREFIX}-i386.a OPTFLAGS="$1"
+TARGET=IOS IOS_ARCH=I386 LIB_NAME=${LIB_PREFIX}-i386.a OPTFLAGS="$1" $2 $3
if [ $? -ne 0 ]
then
echo "i386 compilation failed."
@@ -44,7 +44,7 @@ fi
make -f tensorflow/contrib/makefile/Makefile cleantarget
make -f tensorflow/contrib/makefile/Makefile \
-TARGET=IOS IOS_ARCH=X86_64 LIB_NAME=${LIB_PREFIX}-x86_64.a OPTFLAGS="$1"
+TARGET=IOS IOS_ARCH=X86_64 LIB_NAME=${LIB_PREFIX}-x86_64.a OPTFLAGS="$1" $2 $3
if [ $? -ne 0 ]
then
echo "x86_64 compilation failed."