diff options
Diffstat (limited to 'tensorflow/examples')
51 files changed, 3795 insertions, 0 deletions
diff --git a/tensorflow/examples/android/AndroidManifest.xml b/tensorflow/examples/android/AndroidManifest.xml new file mode 100644 index 0000000000..fbbc74a678 --- /dev/null +++ b/tensorflow/examples/android/AndroidManifest.xml @@ -0,0 +1,46 @@ +<?xml version="1.0" encoding="UTF-8"?> +<!-- + Copyright 2014 The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +--> + +<manifest xmlns:android="http://schemas.android.com/apk/res/android" + package="org.tensorflow.demo"> + + <uses-permission android:name="android.permission.CAMERA" /> + <uses-feature android:name="android.hardware.camera" /> + <uses-feature android:name="android.hardware.camera.autofocus" /> + <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/> + + <uses-sdk + android:minSdkVersion="21" + android:targetSdkVersion="23" /> + + <application android:allowBackup="true" + android:debuggable="true" + android:label="@string/app_name" + android:icon="@drawable/ic_launcher" + android:theme="@style/MaterialTheme"> + + <activity android:name="org.tensorflow.demo.CameraActivity" + android:screenOrientation="portrait" + android:label="@string/app_name"> + <intent-filter> + <action android:name="android.intent.action.MAIN" /> + <category android:name="android.intent.category.LAUNCHER" /> + </intent-filter> + </activity> + </application> + +</manifest> diff --git a/tensorflow/examples/android/BUILD b/tensorflow/examples/android/BUILD new file mode 100644 index 0000000000..fb5bc8da71 --- /dev/null +++ b/tensorflow/examples/android/BUILD @@ -0,0 +1,70 @@ +# Description: +# Tensorflow camera demo app for Android. + +package(default_visibility = ["//visibility:public"]) + +licenses(["notice"]) # Apache 2.0 + +exports_files(["LICENSE"]) + +cc_library( + name = "tensorflow_native_libs", + srcs = glob(["jni/**/*.cc"]), + hdrs = glob(["jni/**/*.h"]), + copts = [ + "-std=c++11", + "-mfpu=neon", + ], + linkopts = ["-llog -landroid -lm -ljnigraphics"], + tags = [ + "manual", + "notap", + ], + deps = [ + ":dummy_pthread", + "//tensorflow/core:android_tensorflow_lib", + ], +) + +# This library only exists as a workaround to satisfy dependencies +# that declare -lpthread in their linkopts. Although Android supports +# pthreads, it does not provide it as a separate library. +cc_library( + name = "dummy_pthread", + srcs = ["jni/libpthread.so"], +) + +android_binary( + name = "tensorflow_demo", + srcs = glob([ + "src/**/*.java", + ]), + assets = glob(["assets/**"]), + assets_dir = "assets", + custom_package = "org.tensorflow.demo", + inline_constants = 1, + legacy_native_support = 0, + manifest = "AndroidManifest.xml", + resource_files = glob(["res/**"]), + tags = [ + "manual", + "notap", + ], + deps = [ + ":tensorflow_native_libs", + ], +) + +filegroup( + name = "all_files", + srcs = glob( + ["**/*"], + exclude = [ + "**/METADATA", + "**/OWNERS", + "bin/**", + "gen/**", + ], + ), + visibility = ["//tensorflow:__subpackages__"], +) diff --git a/tensorflow/examples/android/README.md b/tensorflow/examples/android/README.md new file mode 100644 index 0000000000..8a6441581a --- /dev/null +++ b/tensorflow/examples/android/README.md @@ -0,0 +1,39 @@ +# Tensorflow Android Camera Demo + +This folder contains a simple camera-based demo application utilizing Tensorflow. + +## Description + +This demo uses a Google Inception model to classify camera frames in real-time, +displaying the top results in an overlay on the camera image. See +assets/imagenet_comp_graph_label_strings.txt for the possible classificiations. + +## To build/install/run + +As a pre-requisite, Bazel, the Android NDK, and the Android SDK must all be +installed on your system. The Android build tools may be obtained from: +https://developer.android.com/tools/revisions/build-tools.html + +The Android entries in [<workspace_root>/WORKSPACE](../../WORKSPACE) must be +uncommented with the paths filled in appropriately depending on where you +installed the NDK and SDK. Otherwise an error such as: +"The external label '//external:android/sdk' is not bound to anything" will +be reported. + + +To build the APK, run this from your workspace root: +``` +bazel build //tensorflow/examples/android:tensorflow_demo -c opt --copt=-mfpu=neon +``` +Note that "-c opt" is currently required; if not set, an assert (for an +otherwise non-problematic issue) in Eigen will halt the application during +execution. This issue will be corrected in an upcoming release. + +If adb debugging is enabled on your device, you may instead use the following +command from your workspace root to automatically build and install: +``` +bazel mobile-install //tensorflow/examples/android:tensorflow_demo -c opt --copt=-mfpu=neon +``` + +Add the "--start_app" flag if you wish to automatically start the app after +installing. Otherwise, find the application icon labeled "Tensorflow Demo". diff --git a/tensorflow/examples/android/__init__.py b/tensorflow/examples/android/__init__.py new file mode 100755 index 0000000000..e69de29bb2 --- /dev/null +++ b/tensorflow/examples/android/__init__.py diff --git a/tensorflow/examples/android/assets/imagenet_comp_graph_label_strings.txt b/tensorflow/examples/android/assets/imagenet_comp_graph_label_strings.txt new file mode 100644 index 0000000000..0ac5a169d9 --- /dev/null +++ b/tensorflow/examples/android/assets/imagenet_comp_graph_label_strings.txt @@ -0,0 +1,1001 @@ +dummy +kit fox +English setter +Siberian husky +Australian terrier +English springer +grey whale +lesser panda +Egyptian cat +ibex +Persian cat +cougar +gazelle +porcupine +sea lion +malamute +badger +Great Dane +Walker hound +Welsh springer spaniel +whippet +Scottish deerhound +killer whale +mink +African elephant +Weimaraner +soft-coated wheaten terrier +Dandie Dinmont +red wolf +Old English sheepdog +jaguar +otterhound +bloodhound +Airedale +hyena +meerkat +giant schnauzer +titi +three-toed sloth +sorrel +black-footed ferret +dalmatian +black-and-tan coonhound +papillon +skunk +Staffordshire bullterrier +Mexican hairless +Bouvier des Flandres +weasel +miniature poodle +Cardigan +malinois +bighorn +fox squirrel +colobus +tiger cat +Lhasa +impala +coyote +Yorkshire terrier +Newfoundland +brown bear +red fox +Norwegian elkhound +Rottweiler +hartebeest +Saluki +grey fox +schipperke +Pekinese +Brabancon griffon +West Highland white terrier +Sealyham terrier +guenon +mongoose +indri +tiger +Irish wolfhound +wild boar +EntleBucher +zebra +ram +French bulldog +orangutan +basenji +leopard +Bernese mountain dog +Maltese dog +Norfolk terrier +toy terrier +vizsla +cairn +squirrel monkey +groenendael +clumber +Siamese cat +chimpanzee +komondor +Afghan hound +Japanese spaniel +proboscis monkey +guinea pig +white wolf +ice bear +gorilla +borzoi +toy poodle +Kerry blue terrier +ox +Scotch terrier +Tibetan mastiff +spider monkey +Doberman +Boston bull +Greater Swiss Mountain dog +Appenzeller +Shih-Tzu +Irish water spaniel +Pomeranian +Bedlington terrier +warthog +Arabian camel +siamang +miniature schnauzer +collie +golden retriever +Irish terrier +affenpinscher +Border collie +hare +boxer +silky terrier +beagle +Leonberg +German short-haired pointer +patas +dhole +baboon +macaque +Chesapeake Bay retriever +bull mastiff +kuvasz +capuchin +pug +curly-coated retriever +Norwich terrier +flat-coated retriever +hog +keeshond +Eskimo dog +Brittany spaniel +standard poodle +Lakeland terrier +snow leopard +Gordon setter +dingo +standard schnauzer +hamster +Tibetan terrier +Arctic fox +wire-haired fox terrier +basset +water buffalo +American black bear +Angora +bison +howler monkey +hippopotamus +chow +giant panda +American Staffordshire terrier +Shetland sheepdog +Great Pyrenees +Chihuahua +tabby +marmoset +Labrador retriever +Saint Bernard +armadillo +Samoyed +bluetick +redbone +polecat +marmot +kelpie +gibbon +llama +miniature pinscher +wood rabbit +Italian greyhound +lion +cocker spaniel +Irish setter +dugong +Indian elephant +beaver +Sussex spaniel +Pembroke +Blenheim spaniel +Madagascar cat +Rhodesian ridgeback +lynx +African hunting dog +langur +Ibizan hound +timber wolf +cheetah +English foxhound +briard +sloth bear +Border terrier +German shepherd +otter +koala +tusker +echidna +wallaby +platypus +wombat +revolver +umbrella +schooner +soccer ball +accordion +ant +starfish +chambered nautilus +grand piano +laptop +strawberry +airliner +warplane +airship +balloon +space shuttle +fireboat +gondola +speedboat +lifeboat +canoe +yawl +catamaran +trimaran +container ship +liner +pirate +aircraft carrier +submarine +wreck +half track +tank +missile +bobsled +dogsled +bicycle-built-for-two +mountain bike +freight car +passenger car +barrow +shopping cart +motor scooter +forklift +electric locomotive +steam locomotive +amphibian +ambulance +beach wagon +cab +convertible +jeep +limousine +minivan +Model T +racer +sports car +go-kart +golfcart +moped +snowplow +fire engine +garbage truck +pickup +tow truck +trailer truck +moving van +police van +recreational vehicle +streetcar +snowmobile +tractor +mobile home +tricycle +unicycle +horse cart +jinrikisha +oxcart +bassinet +cradle +crib +four-poster +bookcase +china cabinet +medicine chest +chiffonier +table lamp +file +park bench +barber chair +throne +folding chair +rocking chair +studio couch +toilet seat +desk +pool table +dining table +entertainment center +wardrobe +Granny Smith +orange +lemon +fig +pineapple +banana +jackfruit +custard apple +pomegranate +acorn +hip +ear +rapeseed +corn +buckeye +organ +upright +chime +drum +gong +maraca +marimba +steel drum +banjo +cello +violin +harp +acoustic guitar +electric guitar +cornet +French horn +trombone +harmonica +ocarina +panpipe +bassoon +oboe +sax +flute +daisy +yellow lady's slipper +cliff +valley +alp +volcano +promontory +sandbar +coral reef +lakeside +seashore +geyser +hatchet +cleaver +letter opener +plane +power drill +lawn mower +hammer +corkscrew +can opener +plunger +screwdriver +shovel +plow +chain saw +cock +hen +ostrich +brambling +goldfinch +house finch +junco +indigo bunting +robin +bulbul +jay +magpie +chickadee +water ouzel +kite +bald eagle +vulture +great grey owl +black grouse +ptarmigan +ruffed grouse +prairie chicken +peacock +quail +partridge +African grey +macaw +sulphur-crested cockatoo +lorikeet +coucal +bee eater +hornbill +hummingbird +jacamar +toucan +drake +red-breasted merganser +goose +black swan +white stork +black stork +spoonbill +flamingo +American egret +little blue heron +bittern +crane +limpkin +American coot +bustard +ruddy turnstone +red-backed sandpiper +redshank +dowitcher +oystercatcher +European gallinule +pelican +king penguin +albatross +great white shark +tiger shark +hammerhead +electric ray +stingray +barracouta +coho +tench +goldfish +eel +rock beauty +anemone fish +lionfish +puffer +sturgeon +gar +loggerhead +leatherback turtle +mud turtle +terrapin +box turtle +banded gecko +common iguana +American chameleon +whiptail +agama +frilled lizard +alligator lizard +Gila monster +green lizard +African chameleon +Komodo dragon +triceratops +African crocodile +American alligator +thunder snake +ringneck snake +hognose snake +green snake +king snake +garter snake +water snake +vine snake +night snake +boa constrictor +rock python +Indian cobra +green mamba +sea snake +horned viper +diamondback +sidewinder +European fire salamander +common newt +eft +spotted salamander +axolotl +bullfrog +tree frog +tailed frog +whistle +wing +paintbrush +hand blower +oxygen mask +snorkel +loudspeaker +microphone +screen +mouse +electric fan +oil filter +strainer +space heater +stove +guillotine +barometer +rule +odometer +scale +analog clock +digital clock +wall clock +hourglass +sundial +parking meter +stopwatch +digital watch +stethoscope +syringe +magnetic compass +binoculars +projector +sunglasses +loupe +radio telescope +bow +cannon [ground] +assault rifle +rifle +projectile +computer keyboard +typewriter keyboard +crane +lighter +abacus +cash machine +slide rule +desktop computer +hand-held computer +notebook +web site +harvester +thresher +printer +slot +vending machine +sewing machine +joystick +switch +hook +car wheel +paddlewheel +pinwheel +potter's wheel +gas pump +carousel +swing +reel +radiator +puck +hard disc +sunglass +pick +car mirror +solar dish +remote control +disk brake +buckle +hair slide +knot +combination lock +padlock +nail +safety pin +screw +muzzle +seat belt +ski +candle +jack-o'-lantern +spotlight +torch +neck brace +pier +tripod +maypole +mousetrap +spider web +trilobite +harvestman +scorpion +black and gold garden spider +barn spider +garden spider +black widow +tarantula +wolf spider +tick +centipede +isopod +Dungeness crab +rock crab +fiddler crab +king crab +American lobster +spiny lobster +crayfish +hermit crab +tiger beetle +ladybug +ground beetle +long-horned beetle +leaf beetle +dung beetle +rhinoceros beetle +weevil +fly +bee +grasshopper +cricket +walking stick +cockroach +mantis +cicada +leafhopper +lacewing +dragonfly +damselfly +admiral +ringlet +monarch +cabbage butterfly +sulphur butterfly +lycaenid +jellyfish +sea anemone +brain coral +flatworm +nematode +conch +snail +slug +sea slug +chiton +sea urchin +sea cucumber +iron +espresso maker +microwave +Dutch oven +rotisserie +toaster +waffle iron +vacuum +dishwasher +refrigerator +washer +Crock Pot +frying pan +wok +caldron +coffeepot +teapot +spatula +altar +triumphal arch +patio +steel arch bridge +suspension bridge +viaduct +barn +greenhouse +palace +monastery +library +apiary +boathouse +church +mosque +stupa +planetarium +restaurant +cinema +home theater +lumbermill +coil +obelisk +totem pole +castle +prison +grocery store +bakery +barbershop +bookshop +butcher shop +confectionery +shoe shop +tobacco shop +toyshop +fountain +cliff dwelling +yurt +dock +brass +megalith +bannister +breakwater +dam +chainlink fence +picket fence +worm fence +stone wall +grille +sliding door +turnstile +mountain tent +scoreboard +honeycomb +plate rack +pedestal +beacon +mashed potato +bell pepper +head cabbage +broccoli +cauliflower +zucchini +spaghetti squash +acorn squash +butternut squash +cucumber +artichoke +cardoon +mushroom +shower curtain +jean +carton +handkerchief +sandal +ashcan +safe +plate +necklace +croquet ball +fur coat +thimble +pajama +running shoe +cocktail shaker +chest +manhole cover +modem +tub +tray +balance beam +bagel +prayer rug +kimono +hot pot +whiskey jug +knee pad +book jacket +spindle +ski mask +beer bottle +crash helmet +bottlecap +tile roof +mask +maillot +Petri dish +football helmet +bathing cap +teddy bear +holster +pop bottle +photocopier +vestment +crossword puzzle +golf ball +trifle +suit +water tower +feather boa +cloak +red wine +drumstick +shield +Christmas stocking +hoopskirt +menu +stage +bonnet +meat loaf +baseball +face powder +scabbard +sunscreen +beer glass +hen-of-the-woods +guacamole +lampshade +wool +hay +bow tie +mailbag +water jug +bucket +dishrag +soup bowl +eggnog +mortar +trench coat +paddle +chain +swab +mixing bowl +potpie +wine bottle +shoji +bulletproof vest +drilling platform +binder +cardigan +sweatshirt +pot +birdhouse +hamper +ping-pong ball +pencil box +pay-phone +consomme +apron +punching bag +backpack +groom +bearskin +pencil sharpener +broom +mosquito net +abaya +mortarboard +poncho +crutch +Polaroid camera +space bar +cup +racket +traffic light +quill +radio +dough +cuirass +military uniform +lipstick +shower cap +monitor +oscilloscope +mitten +brassiere +French loaf +vase +milk can +rugby ball +paper towel +earthstar +envelope +miniskirt +cowboy hat +trolleybus +perfume +bathtub +hotdog +coral fungus +bullet train +pillow +toilet tissue +cassette +carpenter's kit +ladle +stinkhorn +lotion +hair spray +academic gown +dome +crate +wig +burrito +pill bottle +chain mail +theater curtain +window shade +barrel +washbasin +ballpoint +basketball +bath towel +cowboy boot +gown +window screen +agaric +cellular telephone +nipple +barbell +mailbox +lab coat +fire screen +minibus +packet +maze +pole +horizontal bar +sombrero +pickelhaube +rain barrel +wallet +cassette player +comic book +piggy bank +street sign +bell cote +fountain pen +Windsor tie +volleyball +overskirt +sarong +purse +bolo tie +bib +parachute +sleeping bag +television +swimming trunks +measuring cup +espresso +pizza +breastplate +shopping basket +wooden spoon +saltshaker +chocolate sauce +ballplayer +goblet +gyromitra +stretcher +water bottle +dial telephone +soap dispenser +jersey +school bus +jigsaw puzzle +plastic bag +reflex camera +diaper +Band Aid +ice lolly +velvet +tennis ball +gasmask +doormat +Loafer +ice cream +pretzel +quilt +maillot +tape player +clog +iPod +bolete +scuba diver +pitcher +matchstick +bikini +sock +CD player +lens cap +thatch +vault +beaker +bubble +cheeseburger +parallel bars +flagpole +coffee mug +rubber eraser +stole +carbonara +dumbbell
\ No newline at end of file diff --git a/tensorflow/examples/android/jni/__init__.py b/tensorflow/examples/android/jni/__init__.py new file mode 100755 index 0000000000..e69de29bb2 --- /dev/null +++ b/tensorflow/examples/android/jni/__init__.py diff --git a/tensorflow/examples/android/jni/imageutils_jni.cc b/tensorflow/examples/android/jni/imageutils_jni.cc new file mode 100644 index 0000000000..a1f88fb867 --- /dev/null +++ b/tensorflow/examples/android/jni/imageutils_jni.cc @@ -0,0 +1,122 @@ +// This file binds the native image utility code to the Java class +// which exposes them. + +#include <jni.h> +#include <stdio.h> +#include <stdlib.h> + +#include "tensorflow/core/platform/port.h" +#include "tensorflow/examples/android/jni/rgb2yuv.h" +#include "tensorflow/examples/android/jni/yuv2rgb.h" + +#define IMAGEUTILS_METHOD(METHOD_NAME) \ + Java_org_tensorflow_demo_env_ImageUtils_##METHOD_NAME // NOLINT + +using namespace tensorflow; + +#ifdef __cplusplus +extern "C" { +#endif + +JNIEXPORT void JNICALL +IMAGEUTILS_METHOD(convertYUV420SPToARGB8888)( + JNIEnv* env, jclass clazz, jbyteArray input, jintArray output, + jint width, jint height, jboolean halfSize); + +JNIEXPORT void JNICALL +IMAGEUTILS_METHOD(convertYUV420SPToRGB565)( + JNIEnv* env, jclass clazz, jbyteArray input, jbyteArray output, + jint width, jint height); + +JNIEXPORT void JNICALL +IMAGEUTILS_METHOD(convertARGB8888ToYUV420SP)( + JNIEnv* env, jclass clazz, jintArray input, jbyteArray output, + jint width, jint height); + +JNIEXPORT void JNICALL +IMAGEUTILS_METHOD(convertRGB565ToYUV420SP)( + JNIEnv* env, jclass clazz, jbyteArray input, jbyteArray output, + jint width, jint height); + +#ifdef __cplusplus +} +#endif + +JNIEXPORT void JNICALL +IMAGEUTILS_METHOD(convertYUV420SPToARGB8888)( + JNIEnv* env, jclass clazz, jbyteArray input, jintArray output, + jint width, jint height, jboolean halfSize) { + jboolean inputCopy = JNI_FALSE; + jbyte* const i = env->GetByteArrayElements(input, &inputCopy); + + jboolean outputCopy = JNI_FALSE; + jint* const o = env->GetIntArrayElements(output, &outputCopy); + + if (halfSize) { + ConvertYUV420SPToARGB8888HalfSize(reinterpret_cast<uint8*>(i), + reinterpret_cast<uint32*>(o), + width, height); + } else { + ConvertYUV420SPToARGB8888(reinterpret_cast<uint8*>(i), + reinterpret_cast<uint8*>(i) + width * height, + reinterpret_cast<uint32*>(o), + width, height); + } + + env->ReleaseByteArrayElements(input, i, JNI_ABORT); + env->ReleaseIntArrayElements(output, o, 0); +} + +JNIEXPORT void JNICALL +IMAGEUTILS_METHOD(convertYUV420SPToRGB565)( + JNIEnv* env, jclass clazz, jbyteArray input, jbyteArray output, + jint width, jint height) { + jboolean inputCopy = JNI_FALSE; + jbyte* const i = env->GetByteArrayElements(input, &inputCopy); + + jboolean outputCopy = JNI_FALSE; + jbyte* const o = env->GetByteArrayElements(output, &outputCopy); + + ConvertYUV420SPToRGB565(reinterpret_cast<uint8*>(i), + reinterpret_cast<uint16*>(o), + width, height); + + env->ReleaseByteArrayElements(input, i, JNI_ABORT); + env->ReleaseByteArrayElements(output, o, 0); +} + +JNIEXPORT void JNICALL +IMAGEUTILS_METHOD(convertARGB8888ToYUV420SP)( + JNIEnv* env, jclass clazz, jintArray input, jbyteArray output, + jint width, jint height) { + jboolean inputCopy = JNI_FALSE; + jint* const i = env->GetIntArrayElements(input, &inputCopy); + + jboolean outputCopy = JNI_FALSE; + jbyte* const o = env->GetByteArrayElements(output, &outputCopy); + + ConvertARGB8888ToYUV420SP(reinterpret_cast<uint32*>(i), + reinterpret_cast<uint8*>(o), + width, height); + + env->ReleaseIntArrayElements(input, i, JNI_ABORT); + env->ReleaseByteArrayElements(output, o, 0); +} + +JNIEXPORT void JNICALL +IMAGEUTILS_METHOD(convertRGB565ToYUV420SP)( + JNIEnv* env, jclass clazz, jbyteArray input, jbyteArray output, + jint width, jint height) { + jboolean inputCopy = JNI_FALSE; + jbyte* const i = env->GetByteArrayElements(input, &inputCopy); + + jboolean outputCopy = JNI_FALSE; + jbyte* const o = env->GetByteArrayElements(output, &outputCopy); + + ConvertRGB565ToYUV420SP(reinterpret_cast<uint16*>(i), + reinterpret_cast<uint8*>(o), + width, height); + + env->ReleaseByteArrayElements(input, i, JNI_ABORT); + env->ReleaseByteArrayElements(output, o, 0); +} diff --git a/tensorflow/examples/android/jni/jni_utils.cc b/tensorflow/examples/android/jni/jni_utils.cc new file mode 100644 index 0000000000..3fffc19cb6 --- /dev/null +++ b/tensorflow/examples/android/jni/jni_utils.cc @@ -0,0 +1,144 @@ +#include "tensorflow/examples/android/jni/jni_utils.h" + +#include <android/asset_manager.h> +#include <android/asset_manager_jni.h> +#include <jni.h> +#include <stdlib.h> + +#include <string> +#include <vector> +#include <fstream> +#include <sstream> + +#include "tensorflow/core/platform/logging.h" +#include "google/protobuf/src/google/protobuf/io/zero_copy_stream_impl.h" +#include "google/protobuf/src/google/protobuf/io/zero_copy_stream_impl_lite.h" +#include "google/protobuf/src/google/protobuf/io/coded_stream.h" +#include "google/protobuf/src/google/protobuf/message_lite.h" + +static const char* const ASSET_PREFIX = "file:///android_asset/"; + +namespace { +class IfstreamInputStream : public ::google::protobuf::io::CopyingInputStream { + public: + explicit IfstreamInputStream(const std::string& file_name) + : ifs_(file_name.c_str(), std::ios::in | std::ios::binary) {} + ~IfstreamInputStream() { ifs_.close(); } + + int Read(void* buffer, int size) { + if (!ifs_) { + return -1; + } + ifs_.read(static_cast<char*>(buffer), size); + return ifs_.gcount(); + } + + private: + std::ifstream ifs_; +}; +} // namespace + +bool PortableReadFileToProto(const std::string& file_name, + ::google::protobuf::MessageLite* proto) { + ::google::protobuf::io::CopyingInputStreamAdaptor stream( + new IfstreamInputStream(file_name)); + stream.SetOwnsCopyingStream(true); + // TODO(jiayq): the following coded stream is for debugging purposes to allow + // one to parse arbitrarily large messages for MessageLite. One most likely + // doesn't want to put protobufs larger than 64MB on Android, so we should + // eventually remove this and quit loud when a large protobuf is passed in. + ::google::protobuf::io::CodedInputStream coded_stream(&stream); + // Total bytes hard limit / warning limit are set to 1GB and 512MB + // respectively. + coded_stream.SetTotalBytesLimit(1024LL << 20, 512LL << 20); + return proto->ParseFromCodedStream(&coded_stream); +} + +bool IsAsset(const char* const filename) { + return strstr(filename, ASSET_PREFIX) == filename; +} + +void ReadFileToProto(AAssetManager* const asset_manager, + const char* const filename, + google::protobuf::MessageLite* message) { + if (!IsAsset(filename)) { + VLOG(0) << "Opening file: " << filename; + CHECK(PortableReadFileToProto(filename, message)); + return; + } + + CHECK_NOTNULL(asset_manager); + + const char* const asset_filename = filename + strlen(ASSET_PREFIX); + AAsset* asset = AAssetManager_open(asset_manager, + asset_filename, + AASSET_MODE_STREAMING); + CHECK_NOTNULL(asset); + + off_t start; + off_t length; + const int fd = AAsset_openFileDescriptor(asset, &start, &length); + + if (fd >= 0) { + // If it has a file descriptor that means it can be memmapped directly + // from the APK. + VLOG(0) << "Opening asset " << asset_filename + << " from disk with zero-copy."; + google::protobuf::io::FileInputStream is(fd); + google::protobuf::io::LimitingInputStream lis(&is, start + length); + lis.Skip(start); + CHECK(message->ParseFromZeroCopyStream(&lis)); + is.Close(); + } else { + // It may be compressed, in which case we have to uncompress + // it to memory first. + VLOG(0) << "Opening asset " << asset_filename + << " from disk with copy."; + const off_t data_size = AAsset_getLength(asset); + const void* const memory = AAsset_getBuffer(asset); + CHECK(message->ParseFromArray(memory, data_size)); + } + AAsset_close(asset); +} + +void ReadFileToString(AAssetManager* const asset_manager, + const char* const filename, std::string* str) { + if (!IsAsset(filename)) { + VLOG(0) << "Opening file: " << filename; + std::ifstream t(filename); + std::string tmp((std::istreambuf_iterator<char>(t)), + std::istreambuf_iterator<char>()); + tmp.swap(*str); + t.close(); + return; + } + + CHECK_NOTNULL(asset_manager); + const char* const asset_filename = filename + strlen(ASSET_PREFIX); + AAsset* asset = AAssetManager_open(asset_manager, + asset_filename, + AASSET_MODE_STREAMING); + CHECK_NOTNULL(asset); + VLOG(0) << "Opening asset " << asset_filename << " from disk with copy."; + const off_t data_size = AAsset_getLength(asset); + const char* memory = reinterpret_cast<const char*>(AAsset_getBuffer(asset)); + + std::string tmp(memory, memory + data_size); + tmp.swap(*str); + AAsset_close(asset); +} + +void ReadFileToVector(AAssetManager* const asset_manager, + const char* const filename, + std::vector<std::string>* str_vector) { + std::string labels_string; + ReadFileToString(asset_manager, filename, &labels_string); + std::istringstream ifs(labels_string); + str_vector->clear(); + std::string label; + while (std::getline(ifs, label)) { + str_vector->push_back(label); + } + VLOG(0) << "Read " << str_vector->size() << " values from " << filename; +} + diff --git a/tensorflow/examples/android/jni/jni_utils.h b/tensorflow/examples/android/jni/jni_utils.h new file mode 100644 index 0000000000..9bd8d2c21f --- /dev/null +++ b/tensorflow/examples/android/jni/jni_utils.h @@ -0,0 +1,30 @@ +#ifndef ORG_TENSORFLOW_JNI_JNI_UTILS_H_ // NOLINT +#define ORG_TENSORFLOW_JNI_JNI_UTILS_H_ // NOLINT + +#include <jni.h> +#include <string> +#include <vector> + +#include "tensorflow/core/platform/port.h" + +namespace google { +namespace protobuf { +class MessageLite; +} // google +} // protobuf + +class AAssetManager; + +bool PortableReadFileToProto(const std::string& file_name, + ::google::protobuf::MessageLite* proto); + +void ReadFileToProto(AAssetManager* const asset_manager, + const char* const filename, google::protobuf::MessageLite* message); + +void ReadFileToString(AAssetManager* const asset_manager, + const char* const filename, std::string* str); + +void ReadFileToVector(AAssetManager* const asset_manager, + const char* const filename, std::vector<std::string>* str_vector); + +#endif // ORG_TENSORFLOW_JNI_JNI_UTILS_H_ diff --git a/tensorflow/examples/android/jni/libpthread.so b/tensorflow/examples/android/jni/libpthread.so Binary files differnew file mode 100755 index 0000000000..7992d0de4c --- /dev/null +++ b/tensorflow/examples/android/jni/libpthread.so diff --git a/tensorflow/examples/android/jni/rgb2yuv.cc b/tensorflow/examples/android/jni/rgb2yuv.cc new file mode 100755 index 0000000000..428f311eb8 --- /dev/null +++ b/tensorflow/examples/android/jni/rgb2yuv.cc @@ -0,0 +1,89 @@ +// These utility functions allow for the conversion of RGB data to YUV data. + +#include "tensorflow/examples/android/jni/rgb2yuv.h" + +#include "tensorflow/core/platform/port.h" + +using namespace tensorflow; + +static inline void WriteYUV(const int x, const int y, const int width, + const int r8, const int g8, const int b8, + uint8* const pY, + uint8* const pUV) { + // Using formulas from http://msdn.microsoft.com/en-us/library/ms893078 + *pY = ((66 * r8 + 129 * g8 + 25 * b8 + 128) >> 8) + 16; + + // Odd widths get rounded up so that UV blocks on the side don't get cut off. + const int blocks_per_row = (width + 1) / 2; + + // 2 bytes per UV block + const int offset = 2 * (((y / 2) * blocks_per_row + (x / 2))); + + // U and V are the average values of all 4 pixels in the block. + if (!(x & 1) && !(y & 1)) { + // Explicitly clear the block if this is the first pixel in it. + pUV[offset] = 0; + pUV[offset + 1] = 0; + } + + // V (with divide by 4 factored in) +#ifdef __APPLE__ + const int u_offset = 0; + const int v_offset = 1; +#else + const int u_offset = 1; + const int v_offset = 0; +#endif + pUV[offset + v_offset] += ((112 * r8 - 94 * g8 - 18 * b8 + 128) >> 10) + 32; + + // U (with divide by 4 factored in) + pUV[offset + u_offset] += ((-38 * r8 - 74 * g8 + 112 * b8 + 128) >> 10) + 32; +} + +void ConvertARGB8888ToYUV420SP(const uint32* const input, uint8* const output, + int width, int height) { + uint8* pY = output; + uint8* pUV = output + (width * height); + const uint32* in = input; + + for (int y = 0; y < height; y++) { + for (int x = 0; x < width; x++) { + const uint32 rgb = *in++; +#ifdef __APPLE__ + const int nB = (rgb >> 8) & 0xFF; + const int nG = (rgb >> 16) & 0xFF; + const int nR = (rgb >> 24) & 0xFF; +#else + const int nR = (rgb >> 16) & 0xFF; + const int nG = (rgb >> 8) & 0xFF; + const int nB = rgb & 0xFF; +#endif + WriteYUV(x, y, width, nR, nG, nB, pY++, pUV); + } + } +} + +void ConvertRGB565ToYUV420SP(const uint16* const input, uint8* const output, + const int width, const int height) { + uint8* pY = output; + uint8* pUV = output + (width * height); + const uint16* in = input; + + for (int y = 0; y < height; y++) { + for (int x = 0; x < width; x++) { + const uint32 rgb = *in++; + + const int r5 = ((rgb >> 11) & 0x1F); + const int g6 = ((rgb >> 5) & 0x3F); + const int b5 = (rgb & 0x1F); + + // Shift left, then fill in the empty low bits with a copy of the high + // bits so we can stretch across the entire 0 - 255 range. + const int r8 = r5 << 3 | r5 >> 2; + const int g8 = g6 << 2 | g6 >> 4; + const int b8 = b5 << 3 | b5 >> 2; + + WriteYUV(x, y, width, r8, g8, b8, pY++, pUV); + } + } +} diff --git a/tensorflow/examples/android/jni/rgb2yuv.h b/tensorflow/examples/android/jni/rgb2yuv.h new file mode 100755 index 0000000000..e5eb5aa419 --- /dev/null +++ b/tensorflow/examples/android/jni/rgb2yuv.h @@ -0,0 +1,23 @@ +#ifndef ORG_TENSORFLOW_JNI_IMAGEUTILS_RGB2YUV_H_ +#define ORG_TENSORFLOW_JNI_IMAGEUTILS_RGB2YUV_H_ + +#include "tensorflow/core/platform/port.h" + +using namespace tensorflow; + +#ifdef __cplusplus +extern "C" { +#endif + +void ConvertARGB8888ToYUV420SP(const uint32* const input, uint8* const output, + int width, int height); + +void ConvertRGB565ToYUV420SP(const uint16* const input, + uint8* const output, + const int width, const int height); + +#ifdef __cplusplus +} +#endif + +#endif // ORG_TENSORFLOW_JNI_IMAGEUTILS_RGB2YUV_H_ diff --git a/tensorflow/examples/android/jni/tensorflow_jni.cc b/tensorflow/examples/android/jni/tensorflow_jni.cc new file mode 100644 index 0000000000..39d0bb1249 --- /dev/null +++ b/tensorflow/examples/android/jni/tensorflow_jni.cc @@ -0,0 +1,253 @@ +#include "tensorflow/examples/android/jni/tensorflow_jni.h" + +#include <android/asset_manager.h> +#include <android/asset_manager_jni.h> +#include <android/bitmap.h> + +#include <jni.h> +#include <pthread.h> +#include <unistd.h> +#include <queue> +#include <sstream> +#include <string> + +#include "tensorflow/core/framework/types.pb.h" +#include "tensorflow/core/platform/logging.h" +#include "tensorflow/core/platform/port.h" +#include "tensorflow/core/public/env.h" +#include "tensorflow/core/public/session.h" +#include "tensorflow/core/public/tensor.h" +#include "tensorflow/examples/android/jni/jni_utils.h" + +// Global variables that holds the Tensorflow classifier. +static std::unique_ptr<tensorflow::Session> session; + +static std::vector<std::string> g_label_strings; +static bool g_compute_graph_initialized = false; +//static mutex g_compute_graph_mutex(base::LINKER_INITIALIZED); + +static int g_tensorflow_input_size; // The image size for the mognet input. +static int g_image_mean; // The image mean. + +using namespace tensorflow; + +JNIEXPORT jint JNICALL +TENSORFLOW_METHOD(initializeTensorflow)( + JNIEnv* env, jobject thiz, jobject java_asset_manager, + jstring model, jstring labels, + jint num_classes, jint mognet_input_size, jint image_mean) { + //MutexLock input_lock(&g_compute_graph_mutex); + if (g_compute_graph_initialized) { + LOG(INFO) << "Compute graph already loaded. skipping."; + return 0; + } + + const char* const model_cstr = env->GetStringUTFChars(model, NULL); + const char* const labels_cstr = env->GetStringUTFChars(labels, NULL); + + g_tensorflow_input_size = mognet_input_size; + g_image_mean = image_mean; + + LOG(INFO) << "Loading Tensorflow."; + + LOG(INFO) << "Making new SessionOptions."; + tensorflow::SessionOptions options; + tensorflow::ConfigProto& config = options.config; + LOG(INFO) << "Got config, " << config.device_count_size() << " devices"; + + session.reset(tensorflow::NewSession(options)); + LOG(INFO) << "Session created."; + + tensorflow::GraphDef tensorflow_graph; + LOG(INFO) << "Graph created."; + + AAssetManager* const asset_manager = + AAssetManager_fromJava(env, java_asset_manager); + LOG(INFO) << "Acquired AssetManager."; + + LOG(INFO) << "Reading file to proto: " << model_cstr; + ReadFileToProto(asset_manager, model_cstr, &tensorflow_graph); + + LOG(INFO) << "Creating session."; + tensorflow::Status s = session->Create(tensorflow_graph); + if (!s.ok()) { + LOG(ERROR) << "Could not create Tensorflow Graph: " << s; + return -1; + } + + // Clear the proto to save memory space. + tensorflow_graph.Clear(); + LOG(INFO) << "Tensorflow graph loaded from: " << model_cstr; + + // Read the label list + ReadFileToVector(asset_manager, labels_cstr, &g_label_strings); + LOG(INFO) << g_label_strings.size() << " label strings loaded from: " + << labels_cstr; + g_compute_graph_initialized = true; + + return 0; +} + +namespace { +typedef struct { + uint8 red; + uint8 green; + uint8 blue; + uint8 alpha; +} RGBA; +} // namespace + +// Returns the top N confidence values over threshold in the provided vector, +// sorted by confidence in descending order. +static void GetTopN( + const Eigen::TensorMap<Eigen::Tensor<float, 1, Eigen::RowMajor>, + Eigen::Aligned>& prediction, + const int num_results, const float threshold, + std::vector<std::pair<float, int> >* top_results) { + // Will contain top N results in ascending order. + std::priority_queue<std::pair<float, int>, + std::vector<std::pair<float, int> >, + std::greater<std::pair<float, int> > > top_result_pq; + + const int count = prediction.size(); + for (int i = 0; i < count; ++i) { + const float value = prediction(i); + + // Only add it if it beats the threshold and has a chance at being in + // the top N. + if (value < threshold) { + continue; + } + + top_result_pq.push(std::pair<float, int>(value, i)); + + // If at capacity, kick the smallest value out. + if (top_result_pq.size() > num_results) { + top_result_pq.pop(); + } + } + + // Copy to output vector and reverse into descending order. + while (!top_result_pq.empty()) { + top_results->push_back(top_result_pq.top()); + top_result_pq.pop(); + } + std::reverse(top_results->begin(), top_results->end()); +} + +static std::string ClassifyImage(const RGBA* const bitmap_src, + const int in_stride, + const int width, const int height) { + // Create input tensor + tensorflow::Tensor input_tensor( + tensorflow::DT_FLOAT, + tensorflow::TensorShape({ + 1, g_tensorflow_input_size, g_tensorflow_input_size, 3})); + + auto input_tensor_mapped = input_tensor.tensor<float, 4>(); + + LOG(INFO) << "Tensorflow: Copying Data."; + for (int i = 0; i < g_tensorflow_input_size; ++i) { + const RGBA* src = bitmap_src + i * g_tensorflow_input_size; + for (int j = 0; j < g_tensorflow_input_size; ++j) { + // Copy 3 values + input_tensor_mapped(0, i, j, 0) = + static_cast<float>(src->red) - g_image_mean; + input_tensor_mapped(0, i, j, 1) = + static_cast<float>(src->green) - g_image_mean; + input_tensor_mapped(0, i, j, 2) = + static_cast<float>(src->blue) - g_image_mean; + ++src; + } + } + + std::vector<std::pair<std::string, tensorflow::Tensor> > input_tensors( + {{"input:0", input_tensor}}); + + VLOG(0) << "Start computing."; + std::vector<tensorflow::Tensor> output_tensors; + std::vector<std::string> output_names({"output:0"}); + + tensorflow::Status s = + session->Run(input_tensors, output_names, {}, &output_tensors); + VLOG(0) << "End computing."; + + if (!s.ok()) { + LOG(ERROR) << "Error during inference: " << s; + return ""; + } + + VLOG(0) << "Reading from layer " << output_names[0]; + tensorflow::Tensor* output = &output_tensors[0]; + const int kNumResults = 5; + const float kThreshold = 0.1f; + std::vector<std::pair<float, int> > top_results; + GetTopN(output->flat<float>(), kNumResults, kThreshold, &top_results); + + std::stringstream ss; + ss.precision(3); + for (const auto& result : top_results) { + const float confidence = result.first; + const int index = result.second; + + ss << index << " " << confidence << " "; + + // Write out the result as a string + if (index < g_label_strings.size()) { + // just for safety: theoretically, the output is under 1000 unless there + // is some numerical issues leading to a wrong prediction. + ss << g_label_strings[index]; + } else { + ss << "Prediction: " << index; + } + + ss << "\n"; + } + + LOG(INFO) << "Predictions: " << ss.str(); + return ss.str(); +} + +JNIEXPORT jstring JNICALL +TENSORFLOW_METHOD(classifyImageRgb)( + JNIEnv* env, jobject thiz, jintArray image, jint width, jint height) { + // Copy image into currFrame. + jboolean iCopied = JNI_FALSE; + jint* pixels = env->GetIntArrayElements(image, &iCopied); + + std::string result = ClassifyImage( + reinterpret_cast<const RGBA*>(pixels), width * 4, width, height); + + env->ReleaseIntArrayElements(image, pixels, JNI_ABORT); + + return env->NewStringUTF(result.c_str()); +} + +JNIEXPORT jstring JNICALL +TENSORFLOW_METHOD(classifyImageBmp)( + JNIEnv* env, jobject thiz, jobject bitmap) { + // Obtains the bitmap information. + AndroidBitmapInfo info; + CHECK_EQ(AndroidBitmap_getInfo(env, bitmap, &info), + ANDROID_BITMAP_RESULT_SUCCESS); + void* pixels; + CHECK_EQ(AndroidBitmap_lockPixels(env, bitmap, &pixels), + ANDROID_BITMAP_RESULT_SUCCESS); + LOG(INFO) << "Height: " << info.height; + LOG(INFO) << "Width: " << info.width; + LOG(INFO) << "Stride: " << info.stride; + // TODO(jiayq): deal with other formats if necessary. + if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) { + return env->NewStringUTF( + "Error: Android system is not using RGBA_8888 in default."); + } + + std::string result = ClassifyImage( + static_cast<const RGBA*>(pixels), info.stride, info.width, info.height); + + // Finally, unlock the pixels + CHECK_EQ(AndroidBitmap_unlockPixels(env, bitmap), + ANDROID_BITMAP_RESULT_SUCCESS); + + return env->NewStringUTF(result.c_str()); +} diff --git a/tensorflow/examples/android/jni/tensorflow_jni.h b/tensorflow/examples/android/jni/tensorflow_jni.h new file mode 100644 index 0000000000..2de353bac8 --- /dev/null +++ b/tensorflow/examples/android/jni/tensorflow_jni.h @@ -0,0 +1,36 @@ +// The methods are exposed to Java to allow for interaction with the native +// Tensorflow code. See +// tensorflow/examples/android/src/org/tensorflow/TensorflowClassifier.java +// for the Java counterparts. + +#ifndef ORG_TENSORFLOW_JNI_TENSORFLOW_JNI_H_ // NOLINT +#define ORG_TENSORFLOW_JNI_TENSORFLOW_JNI_H_ // NOLINT + +#include <jni.h> + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +#define TENSORFLOW_METHOD(METHOD_NAME) \ + Java_org_tensorflow_demo_TensorflowClassifier_##METHOD_NAME // NOLINT + +JNIEXPORT jint JNICALL +TENSORFLOW_METHOD(initializeTensorflow)( + JNIEnv* env, jobject thiz, jobject java_asset_manager, + jstring model, jstring labels, + jint num_classes, jint mognet_input_size, jint image_mean); + +JNIEXPORT jstring JNICALL +TENSORFLOW_METHOD(classifyImageBmp)( + JNIEnv* env, jobject thiz, jobject bitmap); + +JNIEXPORT jstring JNICALL +TENSORFLOW_METHOD(classifyImageRgb)( + JNIEnv* env, jobject thiz, jintArray image, jint width, jint height); + +#ifdef __cplusplus +} // extern "C" +#endif // __cplusplus + +#endif // ORG_TENSORFLOW_JNI_TENSORFLOW_JNI_H_ // NOLINT diff --git a/tensorflow/examples/android/jni/yuv2rgb.cc b/tensorflow/examples/android/jni/yuv2rgb.cc new file mode 100644 index 0000000000..93694e492d --- /dev/null +++ b/tensorflow/examples/android/jni/yuv2rgb.cc @@ -0,0 +1,161 @@ +// This is a collection of routines which converts various YUV image formats +// to ARGB. + +#include "tensorflow/examples/android/jni/yuv2rgb.h" + +#ifndef MAX +#define MAX(a, b) ({__typeof__(a) _a = (a); __typeof__(b) _b = (b); _a > _b ? _a : _b; }) +#define MIN(a, b) ({__typeof__(a) _a = (a); __typeof__(b) _b = (b); _a < _b ? _a : _b; }) +#endif + +// This value is 2 ^ 18 - 1, and is used to clamp the RGB values before their ranges +// are normalized to eight bits. +static const int kMaxChannelValue = 262143; + +// Accepts a YUV 4:2:0 image with a plane of 8 bit Y samples followed by an +// interleaved U/V plane containing 8 bit 2x2 subsampled chroma samples, +// except the interleave order of U and V is reversed. Converts to a packed +// ARGB 32 bit output of the same pixel dimensions. +void ConvertYUV420SPToARGB8888(const uint8* const yData, + const uint8* const uvData, + uint32* const output, const int width, + const int height) { + const uint8* pY = yData; + const uint8* pUV = uvData; + uint32* out = output; + + for (int y = 0; y < height; y++) { + for (int x = 0; x < width; x++) { + int nY = *pY++; + int offset = (y >> 1) * width + 2 * (x >> 1); +#ifdef __APPLE__ + int nU = pUV[offset]; + int nV = pUV[offset + 1]; +#else + int nV = pUV[offset]; + int nU = pUV[offset + 1]; +#endif + + nY -= 16; + nU -= 128; + nV -= 128; + if (nY < 0) nY = 0; + + // This is the floating point equivalent. We do the conversion in integer + // because some Android devices do not have floating point in hardware. + // nR = (int)(1.164 * nY + 2.018 * nU); + // nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU); + // nB = (int)(1.164 * nY + 1.596 * nV); + + int nR = (int)(1192 * nY + 1634 * nV); + int nG = (int)(1192 * nY - 833 * nV - 400 * nU); + int nB = (int)(1192 * nY + 2066 * nU); + + nR = MIN(kMaxChannelValue, MAX(0, nR)); + nG = MIN(kMaxChannelValue, MAX(0, nG)); + nB = MIN(kMaxChannelValue, MAX(0, nB)); + + nR = (nR >> 10) & 0xff; + nG = (nG >> 10) & 0xff; + nB = (nB >> 10) & 0xff; + *out++ = 0xff000000 | (nR << 16) | (nG << 8) | nB; + } + } +} + +// The same as above, but downsamples each dimension to half size. +void ConvertYUV420SPToARGB8888HalfSize(const uint8* const input, + uint32* const output, + int width, int height) { + const uint8* pY = input; + const uint8* pUV = input + (width * height); + uint32* out = output; + int stride = width; + width >>= 1; + height >>= 1; + + for (int y = 0; y < height; y++) { + for (int x = 0; x < width; x++) { + int nY = (pY[0] + pY[1] + pY[stride] + pY[stride + 1]) >> 2; + pY += 2; +#ifdef __APPLE__ + int nU = *pUV++; + int nV = *pUV++; +#else + int nV = *pUV++; + int nU = *pUV++; +#endif + + nY -= 16; + nU -= 128; + nV -= 128; + if (nY < 0) nY = 0; + + int nR = (int)(1192 * nY + 1634 * nV); + int nG = (int)(1192 * nY - 833 * nV - 400 * nU); + int nB = (int)(1192 * nY + 2066 * nU); + + nR = MIN(kMaxChannelValue, MAX(0, nR)); + nG = MIN(kMaxChannelValue, MAX(0, nG)); + nB = MIN(kMaxChannelValue, MAX(0, nB)); + + nR = (nR >> 10) & 0xff; + nG = (nG >> 10) & 0xff; + nB = (nB >> 10) & 0xff; + *out++ = 0xff000000 | (nR << 16) | (nG << 8) | nB; + } + pY += stride; + } +} + +// Accepts a YUV 4:2:0 image with a plane of 8 bit Y samples followed by an +// interleaved U/V plane containing 8 bit 2x2 subsampled chroma samples, +// except the interleave order of U and V is reversed. Converts to a packed +// RGB 565 bit output of the same pixel dimensions. +void ConvertYUV420SPToRGB565(const uint8* const input, uint16* const output, + const int width, const int height) { + const uint8* pY = input; + const uint8* pUV = input + (width * height); + uint16 *out = output; + + for (int y = 0; y < height; y++) { + for (int x = 0; x < width; x++) { + int nY = *pY++; + int offset = (y >> 1) * width + 2 * (x >> 1); +#ifdef __APPLE__ + int nU = pUV[offset]; + int nV = pUV[offset + 1]; +#else + int nV = pUV[offset]; + int nU = pUV[offset + 1]; +#endif + + nY -= 16; + nU -= 128; + nV -= 128; + if (nY < 0) nY = 0; + + // This is the floating point equivalent. We do the conversion in integer + // because some Android devices do not have floating point in hardware. + // nR = (int)(1.164 * nY + 2.018 * nU); + // nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU); + // nB = (int)(1.164 * nY + 1.596 * nV); + + int nR = (int)(1192 * nY + 1634 * nV); + int nG = (int)(1192 * nY - 833 * nV - 400 * nU); + int nB = (int)(1192 * nY + 2066 * nU); + + nR = MIN(kMaxChannelValue, MAX(0, nR)); + nG = MIN(kMaxChannelValue, MAX(0, nG)); + nB = MIN(kMaxChannelValue, MAX(0, nB)); + + // Shift more than for ARGB8888 and apply appropriate bitmask. + nR = (nR >> 13) & 0x1f; + nG = (nG >> 12) & 0x3f; + nB = (nB >> 13) & 0x1f; + + // R is high 5 bits, G is middle 6 bits, and B is low 5 bits. + *out++ = (nR << 11) | (nG << 5) | nB; + } + } +} diff --git a/tensorflow/examples/android/jni/yuv2rgb.h b/tensorflow/examples/android/jni/yuv2rgb.h new file mode 100644 index 0000000000..698da415f5 --- /dev/null +++ b/tensorflow/examples/android/jni/yuv2rgb.h @@ -0,0 +1,37 @@ +// This is a collection of routines which converts various YUV image formats +// to (A)RGB. + +#ifndef ORG_TENSORFLOW_JNI_IMAGEUTILS_YUV2RGB_H_ +#define ORG_TENSORFLOW_JNI_IMAGEUTILS_YUV2RGB_H_ + +#include "tensorflow/core/platform/port.h" + +using namespace tensorflow; + +#ifdef __cplusplus +extern "C" { +#endif + +// Converts YUV420 semi-planar data to ARGB 8888 data using the supplied width +// and height. The input and output must already be allocated and non-null. +// For efficiency, no error checking is performed. +void ConvertYUV420SPToARGB8888(const uint8* const pY, const uint8* const pUV, + uint32* const output, const int width, + const int height); + +// The same as above, but downsamples each dimension to half size. +void ConvertYUV420SPToARGB8888HalfSize(const uint8* const input, + uint32* const output, + int width, int height); + +// Converts YUV420 semi-planar data to RGB 565 data using the supplied width +// and height. The input and output must already be allocated and non-null. +// For efficiency, no error checking is performed. +void ConvertYUV420SPToRGB565(const uint8* const input, uint16* const output, + const int width, const int height); + +#ifdef __cplusplus +} +#endif + +#endif // ORG_TENSORFLOW_JNI_IMAGEUTILS_YUV2RGB_H_ diff --git a/tensorflow/examples/android/res/drawable-hdpi/ic_action_info.png b/tensorflow/examples/android/res/drawable-hdpi/ic_action_info.png Binary files differnew file mode 100644 index 0000000000..32bd1aabca --- /dev/null +++ b/tensorflow/examples/android/res/drawable-hdpi/ic_action_info.png diff --git a/tensorflow/examples/android/res/drawable-hdpi/ic_launcher.png b/tensorflow/examples/android/res/drawable-hdpi/ic_launcher.png Binary files differnew file mode 100644 index 0000000000..b3113cd15c --- /dev/null +++ b/tensorflow/examples/android/res/drawable-hdpi/ic_launcher.png diff --git a/tensorflow/examples/android/res/drawable-hdpi/tile.9.png b/tensorflow/examples/android/res/drawable-hdpi/tile.9.png Binary files differnew file mode 100644 index 0000000000..135862883e --- /dev/null +++ b/tensorflow/examples/android/res/drawable-hdpi/tile.9.png diff --git a/tensorflow/examples/android/res/drawable-mdpi/ic_action_info.png b/tensorflow/examples/android/res/drawable-mdpi/ic_action_info.png Binary files differnew file mode 100644 index 0000000000..8efbbf8b3c --- /dev/null +++ b/tensorflow/examples/android/res/drawable-mdpi/ic_action_info.png diff --git a/tensorflow/examples/android/res/drawable-mdpi/ic_launcher.png b/tensorflow/examples/android/res/drawable-mdpi/ic_launcher.png Binary files differnew file mode 100644 index 0000000000..51f87ee650 --- /dev/null +++ b/tensorflow/examples/android/res/drawable-mdpi/ic_launcher.png diff --git a/tensorflow/examples/android/res/drawable-xhdpi/ic_action_info.png b/tensorflow/examples/android/res/drawable-xhdpi/ic_action_info.png Binary files differnew file mode 100644 index 0000000000..ba143ea7a8 --- /dev/null +++ b/tensorflow/examples/android/res/drawable-xhdpi/ic_action_info.png diff --git a/tensorflow/examples/android/res/drawable-xhdpi/ic_launcher.png b/tensorflow/examples/android/res/drawable-xhdpi/ic_launcher.png Binary files differnew file mode 100644 index 0000000000..6361d792da --- /dev/null +++ b/tensorflow/examples/android/res/drawable-xhdpi/ic_launcher.png diff --git a/tensorflow/examples/android/res/drawable-xxhdpi/ic_action_info.png b/tensorflow/examples/android/res/drawable-xxhdpi/ic_action_info.png Binary files differnew file mode 100644 index 0000000000..394eb7e534 --- /dev/null +++ b/tensorflow/examples/android/res/drawable-xxhdpi/ic_action_info.png diff --git a/tensorflow/examples/android/res/drawable-xxhdpi/ic_launcher.png b/tensorflow/examples/android/res/drawable-xxhdpi/ic_launcher.png Binary files differnew file mode 100644 index 0000000000..2e27bec978 --- /dev/null +++ b/tensorflow/examples/android/res/drawable-xxhdpi/ic_launcher.png diff --git a/tensorflow/examples/android/res/layout-land/camera_connection_fragment.xml b/tensorflow/examples/android/res/layout-land/camera_connection_fragment.xml new file mode 100644 index 0000000000..56b526c84b --- /dev/null +++ b/tensorflow/examples/android/res/layout-land/camera_connection_fragment.xml @@ -0,0 +1,34 @@ +<?xml version="1.0" encoding="utf-8"?><!-- + Copyright 2014 The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +--> +<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android" + android:layout_width="match_parent" + android:layout_height="match_parent"> + + <org.tensorflow.demo.AutoFitTextureView + android:id="@+id/texture" + android:layout_width="wrap_content" + android:layout_height="wrap_content" + android:layout_alignParentBottom="true" + android:layout_alignParentStart="true" + android:layout_alignParentTop="true" /> + + <org.tensorflow.demo.RecognitionScoreView + android:id="@+id/results" + android:layout_width="match_parent" + android:layout_height="112dp" + android:layout_alignParentTop="true" /> + +</RelativeLayout> diff --git a/tensorflow/examples/android/res/layout/activity_camera.xml b/tensorflow/examples/android/res/layout/activity_camera.xml new file mode 100644 index 0000000000..d21be9fc37 --- /dev/null +++ b/tensorflow/examples/android/res/layout/activity_camera.xml @@ -0,0 +1,22 @@ +<?xml version="1.0" encoding="utf-8"?><!-- + Copyright 2014 The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +--> +<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android" + xmlns:tools="http://schemas.android.com/tools" + android:id="@+id/container" + android:layout_width="match_parent" + android:layout_height="match_parent" + android:background="#000" + tools:context="org.tensorflow.demo.CameraActivity" /> diff --git a/tensorflow/examples/android/res/layout/camera_connection_fragment.xml b/tensorflow/examples/android/res/layout/camera_connection_fragment.xml new file mode 100644 index 0000000000..0e8e52a138 --- /dev/null +++ b/tensorflow/examples/android/res/layout/camera_connection_fragment.xml @@ -0,0 +1,32 @@ +<?xml version="1.0" encoding="utf-8"?><!-- + Copyright 2014 The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +--> +<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android" + android:layout_width="match_parent" + android:layout_height="match_parent"> + + <org.tensorflow.demo.AutoFitTextureView + android:id="@+id/texture" + android:layout_width="wrap_content" + android:layout_height="wrap_content" + android:layout_alignParentBottom="true" /> + + <org.tensorflow.demo.RecognitionScoreView + android:id="@+id/results" + android:layout_width="match_parent" + android:layout_height="112dp" + android:layout_alignParentTop="true" /> + +</RelativeLayout> diff --git a/tensorflow/examples/android/res/values-sw600dp/template-dimens.xml b/tensorflow/examples/android/res/values-sw600dp/template-dimens.xml new file mode 100644 index 0000000000..22074a2bdb --- /dev/null +++ b/tensorflow/examples/android/res/values-sw600dp/template-dimens.xml @@ -0,0 +1,24 @@ +<!-- + Copyright 2013 The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + --> + +<resources> + + <!-- Semantic definitions --> + + <dimen name="horizontal_page_margin">@dimen/margin_huge</dimen> + <dimen name="vertical_page_margin">@dimen/margin_medium</dimen> + +</resources> diff --git a/tensorflow/examples/android/res/values-sw600dp/template-styles.xml b/tensorflow/examples/android/res/values-sw600dp/template-styles.xml new file mode 100644 index 0000000000..03d1974183 --- /dev/null +++ b/tensorflow/examples/android/res/values-sw600dp/template-styles.xml @@ -0,0 +1,25 @@ +<!-- + Copyright 2013 The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + --> + +<resources> + + <style name="Widget.SampleMessage"> + <item name="android:textAppearance">?android:textAppearanceLarge</item> + <item name="android:lineSpacingMultiplier">1.2</item> + <item name="android:shadowDy">-6.5</item> + </style> + +</resources> diff --git a/tensorflow/examples/android/res/values-v11/styles.xml b/tensorflow/examples/android/res/values-v11/styles.xml new file mode 100644 index 0000000000..c2d1babc12 --- /dev/null +++ b/tensorflow/examples/android/res/values-v11/styles.xml @@ -0,0 +1,24 @@ +<?xml version="1.0" encoding="utf-8"?> +<resources> + + <!-- + Base application theme for API 11+. This theme completely replaces + AppBaseTheme from res/values/styles.xml on API 11+ devices. + --> + <style name="AppBaseTheme" parent="android:Theme.Holo.Light"> + <!-- API 11 theme customizations can go here. --> + </style> + + <style name="FullscreenTheme" parent="android:Theme.Holo"> + <item name="android:actionBarStyle">@style/FullscreenActionBarStyle</item> + <item name="android:windowActionBarOverlay">true</item> + <item name="android:windowBackground">@null</item> + <item name="metaButtonBarStyle">?android:attr/buttonBarStyle</item> + <item name="metaButtonBarButtonStyle">?android:attr/buttonBarButtonStyle</item> + </style> + + <style name="FullscreenActionBarStyle" parent="android:Widget.Holo.ActionBar"> + <!-- <item name="android:background">@color/black_overlay</item> --> + </style> + +</resources> diff --git a/tensorflow/examples/android/res/values-v11/template-styles.xml b/tensorflow/examples/android/res/values-v11/template-styles.xml new file mode 100644 index 0000000000..8c1ea66f28 --- /dev/null +++ b/tensorflow/examples/android/res/values-v11/template-styles.xml @@ -0,0 +1,22 @@ +<!-- + Copyright 2013 The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + --> + +<resources> + + <!-- Activity themes --> + <style name="Theme.Base" parent="android:Theme.Holo.Light" /> + +</resources> diff --git a/tensorflow/examples/android/res/values-v14/styles.xml b/tensorflow/examples/android/res/values-v14/styles.xml new file mode 100644 index 0000000000..cc370849c0 --- /dev/null +++ b/tensorflow/examples/android/res/values-v14/styles.xml @@ -0,0 +1,12 @@ +<resources> + + <!-- + Base application theme for API 14+. This theme completely replaces + AppBaseTheme from BOTH res/values/styles.xml and + res/values-v11/styles.xml on API 14+ devices. + --> + <style name="AppBaseTheme" parent="android:Theme.Holo.Light.DarkActionBar"> + <!-- API 14 theme customizations can go here. --> + </style> + +</resources> diff --git a/tensorflow/examples/android/res/values-v21/base-colors.xml b/tensorflow/examples/android/res/values-v21/base-colors.xml new file mode 100644 index 0000000000..8b6ec3f85d --- /dev/null +++ b/tensorflow/examples/android/res/values-v21/base-colors.xml @@ -0,0 +1,21 @@ +<?xml version="1.0" encoding="UTF-8"?> +<!-- + Copyright 2013 The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +--> + +<resources> + + +</resources> diff --git a/tensorflow/examples/android/res/values-v21/base-template-styles.xml b/tensorflow/examples/android/res/values-v21/base-template-styles.xml new file mode 100644 index 0000000000..c778e4f98a --- /dev/null +++ b/tensorflow/examples/android/res/values-v21/base-template-styles.xml @@ -0,0 +1,24 @@ +<?xml version="1.0" encoding="UTF-8"?> +<!-- + Copyright 2013 The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +--> + +<resources> + + <!-- Activity themes --> + <style name="Theme.Base" parent="android:Theme.Material.Light"> + </style> + +</resources> diff --git a/tensorflow/examples/android/res/values/attrs.xml b/tensorflow/examples/android/res/values/attrs.xml new file mode 100644 index 0000000000..56e5beae76 --- /dev/null +++ b/tensorflow/examples/android/res/values/attrs.xml @@ -0,0 +1,14 @@ +<resources> + + <!-- + Declare custom theme attributes that allow changing which styles are + used for button bars depending on the API level. + ?android:attr/buttonBarStyle is new as of API 11 so this is + necessary to support previous API levels. + --> + <declare-styleable name="ButtonBarContainerTheme"> + <attr name="metaButtonBarStyle" format="reference" /> + <attr name="metaButtonBarButtonStyle" format="reference" /> + </declare-styleable> + +</resources> diff --git a/tensorflow/examples/android/res/values/base-strings.xml b/tensorflow/examples/android/res/values/base-strings.xml new file mode 100644 index 0000000000..e6c3bc7fa0 --- /dev/null +++ b/tensorflow/examples/android/res/values/base-strings.xml @@ -0,0 +1,20 @@ +<?xml version="1.0" encoding="UTF-8"?> +<!-- + Copyright 2013 The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +--> + +<resources> + <string name="app_name">Tensorflow Demo</string> +</resources> diff --git a/tensorflow/examples/android/res/values/colors.xml b/tensorflow/examples/android/res/values/colors.xml new file mode 100644 index 0000000000..4b75d2b2bd --- /dev/null +++ b/tensorflow/examples/android/res/values/colors.xml @@ -0,0 +1,19 @@ +<?xml version="1.0" encoding="utf-8"?> +<!-- + Copyright 2015 The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +--> +<resources> + <color name="control_background">#cc4285f4</color> +</resources> diff --git a/tensorflow/examples/android/res/values/strings.xml b/tensorflow/examples/android/res/values/strings.xml new file mode 100644 index 0000000000..038c73b3d9 --- /dev/null +++ b/tensorflow/examples/android/res/values/strings.xml @@ -0,0 +1,20 @@ +<?xml version="1.0" encoding="utf-8"?><!-- + Copyright 2014 The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +--> +<resources> + <string name="description_info">Info</string> + <string name="request_permission">This sample needs camera permission.</string> + <string name="camera_error">This device doesn\'t support Camera2 API.</string> +</resources> diff --git a/tensorflow/examples/android/res/values/styles.xml b/tensorflow/examples/android/res/values/styles.xml new file mode 100644 index 0000000000..3f3bdfb494 --- /dev/null +++ b/tensorflow/examples/android/res/values/styles.xml @@ -0,0 +1,18 @@ +<?xml version="1.0" encoding="utf-8"?><!-- + Copyright 2014 The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +--> +<resources> + <style name="MaterialTheme" parent="android:Theme.Material.Light.NoActionBar.Fullscreen" /> +</resources> diff --git a/tensorflow/examples/android/res/values/template-dimens.xml b/tensorflow/examples/android/res/values/template-dimens.xml new file mode 100644 index 0000000000..39e710b5ca --- /dev/null +++ b/tensorflow/examples/android/res/values/template-dimens.xml @@ -0,0 +1,32 @@ +<!-- + Copyright 2013 The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + --> + +<resources> + + <!-- Define standard dimensions to comply with Holo-style grids and rhythm. --> + + <dimen name="margin_tiny">4dp</dimen> + <dimen name="margin_small">8dp</dimen> + <dimen name="margin_medium">16dp</dimen> + <dimen name="margin_large">32dp</dimen> + <dimen name="margin_huge">64dp</dimen> + + <!-- Semantic definitions --> + + <dimen name="horizontal_page_margin">@dimen/margin_medium</dimen> + <dimen name="vertical_page_margin">@dimen/margin_medium</dimen> + +</resources> diff --git a/tensorflow/examples/android/res/values/template-styles.xml b/tensorflow/examples/android/res/values/template-styles.xml new file mode 100644 index 0000000000..6e7d593dd8 --- /dev/null +++ b/tensorflow/examples/android/res/values/template-styles.xml @@ -0,0 +1,42 @@ +<!-- + Copyright 2013 The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + --> + +<resources> + + <!-- Activity themes --> + + <style name="Theme.Base" parent="android:Theme.Light" /> + + <style name="Theme.Sample" parent="Theme.Base" /> + + <style name="AppTheme" parent="Theme.Sample" /> + <!-- Widget styling --> + + <style name="Widget" /> + + <style name="Widget.SampleMessage"> + <item name="android:textAppearance">?android:textAppearanceMedium</item> + <item name="android:lineSpacingMultiplier">1.1</item> + </style> + + <style name="Widget.SampleMessageTile"> + <item name="android:background">@drawable/tile</item> + <item name="android:shadowColor">#7F000000</item> + <item name="android:shadowDy">-3.5</item> + <item name="android:shadowRadius">2</item> + </style> + +</resources> diff --git a/tensorflow/examples/android/src/org/tensorflow/demo/AutoFitTextureView.java b/tensorflow/examples/android/src/org/tensorflow/demo/AutoFitTextureView.java new file mode 100644 index 0000000000..011dc64d16 --- /dev/null +++ b/tensorflow/examples/android/src/org/tensorflow/demo/AutoFitTextureView.java @@ -0,0 +1,74 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.tensorflow.demo; + +import android.content.Context; +import android.util.AttributeSet; +import android.view.TextureView; + +/** + * A {@link TextureView} that can be adjusted to a specified aspect ratio. + */ +public class AutoFitTextureView extends TextureView { + private int ratioWidth = 0; + private int ratioHeight = 0; + + public AutoFitTextureView(final Context context) { + this(context, null); + } + + public AutoFitTextureView(final Context context, final AttributeSet attrs) { + this(context, attrs, 0); + } + + public AutoFitTextureView(final Context context, final AttributeSet attrs, final int defStyle) { + super(context, attrs, defStyle); + } + + /** + * Sets the aspect ratio for this view. The size of the view will be measured based on the ratio + * calculated from the parameters. Note that the actual sizes of parameters don't matter, that + * is, calling setAspectRatio(2, 3) and setAspectRatio(4, 6) make the same result. + * + * @param width Relative horizontal size + * @param height Relative vertical size + */ + public void setAspectRatio(final int width, final int height) { + if (width < 0 || height < 0) { + throw new IllegalArgumentException("Size cannot be negative."); + } + ratioWidth = width; + ratioHeight = height; + requestLayout(); + } + + @Override + protected void onMeasure(final int widthMeasureSpec, final int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + final int width = MeasureSpec.getSize(widthMeasureSpec); + final int height = MeasureSpec.getSize(heightMeasureSpec); + if (0 == ratioWidth || 0 == ratioHeight) { + setMeasuredDimension(width, height); + } else { + if (width < height * ratioWidth / ratioHeight) { + setMeasuredDimension(width, width * ratioHeight / ratioWidth); + } else { + setMeasuredDimension(height * ratioWidth / ratioHeight, height); + } + } + } +} diff --git a/tensorflow/examples/android/src/org/tensorflow/demo/CameraActivity.java b/tensorflow/examples/android/src/org/tensorflow/demo/CameraActivity.java new file mode 100644 index 0000000000..943dddd254 --- /dev/null +++ b/tensorflow/examples/android/src/org/tensorflow/demo/CameraActivity.java @@ -0,0 +1,34 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.tensorflow.demo; + +import android.app.Activity; +import android.os.Bundle; + +public class CameraActivity extends Activity { + @Override + protected void onCreate(final Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + setContentView(R.layout.activity_camera); + if (null == savedInstanceState) { + getFragmentManager() + .beginTransaction() + .replace(R.id.container, CameraConnectionFragment.newInstance()) + .commit(); + } + } +} diff --git a/tensorflow/examples/android/src/org/tensorflow/demo/CameraConnectionFragment.java b/tensorflow/examples/android/src/org/tensorflow/demo/CameraConnectionFragment.java new file mode 100644 index 0000000000..d9a696d9bb --- /dev/null +++ b/tensorflow/examples/android/src/org/tensorflow/demo/CameraConnectionFragment.java @@ -0,0 +1,593 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.tensorflow.demo; + +import android.app.Activity; +import android.app.AlertDialog; +import android.app.Dialog; +import android.app.DialogFragment; +import android.app.Fragment; +import android.content.Context; +import android.content.DialogInterface; +import android.content.res.Configuration; +import android.graphics.ImageFormat; +import android.graphics.Matrix; +import android.graphics.RectF; +import android.graphics.SurfaceTexture; +import android.hardware.camera2.CameraAccessException; +import android.hardware.camera2.CameraCaptureSession; +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CameraDevice; +import android.hardware.camera2.CameraManager; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.CaptureResult; +import android.hardware.camera2.TotalCaptureResult; +import android.hardware.camera2.params.StreamConfigurationMap; +import android.media.ImageReader; +import android.os.Bundle; +import android.os.Handler; +import android.os.HandlerThread; +import android.util.Size; +import android.util.SparseIntArray; +import android.view.LayoutInflater; +import android.view.Surface; +import android.view.TextureView; +import android.view.View; +import android.view.ViewGroup; +import android.widget.Toast; + +import org.tensorflow.demo.env.Logger; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; +import java.util.concurrent.Semaphore; +import java.util.concurrent.TimeUnit; + +public class CameraConnectionFragment extends Fragment { + private static final Logger LOGGER = new Logger(); + + private RecognitionScoreView scoreView; + + /** + * Conversion from screen rotation to JPEG orientation. + */ + private static final SparseIntArray ORIENTATIONS = new SparseIntArray(); + private static final String FRAGMENT_DIALOG = "dialog"; + + static { + ORIENTATIONS.append(Surface.ROTATION_0, 90); + ORIENTATIONS.append(Surface.ROTATION_90, 0); + ORIENTATIONS.append(Surface.ROTATION_180, 270); + ORIENTATIONS.append(Surface.ROTATION_270, 180); + } + + /** + * {@link android.view.TextureView.SurfaceTextureListener} handles several lifecycle events on a + * {@link TextureView}. + */ + private final TextureView.SurfaceTextureListener surfaceTextureListener = + new TextureView.SurfaceTextureListener() { + @Override + public void onSurfaceTextureAvailable( + final SurfaceTexture texture, final int width, final int height) { + openCamera(width, height); + } + + @Override + public void onSurfaceTextureSizeChanged( + final SurfaceTexture texture, final int width, final int height) { + configureTransform(width, height); + } + + @Override + public boolean onSurfaceTextureDestroyed(final SurfaceTexture texture) { + return true; + } + + @Override + public void onSurfaceTextureUpdated(final SurfaceTexture texture) {} + }; + + /** + * ID of the current {@link CameraDevice}. + */ + private String cameraId; + + /** + * An {@link AutoFitTextureView} for camera preview. + */ + private AutoFitTextureView textureView; + + /** + * A {@link CameraCaptureSession } for camera preview. + */ + private CameraCaptureSession captureSession; + + /** + * A reference to the opened {@link CameraDevice}. + */ + private CameraDevice cameraDevice; + + /** + * The {@link android.util.Size} of camera preview. + */ + private Size previewSize; + + /** + * {@link android.hardware.camera2.CameraDevice.StateCallback} + * is called when {@link CameraDevice} changes its state. + */ + private final CameraDevice.StateCallback stateCallback = + new CameraDevice.StateCallback() { + @Override + public void onOpened(final CameraDevice cd) { + // This method is called when the camera is opened. We start camera preview here. + cameraOpenCloseLock.release(); + cameraDevice = cd; + createCameraPreviewSession(); + } + + @Override + public void onDisconnected(final CameraDevice cd) { + cameraOpenCloseLock.release(); + cd.close(); + cameraDevice = null; + } + + @Override + public void onError(final CameraDevice cd, final int error) { + cameraOpenCloseLock.release(); + cd.close(); + cameraDevice = null; + final Activity activity = getActivity(); + if (null != activity) { + activity.finish(); + } + } + }; + + /** + * An additional thread for running tasks that shouldn't block the UI. + */ + private HandlerThread backgroundThread; + + /** + * A {@link Handler} for running tasks in the background. + */ + private Handler backgroundHandler; + + /** + * An {@link ImageReader} that handles still image capture. + */ + private ImageReader imageReader; + + /** + * {@link android.hardware.camera2.CaptureRequest.Builder} for the camera preview + */ + private CaptureRequest.Builder previewRequestBuilder; + + /** + * {@link CaptureRequest} generated by {@link #previewRequestBuilder} + */ + private CaptureRequest previewRequest; + + /** + * A {@link Semaphore} to prevent the app from exiting before closing the camera. + */ + private final Semaphore cameraOpenCloseLock = new Semaphore(1); + + /** + * Shows a {@link Toast} on the UI thread. + * + * @param text The message to show + */ + private void showToast(final String text) { + final Activity activity = getActivity(); + if (activity != null) { + activity.runOnUiThread( + new Runnable() { + @Override + public void run() { + Toast.makeText(activity, text, Toast.LENGTH_SHORT).show(); + } + }); + } + } + + /** + * Given {@code choices} of {@code Size}s supported by a camera, chooses the smallest one whose + * width and height are at least as large as the respective requested values, and whose aspect + * ratio matches with the specified value. + * + * @param choices The list of sizes that the camera supports for the intended output class + * @param width The minimum desired width + * @param height The minimum desired height + * @param aspectRatio The aspect ratio + * @return The optimal {@code Size}, or an arbitrary one if none were big enough + */ + private static Size chooseOptimalSize( + final Size[] choices, final int width, final int height, final Size aspectRatio) { + // Collect the supported resolutions that are at least as big as the preview Surface + final List<Size> bigEnough = new ArrayList<>(); + for (final Size option : choices) { + // TODO(andrewharp): Choose size intelligently. + if (option.getHeight() == 320 && option.getWidth() == 480) { + LOGGER.i("Adding size: " + option.getWidth() + "x" + option.getHeight()); + bigEnough.add(option); + } else { + LOGGER.i("Not adding size: " + option.getWidth() + "x" + option.getHeight()); + } + } + + // Pick the smallest of those, assuming we found any + if (bigEnough.size() > 0) { + final Size chosenSize = Collections.min(bigEnough, new CompareSizesByArea()); + LOGGER.i("Chosen size: " + chosenSize.getWidth() + "x" + chosenSize.getHeight()); + return chosenSize; + } else { + LOGGER.e("Couldn't find any suitable preview size"); + return choices[0]; + } + } + + public static CameraConnectionFragment newInstance() { + return new CameraConnectionFragment(); + } + + @Override + public View onCreateView( + final LayoutInflater inflater, final ViewGroup container, final Bundle savedInstanceState) { + return inflater.inflate(R.layout.camera_connection_fragment, container, false); + } + + @Override + public void onViewCreated(final View view, final Bundle savedInstanceState) { + textureView = (AutoFitTextureView) view.findViewById(R.id.texture); + scoreView = (RecognitionScoreView) view.findViewById(R.id.results); + } + + @Override + public void onActivityCreated(final Bundle savedInstanceState) { + super.onActivityCreated(savedInstanceState); + } + + @Override + public void onResume() { + super.onResume(); + startBackgroundThread(); + + // When the screen is turned off and turned back on, the SurfaceTexture is already + // available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open + // a camera and start preview from here (otherwise, we wait until the surface is ready in + // the SurfaceTextureListener). + if (textureView.isAvailable()) { + openCamera(textureView.getWidth(), textureView.getHeight()); + } else { + textureView.setSurfaceTextureListener(surfaceTextureListener); + } + } + + @Override + public void onPause() { + closeCamera(); + stopBackgroundThread(); + super.onPause(); + } + + /** + * Sets up member variables related to camera. + * + * @param width The width of available size for camera preview + * @param height The height of available size for camera preview + */ + private void setUpCameraOutputs(final int width, final int height) { + final Activity activity = getActivity(); + final CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); + try { + for (final String cameraId : manager.getCameraIdList()) { + final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); + + // We don't use a front facing camera in this sample. + final Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING); + if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) { + continue; + } + + final StreamConfigurationMap map = + characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); + + if (map == null) { + continue; + } + + // For still image captures, we use the largest available size. + final Size largest = + Collections.max( + Arrays.asList(map.getOutputSizes(ImageFormat.YUV_420_888)), + new CompareSizesByArea()); + + imageReader = + ImageReader.newInstance( + largest.getWidth(), largest.getHeight(), ImageFormat.YUV_420_888, /*maxImages*/ 2); + + // Danger, W.R.! Attempting to use too large a preview size could exceed the camera + // bus' bandwidth limitation, resulting in gorgeous previews but the storage of + // garbage capture data. + previewSize = + chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), width, height, largest); + + // We fit the aspect ratio of TextureView to the size of preview we picked. + final int orientation = getResources().getConfiguration().orientation; + if (orientation == Configuration.ORIENTATION_LANDSCAPE) { + textureView.setAspectRatio(previewSize.getWidth(), previewSize.getHeight()); + } else { + textureView.setAspectRatio(previewSize.getHeight(), previewSize.getWidth()); + } + + CameraConnectionFragment.this.cameraId = cameraId; + return; + } + } catch (final CameraAccessException e) { + LOGGER.e(e, "Exception!"); + } catch (final NullPointerException e) { + // Currently an NPE is thrown when the Camera2API is used but not supported on the + // device this code runs. + ErrorDialog.newInstance(getString(R.string.camera_error)) + .show(getChildFragmentManager(), FRAGMENT_DIALOG); + } + } + + /** + * Opens the camera specified by {@link CameraConnectionFragment#cameraId}. + */ + private void openCamera(final int width, final int height) { + setUpCameraOutputs(width, height); + configureTransform(width, height); + final Activity activity = getActivity(); + final CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); + try { + if (!cameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) { + throw new RuntimeException("Time out waiting to lock camera opening."); + } + manager.openCamera(cameraId, stateCallback, backgroundHandler); + } catch (final CameraAccessException e) { + LOGGER.e(e, "Exception!"); + } catch (final InterruptedException e) { + throw new RuntimeException("Interrupted while trying to lock camera opening.", e); + } + } + + /** + * Closes the current {@link CameraDevice}. + */ + private void closeCamera() { + try { + cameraOpenCloseLock.acquire(); + if (null != captureSession) { + captureSession.close(); + captureSession = null; + } + if (null != cameraDevice) { + cameraDevice.close(); + cameraDevice = null; + } + if (null != imageReader) { + imageReader.close(); + imageReader = null; + } + } catch (final InterruptedException e) { + throw new RuntimeException("Interrupted while trying to lock camera closing.", e); + } finally { + cameraOpenCloseLock.release(); + } + } + + /** + * Starts a background thread and its {@link Handler}. + */ + private void startBackgroundThread() { + backgroundThread = new HandlerThread("CameraBackground"); + backgroundThread.start(); + backgroundHandler = new Handler(backgroundThread.getLooper()); + } + + /** + * Stops the background thread and its {@link Handler}. + */ + private void stopBackgroundThread() { + backgroundThread.quitSafely(); + try { + backgroundThread.join(); + backgroundThread = null; + backgroundHandler = null; + } catch (final InterruptedException e) { + LOGGER.e(e, "Exception!"); + } + } + + private final TensorflowImageListener tfPreviewListener = new TensorflowImageListener(); + + private final CameraCaptureSession.CaptureCallback captureCallback = + new CameraCaptureSession.CaptureCallback() { + @Override + public void onCaptureProgressed( + final CameraCaptureSession session, + final CaptureRequest request, + final CaptureResult partialResult) {} + + @Override + public void onCaptureCompleted( + final CameraCaptureSession session, + final CaptureRequest request, + final TotalCaptureResult result) {} + }; + + /** + * Creates a new {@link CameraCaptureSession} for camera preview. + */ + private void createCameraPreviewSession() { + try { + final SurfaceTexture texture = textureView.getSurfaceTexture(); + assert texture != null; + + // We configure the size of default buffer to be the size of camera preview we want. + texture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight()); + + // This is the output Surface we need to start preview. + final Surface surface = new Surface(texture); + + // We set up a CaptureRequest.Builder with the output Surface. + previewRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); + previewRequestBuilder.addTarget(surface); + + LOGGER.i("Opening camera preview: " + previewSize.getWidth() + "x" + previewSize.getHeight()); + + // Create the reader for the preview frames. + final ImageReader previewReader = + ImageReader.newInstance( + previewSize.getWidth(), previewSize.getHeight(), ImageFormat.YUV_420_888, 2); + + previewReader.setOnImageAvailableListener(tfPreviewListener, backgroundHandler); + previewRequestBuilder.addTarget(previewReader.getSurface()); + + // Here, we create a CameraCaptureSession for camera preview. + cameraDevice.createCaptureSession( + Arrays.asList(surface, imageReader.getSurface(), previewReader.getSurface()), + new CameraCaptureSession.StateCallback() { + + @Override + public void onConfigured(final CameraCaptureSession cameraCaptureSession) { + // The camera is already closed + if (null == cameraDevice) { + return; + } + + // When the session is ready, we start displaying the preview. + captureSession = cameraCaptureSession; + try { + // Auto focus should be continuous for camera preview. + previewRequestBuilder.set( + CaptureRequest.CONTROL_AF_MODE, + CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); + // Flash is automatically enabled when necessary. + previewRequestBuilder.set( + CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH); + + // Finally, we start displaying the camera preview. + previewRequest = previewRequestBuilder.build(); + captureSession.setRepeatingRequest( + previewRequest, captureCallback, backgroundHandler); + } catch (final CameraAccessException e) { + LOGGER.e(e, "Exception!"); + } + } + + @Override + public void onConfigureFailed(final CameraCaptureSession cameraCaptureSession) { + showToast("Failed"); + } + }, + null); + } catch (final CameraAccessException e) { + LOGGER.e(e, "Exception!"); + } + + LOGGER.i("Getting assets."); + tfPreviewListener.initialize(getActivity().getAssets(), scoreView); + LOGGER.i("Tensorflow initialized."); + } + + /** + * Configures the necessary {@link android.graphics.Matrix} transformation to `mTextureView`. + * This method should be called after the camera preview size is determined in + * setUpCameraOutputs and also the size of `mTextureView` is fixed. + * + * @param viewWidth The width of `mTextureView` + * @param viewHeight The height of `mTextureView` + */ + private void configureTransform(final int viewWidth, final int viewHeight) { + final Activity activity = getActivity(); + if (null == textureView || null == previewSize || null == activity) { + return; + } + final int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); + final Matrix matrix = new Matrix(); + final RectF viewRect = new RectF(0, 0, viewWidth, viewHeight); + final RectF bufferRect = new RectF(0, 0, previewSize.getHeight(), previewSize.getWidth()); + final float centerX = viewRect.centerX(); + final float centerY = viewRect.centerY(); + if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) { + bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY()); + matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL); + final float scale = + Math.max( + (float) viewHeight / previewSize.getHeight(), + (float) viewWidth / previewSize.getWidth()); + matrix.postScale(scale, scale, centerX, centerY); + matrix.postRotate(90 * (rotation - 2), centerX, centerY); + } else if (Surface.ROTATION_180 == rotation) { + matrix.postRotate(180, centerX, centerY); + } + textureView.setTransform(matrix); + } + + /** + * Compares two {@code Size}s based on their areas. + */ + static class CompareSizesByArea implements Comparator<Size> { + @Override + public int compare(final Size lhs, final Size rhs) { + // We cast here to ensure the multiplications won't overflow + return Long.signum( + (long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight()); + } + } + + /** + * Shows an error message dialog. + */ + public static class ErrorDialog extends DialogFragment { + private static final String ARG_MESSAGE = "message"; + + public static ErrorDialog newInstance(final String message) { + final ErrorDialog dialog = new ErrorDialog(); + final Bundle args = new Bundle(); + args.putString(ARG_MESSAGE, message); + dialog.setArguments(args); + return dialog; + } + + @Override + public Dialog onCreateDialog(final Bundle savedInstanceState) { + final Activity activity = getActivity(); + return new AlertDialog.Builder(activity) + .setMessage(getArguments().getString(ARG_MESSAGE)) + .setPositiveButton( + android.R.string.ok, + new DialogInterface.OnClickListener() { + @Override + public void onClick(final DialogInterface dialogInterface, final int i) { + activity.finish(); + } + }) + .create(); + } + } +} diff --git a/tensorflow/examples/android/src/org/tensorflow/demo/Classifier.java b/tensorflow/examples/android/src/org/tensorflow/demo/Classifier.java new file mode 100644 index 0000000000..60b3037c7d --- /dev/null +++ b/tensorflow/examples/android/src/org/tensorflow/demo/Classifier.java @@ -0,0 +1,87 @@ +package org.tensorflow.demo; + +import android.graphics.Bitmap; +import android.graphics.RectF; + +import java.util.List; + +/** + * Generic interface for interacting with different recognition engines. + */ +public interface Classifier { + /** + * An immutable result returned by a Classifier describing what was recognized. + */ + public class Recognition { + /** + * A unique identifier for what has been recognized. Specific to the class, not the instance of + * the object. + */ + private final String id; + + /** + * Display name for the recognition. + */ + private final String title; + + /** + * A sortable score for how good the recognition is relative to others. Higher should be better. + */ + private final Float confidence; + + /** + * Optional location within the source image for the location of the recognized object. + */ + private final RectF location; + + public Recognition( + final String id, final String title, final Float confidence, final RectF location) { + this.id = id; + this.title = title; + this.confidence = confidence; + this.location = location; + } + + public String getId() { + return id; + } + + public String getTitle() { + return title; + } + + public Float getConfidence() { + return confidence; + } + + public RectF getLocation() { + return new RectF(location); + } + + @Override + public String toString() { + String resultString = ""; + if (id != null) { + resultString += "[" + id + "] "; + } + + if (title != null) { + resultString += title + " "; + } + + if (confidence != null) { + resultString += String.format("(%.1f%%) ", confidence * 100.0f); + } + + if (location != null) { + resultString += location + " "; + } + + return resultString.trim(); + } + } + + List<Recognition> recognizeImage(Bitmap bitmap); + + void close(); +} diff --git a/tensorflow/examples/android/src/org/tensorflow/demo/RecognitionScoreView.java b/tensorflow/examples/android/src/org/tensorflow/demo/RecognitionScoreView.java new file mode 100644 index 0000000000..961b492a8d --- /dev/null +++ b/tensorflow/examples/android/src/org/tensorflow/demo/RecognitionScoreView.java @@ -0,0 +1,53 @@ +package org.tensorflow.demo; + +import android.content.Context; +import android.graphics.Canvas; +import android.graphics.Paint; +import android.util.AttributeSet; +import android.util.TypedValue; +import android.view.View; + +import org.tensorflow.demo.Classifier.Recognition; + +import java.util.List; + +public class RecognitionScoreView extends View { + private static final float TEXT_SIZE_DIP = 24; + private List<Recognition> results; + private final float textSizePx; + private final Paint fgPaint; + private final Paint bgPaint; + + public RecognitionScoreView(final Context context, final AttributeSet set) { + super(context, set); + + textSizePx = + TypedValue.applyDimension( + TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, getResources().getDisplayMetrics()); + fgPaint = new Paint(); + fgPaint.setTextSize(textSizePx); + + bgPaint = new Paint(); + bgPaint.setColor(0xcc4285f4); + } + + public void setResults(final List<Recognition> results) { + this.results = results; + postInvalidate(); + } + + @Override + public void onDraw(final Canvas canvas) { + final int x = 10; + int y = (int) (fgPaint.getTextSize() * 1.5f); + + canvas.drawPaint(bgPaint); + + if (results != null) { + for (final Recognition recog : results) { + canvas.drawText(recog.getTitle() + ": " + recog.getConfidence(), x, y, fgPaint); + y += fgPaint.getTextSize() * 1.5f; + } + } + } +} diff --git a/tensorflow/examples/android/src/org/tensorflow/demo/TensorflowClassifier.java b/tensorflow/examples/android/src/org/tensorflow/demo/TensorflowClassifier.java new file mode 100644 index 0000000000..84a7596ecb --- /dev/null +++ b/tensorflow/examples/android/src/org/tensorflow/demo/TensorflowClassifier.java @@ -0,0 +1,62 @@ +package org.tensorflow.demo; + +import android.content.res.AssetManager; +import android.graphics.Bitmap; +import android.util.Log; + +import java.util.ArrayList; +import java.util.List; +import java.util.StringTokenizer; + +/** + * JNI wrapper class for the Tensorflow native code. + */ +public class TensorflowClassifier implements Classifier { + private static final String TAG = "TensorflowClassifier"; + + // jni native methods. + public native int initializeTensorflow( + AssetManager assetManager, + String model, + String labels, + int numClasses, + int inputSize, + int imageMean); + + private native String classifyImageBmp(Bitmap bitmap); + + private native String classifyImageRgb(int[] output, int width, int height); + + static { + System.loadLibrary("tensorflow_demo"); + } + + @Override + public List<Recognition> recognizeImage(final Bitmap bitmap) { + final ArrayList<Recognition> recognitions = new ArrayList<Recognition>(); + for (final String result : classifyImageBmp(bitmap).split("\n")) { + Log.i(TAG, "Parsing [" + result + "]"); + + // Clean up the string as needed + final StringTokenizer st = new StringTokenizer(result); + if (!st.hasMoreTokens()) { + continue; + } + + final String id = st.nextToken(); + final String confidenceString = st.nextToken(); + final float confidence = Float.parseFloat(confidenceString); + + final String title = + result.substring(id.length() + confidenceString.length() + 2, result.length()); + + if (!title.isEmpty()) { + recognitions.add(new Recognition(id, title, confidence, null)); + } + } + return recognitions; + } + + @Override + public void close() {} +} diff --git a/tensorflow/examples/android/src/org/tensorflow/demo/TensorflowImageListener.java b/tensorflow/examples/android/src/org/tensorflow/demo/TensorflowImageListener.java new file mode 100644 index 0000000000..940fbc6771 --- /dev/null +++ b/tensorflow/examples/android/src/org/tensorflow/demo/TensorflowImageListener.java @@ -0,0 +1,147 @@ +package org.tensorflow.demo; + +import android.content.res.AssetManager; +import android.graphics.Bitmap; +import android.graphics.Bitmap.Config; +import android.graphics.Canvas; +import android.graphics.Matrix; +import android.media.Image; +import android.media.Image.Plane; +import android.media.ImageReader; +import android.media.ImageReader.OnImageAvailableListener; + +import junit.framework.Assert; + +import org.tensorflow.demo.env.ImageUtils; +import org.tensorflow.demo.env.Logger; + +import java.nio.ByteBuffer; +import java.util.List; + +/** + * Class that takes in preview frames and converts the image to Bitmaps to process with Tensorflow. + */ +public class TensorflowImageListener implements OnImageAvailableListener { + private static final Logger LOGGER = new Logger(); + + private static final boolean SAVE_PREVIEW_BITMAP = false; + + private static final String MODEL_FILE = "file:///android_asset/tensorflow_inception_graph.pb"; + private static final String LABEL_FILE = + "file:///android_asset/imagenet_comp_graph_label_strings.txt"; + + private static final int NUM_CLASSES = 1001; + private static final int INPUT_SIZE = 224; + private static final int IMAGE_MEAN = 117; + + // TODO(andrewharp): Get orientation programatically. + private final int screenRotation = 90; + + private final TensorflowClassifier tensorflow = new TensorflowClassifier(); + + private int previewWidth = 0; + private int previewHeight = 0; + private byte[] yuvBytes = null; + private int[] rgbBytes = null; + private Bitmap rgbFrameBitmap = null; + private Bitmap croppedBitmap = null; + + private RecognitionScoreView scoreView; + + public void initialize(final AssetManager assetManager, final RecognitionScoreView scoreView) { + tensorflow.initializeTensorflow( + assetManager, MODEL_FILE, LABEL_FILE, NUM_CLASSES, INPUT_SIZE, IMAGE_MEAN); + this.scoreView = scoreView; + } + + private void drawResizedBitmap(final Bitmap src, final Bitmap dst) { + Assert.assertEquals(dst.getWidth(), dst.getHeight()); + final float minDim = Math.min(src.getWidth(), src.getHeight()); + + final Matrix matrix = new Matrix(); + + // We only want the center square out of the original rectangle. + final float translateX = -Math.max(0, (src.getWidth() - minDim) / 2); + final float translateY = -Math.max(0, (src.getHeight() - minDim) / 2); + matrix.preTranslate(translateX, translateY); + + final float scaleFactor = dst.getHeight() / minDim; + matrix.postScale(scaleFactor, scaleFactor); + + // Rotate around the center if necessary. + if (screenRotation != 0) { + matrix.postTranslate(-dst.getWidth() / 2.0f, -dst.getHeight() / 2.0f); + matrix.postRotate(screenRotation); + matrix.postTranslate(dst.getWidth() / 2.0f, dst.getHeight() / 2.0f); + } + + final Canvas canvas = new Canvas(dst); + canvas.drawBitmap(src, matrix, null); + } + + @Override + public void onImageAvailable(final ImageReader reader) { + Image image = null; + try { + image = reader.acquireLatestImage(); + + if (image == null) { + return; + } + + // Initialize the storage bitmaps once when the resolution is known. + if (previewWidth != image.getWidth() || previewHeight != image.getHeight()) { + LOGGER.i("Initializing at size %dx%d", previewWidth, previewHeight); + previewWidth = image.getWidth(); + previewHeight = image.getHeight(); + rgbBytes = new int[previewWidth * previewHeight]; + yuvBytes = new byte[ImageUtils.getYUVByteSize(previewWidth, previewHeight)]; + rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888); + croppedBitmap = Bitmap.createBitmap(INPUT_SIZE, INPUT_SIZE, Config.ARGB_8888); + } + + final Plane[] planes = image.getPlanes(); + int position = 0; + + // Copy the bytes from the Image into a buffer for easier conversion to RGB. + // TODO(andrewharp): It may not be correct to do it this way. + final int[] planeOrder = {0, 2}; + for (int i = 0; i < planeOrder.length; ++i) { + final Plane plane = planes[planeOrder[i]]; + final ByteBuffer buffer = plane.getBuffer(); + + buffer.rewind(); + final int readAmount = buffer.remaining(); + + buffer.get(yuvBytes, position, readAmount); + position += readAmount; + } + + image.close(); + + ImageUtils.convertYUV420SPToARGB8888(yuvBytes, rgbBytes, previewWidth, previewHeight, false); + } catch (final Exception e) { + if (image != null) { + image.close(); + } + LOGGER.e(e, "Exception!"); + return; + } + + rgbFrameBitmap.setPixels(rgbBytes, 0, previewWidth, 0, 0, previewWidth, previewHeight); + drawResizedBitmap(rgbFrameBitmap, croppedBitmap); + + // For examining the actual TF input. + if (SAVE_PREVIEW_BITMAP) { + ImageUtils.saveBitmap(croppedBitmap); + } + + final List<Classifier.Recognition> results = tensorflow.recognizeImage(croppedBitmap); + + LOGGER.v("%d results", results.size()); + for (final Classifier.Recognition result : results) { + LOGGER.v("Result: " + result.getTitle()); + } + scoreView.setResults(results); + } +} diff --git a/tensorflow/examples/android/src/org/tensorflow/demo/env/ImageUtils.java b/tensorflow/examples/android/src/org/tensorflow/demo/env/ImageUtils.java new file mode 100644 index 0000000000..78f818f734 --- /dev/null +++ b/tensorflow/examples/android/src/org/tensorflow/demo/env/ImageUtils.java @@ -0,0 +1,113 @@ +package org.tensorflow.demo.env; + +import android.graphics.Bitmap; +import android.os.Environment; + +import java.io.File; +import java.io.FileOutputStream; + +/** + * Utility class for manipulating images. + **/ +public class ImageUtils { + @SuppressWarnings("unused") + private static final Logger LOGGER = new Logger(); + + /** + * Utility method to compute the allocated size in bytes of a YUV420SP image + * of the given dimensions. + */ + public static int getYUVByteSize(final int width, final int height) { + // The luminance plane requires 1 byte per pixel. + final int ySize = width * height; + + // The UV plane works on 2x2 blocks, so dimensions with odd size must be rounded up. + // Each 2x2 block takes 2 bytes to encode, one each for U and V. + final int uvSize = ((width + 1) / 2) * ((height + 1) / 2) * 2; + + return ySize + uvSize; + } + + /** + * Saves a Bitmap object to disk for analysis. + * + * @param bitmap The bitmap to save. + */ + public static void saveBitmap(final Bitmap bitmap) { + final String root = + Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator + "tensorflow"; + LOGGER.i("Saving %dx%d bitmap to %s.", bitmap.getWidth(), bitmap.getHeight(), root); + final File myDir = new File(root); + + if (!myDir.mkdirs()) { + LOGGER.i("Make dir failed"); + } + + final String fname = "preview.png"; + final File file = new File(myDir, fname); + if (file.exists()) { + file.delete(); + } + try { + final FileOutputStream out = new FileOutputStream(file); + bitmap.compress(Bitmap.CompressFormat.PNG, 99, out); + out.flush(); + out.close(); + } catch (final Exception e) { + LOGGER.e(e, "Exception!"); + } + } + + /** + * Converts YUV420 semi-planar data to ARGB 8888 data using the supplied width + * and height. The input and output must already be allocated and non-null. + * For efficiency, no error checking is performed. + * + * @param input The array of YUV 4:2:0 input data. + * @param output A pre-allocated array for the ARGB 8:8:8:8 output data. + * @param width The width of the input image. + * @param height The height of the input image. + * @param halfSize If true, downsample to 50% in each dimension, otherwise not. + */ + public static native void convertYUV420SPToARGB8888( + byte[] input, int[] output, int width, int height, boolean halfSize); + + /** + * Converts YUV420 semi-planar data to RGB 565 data using the supplied width + * and height. The input and output must already be allocated and non-null. + * For efficiency, no error checking is performed. + * + * @param input The array of YUV 4:2:0 input data. + * @param output A pre-allocated array for the RGB 5:6:5 output data. + * @param width The width of the input image. + * @param height The height of the input image. + */ + public static native void convertYUV420SPToRGB565( + byte[] input, byte[] output, int width, int height); + + /** + * Converts 32-bit ARGB8888 image data to YUV420SP data. This is useful, for + * instance, in creating data to feed the classes that rely on raw camera + * preview frames. + * + * @param input An array of input pixels in ARGB8888 format. + * @param output A pre-allocated array for the YUV420SP output data. + * @param width The width of the input image. + * @param height The height of the input image. + */ + public static native void convertARGB8888ToYUV420SP( + int[] input, byte[] output, int width, int height); + + /** + * Converts 16-bit RGB565 image data to YUV420SP data. This is useful, for + * instance, in creating data to feed the classes that rely on raw camera + * preview frames. + * + * @param input An array of input pixels in RGB565 format. + * @param output A pre-allocated array for the YUV420SP output data. + * @param width The width of the input image. + * @param height The height of the input image. + */ + public static native void convertRGB565ToYUV420SP( + byte[] input, byte[] output, int width, int height); +} diff --git a/tensorflow/examples/android/src/org/tensorflow/demo/env/Logger.java b/tensorflow/examples/android/src/org/tensorflow/demo/env/Logger.java new file mode 100644 index 0000000000..697c231176 --- /dev/null +++ b/tensorflow/examples/android/src/org/tensorflow/demo/env/Logger.java @@ -0,0 +1,176 @@ +package org.tensorflow.demo.env; + +import android.util.Log; + +import java.util.HashSet; +import java.util.Set; + +/** + * Wrapper for the platform log function, allows convenient message prefixing and log disabling. + */ +public final class Logger { + private static final String DEFAULT_TAG = "tensorflow"; + private static final int DEFAULT_MIN_LOG_LEVEL = Log.DEBUG; + + // Classes to be ignored when examining the stack trace + private static final Set<String> IGNORED_CLASS_NAMES; + + static { + IGNORED_CLASS_NAMES = new HashSet<String>(3); + IGNORED_CLASS_NAMES.add("dalvik.system.VMStack"); + IGNORED_CLASS_NAMES.add("java.lang.Thread"); + IGNORED_CLASS_NAMES.add(Logger.class.getCanonicalName()); + } + + private final String tag; + private final String messagePrefix; + private int minLogLevel = DEFAULT_MIN_LOG_LEVEL; + + /** + * Creates a Logger using the class name as the message prefix. + * + * @param clazz the simple name of this class is used as the message prefix. + */ + public Logger(final Class<?> clazz) { + this(clazz.getSimpleName()); + } + + /** + * Creates a Logger using the specified message prefix. + * + * @param messagePrefix is prepended to the text of every message. + */ + public Logger(final String messagePrefix) { + this(DEFAULT_TAG, messagePrefix); + } + + /** + * Creates a Logger with a custom tag and a custom message prefix. If the message prefix + * is set to <pre>null</pre>, the caller's class name is used as the prefix. + * + * @param tag identifies the source of a log message. + * @param messagePrefix prepended to every message if non-null. If null, the name of the caller is + * being used + */ + public Logger(final String tag, final String messagePrefix) { + this.tag = tag; + final String prefix = messagePrefix == null ? getCallerSimpleName() : messagePrefix; + this.messagePrefix = (prefix.length() > 0) ? prefix + ": " : prefix; + } + + /** + * Creates a Logger using the caller's class name as the message prefix. + */ + public Logger() { + this(DEFAULT_TAG, null); + } + + /** + * Creates a Logger using the caller's class name as the message prefix. + */ + public Logger(final int minLogLevel) { + this(DEFAULT_TAG, null); + this.minLogLevel = minLogLevel; + } + + public void setMinLogLevel(final int minLogLevel) { + this.minLogLevel = minLogLevel; + } + + public boolean isLoggable(final int logLevel) { + return logLevel >= minLogLevel || Log.isLoggable(tag, logLevel); + } + + /** + * Return caller's simple name. + * + * Android getStackTrace() returns an array that looks like this: + * stackTrace[0]: dalvik.system.VMStack + * stackTrace[1]: java.lang.Thread + * stackTrace[2]: com.google.android.apps.unveil.env.UnveilLogger + * stackTrace[3]: com.google.android.apps.unveil.BaseApplication + * + * This function returns the simple version of the first non-filtered name. + * + * @return caller's simple name + */ + private static String getCallerSimpleName() { + // Get the current callstack so we can pull the class of the caller off of it. + final StackTraceElement[] stackTrace = Thread.currentThread().getStackTrace(); + + for (final StackTraceElement elem : stackTrace) { + final String className = elem.getClassName(); + if (!IGNORED_CLASS_NAMES.contains(className)) { + // We're only interested in the simple name of the class, not the complete package. + final String[] classParts = className.split("\\."); + return classParts[classParts.length - 1]; + } + } + + return Logger.class.getSimpleName(); + } + + private String toMessage(final String format, final Object... args) { + return messagePrefix + (args.length > 0 ? String.format(format, args) : format); + } + + public void v(final String format, final Object... args) { + if (isLoggable(Log.VERBOSE)) { + Log.v(tag, toMessage(format, args)); + } + } + + public void v(final Throwable t, final String format, final Object... args) { + if (isLoggable(Log.VERBOSE)) { + Log.v(tag, toMessage(format, args), t); + } + } + + public void d(final String format, final Object... args) { + if (isLoggable(Log.DEBUG)) { + Log.d(tag, toMessage(format, args)); + } + } + + public void d(final Throwable t, final String format, final Object... args) { + if (isLoggable(Log.DEBUG)) { + Log.d(tag, toMessage(format, args), t); + } + } + + public void i(final String format, final Object... args) { + if (isLoggable(Log.INFO)) { + Log.i(tag, toMessage(format, args)); + } + } + + public void i(final Throwable t, final String format, final Object... args) { + if (isLoggable(Log.INFO)) { + Log.i(tag, toMessage(format, args), t); + } + } + + public void w(final String format, final Object... args) { + if (isLoggable(Log.WARN)) { + Log.w(tag, toMessage(format, args)); + } + } + + public void w(final Throwable t, final String format, final Object... args) { + if (isLoggable(Log.WARN)) { + Log.w(tag, toMessage(format, args), t); + } + } + + public void e(final String format, final Object... args) { + if (isLoggable(Log.ERROR)) { + Log.e(tag, toMessage(format, args)); + } + } + + public void e(final Throwable t, final String format, final Object... args) { + if (isLoggable(Log.ERROR)) { + Log.e(tag, toMessage(format, args), t); + } + } +} |