aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/contrib/lite/java
diff options
context:
space:
mode:
authorGravatar A. Unique TensorFlower <gardener@tensorflow.org>2018-10-01 13:08:10 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2018-10-01 13:16:46 -0700
commit3c6e6885f32e7638ece306dad3a5081b06137bdc (patch)
tree1c2d2f767a2f91dc0d41e9d6af79b5974c834df2 /tensorflow/contrib/lite/java
parent3648cb0198690d551ea5c8eefcf706c8fa67f4f0 (diff)
Check in and refactor the OVIC detector benchmarker.
PiperOrigin-RevId: 215266415
Diffstat (limited to 'tensorflow/contrib/lite/java')
-rw-r--r--tensorflow/contrib/lite/java/ovic/BUILD61
-rw-r--r--tensorflow/contrib/lite/java/ovic/demo/app/BUILD5
-rw-r--r--tensorflow/contrib/lite/java/ovic/demo/app/OvicBenchmarkerActivity.java77
-rw-r--r--tensorflow/contrib/lite/java/ovic/demo/app/res/layout/activity_main.xml27
-rw-r--r--tensorflow/contrib/lite/java/ovic/demo/app/res/values/strings.xml3
-rw-r--r--tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/BoundingBox.java68
-rw-r--r--tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicBenchmarker.java152
-rw-r--r--tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicClassificationResult.java (renamed from tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicSingleImageResult.java)12
-rw-r--r--tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicClassifier.java10
-rw-r--r--tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicClassifierBenchmarker.java142
-rw-r--r--tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicDetectionResult.java91
-rw-r--r--tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicDetector.java184
-rw-r--r--tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicDetectorBenchmarker.java160
-rw-r--r--tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicValidator.java2
-rw-r--r--tensorflow/contrib/lite/java/ovic/src/test/java/org/tensorflow/ovic/OvicClassifierTest.java6
-rw-r--r--tensorflow/contrib/lite/java/ovic/src/test/java/org/tensorflow/ovic/OvicDetectorTest.java149
-rw-r--r--tensorflow/contrib/lite/java/ovic/src/testdata/BUILD5
-rw-r--r--tensorflow/contrib/lite/java/ovic/src/testdata/coco_labels.txt91
18 files changed, 1101 insertions, 144 deletions
diff --git a/tensorflow/contrib/lite/java/ovic/BUILD b/tensorflow/contrib/lite/java/ovic/BUILD
index bb0be04ca2..ea9b9ed4b6 100644
--- a/tensorflow/contrib/lite/java/ovic/BUILD
+++ b/tensorflow/contrib/lite/java/ovic/BUILD
@@ -9,6 +9,7 @@ licenses(["notice"]) # Apache 2.0
load("//tensorflow/java:build_defs.bzl", "JAVACOPTS")
+# Build targets for OVIC classification.
java_test(
name = "OvicClassifierTest",
size = "medium",
@@ -45,8 +46,9 @@ android_library(
name = "ovicbenchmarkerlib",
srcs = [
"src/main/java/org/tensorflow/ovic/OvicBenchmarker.java",
+ "src/main/java/org/tensorflow/ovic/OvicClassificationResult.java",
"src/main/java/org/tensorflow/ovic/OvicClassifier.java",
- "src/main/java/org/tensorflow/ovic/OvicSingleImageResult.java",
+ "src/main/java/org/tensorflow/ovic/OvicClassifierBenchmarker.java",
],
manifest = "//tensorflow/contrib/lite/java:AndroidManifest.xml",
tags = ["no_oss"],
@@ -60,8 +62,8 @@ android_library(
java_library(
name = "ovicbenchmarkerlib_java",
srcs = [
+ "src/main/java/org/tensorflow/ovic/OvicClassificationResult.java",
"src/main/java/org/tensorflow/ovic/OvicClassifier.java",
- "src/main/java/org/tensorflow/ovic/OvicSingleImageResult.java",
],
javacopts = JAVACOPTS,
tags = ["no_oss"],
@@ -73,3 +75,58 @@ java_library(
"@org_checkerframework_qual",
],
)
+
+# Build targets for OVIC detection.
+java_test(
+ name = "OvicDetectorTest",
+ size = "medium",
+ srcs = ["src/test/java/org/tensorflow/ovic/OvicDetectorTest.java"],
+ data = [
+ "//tensorflow/contrib/lite/java/ovic/src/testdata:coco_labels.txt",
+ "//tensorflow/contrib/lite/java/ovic/src/testdata:ovic_testdata",
+ "@tflite_mobilenet_ssd_quant//:detect.tflite",
+ ],
+ javacopts = JAVACOPTS,
+ tags = ["no_oss"],
+ test_class = "org.tensorflow.ovic.OvicDetectorTest",
+ visibility = ["//visibility:public"],
+ deps = [
+ "//tensorflow/contrib/lite/java/ovic:ovicdetectionbenchmarkerlib_java",
+ "@com_google_truth",
+ "@junit",
+ ],
+)
+
+android_library(
+ name = "ovicdetectionbenchmarkerlib",
+ srcs = [
+ "src/main/java/org/tensorflow/ovic/BoundingBox.java",
+ "src/main/java/org/tensorflow/ovic/OvicBenchmarker.java",
+ "src/main/java/org/tensorflow/ovic/OvicDetectionResult.java",
+ "src/main/java/org/tensorflow/ovic/OvicDetector.java",
+ "src/main/java/org/tensorflow/ovic/OvicDetectorBenchmarker.java",
+ ],
+ manifest = "//tensorflow/contrib/lite/java:AndroidManifest.xml",
+ deps = [
+ "//tensorflow/contrib/lite/java:tensorflowlite",
+ "//tensorflow/contrib/lite/java/src/testhelper/java/org/tensorflow/lite:testhelper",
+ "@org_checkerframework_qual",
+ ],
+)
+
+java_library(
+ name = "ovicdetectionbenchmarkerlib_java",
+ srcs = [
+ "src/main/java/org/tensorflow/ovic/BoundingBox.java",
+ "src/main/java/org/tensorflow/ovic/OvicDetectionResult.java",
+ "src/main/java/org/tensorflow/ovic/OvicDetector.java",
+ ],
+ javacopts = JAVACOPTS,
+ deps = [
+ "//tensorflow/contrib/lite/java:libtensorflowlite_jni.so",
+ "//tensorflow/contrib/lite/java:tensorflowlite_java",
+ "//tensorflow/contrib/lite/java/src/main/native",
+ "//tensorflow/contrib/lite/java/src/testhelper/java/org/tensorflow/lite:testhelper",
+ "@org_checkerframework_qual",
+ ],
+)
diff --git a/tensorflow/contrib/lite/java/ovic/demo/app/BUILD b/tensorflow/contrib/lite/java/ovic/demo/app/BUILD
index 058240aada..f567358ea3 100644
--- a/tensorflow/contrib/lite/java/ovic/demo/app/BUILD
+++ b/tensorflow/contrib/lite/java/ovic/demo/app/BUILD
@@ -10,8 +10,10 @@ android_binary(
],
aapt_version = "aapt",
assets = [
- "//tensorflow/contrib/lite/java/ovic/src/testdata:ovic_testdata",
+ "//tensorflow/contrib/lite/java/ovic/src/testdata:coco_labels.txt",
"//tensorflow/contrib/lite/java/ovic/src/testdata:labels.txt",
+ "//tensorflow/contrib/lite/java/ovic/src/testdata:ovic_testdata",
+ "@tflite_mobilenet_ssd_quant//:detect.tflite",
],
assets_dir = "",
custom_package = "ovic.demo.app",
@@ -25,6 +27,7 @@ android_binary(
deps = [
"//tensorflow/contrib/lite/java:tensorflowlite",
"//tensorflow/contrib/lite/java/ovic:ovicbenchmarkerlib",
+ "//tensorflow/contrib/lite/java/ovic:ovicdetectionbenchmarkerlib",
"@androidsdk//com.android.support:support-v13-25.2.0",
"@androidsdk//com.android.support:support-v4-25.2.0",
],
diff --git a/tensorflow/contrib/lite/java/ovic/demo/app/OvicBenchmarkerActivity.java b/tensorflow/contrib/lite/java/ovic/demo/app/OvicBenchmarkerActivity.java
index 4adf94aeb6..48c29ecebe 100644
--- a/tensorflow/contrib/lite/java/ovic/demo/app/OvicBenchmarkerActivity.java
+++ b/tensorflow/contrib/lite/java/ovic/demo/app/OvicBenchmarkerActivity.java
@@ -35,19 +35,18 @@ import java.nio.MappedByteBuffer;
import java.nio.channels.FileChannel;
import java.text.DecimalFormat;
import org.tensorflow.ovic.OvicBenchmarker;
-import org.tensorflow.ovic.OvicSingleImageResult;
-
+import org.tensorflow.ovic.OvicClassifierBenchmarker;
+import org.tensorflow.ovic.OvicDetectorBenchmarker;
/** Class that benchmark image classifier models. */
public class OvicBenchmarkerActivity extends Activity {
/** Tag for the {@link Log}. */
private static final String TAG = "OvicBenchmarkerActivity";
- /** Name of the label file stored in Assets. */
- private static final String LABEL_PATH = "labels.txt";
-
- private static final String TEST_IMAGE_PATH = "test_image_224.jpg";
- private static final String MODEL_PATH = "float_model.lite";
+ /** Name of the task-dependent data files stored in Assets. */
+ private static String labelPath = null;
+ private static String testImagePath = null;
+ private static String modelPath = null;
/**
* Each bottom press will launch a benchmarking experiment. The experiment stops when either the
* total native latency reaches WALL_TIME or the number of iterations reaches MAX_ITERATIONS,
@@ -66,8 +65,6 @@ public class OvicBenchmarkerActivity extends Activity {
private MappedByteBuffer model = null;
private InputStream labelInputStream = null;
private OvicBenchmarker benchmarker;
- /** Inference result of each iteration. */
- OvicSingleImageResult iterResult = null;
private TextView textView = null;
// private Button startButton = null;
@@ -83,21 +80,31 @@ public class OvicBenchmarkerActivity extends Activity {
}
private Bitmap loadTestBitmap() throws IOException {
- InputStream imageStream = getAssets().open(TEST_IMAGE_PATH);
+ InputStream imageStream = getAssets().open(testImagePath);
return BitmapFactory.decodeStream(imageStream);
}
- public void initializeTest() throws IOException {
+ public void initializeTest(boolean benchmarkClassification) throws IOException {
Log.i(TAG, "Initializing benchmarker.");
- benchmarker = new OvicBenchmarker(WALL_TIME);
+ if (benchmarkClassification) {
+ benchmarker = new OvicClassifierBenchmarker(WALL_TIME);
+ labelPath = "labels.txt";
+ testImagePath = "test_image_224.jpg";
+ modelPath = "quantized_model.lite";
+ } else { // Benchmarking detection.
+ benchmarker = new OvicDetectorBenchmarker(WALL_TIME);
+ labelPath = "coco_labels.txt";
+ testImagePath = "test_image_224.jpg";
+ modelPath = "detect.tflite";
+ }
AssetManager am = getAssets();
- AssetFileDescriptor fileDescriptor = am.openFd(MODEL_PATH);
+ AssetFileDescriptor fileDescriptor = am.openFd(modelPath);
FileInputStream modelInputStream = new FileInputStream(fileDescriptor.getFileDescriptor());
FileChannel fileChannel = modelInputStream.getChannel();
long startOffset = fileDescriptor.getStartOffset();
long declaredLength = fileDescriptor.getDeclaredLength();
model = fileChannel.map(FileChannel.MapMode.READ_ONLY, startOffset, declaredLength);
- labelInputStream = am.open(LABEL_PATH);
+ labelInputStream = am.open(labelPath);
}
public Boolean doTestIteration() throws IOException, InterruptedException {
@@ -117,24 +124,44 @@ public class OvicBenchmarkerActivity extends Activity {
Log.i(TAG, "Going to do test iter.");
// Start testing.
Bitmap testImageBitmap = loadTestBitmap();
- iterResult = benchmarker.doTestIteration(testImageBitmap);
- testImageBitmap.recycle();
- if (iterResult == null) {
+ try {
+ if (!benchmarker.processBitmap(testImageBitmap)) {
+ throw new RuntimeException("Failed to run test.");
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ throw e;
+ } finally {
+ testImageBitmap.recycle();
+ }
+ String iterResultString = benchmarker.getLastResultString();
+ if (iterResultString == null) {
throw new RuntimeException("Inference failed to produce a result.");
}
- Log.i(TAG, iterResult.toString());
+ Log.i(TAG, iterResultString);
return true;
}
- public void startPressed(View view) throws IOException {
- Log.i(TAG, "Start pressed");
+ public void detectPressed(View view) throws IOException {
+ benchmarkSession(false);
+ }
+ public void classifyPressed(View view) throws IOException {
+ benchmarkSession(true);
+ }
+
+ private void benchmarkSession(boolean benchmarkClassification) throws IOException {
try {
- initializeTest();
+ initializeTest(benchmarkClassification);
} catch (IOException e) {
Log.e(TAG, "Can't initialize benchmarker.", e);
throw e;
}
String displayText = "";
+ if (benchmarkClassification) {
+ displayText = "Classification benchmark: ";
+ } else {
+ displayText = "Detection benchmark: ";
+ }
try {
setProcessorAffinity(BIG_CORE_MASK);
} catch (IOException e) {
@@ -144,7 +171,6 @@ public class OvicBenchmarkerActivity extends Activity {
Log.i(TAG, "Successfully initialized benchmarker.");
int testIter = 0;
Boolean iterSuccess = false;
- double totalLatency = 0.0f;
while (testIter < MAX_ITERATIONS) {
try {
iterSuccess = doTestIteration();
@@ -153,23 +179,22 @@ public class OvicBenchmarkerActivity extends Activity {
throw e;
} catch (InterruptedException e) {
Log.e(TAG, "Interrupted at iteration " + testIter);
+ displayText += e.getMessage() + "\n";
}
if (!iterSuccess) {
break;
}
testIter++;
- totalLatency += (double) iterResult.latency;
}
- ;
Log.i(TAG, "Benchmarking finished");
if (textView != null) {
if (testIter > 0) {
textView.setText(
displayText
- + MODEL_PATH
+ + modelPath
+ ": Average latency="
- + df2.format(totalLatency / testIter)
+ + df2.format(benchmarker.getTotalRunTime() / testIter)
+ "ms after "
+ testIter
+ " runs.");
diff --git a/tensorflow/contrib/lite/java/ovic/demo/app/res/layout/activity_main.xml b/tensorflow/contrib/lite/java/ovic/demo/app/res/layout/activity_main.xml
index e9d83bae54..1bce60ff7d 100644
--- a/tensorflow/contrib/lite/java/ovic/demo/app/res/layout/activity_main.xml
+++ b/tensorflow/contrib/lite/java/ovic/demo/app/res/layout/activity_main.xml
@@ -30,14 +30,14 @@
android:layout_height="wrap_content"
android:text="@string/initial_status_msg"
android:id="@+id/textView"
- android:layout_above="@+id/button_start"
+ android:layout_above="@+id/button_clf_start"
android:layout_alignParentTop="true"/>
<Button
android:layout_width="wrap_content"
android:layout_height="wrap_content"
- android:text="@string/start_label"
- android:id="@id/button_start"
+ android:text="@string/start_clf_label"
+ android:id="@id/button_clf_start"
android:layout_alignParentBottom="true"
android:layout_alignParentLeft="true"
android:background="@drawable/start_button_color"
@@ -49,6 +49,25 @@
android:textColor="#ffffff"
android:enabled="true"
style="?android:attr/buttonBarButtonStyle"
- android:onClick="startPressed"/>
+ android:onClick="classifyPressed"/>
+
+ <Button
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/start_det_label"
+ android:id="@+id/button_det_start"
+ android:layout_alignParentBottom="true"
+ android:layout_alignParentRight="true"
+ android:layout_toRightOf="@id/button_clf_start"
+ android:background="@drawable/start_button_color"
+ android:padding="10dp"
+ android:layout_marginRight="100dp"
+ android:layout_marginLeft="30dp"
+ android:layout_marginTop="10dp"
+ android:foreground="#000000"
+ android:textColor="#ffffff"
+ android:enabled="true"
+ style="?android:attr/buttonBarButtonStyle"
+ android:onClick="detectPressed"/>
</RelativeLayout>
diff --git a/tensorflow/contrib/lite/java/ovic/demo/app/res/values/strings.xml b/tensorflow/contrib/lite/java/ovic/demo/app/res/values/strings.xml
index d26beb1d27..53525908d3 100644
--- a/tensorflow/contrib/lite/java/ovic/demo/app/res/values/strings.xml
+++ b/tensorflow/contrib/lite/java/ovic/demo/app/res/values/strings.xml
@@ -17,6 +17,7 @@
<resources>
<string name="app_name" translatable="false">Benchmarker</string>
- <string name="start_label" translatable="false">Start</string>
+ <string name="start_clf_label" translatable="false">Clf</string>
+ <string name="start_det_label" translatable="false">Det</string>
<string name="initial_status_msg" translatable="false"> Press start to run the benchmarks.</string>
</resources>
diff --git a/tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/BoundingBox.java b/tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/BoundingBox.java
new file mode 100644
index 0000000000..9bf7d005d2
--- /dev/null
+++ b/tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/BoundingBox.java
@@ -0,0 +1,68 @@
+/* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ https://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+package org.tensorflow.ovic;
+
+/** Class for holding a detection bounding box with category and confidence. */
+public class BoundingBox {
+ // Upper left point.
+ public float x1;
+ public float y1;
+
+ // Lower right point.
+ public float x2;
+ public float y2;
+
+ // The area of the box
+ public float area;
+
+ // The object category
+ public int category;
+
+ // The confidence of the detection
+ public float score;
+
+ public BoundingBox(float x1, float y1, float x2, float y2, int category, float score) {
+ this.x1 = x1;
+ this.y1 = y1;
+ this.x2 = x2;
+ this.y2 = y2;
+ this.category = category;
+ this.score = score;
+ // -1 stands for area not initialized
+ this.area = -1;
+ }
+
+ // The intersection area of two bounding boxes
+ public float intersect(BoundingBox bbx) {
+ return Math.max(0, Math.min(x2, bbx.x2) - Math.max(x1, bbx.x1))
+ * Math.max(0, Math.min(y2, bbx.y2) - Math.max(y1, bbx.y1));
+ }
+
+ // The union area of two bounding boxes
+ public float union(BoundingBox bbx) {
+ return bbx.getArea() + this.getArea() - this.intersect(bbx);
+ }
+
+ public float getArea() {
+ if (area < 0) {
+ area = (x2 - x1) * (y2 - y1);
+ }
+ return area;
+ }
+
+ public float computeIoU(BoundingBox bbx) {
+ return (float) (this.intersect(bbx) * 1.0 / this.union(bbx));
+ }
+}
diff --git a/tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicBenchmarker.java b/tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicBenchmarker.java
index 4cda258bee..15d9511f50 100644
--- a/tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicBenchmarker.java
+++ b/tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicBenchmarker.java
@@ -20,11 +20,10 @@ import android.util.Log;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
-import java.nio.ByteOrder;
import java.nio.MappedByteBuffer;
/**
- * Class that benchmarks image classifier models.
+ * Base class that benchmarks image models.
*
* <p>===================== General workflow =======================
*
@@ -33,37 +32,40 @@ import java.nio.MappedByteBuffer;
* benchmarker.getReadyToTest(labelInputStream, model);
* while (!benchmarker.shouldStop()) {
* Bitmap bitmap = ...
- * benchmarker.doTestIteration(bitmap);
+ * imgId = ...
+ * benchmarker.processBitmap(bitmap, imgId);
* }
* }</pre>
*/
-public class OvicBenchmarker {
+public abstract class OvicBenchmarker {
/** Tag for the {@link Log}. */
private static final String TAG = "OvicBenchmarker";
- /** Evaluation transformation parameters. */
- private static final float CENTRAL_FRACTION = 0.875f;
-
/** Dimensions of inputs. */
- private static final int DIM_BATCH_SIZE = 1;
- private static final int DIM_PIXEL_SIZE = 3;
- private int imgHeight = 224;
- private int imgWidth = 224;
+ protected static final int DIM_BATCH_SIZE = 1;
+ protected static final int DIM_PIXEL_SIZE = 3;
+ protected int imgHeight = 224;
+ protected int imgWidth = 224;
+
+ /** Preprocess parameters (only used when input is float). */
+ protected static final float IMAGE_MEAN = 127.5f;
+ protected static final float IMAGE_STD = 127.5f;
+
+ /** Whether input is float or quantized. */
+ protected Boolean quantizedInput = null;
/* Preallocated buffers for storing image data in. */
- private int[] intValues = null;
+ protected int[] intValues = null;
/** A ByteBuffer to hold image data, to be feed into classifier as inputs. */
- private ByteBuffer imgData = null;
-
- private OvicClassifier classifier;
+ protected ByteBuffer imgData = null;
/** Total runtime in ms. */
- private double totalRuntime = 0.0;
+ protected double totalRuntime = 0.0;
/** Total allowed runtime in ms. */
- private double wallTime = 20000 * 30.0;
-
- private Boolean benchmarkStarted = null;
+ protected double wallTime = 20000 * 30.0;
+ /** Record whether benchmark has started (used to skip the first image). */
+ protected boolean benchmarkStarted = false;
/**
* Initializes an {@link OvicBenchmarker}
@@ -76,6 +78,11 @@ public class OvicBenchmarker {
this.wallTime = wallTime;
}
+ /** Return the cumulative latency of all runs so far. */
+ public double getTotalRunTime() {
+ return totalRuntime;
+ }
+
/** Check whether the benchmarker should stop. */
public Boolean shouldStop() {
if (totalRuntime >= wallTime) {
@@ -90,105 +97,62 @@ public class OvicBenchmarker {
return false;
}
- /** Check whether the benchmarker is ready to start classifying images. */
- public Boolean readyToTest() {
- return (classifier != null);
- }
+ /** Abstract class for checking whether the benchmarker is ready to start processing images */
+ public abstract boolean readyToTest();
/**
- * Getting the benchmarker ready for classifying images.
+ * Abstract class for getting the benchmarker ready.
*
* @param labelInputStream: an {@link InputStream} specifying where the list of labels should be
* read from.
* @param model: a {@link MappedByteBuffer} model to benchmark.
*/
- public void getReadyToTest(InputStream labelInputStream, MappedByteBuffer model) {
- try {
- Log.i(TAG, "Creating classifier.");
- classifier = new OvicClassifier(labelInputStream, model);
- int [] inputDims = classifier.getInputDims();
- imgHeight = inputDims[1];
- imgWidth = inputDims[2];
- // Only accept QUANTIZED_UINT8 input.
- imgData = ByteBuffer.allocateDirect(DIM_BATCH_SIZE * imgHeight * imgWidth * DIM_PIXEL_SIZE);
- imgData.order(ByteOrder.nativeOrder());
- intValues = new int[imgHeight * imgWidth];
- } catch (Exception e) {
- Log.e(TAG, e.getMessage());
- Log.e(TAG, "Failed to initialize ImageNet classifier for the benchmarker.");
- }
- }
-
- /** Return how many classes are predicted per image. */
- public int getNumPredictions() {
- return classifier.getNumPredictions();
- }
+ public abstract void getReadyToTest(InputStream labelInputStream, MappedByteBuffer model);
/**
* Perform test on a single bitmap image.
*
- * @param bitmap: a {@link Bitmap} image to classify.
+ * @param bitmap: a {@link Bitmap} image to process.
+ * @param imageId: an ID uniquely representing the image.
*/
- public OvicSingleImageResult doTestIteration(Bitmap bitmap)
- throws IOException, InterruptedException {
- if (shouldStop() || !readyToTest()) {
- return null;
- }
- OvicSingleImageResult iterResult = null;
- try {
- Log.i(TAG, "Converting bitmap.");
- convertBitmapToInput(bitmap);
- Log.i(TAG, "Classifying image.");
- iterResult = classifier.classifyByteBuffer(imgData);
- } catch (RuntimeException e) {
- Log.e(TAG, e.getMessage());
- Log.e(TAG, "Failed to classify image.");
- }
- if (iterResult == null || iterResult.latency == null) {
- throw new RuntimeException("Classification result or timing is invalid.");
- }
- Log.d(TAG, "Native inference latency: " + iterResult.latency);
- Log.i(TAG, iterResult.toString());
+ public abstract boolean processBitmap(Bitmap bitmap, int imageId)
+ throws IOException, InterruptedException;
- if (!benchmarkStarted) { // Skip the first image to discount warming-up time.
- benchmarkStarted = true;
- } else {
- totalRuntime += (double) iterResult.latency;
- }
- return iterResult;
+ /** Perform test on a single bitmap image without an image ID. */
+ public boolean processBitmap(Bitmap bitmap) throws IOException, InterruptedException {
+ return processBitmap(bitmap, /* imageId = */ 0);
}
+ /** Returns the last inference results as string. */
+ public abstract String getLastResultString();
+
/**
- * Writes Image data into a {@link ByteBuffer}.
- *
- * @param bitmap: a {@link Bitmap} source image.
- */
- private void convertBitmapToInput(Bitmap bitmap) throws RuntimeException {
- if (imgData == null) {
+ * Loads input buffer from intValues into ByteBuffer for the interpreter.
+ * Input buffer must be loaded in intValues and output will be placed in imgData.
+ */
+ protected void loadsInputToByteBuffer() {
+ if (imgData == null || intValues == null || quantizedInput == null) {
throw new RuntimeException("Benchmarker is not yet ready to test.");
}
- imgData.rewind();
- // Perform transformations corresponding to evaluation mode.
- float width = (float) bitmap.getWidth();
- float height = (float) bitmap.getHeight();
- int stWidth = Math.round((width - width * CENTRAL_FRACTION) / 2);
- int stHeight = Math.round((height - height * CENTRAL_FRACTION) / 2);
- int newWidth = Math.round(width - stWidth * 2);
- int newHeight = Math.round(height - stHeight * 2);
- bitmap = Bitmap.createBitmap(bitmap, stWidth, stHeight, newWidth, newHeight);
- bitmap = Bitmap.createScaledBitmap(bitmap, imgWidth, imgHeight, true);
- bitmap.getPixels(intValues, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
-
// Convert the image to ByteBuffer.
+ imgData.rewind();
int pixel = 0;
long startTime = SystemClock.uptimeMillis();
for (int i = 0; i < imgHeight; ++i) {
for (int j = 0; j < imgWidth; ++j) {
- final int val = intValues[pixel++];
- imgData.put((byte) ((val >> 16) & 0xFF));
- imgData.put((byte) ((val >> 8) & 0xFF));
- imgData.put((byte) (val & 0xFF));
+ final int pixelValue = intValues[pixel++];
+ if (quantizedInput) {
+ // Quantized model
+ imgData.put((byte) ((pixelValue >> 16) & 0xFF));
+ imgData.put((byte) ((pixelValue >> 8) & 0xFF));
+ imgData.put((byte) (pixelValue & 0xFF));
+ } else {
+ // Float model
+ imgData.putFloat((((pixelValue >> 16) & 0xFF) - IMAGE_MEAN) / IMAGE_STD);
+ imgData.putFloat((((pixelValue >> 8) & 0xFF) - IMAGE_MEAN) / IMAGE_STD);
+ imgData.putFloat(((pixelValue & 0xFF) - IMAGE_MEAN) / IMAGE_STD);
+ }
}
}
long endTime = SystemClock.uptimeMillis();
diff --git a/tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicSingleImageResult.java b/tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicClassificationResult.java
index 4af9a65c2f..5ab804e6ee 100644
--- a/tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicSingleImageResult.java
+++ b/tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicClassificationResult.java
@@ -1,4 +1,4 @@
-/*Copyright 2018 Google LLC
+/* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
@@ -17,17 +17,17 @@ package org.tensorflow.ovic;
import java.util.ArrayList;
/** Result class for inference run on a single image. */
-public class OvicSingleImageResult {
+public class OvicClassificationResult {
/** Top K classes and probabilities. */
- public ArrayList<String> topKClasses;
- public ArrayList<Float> topKProbs;
- public ArrayList<Integer> topKIndices;
+ public final ArrayList<String> topKClasses;
+ public final ArrayList<Float> topKProbs;
+ public final ArrayList<Integer> topKIndices;
/** Latency (ms). */
public Long latency;
- OvicSingleImageResult() {
+ OvicClassificationResult() {
topKClasses = new ArrayList<>();
topKProbs = new ArrayList<>();
topKIndices = new ArrayList<>();
diff --git a/tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicClassifier.java b/tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicClassifier.java
index fd610b054f..d8a54c1f3b 100644
--- a/tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicClassifier.java
+++ b/tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicClassifier.java
@@ -31,7 +31,7 @@ import java.util.PriorityQueue;
import org.tensorflow.lite.Interpreter;
import org.tensorflow.lite.TestHelper;
-/** Benchmark ImageNet Classifier with Tensorflow Lite. */
+/** Class for running ImageNet classification with a TfLite model. */
public class OvicClassifier {
/** Tag for the {@link Log}. */
@@ -106,7 +106,7 @@ public class OvicClassifier {
/** Classifies a {@link ByteBuffer} image. */
// @throws RuntimeException if model is uninitialized.
- public OvicSingleImageResult classifyByteBuffer(ByteBuffer imgData) {
+ public OvicClassificationResult classifyByteBuffer(ByteBuffer imgData) {
if (tflite == null) {
throw new RuntimeException(TAG + ": ImageNet classifier has not been initialized; Failed.");
}
@@ -122,7 +122,7 @@ public class OvicClassifier {
labelProbArray[0][i] = (inferenceOutputArray[0][i] & 0xff) / 255.0f;
}
}
- OvicSingleImageResult iterResult = computeTopKLabels();
+ OvicClassificationResult iterResult = computeTopKLabels();
iterResult.latency = getLastNativeInferenceLatencyMilliseconds();
return iterResult;
}
@@ -174,7 +174,7 @@ public class OvicClassifier {
}
/** Computes top-K labels. */
- private OvicSingleImageResult computeTopKLabels() {
+ private OvicClassificationResult computeTopKLabels() {
if (labelList == null) {
throw new RuntimeException("Label file has not been loaded.");
}
@@ -184,7 +184,7 @@ public class OvicClassifier {
sortedLabels.poll();
}
}
- OvicSingleImageResult singleImageResult = new OvicSingleImageResult();
+ OvicClassificationResult singleImageResult = new OvicClassificationResult();
if (sortedLabels.size() != RESULTS_TO_SHOW) {
throw new RuntimeException(
"Number of returned labels does not match requirement: "
diff --git a/tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicClassifierBenchmarker.java b/tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicClassifierBenchmarker.java
new file mode 100644
index 0000000000..0cdd0f7bec
--- /dev/null
+++ b/tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicClassifierBenchmarker.java
@@ -0,0 +1,142 @@
+/* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ https://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+package org.tensorflow.ovic;
+
+import android.graphics.Bitmap;
+import android.util.Log;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.MappedByteBuffer;
+
+/** Class that benchmarks image classifier models. */
+public final class OvicClassifierBenchmarker extends OvicBenchmarker {
+ /** Tag for the {@link Log}. */
+ private static final String TAG = "OvicClassifierBenchmarker";
+
+ /** ImageNet preprocessing parameters. */
+ private static final float CENTRAL_FRACTION = 0.875f;
+ private OvicClassifier classifier;
+ private OvicClassificationResult iterResult = null;
+
+ public OvicClassifierBenchmarker(double wallTime) {
+ super(wallTime);
+ }
+
+ /** Test if the classifier is ready for benchmarking. */
+ @Override
+ public boolean readyToTest() {
+ return (classifier != null);
+ }
+
+ /**
+ * Getting the benchmarker ready for classifying images.
+ *
+ * @param labelInputStream: an {@link InputStream} specifying where the list of labels should be
+ * read from.
+ * @param model: a {@link MappedByteBuffer} model to benchmark.
+ */
+ @Override
+ public void getReadyToTest(InputStream labelInputStream, MappedByteBuffer model) {
+ try {
+ Log.i(TAG, "Creating classifier.");
+ classifier = new OvicClassifier(labelInputStream, model);
+ int [] inputDims = classifier.getInputDims();
+ imgHeight = inputDims[1];
+ imgWidth = inputDims[2];
+ quantizedInput = true;
+ // Only accept QUANTIZED_UINT8 input.
+ imgData = ByteBuffer.allocateDirect(DIM_BATCH_SIZE * imgHeight * imgWidth * DIM_PIXEL_SIZE);
+ imgData.order(ByteOrder.nativeOrder());
+ intValues = new int[imgHeight * imgWidth];
+ } catch (Exception e) {
+ Log.e(TAG, e.getMessage());
+ Log.e(TAG, "Failed to initialize ImageNet classifier for the benchmarker.");
+ }
+ }
+
+ /**
+ * Perform classification on a single bitmap image.
+ *
+ * @param bitmap: a {@link Bitmap} image to process.
+ * @param imageId: an ID uniquely representing the image.
+ */
+ @Override
+ public boolean processBitmap(Bitmap bitmap, int imageId)
+ throws IOException, InterruptedException {
+ if (shouldStop() || !readyToTest()) {
+ return false;
+ }
+ try {
+ Log.i(TAG, "Converting bitmap.");
+ convertBitmapToInput(bitmap);
+ Log.i(TAG, "Classifying image: " + imageId);
+ iterResult = classifier.classifyByteBuffer(imgData);
+ } catch (RuntimeException e) {
+ Log.e(TAG, e.getMessage());
+ Log.e(TAG, "Failed to classify image.");
+ }
+ if (iterResult == null || iterResult.latency == null) {
+ throw new RuntimeException("Classification result or timing is invalid.");
+ }
+ Log.d(TAG, "Native inference latency: " + iterResult.latency);
+ Log.i(TAG, iterResult.toString());
+
+ if (!benchmarkStarted) { // Skip the first image to discount warming-up time.
+ benchmarkStarted = true;
+ } else {
+ totalRuntime += ((double) iterResult.latency);
+ }
+ return true;
+ }
+
+ /** Return how many classes are predicted per image. */
+ public int getNumPredictions() {
+ return classifier.getNumPredictions();
+ }
+
+ public OvicClassificationResult getLastClassificationResult() {
+ return iterResult;
+ }
+
+ @Override
+ public String getLastResultString() {
+ if (iterResult == null) {
+ return null;
+ } else {
+ return iterResult.toString();
+ }
+ }
+
+ /**
+ * Preprocess bitmap according to ImageNet protocol then writes result into a {@link ByteBuffer}.
+ *
+ * @param bitmap: a {@link Bitmap} source image.
+ */
+ private void convertBitmapToInput(Bitmap bitmap) {
+ // Perform transformations corresponding to evaluation mode.
+ float width = (float) bitmap.getWidth();
+ float height = (float) bitmap.getHeight();
+ int stWidth = Math.round((width - width * CENTRAL_FRACTION) / 2);
+ int stHeight = Math.round((height - height * CENTRAL_FRACTION) / 2);
+ int newWidth = Math.round(width - stWidth * 2);
+ int newHeight = Math.round(height - stHeight * 2);
+ bitmap = Bitmap.createBitmap(bitmap, stWidth, stHeight, newWidth, newHeight);
+ bitmap = Bitmap.createScaledBitmap(bitmap, imgWidth, imgHeight, true);
+ bitmap.getPixels(intValues, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
+ loadsInputToByteBuffer();
+ }
+}
diff --git a/tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicDetectionResult.java b/tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicDetectionResult.java
new file mode 100644
index 0000000000..cf2902a5cb
--- /dev/null
+++ b/tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicDetectionResult.java
@@ -0,0 +1,91 @@
+/* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ https://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+package org.tensorflow.ovic;
+
+import java.util.ArrayList;
+
+/** Result class for inference run on a single image. */
+public class OvicDetectionResult {
+
+ // Top K classes and probabilities.
+ public final ArrayList<BoundingBox> detections;
+ // Latency (ms).
+ public Long latency = -1L;
+ // id of the image.
+ public int id = -1;
+ // Number of valid detections (separately maintained, maybe different from detections.size()).
+ public int count = 0;
+
+ // Create OvicDetectionResult object with pre-filled capacity. Note that detections.size() will
+ // be equal to capacity after this call.
+ OvicDetectionResult(int capacity) {
+ detections = new ArrayList<BoundingBox>(capacity);
+ for (int i = 0; i < capacity; i++) {
+ detections.add(new BoundingBox(-1.0f, -1.0f, -1.0f, -1.0f, -1, -1.0f));
+ }
+ }
+
+ public void resetTo(Long latency, int id) {
+ count = 0;
+ this.latency = latency;
+ this.id = id;
+ }
+
+ public void addBox(float x1, float y1, float x2, float y2, int category, float score) {
+ detections.get(count).x1 = x1;
+ detections.get(count).y1 = y1;
+ detections.get(count).x2 = x2;
+ detections.get(count).y2 = y2;
+ detections.get(count).category = category;
+ detections.get(count).score = score;
+ count += 1;
+ }
+
+ public void scaleUp(double scaleFactorWidth, double scaleFactorHeight) {
+ for (BoundingBox box : detections) {
+ box.x1 = (float) (box.x1 * scaleFactorWidth);
+ box.y1 = (float) (box.y1 * scaleFactorHeight);
+ box.x2 = (float) (box.x2 * scaleFactorWidth);
+ box.y2 = (float) (box.y2 * scaleFactorHeight);
+ }
+ }
+
+ @Override
+ public String toString() {
+ String textToShow = latency + "ms";
+ int k = 0;
+ for (BoundingBox box : detections) {
+ textToShow +=
+ "\nPrediction ["
+ + k
+ + "] = Class "
+ + box.category
+ + " ("
+ + box.x1
+ + ", "
+ + box.y1
+ + ", "
+ + box.x2
+ + ", "
+ + box.y2
+ + ") : "
+ + box.score;
+ k++;
+ }
+
+
+ return textToShow;
+ }
+}
diff --git a/tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicDetector.java b/tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicDetector.java
new file mode 100644
index 0000000000..56836a79e5
--- /dev/null
+++ b/tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicDetector.java
@@ -0,0 +1,184 @@
+/* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ https://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+package org.tensorflow.ovic;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.nio.ByteBuffer;
+import java.nio.MappedByteBuffer;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import org.tensorflow.lite.Interpreter;
+import org.tensorflow.lite.TestHelper;
+
+/** Class for running COCO detection with a TfLite model. */
+public class OvicDetector implements AutoCloseable {
+
+ /** Tag for the {@link Log}. */
+ private static final String TAG = "OvicDetector";
+
+ /** An instance of the driver class to run model inference with Tensorflow Lite. */
+ private Interpreter tflite;
+
+ /** Labels corresponding to the output of the vision model. */
+ private final List<String> labelList;
+
+ /** Define the output format. */
+ private final Boolean inputIsFloat;
+
+ /** Number of detections per image. 10 for demo, 100 for the actual competition. */
+ private static final int NUM_RESULTS = 10;
+
+ /** The output arrays for the mobilenet SSD. */
+ private float[][][] outputLocations;
+ private float[][] outputClasses;
+ private float[][] outputScores;
+ private float[] numDetections;
+ private Map<Integer, Object> outputMap;
+
+ /** Input resolution. */
+ private final int[] inputDims;
+
+ /** Final result. */
+ public OvicDetectionResult result = null;
+
+ OvicDetector(InputStream labelInputStream, MappedByteBuffer model) throws IOException {
+ // Load the label list.
+ labelList = loadLabelList(labelInputStream);
+
+ // Create the TfLite interpreter.
+ tflite = new Interpreter(model, new Interpreter.Options().setNumThreads(1));
+ inputDims = TestHelper.getInputDims(tflite, 0);
+ inputIsFloat = TestHelper.getInputDataType(tflite, 0).equals("float");
+ if (inputDims.length != 4) {
+ throw new RuntimeException("The model's input dimensions must be 4 (BWHC).");
+ }
+ if (inputDims[0] != 1) {
+ throw new RuntimeException(
+ "The model must have a batch size of 1, got " + inputDims[0] + " instead.");
+ }
+ if (inputDims[3] != 3) {
+ throw new RuntimeException(
+ "The model must have three color channels, got " + inputDims[3] + " instead.");
+ }
+ // Check the resolution.
+ int minSide = Math.min(inputDims[1], inputDims[2]);
+ int maxSide = Math.max(inputDims[1], inputDims[2]);
+ if (minSide <= 0 || maxSide > 1000) {
+ throw new RuntimeException("The model's resolution must be between (0, 1000].");
+ }
+
+ // Initialize the input array and result arrays. The input images are stored in a list of
+ // Object. Since this function anaylzed one image per time, there is only 1 item.
+ // The output is fomulated as a map of int -> Object. The output arrays are added to the map.
+ outputLocations = new float[1][NUM_RESULTS][4];
+ outputClasses = new float[1][NUM_RESULTS];
+ outputScores = new float[1][NUM_RESULTS];
+ numDetections = new float[1];
+ outputMap = new HashMap<>();
+ outputMap.put(0, outputLocations);
+ outputMap.put(1, outputClasses);
+ outputMap.put(2, outputScores);
+ outputMap.put(3, numDetections);
+ // Preallocate the result. This will be where inference result is stored after each
+ // detectByteBuffer call.
+ result = new OvicDetectionResult(NUM_RESULTS);
+ }
+
+ public Boolean quantizedInput() {
+ return !inputIsFloat;
+ }
+
+ /** Reads label list from Assets. */
+ private static List<String> loadLabelList(InputStream labelInputStream) throws IOException {
+ List<String> labelList = new ArrayList<>();
+ try (BufferedReader reader =
+ new BufferedReader(new InputStreamReader(labelInputStream, StandardCharsets.UTF_8))) {
+ String line;
+ while ((line = reader.readLine()) != null) {
+ labelList.add(line);
+ }
+ }
+ return labelList;
+ }
+
+ /**
+ * The interface to run the detection. This method currently only support float mobilenet_ssd
+ * model. The quantized models will be added in the future.
+ *
+ * @param imgData The image buffer in ByteBuffer format.
+ * @return boolean indicator of whether detection was a success. If success, the detection results
+ * is available in the result member variable.
+ * See OvicDetectionResult.java for details.
+ */
+ boolean detectByteBuffer(ByteBuffer imgData, int imageId) {
+ if (tflite == null) {
+ throw new RuntimeException(TAG + ": Detector has not been initialized; Failed.");
+ }
+ if (inputIsFloat == null) {
+ throw new RuntimeException(TAG + ": Detector input type has not been resolved.");
+ }
+
+ Object[] inputArray = {imgData};
+ tflite.runForMultipleInputsOutputs(inputArray, outputMap);
+
+ Long latency = getLastNativeInferenceLatencyMilliseconds();
+
+ // Update the results.
+ result.resetTo(latency, imageId);
+ for (int i = 0; i < NUM_RESULTS; i++) {
+ result.addBox(outputLocations[0][i][1] * inputDims[1],
+ outputLocations[0][i][0] * inputDims[1],
+ outputLocations[0][i][3] * inputDims[2],
+ outputLocations[0][i][2] * inputDims[2],
+ Math.round(outputClasses[0][i] + 1 /* Label offset */),
+ outputScores[0][i]);
+ }
+ return true; // Marks that the result is available.
+ }
+
+ /*
+ * Get native inference latency of last image detection run.
+ * @throws RuntimeException if model is uninitialized.
+ * @return The inference latency in millisecond.
+ */
+ public Long getLastNativeInferenceLatencyMilliseconds() {
+ if (tflite == null) {
+ throw new RuntimeException(TAG + ": ImageNet classifier has not been initialized; Failed.");
+ }
+ Long latency = tflite.getLastNativeInferenceDurationNanoseconds();
+ return (latency == null) ? null : (Long) (latency / 1000000);
+ }
+
+ public int[] getInputDims() {
+ return inputDims;
+ }
+
+ public List<String> getLabels() {
+ return labelList;
+ }
+
+ /** Closes tflite to release resources. */
+ @Override
+ public void close() {
+ tflite.close();
+ tflite = null;
+ }
+}
diff --git a/tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicDetectorBenchmarker.java b/tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicDetectorBenchmarker.java
new file mode 100644
index 0000000000..1a4e193ff2
--- /dev/null
+++ b/tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicDetectorBenchmarker.java
@@ -0,0 +1,160 @@
+/* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ https://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+package org.tensorflow.ovic;
+
+import android.graphics.Bitmap;
+import android.util.Log;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.MappedByteBuffer;
+
+/**
+ * Class that benchmarks object detection models.
+ */
+public final class OvicDetectorBenchmarker extends OvicBenchmarker {
+ /** Tag for the {@link Log}. */
+ private static final String TAG = "OvicDetectorBenchmarker";
+
+ public double scaleFactorWidth = 1.0f;
+ public double scaleFactorHeight = 1.0f;
+ private Bitmap scaledBitmap = null; // Preallocate bitmap for scaling.
+
+ private OvicDetector detector;
+
+ /**
+ * Initializes an {@link OvicDetectionBenchmarker}
+ *
+ * @param wallTime: a double number specifying the total amount of time to benchmark.
+ */
+ public OvicDetectorBenchmarker(double wallTime) {
+ super(wallTime);
+ }
+
+ /** Check to see if the detector is ready to test. */
+ @Override
+ public boolean readyToTest() {
+ return (detector != null);
+ }
+
+ /**
+ * Getting the benchmarker ready for detecting images.
+ *
+ * @param labelInputStream: an {@link InputStream} specifying where the list of labels should be
+ * read from.
+ * @param model: a {@link MappedByteBuffer} model to benchmark.
+ */
+ @Override
+ public void getReadyToTest(InputStream labelInputStream, MappedByteBuffer model) {
+ try {
+ Log.i(TAG, "Creating detector.");
+ detector = new OvicDetector(labelInputStream, model);
+ quantizedInput = detector.quantizedInput();
+ int[] inputDims = detector.getInputDims();
+ imgHeight = inputDims[1];
+ imgWidth = inputDims[2];
+ if (quantizedInput) {
+ imgData = ByteBuffer.allocateDirect(DIM_BATCH_SIZE * imgHeight * imgWidth * DIM_PIXEL_SIZE);
+ } else {
+ imgData =
+ ByteBuffer.allocateDirect(DIM_BATCH_SIZE * imgHeight * imgWidth * DIM_PIXEL_SIZE * 4);
+ }
+ imgData.order(ByteOrder.nativeOrder());
+ intValues = new int[imgHeight * imgWidth];
+ benchmarkStarted = false;
+ } catch (Exception e) {
+ Log.e(TAG, e.getMessage());
+ Log.e(TAG, "Failed to initialize COCO detector for the benchmarker.", e);
+ }
+ }
+
+ /**
+ * Perform detection on a single ByteBuffer {@link ByteBuffer} image. The image must have the
+ * same dimension that the model expects.
+ *
+ * @param image: a {@link ByteBuffer} image to process.
+ * @param imageId: an ID uniquely representing the image.
+ */
+ public boolean processBuffer(ByteBuffer image, int imageId) {
+ if (!readyToTest()) {
+ return false;
+ }
+ try {
+ if (!detector.detectByteBuffer(image, imageId)) {
+ return false;
+ }
+ } catch (RuntimeException e) {
+ Log.e(TAG, e.getMessage());
+ return false;
+ }
+
+ if (!benchmarkStarted) { // Skip the first image to discount warming-up time.
+ benchmarkStarted = true;
+ } else {
+ totalRuntime += ((double) detector.result.latency);
+ }
+ return true; // Indicating that result is ready.
+ }
+
+ /**
+ * Perform detection on a single bitmap image.
+ *
+ * @param bitmap: a {@link Bitmap} image to process.
+ * @param imageId: an ID uniquely representing the image.
+ */
+ @Override
+ public boolean processBitmap(Bitmap bitmap, int imageId)
+ throws IOException, InterruptedException {
+ if (shouldStop() || !readyToTest()) {
+ return false;
+ }
+ convertBitmapToInput(bitmap); // Scale bitmap if needed, store result in imgData.
+ if (!processBuffer(imgData, imageId)) {
+ return false;
+ }
+ // Scale results back to original image coordinates.
+ detector.result.scaleUp(scaleFactorWidth, scaleFactorHeight);
+ return true; // Indicating that result is ready.
+ }
+
+ public OvicDetectionResult getLastDetectionResult() {
+ return detector.result;
+ }
+
+ @Override
+ public String getLastResultString() {
+ if (detector.result == null) {
+ return null;
+ }
+ return detector.result.toString();
+ }
+
+ /**
+ * Preprocess bitmap image into {@link ByteBuffer} format for the detector.
+ *
+ * @param bitmap: a {@link Bitmap} source image.
+ */
+ private void convertBitmapToInput(Bitmap bitmap) {
+ int originalWidth = bitmap.getWidth();
+ int originalHeight = bitmap.getHeight();
+ scaledBitmap = Bitmap.createScaledBitmap(bitmap, imgWidth, imgHeight, true);
+ scaleFactorWidth = originalWidth * 1.0 / imgWidth;
+ scaleFactorHeight = originalHeight * 1.0 / imgHeight;
+ scaledBitmap.getPixels(intValues, 0, imgWidth, 0, 0, imgWidth, imgHeight);
+ scaledBitmap.recycle();
+ loadsInputToByteBuffer();
+ }
+}
diff --git a/tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicValidator.java b/tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicValidator.java
index a504ec74a9..baa14baf92 100644
--- a/tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicValidator.java
+++ b/tensorflow/contrib/lite/java/ovic/src/main/java/org/tensorflow/ovic/OvicValidator.java
@@ -51,7 +51,7 @@ public class OvicValidator {
MappedByteBuffer model = loadModelFile(modelFile);
OvicClassifier classifier = new OvicClassifier(labelsInputStream, model);
ByteBuffer imgData = createByteBufferForClassifier(classifier);
- OvicSingleImageResult testResult = classifier.classifyByteBuffer(imgData);
+ OvicClassificationResult testResult = classifier.classifyByteBuffer(imgData);
if (testResult.topKClasses.isEmpty()) {
throw new RuntimeException("Failed to return top K predictions.");
}
diff --git a/tensorflow/contrib/lite/java/ovic/src/test/java/org/tensorflow/ovic/OvicClassifierTest.java b/tensorflow/contrib/lite/java/ovic/src/test/java/org/tensorflow/ovic/OvicClassifierTest.java
index 1587c3c56f..99e874ca78 100644
--- a/tensorflow/contrib/lite/java/ovic/src/test/java/org/tensorflow/ovic/OvicClassifierTest.java
+++ b/tensorflow/contrib/lite/java/ovic/src/test/java/org/tensorflow/ovic/OvicClassifierTest.java
@@ -1,4 +1,4 @@
-/*Copyright 2018 Google LLC
+/* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
@@ -43,7 +43,7 @@ public final class OvicClassifierTest {
private MappedByteBuffer lowResModel = null;
private ByteBuffer testImage = null;
private ByteBuffer lowResTestImage = null;
- private OvicSingleImageResult testResult = null;
+ private OvicClassificationResult testResult = null;
private static final String LABELS_PATH =
"tensorflow/contrib/lite/java/ovic/src/testdata/labels.txt";
private static final String QUANTIZED_MODEL_PATH =
@@ -147,7 +147,7 @@ public final class OvicClassifierTest {
return imgData;
}
- private static void assertCorrectTopK(OvicSingleImageResult testResult) {
+ private static void assertCorrectTopK(OvicClassificationResult testResult) {
assertThat(testResult.topKClasses.size() > 0).isTrue();
Boolean topKAccurate = false;
// Assert that the correct class is in the top K.
diff --git a/tensorflow/contrib/lite/java/ovic/src/test/java/org/tensorflow/ovic/OvicDetectorTest.java b/tensorflow/contrib/lite/java/ovic/src/test/java/org/tensorflow/ovic/OvicDetectorTest.java
new file mode 100644
index 0000000000..4681e26052
--- /dev/null
+++ b/tensorflow/contrib/lite/java/ovic/src/test/java/org/tensorflow/ovic/OvicDetectorTest.java
@@ -0,0 +1,149 @@
+/* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ https://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+package org.tensorflow.ovic;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import java.awt.Graphics2D;
+import java.awt.image.BufferedImage;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.MappedByteBuffer;
+import java.nio.channels.FileChannel;
+import javax.imageio.ImageIO;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit test for {@link org.tensorflow.ovic.OvicDetector}. */
+@RunWith(JUnit4.class)
+public final class OvicDetectorTest {
+ private OvicDetector detector = null;
+ private InputStream labelsInputStream = null;
+ private MappedByteBuffer model = null;
+ private ByteBuffer testImage = null;
+
+ private static final float IMAGE_MEAN = 128f;
+ private static final float IMAGE_STD = 128f;
+
+ private Boolean quantizedInput = null;
+ private static final String LABELS_PATH =
+ "tensorflow/contrib/lite/java/ovic/src/testdata/coco_labels.txt";
+ private static final String MODEL_PATH =
+ "external/tflite_mobilenet_ssd_quant/detect.tflite";
+ private static final String TEST_IMAGE_PATH =
+ "external/tflite_ovic_testdata/test_image_224.jpg";
+ private static final int GROUNDTRUTH = 1 /* Person */;
+
+ @Before
+ public void setUp() {
+ try {
+ // load models.
+ model = loadModelFile(MODEL_PATH);
+
+ // Load label files;
+ File labelsfile = new File(LABELS_PATH);
+ labelsInputStream = new FileInputStream(labelsfile);
+
+ // Create detector.
+ detector = new OvicDetector(labelsInputStream, model);
+ quantizedInput = detector.quantizedInput();
+
+ // Load test image and convert into byte buffer.
+ File imageFile = new File(TEST_IMAGE_PATH);
+ BufferedImage rawimg = ImageIO.read(imageFile);
+ int[] inputDims = detector.getInputDims();
+ BufferedImage img = new BufferedImage(inputDims[1], inputDims[2], rawimg.getType());
+ Graphics2D g = img.createGraphics();
+ g.drawImage(rawimg, 0, 0, inputDims[1], inputDims[2], null);
+ g.dispose();
+ testImage = toByteBuffer(img);
+ } catch (IOException e) {
+ System.out.println(e.getMessage());
+ }
+
+ System.out.println("Successfully setup");
+ }
+
+ private static MappedByteBuffer loadModelFile(String modelFilePath) throws IOException {
+ File modelfile = new File(modelFilePath);
+ FileInputStream inputStream = new FileInputStream(modelfile);
+ FileChannel fileChannel = inputStream.getChannel();
+ long startOffset = 0L;
+ long declaredLength = fileChannel.size();
+ return fileChannel.map(FileChannel.MapMode.READ_ONLY, startOffset, declaredLength);
+ }
+
+ private ByteBuffer toByteBuffer(BufferedImage image) {
+ ByteBuffer imgData;
+ if (quantizedInput) {
+ imgData = ByteBuffer.allocateDirect(image.getHeight() * image.getWidth() * 3);
+ } else {
+ imgData = ByteBuffer.allocateDirect(image.getHeight() * image.getWidth() * 12);
+ }
+ imgData.order(ByteOrder.nativeOrder());
+ for (int y = 0; y < image.getHeight(); y++) {
+ for (int x = 0; x < image.getWidth(); x++) {
+ int pixelValue = image.getRGB(x, y);
+ if (quantizedInput) {
+ // Quantized model
+ imgData.put((byte) ((pixelValue >> 16) & 0xFF));
+ imgData.put((byte) ((pixelValue >> 8) & 0xFF));
+ imgData.put((byte) (pixelValue & 0xFF));
+ } else {
+ // Float model
+ imgData.putFloat((((pixelValue >> 16) & 0xFF) - IMAGE_MEAN) / IMAGE_STD);
+ imgData.putFloat((((pixelValue >> 8) & 0xFF) - IMAGE_MEAN) / IMAGE_STD);
+ imgData.putFloat(((pixelValue & 0xFF) - IMAGE_MEAN) / IMAGE_STD);
+ }
+ }
+ }
+ return imgData;
+ }
+
+ @Test
+ public void ovicDetector_detectSuccess() throws Exception {
+ assertThat(detector.detectByteBuffer(testImage, 1)).isTrue();
+ assertThat(detector.result != null).isTrue();
+ }
+
+ @Test
+ public void ovicDetector_simpleBatchTest() throws Exception {
+ final int numRepeats = 5;
+ for (int i = 0; i < numRepeats; i++) {
+ assertThat(detector.detectByteBuffer(testImage, 1)).isTrue();
+ OvicDetectionResult result = detector.result;
+ Boolean detectWithinTop5 = false;
+ for (int j = 0; j < Math.min(5, result.count); j++) {
+ if (result.detections.get(j).category == GROUNDTRUTH) {
+ detectWithinTop5 = true;
+ break;
+ }
+ }
+ if (!detectWithinTop5) {
+ System.out.println("---------------- Image " + i + " ---------------------");
+ System.out.println("Expect category " + GROUNDTRUTH);
+ System.out.println("Detection results: ");
+ System.out.println(result.toString());
+ }
+ assertThat(detectWithinTop5).isTrue();
+ }
+ }
+}
diff --git a/tensorflow/contrib/lite/java/ovic/src/testdata/BUILD b/tensorflow/contrib/lite/java/ovic/src/testdata/BUILD
index 1021ea30dd..051aa2204e 100644
--- a/tensorflow/contrib/lite/java/ovic/src/testdata/BUILD
+++ b/tensorflow/contrib/lite/java/ovic/src/testdata/BUILD
@@ -14,6 +14,9 @@ filegroup(
)
exports_files(
- ["labels.txt"],
+ [
+ "labels.txt",
+ "coco_labels.txt",
+ ],
visibility = ["//visibility:public"],
)
diff --git a/tensorflow/contrib/lite/java/ovic/src/testdata/coco_labels.txt b/tensorflow/contrib/lite/java/ovic/src/testdata/coco_labels.txt
new file mode 100644
index 0000000000..d91f535b1a
--- /dev/null
+++ b/tensorflow/contrib/lite/java/ovic/src/testdata/coco_labels.txt
@@ -0,0 +1,91 @@
+person
+bicycle
+car
+motorcycle
+airplane
+bus
+train
+truck
+boat
+traffic light
+fire hydrant
+empty
+stop sign
+parking meter
+bench
+bird
+cat
+dog
+horse
+sheep
+cow
+elephant
+bear
+zebra
+giraffe
+empty
+backpack
+umbrella
+empty
+empty
+handbag
+tie
+suitcase
+frisbee
+skis
+snowboard
+sports ball
+kite
+baseball bat
+baseball glove
+skateboard
+surfboard
+tennis racket
+bottle
+empty
+wine glasses
+cup
+fork
+knife
+spoon
+bowl
+banana
+apple
+sandwich
+orange
+broccoli
+carrot
+hot dog
+pizza
+donut
+cake
+chair
+couch
+potted plant
+bed
+empty
+dining table
+empty
+empty
+toilet
+empty
+tv
+laptop
+mouse
+remote
+keyboard
+cell phone
+microwave
+oven
+toaster
+sink
+refrigerator
+empty
+book
+clock
+vase
+scissors
+teddy bear
+hair drier
+toothbrush
+empty