aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/examples/android
diff options
context:
space:
mode:
authorGravatar Andrew Harp <andrewharp@google.com>2017-09-20 14:23:41 -0700
committerGravatar TensorFlower Gardener <gardener@tensorflow.org>2017-09-20 14:27:36 -0700
commit94b3d7fd8ac71f8475b2d53e78f89cf68ff05819 (patch)
tree0cadf7b76196762adb25fd1ae21fab42bd3e27be /tensorflow/examples/android
parent1ad7cb6f05c221ff0df5532e4101e99250dec33f (diff)
Android demo: finish implementing support for legacy camera so that luminance data is available for tracking.
Resolves #13013 Also make image access lazy to improve performance and clean up member variable declarations between CameraActivity.java and subclasses. PiperOrigin-RevId: 169450498
Diffstat (limited to 'tensorflow/examples/android')
-rw-r--r--tensorflow/examples/android/src/org/tensorflow/demo/CameraActivity.java163
-rw-r--r--tensorflow/examples/android/src/org/tensorflow/demo/CameraConnectionFragment.java4
-rw-r--r--tensorflow/examples/android/src/org/tensorflow/demo/ClassifierActivity.java30
-rw-r--r--tensorflow/examples/android/src/org/tensorflow/demo/DetectorActivity.java126
-rw-r--r--tensorflow/examples/android/src/org/tensorflow/demo/LegacyCameraConnectionFragment.java41
-rw-r--r--tensorflow/examples/android/src/org/tensorflow/demo/StylizeActivity.java54
6 files changed, 207 insertions, 211 deletions
diff --git a/tensorflow/examples/android/src/org/tensorflow/demo/CameraActivity.java b/tensorflow/examples/android/src/org/tensorflow/demo/CameraActivity.java
index 83cf9f0a2a..7e57c17467 100644
--- a/tensorflow/examples/android/src/org/tensorflow/demo/CameraActivity.java
+++ b/tensorflow/examples/android/src/org/tensorflow/demo/CameraActivity.java
@@ -21,7 +21,6 @@ import android.app.Activity;
import android.app.Fragment;
import android.content.Context;
import android.content.pm.PackageManager;
-import android.graphics.Bitmap;
import android.hardware.Camera;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCharacteristics;
@@ -41,15 +40,12 @@ import android.view.KeyEvent;
import android.view.WindowManager;
import android.widget.Toast;
import java.nio.ByteBuffer;
-
import org.tensorflow.demo.env.ImageUtils;
import org.tensorflow.demo.env.Logger;
+import org.tensorflow.demo.R; // Explicit import needed for internal Google builds.
-// Explicit import needed for internal Google builds.
-import org.tensorflow.demo.R;
-
-public abstract class CameraActivity extends Activity implements OnImageAvailableListener, Camera.
- PreviewCallback {
+public abstract class CameraActivity extends Activity
+ implements OnImageAvailableListener, Camera.PreviewCallback {
private static final Logger LOGGER = new Logger();
private static final int PERMISSIONS_REQUEST = 1;
@@ -62,19 +58,16 @@ public abstract class CameraActivity extends Activity implements OnImageAvailabl
private Handler handler;
private HandlerThread handlerThread;
private boolean useCamera2API;
- protected Bitmap rgbFrameBitmap = null;
+ private boolean isProcessingFrame = false;
+ private byte[][] yuvBytes = new byte[3][];
private int[] rgbBytes = null;
+ private int yRowStride;
+
protected int previewWidth = 0;
protected int previewHeight = 0;
- protected Bitmap croppedBitmap = null;
- protected static final boolean SAVE_PREVIEW_BITMAP = false;
- protected long lastProcessingTimeMs;
- protected Bitmap cropCopyBitmap;
- protected ResultsView resultsView;
- protected boolean computing = false;
- protected Runnable postInferenceCallback;
- protected byte[][] yuvBytes=new byte[3][];
- protected int yRowStride;
+
+ private Runnable postInferenceCallback;
+ private Runnable imageConverter;
@Override
protected void onCreate(final Bundle savedInstanceState) {
@@ -91,16 +84,31 @@ public abstract class CameraActivity extends Activity implements OnImageAvailabl
}
}
+ private byte[] lastPreviewFrame;
+
+ protected int[] getRgbBytes() {
+ imageConverter.run();
+ return rgbBytes;
+ }
+
+ protected int getLuminanceStride() {
+ return yRowStride;
+ }
+
+ protected byte[] getLuminance() {
+ return yuvBytes[0];
+ }
+
/**
* Callback for android.hardware.Camera API
*/
@Override
public void onPreviewFrame(final byte[] bytes, final Camera camera) {
- if (computing) {
+ if (isProcessingFrame) {
+ LOGGER.w("Dropping frame!");
return;
}
- computing = true;
- yuvBytes[0] = bytes;
+
try {
// Initialize the storage bitmaps once when the resolution is known.
if (rgbBytes == null) {
@@ -110,18 +118,33 @@ public abstract class CameraActivity extends Activity implements OnImageAvailabl
rgbBytes = new int[previewWidth * previewHeight];
onPreviewSizeChosen(new Size(previewSize.width, previewSize.height), 90);
}
- ImageUtils.convertYUV420SPToARGB8888(bytes, previewWidth, previewHeight, rgbBytes);
} catch (final Exception e) {
LOGGER.e(e, "Exception!");
return;
}
- postInferenceCallback = new Runnable() {
- @Override
- public void run() {
- camera.addCallbackBuffer(bytes);
- }
- };
- processImageRGBbytes(rgbBytes);
+
+ isProcessingFrame = true;
+ lastPreviewFrame = bytes;
+ yuvBytes[0] = bytes;
+ yRowStride = previewWidth;
+
+ imageConverter =
+ new Runnable() {
+ @Override
+ public void run() {
+ ImageUtils.convertYUV420SPToARGB8888(bytes, previewWidth, previewHeight, rgbBytes);
+ }
+ };
+
+ postInferenceCallback =
+ new Runnable() {
+ @Override
+ public void run() {
+ camera.addCallbackBuffer(bytes);
+ isProcessingFrame = false;
+ }
+ };
+ processImage();
}
/**
@@ -129,51 +152,64 @@ public abstract class CameraActivity extends Activity implements OnImageAvailabl
*/
@Override
public void onImageAvailable(final ImageReader reader) {
- Image image = null;
//We need wait until we have some size from onPreviewSizeChosen
if (previewWidth == 0 || previewHeight == 0) {
return;
}
- rgbBytes = new int[previewWidth * previewHeight];
+ if (rgbBytes == null) {
+ rgbBytes = new int[previewWidth * previewHeight];
+ }
try {
- image = reader.acquireLatestImage();
+ final Image image = reader.acquireLatestImage();
if (image == null) {
return;
}
- if (computing) {
+ if (isProcessingFrame) {
image.close();
return;
}
- computing = true;
+ isProcessingFrame = true;
Trace.beginSection("imageAvailable");
final Plane[] planes = image.getPlanes();
fillBytes(planes, yuvBytes);
yRowStride = planes[0].getRowStride();
final int uvRowStride = planes[1].getRowStride();
final int uvPixelStride = planes[1].getPixelStride();
- ImageUtils.convertYUV420ToARGB8888(
- yuvBytes[0],
- yuvBytes[1],
- yuvBytes[2],
- previewWidth,
- previewHeight,
- yRowStride,
- uvRowStride,
- uvPixelStride,
- rgbBytes);
- image.close();
+ imageConverter =
+ new Runnable() {
+ @Override
+ public void run() {
+ ImageUtils.convertYUV420ToARGB8888(
+ yuvBytes[0],
+ yuvBytes[1],
+ yuvBytes[2],
+ previewWidth,
+ previewHeight,
+ yRowStride,
+ uvRowStride,
+ uvPixelStride,
+ rgbBytes);
+ }
+ };
+
+ postInferenceCallback =
+ new Runnable() {
+ @Override
+ public void run() {
+ image.close();
+ isProcessingFrame = false;
+ }
+ };
+
+ processImage();
} catch (final Exception e) {
- if (image != null) {
- image.close();
- }
LOGGER.e(e, "Exception!");
Trace.endSection();
return;
}
- processImageRGBbytes(rgbBytes);
Trace.endSection();
}
@@ -235,15 +271,13 @@ public abstract class CameraActivity extends Activity implements OnImageAvailabl
@Override
public void onRequestPermissionsResult(
final int requestCode, final String[] permissions, final int[] grantResults) {
- switch (requestCode) {
- case PERMISSIONS_REQUEST: {
- if (grantResults.length > 0
- && grantResults[0] == PackageManager.PERMISSION_GRANTED
- && grantResults[1] == PackageManager.PERMISSION_GRANTED) {
- setFragment();
- } else {
- requestPermission();
- }
+ if (requestCode == PERMISSIONS_REQUEST) {
+ if (grantResults.length > 0
+ && grantResults[0] == PackageManager.PERMISSION_GRANTED
+ && grantResults[1] == PackageManager.PERMISSION_GRANTED) {
+ setFragment();
+ } else {
+ requestPermission();
}
}
}
@@ -269,7 +303,8 @@ public abstract class CameraActivity extends Activity implements OnImageAvailabl
}
// Returns true if the device supports the required hardware level, or better.
- boolean isHardwareLevelSupported(CameraCharacteristics characteristics, int requiredLevel) {
+ private boolean isHardwareLevelSupported(
+ CameraCharacteristics characteristics, int requiredLevel) {
int deviceLevel = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
if (deviceLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
return requiredLevel == deviceLevel;
@@ -331,7 +366,8 @@ public abstract class CameraActivity extends Activity implements OnImageAvailabl
camera2Fragment.setCamera(cameraId);
fragment = camera2Fragment;
} else {
- fragment = new LegacyCameraConnectionFragment(this, getLayoutId());
+ fragment =
+ new LegacyCameraConnectionFragment(this, getLayoutId(), getDesiredPreviewFrameSize());
}
getFragmentManager()
@@ -384,7 +420,14 @@ public abstract class CameraActivity extends Activity implements OnImageAvailabl
return super.onKeyDown(keyCode, event);
}
- protected abstract void processImageRGBbytes(int[] rgbBytes ) ;
+ protected void readyForNextImage() {
+ if (postInferenceCallback != null) {
+ postInferenceCallback.run();
+ }
+ }
+
+ protected abstract void processImage();
+
protected abstract void onPreviewSizeChosen(final Size size, final int rotation);
protected abstract int getLayoutId();
protected abstract Size getDesiredPreviewFrameSize();
diff --git a/tensorflow/examples/android/src/org/tensorflow/demo/CameraConnectionFragment.java b/tensorflow/examples/android/src/org/tensorflow/demo/CameraConnectionFragment.java
index 986f2777b2..361cf0e87c 100644
--- a/tensorflow/examples/android/src/org/tensorflow/demo/CameraConnectionFragment.java
+++ b/tensorflow/examples/android/src/org/tensorflow/demo/CameraConnectionFragment.java
@@ -59,7 +59,7 @@ import java.util.List;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import org.tensorflow.demo.env.Logger;
-import org.tensorflow.demo.R;
+import org.tensorflow.demo.R; // Explicit import needed for internal Google builds.
public class CameraConnectionFragment extends Fragment {
private static final Logger LOGGER = new Logger();
@@ -265,7 +265,7 @@ public class CameraConnectionFragment extends Fragment {
* @param height The minimum desired height
* @return The optimal {@code Size}, or an arbitrary one if none were big enough
*/
- private static Size chooseOptimalSize(final Size[] choices, final int width, final int height) {
+ protected static Size chooseOptimalSize(final Size[] choices, final int width, final int height) {
final int minSize = Math.max(Math.min(width, height), MINIMUM_PREVIEW_SIZE);
final Size desiredSize = new Size(width, height);
diff --git a/tensorflow/examples/android/src/org/tensorflow/demo/ClassifierActivity.java b/tensorflow/examples/android/src/org/tensorflow/demo/ClassifierActivity.java
index ab48e2265b..b29fa1546c 100644
--- a/tensorflow/examples/android/src/org/tensorflow/demo/ClassifierActivity.java
+++ b/tensorflow/examples/android/src/org/tensorflow/demo/ClassifierActivity.java
@@ -22,26 +22,32 @@ import android.graphics.Canvas;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Typeface;
-
import android.media.ImageReader.OnImageAvailableListener;
import android.os.SystemClock;
import android.util.Size;
import android.util.TypedValue;
import android.view.Display;
-
import java.util.List;
import java.util.Vector;
import org.tensorflow.demo.OverlayView.DrawCallback;
import org.tensorflow.demo.env.BorderedText;
import org.tensorflow.demo.env.ImageUtils;
import org.tensorflow.demo.env.Logger;
-
-// Explicit import needed for internal Google builds.
-import org.tensorflow.demo.R;
+import org.tensorflow.demo.R; // Explicit import needed for internal Google builds.
public class ClassifierActivity extends CameraActivity implements OnImageAvailableListener {
private static final Logger LOGGER = new Logger();
+ protected static final boolean SAVE_PREVIEW_BITMAP = false;
+
+ private ResultsView resultsView;
+
+ private Bitmap rgbFrameBitmap = null;
+ private Bitmap croppedBitmap = null;
+ private Bitmap cropCopyBitmap = null;
+
+ private long lastProcessingTimeMs;
+
// These are the settings for the original v1 Inception model. If you want to
// use a model that's been produced from the TensorFlow for Poets codelab,
// you'll need to set IMAGE_SIZE = 299, IMAGE_MEAN = 128, IMAGE_STD = 128,
@@ -136,8 +142,6 @@ public class ClassifierActivity extends CameraActivity implements OnImageAvailab
cropToFrameTransform = new Matrix();
frameToCropTransform.invert(cropToFrameTransform);
- yuvBytes = new byte[3][];
-
addCallback(
new DrawCallback() {
@Override
@@ -147,8 +151,9 @@ public class ClassifierActivity extends CameraActivity implements OnImageAvailab
});
}
- protected void processImageRGBbytes(int[] rgbBytes ) {
- rgbFrameBitmap.setPixels(rgbBytes, 0, previewWidth, 0, 0, previewWidth, previewHeight);
+ @Override
+ protected void processImage() {
+ rgbFrameBitmap.setPixels(getRgbBytes(), 0, previewWidth, 0, 0, previewWidth, previewHeight);
final Canvas canvas = new Canvas(croppedBitmap);
canvas.drawBitmap(rgbFrameBitmap, frameToCropTransform, null);
@@ -165,15 +170,12 @@ public class ClassifierActivity extends CameraActivity implements OnImageAvailab
lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime;
LOGGER.i("Detect: %s", results);
cropCopyBitmap = Bitmap.createBitmap(croppedBitmap);
- if (resultsView==null) {
+ if (resultsView == null) {
resultsView = (ResultsView) findViewById(R.id.results);
}
resultsView.setResults(results);
requestRender();
- computing = false;
- if (postInferenceCallback != null) {
- postInferenceCallback.run();
- }
+ readyForNextImage();
}
});
}
diff --git a/tensorflow/examples/android/src/org/tensorflow/demo/DetectorActivity.java b/tensorflow/examples/android/src/org/tensorflow/demo/DetectorActivity.java
index 91b2831306..3c80a2ae3c 100644
--- a/tensorflow/examples/android/src/org/tensorflow/demo/DetectorActivity.java
+++ b/tensorflow/examples/android/src/org/tensorflow/demo/DetectorActivity.java
@@ -25,12 +25,8 @@ import android.graphics.Paint;
import android.graphics.Paint.Style;
import android.graphics.RectF;
import android.graphics.Typeface;
-import android.media.Image;
-import android.media.Image.Plane;
-import android.media.ImageReader;
import android.media.ImageReader.OnImageAvailableListener;
import android.os.SystemClock;
-import android.os.Trace;
import android.util.Size;
import android.util.TypedValue;
import android.view.Display;
@@ -44,7 +40,7 @@ import org.tensorflow.demo.env.BorderedText;
import org.tensorflow.demo.env.ImageUtils;
import org.tensorflow.demo.env.Logger;
import org.tensorflow.demo.tracking.MultiBoxTracker;
-import org.tensorflow.demo.R;
+import org.tensorflow.demo.R; // Explicit import needed for internal Google builds.
/**
* An activity that uses a TensorFlowMultiBoxDetector and ObjectTracker to detect and then track
@@ -104,30 +100,23 @@ public class DetectorActivity extends CameraActivity implements OnImageAvailable
private Classifier detector;
- private int previewWidth = 0;
- private int previewHeight = 0;
- private byte[][] yuvBytes;
- private int[] rgbBytes = null;
+ private long lastProcessingTimeMs;
private Bitmap rgbFrameBitmap = null;
private Bitmap croppedBitmap = null;
+ private Bitmap cropCopyBitmap = null;
- private boolean computing = false;
+ private boolean computingDetection = false;
private long timestamp = 0;
private Matrix frameToCropTransform;
private Matrix cropToFrameTransform;
- private Bitmap cropCopyBitmap;
-
private MultiBoxTracker tracker;
- private byte[] luminance;
+ private byte[] luminanceCopy;
private BorderedText borderedText;
-
- private long lastProcessingTimeMs;
-
@Override
public void onPreviewSizeChosen(final Size size, final int rotation) {
final float textSizePx =
@@ -187,7 +176,6 @@ public class DetectorActivity extends CameraActivity implements OnImageAvailable
sensorOrientation = rotation + screenOrientation;
LOGGER.i("Initializing at size %dx%d", previewWidth, previewHeight);
- rgbBytes = new int[previewWidth * previewHeight];
rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888);
croppedBitmap = Bitmap.createBitmap(cropSize, cropSize, Config.ARGB_8888);
@@ -199,7 +187,6 @@ public class DetectorActivity extends CameraActivity implements OnImageAvailable
cropToFrameTransform = new Matrix();
frameToCropTransform.invert(cropToFrameTransform);
- yuvBytes = new byte[3][];
trackingOverlay = (OverlayView) findViewById(R.id.tracking_overlay);
trackingOverlay.addCallback(
@@ -260,82 +247,47 @@ public class DetectorActivity extends CameraActivity implements OnImageAvailable
OverlayView trackingOverlay;
@Override
- public void onImageAvailable(final ImageReader reader) {
- Image image = null;
-
+ protected void processImage() {
++timestamp;
final long currTimestamp = timestamp;
+ byte[] originalLuminance = getLuminance();
+ tracker.onFrame(
+ previewWidth,
+ previewHeight,
+ getLuminanceStride(),
+ sensorOrientation,
+ originalLuminance,
+ timestamp);
+ trackingOverlay.postInvalidate();
+
+ // No mutex needed as this method is not reentrant.
+ if (computingDetection) {
+ readyForNextImage();
+ return;
+ }
+ computingDetection = true;
+ LOGGER.i("Preparing image " + currTimestamp + " for detection in bg thread.");
- try {
- image = reader.acquireLatestImage();
-
- if (image == null) {
- return;
- }
-
- Trace.beginSection("imageAvailable");
-
- final Plane[] planes = image.getPlanes();
- fillBytes(planes, yuvBytes);
-
- tracker.onFrame(
- previewWidth,
- previewHeight,
- planes[0].getRowStride(),
- sensorOrientation,
- yuvBytes[0],
- timestamp);
- trackingOverlay.postInvalidate();
+ rgbFrameBitmap.setPixels(getRgbBytes(), 0, previewWidth, 0, 0, previewWidth, previewHeight);
- // No mutex needed as this method is not reentrant.
- if (computing) {
- image.close();
- return;
- }
- computing = true;
-
- final int yRowStride = planes[0].getRowStride();
- final int uvRowStride = planes[1].getRowStride();
- final int uvPixelStride = planes[1].getPixelStride();
- ImageUtils.convertYUV420ToARGB8888(
- yuvBytes[0],
- yuvBytes[1],
- yuvBytes[2],
- previewWidth,
- previewHeight,
- yRowStride,
- uvRowStride,
- uvPixelStride,
- rgbBytes);
-
- image.close();
- } catch (final Exception e) {
- if (image != null) {
- image.close();
- }
- LOGGER.e(e, "Exception!");
- Trace.endSection();
- return;
+ if (luminanceCopy == null) {
+ luminanceCopy = new byte[originalLuminance.length];
}
+ System.arraycopy(originalLuminance, 0, luminanceCopy, 0, originalLuminance.length);
+ readyForNextImage();
- rgbFrameBitmap.setPixels(rgbBytes, 0, previewWidth, 0, 0, previewWidth, previewHeight);
final Canvas canvas = new Canvas(croppedBitmap);
canvas.drawBitmap(rgbFrameBitmap, frameToCropTransform, null);
-
// For examining the actual TF input.
if (SAVE_PREVIEW_BITMAP) {
ImageUtils.saveBitmap(croppedBitmap);
}
- if (luminance == null) {
- luminance = new byte[yuvBytes[0].length];
- }
- System.arraycopy(yuvBytes[0], 0, luminance, 0, luminance.length);
-
runInBackground(
new Runnable() {
@Override
public void run() {
+ LOGGER.i("Running detection on image " + currTimestamp);
final long startTime = SystemClock.uptimeMillis();
final List<Classifier.Recognition> results = detector.recognizeImage(croppedBitmap);
lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime;
@@ -349,9 +301,15 @@ public class DetectorActivity extends CameraActivity implements OnImageAvailable
float minimumConfidence = MINIMUM_CONFIDENCE_TF_OD_API;
switch (MODE) {
- case TF_OD_API: minimumConfidence = MINIMUM_CONFIDENCE_TF_OD_API; break;
- case MULTIBOX: minimumConfidence = MINIMUM_CONFIDENCE_MULTIBOX; break;
- case YOLO: minimumConfidence = MINIMUM_CONFIDENCE_YOLO; break;
+ case TF_OD_API:
+ minimumConfidence = MINIMUM_CONFIDENCE_TF_OD_API;
+ break;
+ case MULTIBOX:
+ minimumConfidence = MINIMUM_CONFIDENCE_MULTIBOX;
+ break;
+ case YOLO:
+ minimumConfidence = MINIMUM_CONFIDENCE_YOLO;
+ break;
}
final List<Classifier.Recognition> mappedRecognitions =
@@ -368,19 +326,15 @@ public class DetectorActivity extends CameraActivity implements OnImageAvailable
}
}
- tracker.trackResults(mappedRecognitions, luminance, currTimestamp);
+ tracker.trackResults(mappedRecognitions, luminanceCopy, currTimestamp);
trackingOverlay.postInvalidate();
requestRender();
- computing = false;
+ computingDetection = false;
}
});
-
- Trace.endSection();
}
- protected void processImageRGBbytes(int[] rgbBytes ) {}
-
@Override
protected int getLayoutId() {
return R.layout.camera_connection_fragment_tracking;
diff --git a/tensorflow/examples/android/src/org/tensorflow/demo/LegacyCameraConnectionFragment.java b/tensorflow/examples/android/src/org/tensorflow/demo/LegacyCameraConnectionFragment.java
index e5b3eeeceb..a317273acd 100644
--- a/tensorflow/examples/android/src/org/tensorflow/demo/LegacyCameraConnectionFragment.java
+++ b/tensorflow/examples/android/src/org/tensorflow/demo/LegacyCameraConnectionFragment.java
@@ -1,7 +1,7 @@
package org.tensorflow.demo;
/*
- * Copyright 2014 The Android Open Source Project
+ * Copyright 2017 The TensorFlow Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -18,31 +18,29 @@ package org.tensorflow.demo;
import android.app.Fragment;
import android.graphics.SurfaceTexture;
+import android.hardware.Camera;
+import android.hardware.Camera.CameraInfo;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
+import android.util.Size;
import android.util.SparseIntArray;
import android.view.LayoutInflater;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.view.ViewGroup;
-
import java.io.IOException;
-
-import android.hardware.Camera;
-import android.hardware.Camera.CameraInfo;
-
+import java.util.List;
+import org.tensorflow.demo.env.ImageUtils;
import org.tensorflow.demo.env.Logger;
-
-// Explicit import needed for internal Google builds.
-import org.tensorflow.demo.R;
+import org.tensorflow.demo.R; // Explicit import needed for internal Google builds.
public class LegacyCameraConnectionFragment extends Fragment {
-
private Camera camera;
private static final Logger LOGGER = new Logger();
private Camera.PreviewCallback imageListener;
+ private Size desiredSize;
/**
* The layout identifier to inflate for this Fragment.
@@ -50,10 +48,10 @@ public class LegacyCameraConnectionFragment extends Fragment {
private int layout;
public LegacyCameraConnectionFragment(
- final Camera.PreviewCallback imageListener,
- final int layout) {
+ final Camera.PreviewCallback imageListener, final int layout, final Size desiredSize) {
this.imageListener = imageListener;
this.layout = layout;
+ this.desiredSize = desiredSize;
}
/**
@@ -85,6 +83,16 @@ public class LegacyCameraConnectionFragment extends Fragment {
Camera.Parameters parameters = camera.getParameters();
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
+ List<Camera.Size> cameraSizes = parameters.getSupportedPreviewSizes();
+ Size[] sizes = new Size[cameraSizes.size()];
+ int i = 0;
+ for (Camera.Size size : cameraSizes) {
+ sizes[i++] = new Size(size.width, size.height);
+ }
+ Size previewSize =
+ CameraConnectionFragment.chooseOptimalSize(
+ sizes, desiredSize.getWidth(), desiredSize.getHeight());
+ parameters.setPreviewSize(previewSize.getWidth(), previewSize.getHeight());
camera.setDisplayOrientation(90);
camera.setParameters(parameters);
camera.setPreviewTexture(texture);
@@ -94,8 +102,7 @@ public class LegacyCameraConnectionFragment extends Fragment {
camera.setPreviewCallbackWithBuffer(imageListener);
Camera.Size s = camera.getParameters().getPreviewSize();
- int bufferSize = s.height * s.width * 3 / 2;
- camera.addCallbackBuffer(new byte[bufferSize]);
+ camera.addCallbackBuffer(new byte[ImageUtils.getYUVByteSize(s.height, s.width)]);
textureView.setAspectRatio(s.height, s.width);
@@ -104,8 +111,7 @@ public class LegacyCameraConnectionFragment extends Fragment {
@Override
public void onSurfaceTextureSizeChanged(
- final SurfaceTexture texture, final int width, final int height) {
- }
+ final SurfaceTexture texture, final int width, final int height) {}
@Override
public boolean onSurfaceTextureDestroyed(final SurfaceTexture texture) {
@@ -113,8 +119,7 @@ public class LegacyCameraConnectionFragment extends Fragment {
}
@Override
- public void onSurfaceTextureUpdated(final SurfaceTexture texture) {
- }
+ public void onSurfaceTextureUpdated(final SurfaceTexture texture) {}
};
/**
diff --git a/tensorflow/examples/android/src/org/tensorflow/demo/StylizeActivity.java b/tensorflow/examples/android/src/org/tensorflow/demo/StylizeActivity.java
index 58dd5c6069..6a66ec3927 100644
--- a/tensorflow/examples/android/src/org/tensorflow/demo/StylizeActivity.java
+++ b/tensorflow/examples/android/src/org/tensorflow/demo/StylizeActivity.java
@@ -28,14 +28,9 @@ import android.graphics.Paint;
import android.graphics.Paint.Style;
import android.graphics.Rect;
import android.graphics.Typeface;
-import android.hardware.Camera;
-import android.media.Image;
-import android.media.Image.Plane;
-import android.media.ImageReader;
import android.media.ImageReader.OnImageAvailableListener;
import android.os.Bundle;
import android.os.SystemClock;
-import android.os.Trace;
import android.util.Size;
import android.util.TypedValue;
import android.view.Display;
@@ -59,9 +54,7 @@ import org.tensorflow.demo.OverlayView.DrawCallback;
import org.tensorflow.demo.env.BorderedText;
import org.tensorflow.demo.env.ImageUtils;
import org.tensorflow.demo.env.Logger;
-
-// Explicit import needed for internal Google builds.
-import org.tensorflow.demo.R;
+import org.tensorflow.demo.R; // Explicit import needed for internal Google builds.
/**
* Sample activity that stylizes the camera preview according to "A Learned Representation For
@@ -98,8 +91,10 @@ public class StylizeActivity extends CameraActivity implements OnImageAvailableL
private Integer sensorOrientation;
- private int previewWidth = 0;
- private int previewHeight = 0;
+ private long lastProcessingTimeMs;
+ private Bitmap rgbFrameBitmap = null;
+ private Bitmap croppedBitmap = null;
+ private Bitmap cropCopyBitmap = null;
private final float[] styleVals = new float[NUM_STYLES];
private int[] intValues;
@@ -457,18 +452,18 @@ public class StylizeActivity extends CameraActivity implements OnImageAvailableL
cropToFrameTransform = new Matrix();
frameToCropTransform.invert(cropToFrameTransform);
- yuvBytes = new byte[3][];
intValues = new int[desiredSize * desiredSize];
floatValues = new float[desiredSize * desiredSize * 3];
initializedSize = desiredSize;
}
- protected void processImageRGBbytes(int[] rgbBytes ) {
+ @Override
+ protected void processImage() {
if (desiredSize != initializedSize) {
LOGGER.i(
"Initializing at size preview size %dx%d, stylize size %d",
previewWidth, previewHeight, desiredSize);
-
+
rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888);
croppedBitmap = Bitmap.createBitmap(desiredSize, desiredSize, Config.ARGB_8888);
frameToCropTransform = ImageUtils.getTransformationMatrix(
@@ -478,12 +473,11 @@ public class StylizeActivity extends CameraActivity implements OnImageAvailableL
cropToFrameTransform = new Matrix();
frameToCropTransform.invert(cropToFrameTransform);
- yuvBytes = new byte[3][];
intValues = new int[desiredSize * desiredSize];
floatValues = new float[desiredSize * desiredSize * 3];
initializedSize = desiredSize;
}
- rgbFrameBitmap.setPixels(rgbBytes, 0, previewWidth, 0, 0, previewWidth, previewHeight);
+ rgbFrameBitmap.setPixels(getRgbBytes(), 0, previewWidth, 0, 0, previewWidth, previewHeight);
final Canvas canvas = new Canvas(croppedBitmap);
canvas.drawBitmap(rgbFrameBitmap, frameToCropTransform, null);
@@ -492,21 +486,19 @@ public class StylizeActivity extends CameraActivity implements OnImageAvailableL
ImageUtils.saveBitmap(croppedBitmap);
}
- runInBackground(new Runnable() {
- @Override
- public void run() {
- cropCopyBitmap = Bitmap.createBitmap(croppedBitmap);
- final long startTime = SystemClock.uptimeMillis();
- stylizeImage(croppedBitmap);
- lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime;
- textureCopyBitmap = Bitmap.createBitmap(croppedBitmap);
- requestRender();
- computing = false;
- if (postInferenceCallback != null) {
- postInferenceCallback.run();
- }
- }
- });
+ runInBackground(
+ new Runnable() {
+ @Override
+ public void run() {
+ cropCopyBitmap = Bitmap.createBitmap(croppedBitmap);
+ final long startTime = SystemClock.uptimeMillis();
+ stylizeImage(croppedBitmap);
+ lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime;
+ textureCopyBitmap = Bitmap.createBitmap(croppedBitmap);
+ requestRender();
+ readyForNextImage();
+ }
+ });
if (desiredSize != initializedSize) {
resetPreviewBuffers();
}
@@ -540,7 +532,7 @@ public class StylizeActivity extends CameraActivity implements OnImageAvailableL
}
// Copy the input data into TensorFlow.
- LOGGER.i("Width: %s , Height: %s",bitmap.getWidth(),bitmap.getHeight());
+ LOGGER.i("Width: %s , Height: %s", bitmap.getWidth(), bitmap.getHeight());
inferenceInterface.feed(
INPUT_NODE, floatValues, 1, bitmap.getWidth(), bitmap.getHeight(), 3);
inferenceInterface.feed(STYLE_NODE, styleVals, NUM_STYLES);