Camera2 support for frame processing

pull/493/head
Mattia Iavarone 5 years ago
parent 06d805cb72
commit e96c9f0846
  1. 5
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/MockCameraEngine.java
  2. 8
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java
  3. 12
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java
  4. 132
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java
  5. 28
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraEngine.java
  6. 8
      cameraview/src/main/java/com/otaliastudios/cameraview/frame/Frame.java
  7. 143
      cameraview/src/main/java/com/otaliastudios/cameraview/frame/FrameManager.java
  8. 96
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/ImageHelper.java
  9. 13
      demo/src/main/java/com/otaliastudios/cameraview/demo/CameraActivity.java
  10. 2
      demo/src/main/res/layout/activity_camera.xml

@ -12,6 +12,7 @@ import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.controls.Audio; import com.otaliastudios.cameraview.controls.Audio;
import com.otaliastudios.cameraview.controls.Facing; import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.controls.Flash; import com.otaliastudios.cameraview.controls.Flash;
import com.otaliastudios.cameraview.frame.FrameManager;
import com.otaliastudios.cameraview.gesture.Gesture; import com.otaliastudios.cameraview.gesture.Gesture;
import com.otaliastudios.cameraview.controls.Hdr; import com.otaliastudios.cameraview.controls.Hdr;
import com.otaliastudios.cameraview.controls.Mode; import com.otaliastudios.cameraview.controls.Mode;
@ -176,8 +177,10 @@ public class MockCameraEngine extends CameraEngine {
mFocusStarted = true; mFocusStarted = true;
} }
@NonNull
@Override @Override
public void onBufferAvailable(@NonNull byte[] buffer) { protected FrameManager instantiateFrameManager() {
return new FrameManager(2, null);
} }
@Override @Override

@ -1911,7 +1911,13 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
@Override @Override
public void run() { public void run() {
for (FrameProcessor processor : mFrameProcessors) { for (FrameProcessor processor : mFrameProcessors) {
processor.process(frame); try {
processor.process(frame);
} catch (Exception e) {
mLogger.w("dispatchFrame:", "Error during processor implementation.",
"Can happen when camera is closed while processors are running.",
e);
}
} }
frame.release(); frame.release();
} }

@ -26,6 +26,7 @@ import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.VideoResult; import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.controls.Facing; import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.controls.Flash; import com.otaliastudios.cameraview.controls.Flash;
import com.otaliastudios.cameraview.frame.FrameManager;
import com.otaliastudios.cameraview.gesture.Gesture; import com.otaliastudios.cameraview.gesture.Gesture;
import com.otaliastudios.cameraview.controls.Hdr; import com.otaliastudios.cameraview.controls.Hdr;
import com.otaliastudios.cameraview.controls.Mode; import com.otaliastudios.cameraview.controls.Mode;
@ -47,7 +48,10 @@ import java.util.List;
@SuppressWarnings("deprecation") @SuppressWarnings("deprecation")
public class Camera1Engine extends CameraEngine implements Camera.PreviewCallback, Camera.ErrorCallback { public class Camera1Engine extends CameraEngine implements
Camera.PreviewCallback,
Camera.ErrorCallback,
FrameManager.BufferCallback {
private static final String TAG = Camera1Engine.class.getSimpleName(); private static final String TAG = Camera1Engine.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG); private static final CameraLogger LOG = CameraLogger.create(TAG);
@ -284,6 +288,12 @@ public class Camera1Engine extends CameraEngine implements Camera.PreviewCallbac
mHasFrameProcessors = hasFrameProcessors; mHasFrameProcessors = hasFrameProcessors;
} }
@NonNull
@Override
protected FrameManager instantiateFrameManager() {
return new FrameManager(2, this);
}
@Override @Override
public void onBufferAvailable(@NonNull byte[] buffer) { public void onBufferAvailable(@NonNull byte[] buffer) {
// TODO: sync with handler? // TODO: sync with handler?

@ -3,6 +3,8 @@ package com.otaliastudios.cameraview.engine;
import android.annotation.SuppressLint; import android.annotation.SuppressLint;
import android.content.Context; import android.content.Context;
import android.graphics.Camera; import android.graphics.Camera;
import android.graphics.ImageFormat;
import android.graphics.PixelFormat;
import android.graphics.PointF; import android.graphics.PointF;
import android.graphics.Rect; import android.graphics.Rect;
import android.graphics.SurfaceTexture; import android.graphics.SurfaceTexture;
@ -14,6 +16,8 @@ import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureRequest; import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.StreamConfigurationMap; import android.hardware.camera2.params.StreamConfigurationMap;
import android.location.Location; import android.location.Location;
import android.media.Image;
import android.media.ImageReader;
import android.media.MediaCodec; import android.media.MediaCodec;
import android.os.Build; import android.os.Build;
import android.view.Surface; import android.view.Surface;
@ -33,14 +37,20 @@ import com.otaliastudios.cameraview.controls.Flash;
import com.otaliastudios.cameraview.controls.Hdr; import com.otaliastudios.cameraview.controls.Hdr;
import com.otaliastudios.cameraview.controls.Mode; import com.otaliastudios.cameraview.controls.Mode;
import com.otaliastudios.cameraview.controls.WhiteBalance; import com.otaliastudios.cameraview.controls.WhiteBalance;
import com.otaliastudios.cameraview.frame.Frame;
import com.otaliastudios.cameraview.frame.FrameManager;
import com.otaliastudios.cameraview.gesture.Gesture; import com.otaliastudios.cameraview.gesture.Gesture;
import com.otaliastudios.cameraview.internal.utils.CropHelper; import com.otaliastudios.cameraview.internal.utils.CropHelper;
import com.otaliastudios.cameraview.internal.utils.ImageHelper;
import com.otaliastudios.cameraview.internal.utils.WorkerHandler;
import com.otaliastudios.cameraview.picture.SnapshotGlPictureRecorder; import com.otaliastudios.cameraview.picture.SnapshotGlPictureRecorder;
import com.otaliastudios.cameraview.preview.GlCameraPreview; import com.otaliastudios.cameraview.preview.GlCameraPreview;
import com.otaliastudios.cameraview.size.AspectRatio; import com.otaliastudios.cameraview.size.AspectRatio;
import com.otaliastudios.cameraview.size.Size; import com.otaliastudios.cameraview.size.Size;
import com.otaliastudios.cameraview.size.SizeSelectors;
import com.otaliastudios.cameraview.video.Full2VideoRecorder; import com.otaliastudios.cameraview.video.Full2VideoRecorder;
import com.otaliastudios.cameraview.video.SnapshotVideoRecorder; import com.otaliastudios.cameraview.video.SnapshotVideoRecorder;
import com.otaliastudios.cameraview.video.VideoRecorder;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
@ -57,13 +67,15 @@ import androidx.annotation.WorkerThread;
// TODO exposure correction // TODO exposure correction
// TODO autofocus // TODO autofocus
// TODO pictures // TODO pictures
// TODO frame processor
@RequiresApi(Build.VERSION_CODES.LOLLIPOP) @RequiresApi(Build.VERSION_CODES.LOLLIPOP)
public class Camera2Engine extends CameraEngine { public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAvailableListener {
private static final String TAG = Camera2Engine.class.getSimpleName(); private static final String TAG = Camera2Engine.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG); private static final CameraLogger LOG = CameraLogger.create(TAG);
private static final int FRAME_PROCESSING_FORMAT = ImageFormat.NV21;
private static final int FRAME_PROCESSING_INPUT_FORMAT = ImageFormat.YUV_420_888;
private final CameraManager mManager; private final CameraManager mManager;
private String mCameraId; private String mCameraId;
private CameraDevice mCamera; private CameraDevice mCamera;
@ -72,6 +84,11 @@ public class Camera2Engine extends CameraEngine {
private CaptureRequest.Builder mRepeatingRequestBuilder; private CaptureRequest.Builder mRepeatingRequestBuilder;
private CaptureRequest mRepeatingRequest; private CaptureRequest mRepeatingRequest;
// Frame processing
private Size mFrameProcessingSize;
private ImageReader mFrameProcessingReader; // need this or the reader surface is collected
private final WorkerHandler mFrameConversionHandler;
private Surface mPreviewStreamSurface; private Surface mPreviewStreamSurface;
private Surface mFrameProcessingSurface; private Surface mFrameProcessingSurface;
@ -85,6 +102,7 @@ public class Camera2Engine extends CameraEngine {
super(callback); super(callback);
mMapper = Mapper.get(Engine.CAMERA2); mMapper = Mapper.get(Engine.CAMERA2);
mManager = (CameraManager) mCallback.getContext().getSystemService(Context.CAMERA_SERVICE); mManager = (CameraManager) mCallback.getContext().getSystemService(Context.CAMERA_SERVICE);
mFrameConversionHandler = WorkerHandler.get("CameraViewFrameConversion");
} }
//region Utilities //region Utilities
@ -374,9 +392,29 @@ public class Camera2Engine extends CameraEngine {
// 4. FRAME PROCESSING // 4. FRAME PROCESSING
if (mHasFrameProcessors) { if (mHasFrameProcessors) {
// TODO // Choose the size.
StreamConfigurationMap streamMap = mCameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (streamMap == null) throw new RuntimeException("StreamConfigurationMap is null. Should not happen.");
android.util.Size[] aSizes = streamMap.getOutputSizes(FRAME_PROCESSING_INPUT_FORMAT);
List<Size> sizes = new ArrayList<>();
for (android.util.Size aSize : aSizes) {
sizes.add(new Size(aSize.getWidth(), aSize.getHeight()));
}
mFrameProcessingSize = SizeSelectors.and(
SizeSelectors.maxWidth(Math.min(700, mPreviewStreamSize.getWidth())),
SizeSelectors.maxHeight(Math.min(700, mPreviewStreamSize.getHeight())),
SizeSelectors.biggest()).select(sizes).get(0);
mFrameProcessingReader = ImageReader.newInstance(
mFrameProcessingSize.getWidth(),
mFrameProcessingSize.getHeight(),
FRAME_PROCESSING_INPUT_FORMAT,
2);
mFrameProcessingReader.setOnImageAvailableListener(this, mFrameConversionHandler.getHandler());
mFrameProcessingSurface = mFrameProcessingReader.getSurface();
outputSurfaces.add(mFrameProcessingSurface); outputSurfaces.add(mFrameProcessingSurface);
} else { } else {
mFrameProcessingReader = null;
mFrameProcessingSize = null;
mFrameProcessingSurface = null; mFrameProcessingSurface = null;
} }
@ -414,23 +452,28 @@ public class Camera2Engine extends CameraEngine {
throw new IllegalStateException("previewStreamSize should not be null at this point."); throw new IllegalStateException("previewStreamSize should not be null at this point.");
} }
mPreview.setStreamSize(previewSizeForView.getWidth(), previewSizeForView.getHeight()); mPreview.setStreamSize(previewSizeForView.getWidth(), previewSizeForView.getHeight());
// Set the preview rotation.
mPreview.setDrawRotation(mDisplayOffset); mPreview.setDrawRotation(mDisplayOffset);
if (mHasFrameProcessors) {
getFrameManager().setUp(ImageFormat.getBitsPerPixel(FRAME_PROCESSING_FORMAT), mFrameProcessingSize);
}
// TODO mCamera.setPreviewCallbackWithBuffer(null); // Release anything left LOG.i("onStartPreview", "Starting preview.");
// TODO mCamera.setPreviewCallbackWithBuffer(this); // Add ourselves
// TODO mFrameManager.setUp(ImageFormat.getBitsPerPixel(mPreviewStreamFormat), mPreviewStreamSize);
LOG.i("onStartPreview", "Starting preview with startPreview().");
addRepeatingRequestBuilderSurfaces(); addRepeatingRequestBuilderSurfaces();
applyRepeatingRequestBuilder(false, CameraException.REASON_FAILED_TO_START_PREVIEW, null); applyRepeatingRequestBuilder(false, CameraException.REASON_FAILED_TO_START_PREVIEW, null);
LOG.i("onStartPreview", "Started preview."); LOG.i("onStartPreview", "Started preview.");
// Start delayed video if needed. // Start delayed video if needed.
if (mFullVideoPendingStub != null) { if (mFullVideoPendingStub != null) {
// Do not call takeVideo. It will reset some stub parameters that the recorder sets. // Do not call takeVideo/onTakeVideo. It will reset some stub parameters that the recorder sets.
onTakeVideo(mFullVideoPendingStub); // Also we are posting this so that doTakeVideo sees a started preview.
final VideoResult.Stub stub = mFullVideoPendingStub;
mFullVideoPendingStub = null;
mHandler.post(new Runnable() {
@Override
public void run() {
doTakeVideo(stub);
}
});
} }
return Tasks.forResult(null); return Tasks.forResult(null);
} }
@ -449,8 +492,9 @@ public class Camera2Engine extends CameraEngine {
mVideoRecorder = null; mVideoRecorder = null;
} }
mPictureRecorder = null; mPictureRecorder = null;
getFrameManager().release(); if (mHasFrameProcessors) {
// TODO mCamera.setPreviewCallbackWithBuffer(null); // Release anything left getFrameManager().release();
}
try { try {
// NOTE: should we wait for onReady() like docs say? // NOTE: should we wait for onReady() like docs say?
// Leaving this synchronous for now. // Leaving this synchronous for now.
@ -478,6 +522,11 @@ public class Camera2Engine extends CameraEngine {
mPreviewStreamSurface = null; mPreviewStreamSurface = null;
mPreviewStreamSize = null; mPreviewStreamSize = null;
mCaptureSize = null; mCaptureSize = null;
mFrameProcessingSize = null;
if (mFrameProcessingReader != null) {
mFrameProcessingReader.close();
mFrameProcessingReader = null;
}
mSession.close(); mSession.close();
mSession = null; mSession = null;
return Tasks.forResult(null); return Tasks.forResult(null);
@ -537,15 +586,9 @@ public class Camera2Engine extends CameraEngine {
stub.size = flip(REF_SENSOR, REF_OUTPUT) ? mCaptureSize.flip() : mCaptureSize; stub.size = flip(REF_SENSOR, REF_OUTPUT) ? mCaptureSize.flip() : mCaptureSize;
if (!Full2VideoRecorder.SUPPORTS_PERSISTENT_SURFACE) { if (!Full2VideoRecorder.SUPPORTS_PERSISTENT_SURFACE) {
// On API 21 and 22, we must restart the session at each time. // On API 21 and 22, we must restart the session at each time.
if (stub == mFullVideoPendingStub) { // Save the pending data and restart the session.
// We have restarted and are ready to take the video. mFullVideoPendingStub = stub;
doTakeVideo(mFullVideoPendingStub); restartBind();
mFullVideoPendingStub = null;
} else {
// Save the pending data and restart the session.
mFullVideoPendingStub = stub;
restartBind();
}
} else { } else {
doTakeVideo(stub); doTakeVideo(stub);
} }
@ -820,9 +863,48 @@ public class Camera2Engine extends CameraEngine {
//region FrameProcessing //region FrameProcessing
@NonNull
@Override @Override
public void onBufferAvailable(@NonNull byte[] buffer) { protected FrameManager instantiateFrameManager() {
// TODO return new FrameManager(2, null);
}
@Override
public void onImageAvailable(ImageReader reader) {
byte[] data = getFrameManager().getBuffer();
if (data == null) {
LOG.w("onImageAvailable", "no byte buffer!");
return;
}
Image image = null;
try {
image = reader.acquireLatestImage();
} catch (IllegalStateException ignore) { }
if (image == null) {
LOG.w("onImageAvailable", "we have a byte buffer but no Image!");
getFrameManager().onBufferUnused(data);
return;
}
LOG.i("onImageAvailable", "we have both a byte buffer and an Image.");
try {
ImageHelper.convertToNV21(image, data);
} catch (Exception e) {
LOG.w("onImageAvailable", "error while converting.");
getFrameManager().onBufferUnused(data);
image.close();
return;
}
image.close();
if (getEngineState() == STATE_STARTED) {
Frame frame = getFrameManager().getFrame(data,
System.currentTimeMillis(),
offset(REF_SENSOR, REF_OUTPUT),
mFrameProcessingSize,
FRAME_PROCESSING_FORMAT);
mCallback.dispatchFrame(frame);
} else {
getFrameManager().onBufferUnused(data);
}
} }
@Override @Override

@ -115,7 +115,6 @@ import java.util.concurrent.Executor;
*/ */
public abstract class CameraEngine implements public abstract class CameraEngine implements
CameraPreview.SurfaceCallback, CameraPreview.SurfaceCallback,
FrameManager.BufferCallback,
PictureRecorder.PictureResultListener, PictureRecorder.PictureResultListener,
VideoRecorder.VideoResultListener { VideoRecorder.VideoResultListener {
@ -224,7 +223,7 @@ public abstract class CameraEngine implements
mCrashHandler = new Handler(Looper.getMainLooper()); mCrashHandler = new Handler(Looper.getMainLooper());
mHandler = WorkerHandler.get("CameraViewEngine"); mHandler = WorkerHandler.get("CameraViewEngine");
mHandler.getThread().setUncaughtExceptionHandler(new CrashExceptionHandler()); mHandler.getThread().setUncaughtExceptionHandler(new CrashExceptionHandler());
mFrameManager = new FrameManager(2, this); mFrameManager = instantiateFrameManager();
} }
public void setPreview(@NonNull CameraPreview cameraPreview) { public void setPreview(@NonNull CameraPreview cameraPreview) {
@ -474,11 +473,11 @@ public abstract class CameraEngine implements
@SuppressWarnings("WeakerAccess") @SuppressWarnings("WeakerAccess")
protected void restartBind() { protected void restartBind() {
LOG.i("restartPreviewAndBind", "posting."); LOG.i("restartBind", "posting.");
mHandler.run(new Runnable() { mHandler.run(new Runnable() {
@Override @Override
public void run() { public void run() {
LOG.i("restartPreviewAndBind", "executing."); LOG.i("restartBind", "executing.");
stopPreview(false).continueWithTask(mHandler.getExecutor(), new Continuation<Void, Task<Void>>() { stopPreview(false).continueWithTask(mHandler.getExecutor(), new Continuation<Void, Task<Void>>() {
@Override @Override
public Task<Void> then(@NonNull Task<Void> task) { public Task<Void> then(@NonNull Task<Void> task) {
@ -605,10 +604,13 @@ public abstract class CameraEngine implements
// Compute a new camera preview size and apply. // Compute a new camera preview size and apply.
Size newSize = computePreviewStreamSize(); Size newSize = computePreviewStreamSize();
if (newSize.equals(mPreviewStreamSize)) return; if (newSize.equals(mPreviewStreamSize)) {
LOG.i("onSurfaceChanged:", "Computed a new preview size. Going on."); LOG.i("onSurfaceChanged:", "The computed preview size is identical. No op.");
mPreviewStreamSize = newSize; } else {
onPreviewStreamSizeChanged(); LOG.i("onSurfaceChanged:", "Computed a new preview size. Calling onPreviewStreamSizeChanged().");
mPreviewStreamSize = newSize;
onPreviewStreamSizeChanged();
}
} }
}); });
} }
@ -895,6 +897,14 @@ public abstract class CameraEngine implements
*/ */
protected abstract boolean collectCameraInfo(@NonNull Facing facing); protected abstract boolean collectCameraInfo(@NonNull Facing facing);
/**
* Called at construction time to get a frame manager that can later be
* accessed through {@link #getFrameManager()}.
* @return a frame manager
*/
@NonNull
protected abstract FrameManager instantiateFrameManager();
// If closed, no-op. If opened, check supported and apply. // If closed, no-op. If opened, check supported and apply.
public abstract void setZoom(float zoom, @Nullable PointF[] points, boolean notify); public abstract void setZoom(float zoom, @Nullable PointF[] points, boolean notify);
@ -1353,7 +1363,7 @@ public abstract class CameraEngine implements
if (targetMinSize == null) throw new IllegalStateException("targetMinSize should not be null here."); if (targetMinSize == null) throw new IllegalStateException("targetMinSize should not be null here.");
AspectRatio targetRatio = AspectRatio.of(mCaptureSize.getWidth(), mCaptureSize.getHeight()); AspectRatio targetRatio = AspectRatio.of(mCaptureSize.getWidth(), mCaptureSize.getHeight());
if (flip) targetRatio = targetRatio.flip(); if (flip) targetRatio = targetRatio.flip();
LOG.i("size:", "computePreviewStreamSize:", "targetRatio:", targetRatio, "targetMinSize:", targetMinSize); LOG.i("computePreviewStreamSize:", "targetRatio:", targetRatio, "targetMinSize:", targetMinSize);
SizeSelector matchRatio = SizeSelectors.and( // Match this aspect ratio and sort by biggest SizeSelector matchRatio = SizeSelectors.and( // Match this aspect ratio and sort by biggest
SizeSelectors.aspectRatio(targetRatio, 0), SizeSelectors.aspectRatio(targetRatio, 0),
SizeSelectors.biggest()); SizeSelectors.biggest());

@ -1,5 +1,6 @@
package com.otaliastudios.cameraview.frame; package com.otaliastudios.cameraview.frame;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.size.Size; import com.otaliastudios.cameraview.size.Size;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
@ -10,10 +11,14 @@ import androidx.annotation.VisibleForTesting;
*/ */
public class Frame { public class Frame {
private final static String TAG = Frame.class.getSimpleName();
private final static CameraLogger LOG = CameraLogger.create(TAG);
@VisibleForTesting FrameManager mManager; @VisibleForTesting FrameManager mManager;
private byte[] mData = null; private byte[] mData = null;
private long mTime = -1; private long mTime = -1;
private long mLastTime = -1;
private int mRotation = 0; private int mRotation = 0;
private Size mSize = null; private Size mSize = null;
private int mFormat = -1; private int mFormat = -1;
@ -29,6 +34,7 @@ public class Frame {
private void ensureAlive() { private void ensureAlive() {
if (!isAlive()) { if (!isAlive()) {
LOG.e("Frame is dead! time:", mTime, "lastTime:", mLastTime);
throw new RuntimeException("You should not access a released frame. " + throw new RuntimeException("You should not access a released frame. " +
"If this frame was passed to a FrameProcessor, you can only use its contents synchronously," + "If this frame was passed to a FrameProcessor, you can only use its contents synchronously," +
"for the duration of the process() method."); "for the duration of the process() method.");
@ -38,6 +44,7 @@ public class Frame {
void set(@NonNull byte[] data, long time, int rotation, @NonNull Size size, int format) { void set(@NonNull byte[] data, long time, int rotation, @NonNull Size size, int format) {
this.mData = data; this.mData = data;
this.mTime = time; this.mTime = time;
this.mLastTime = time;
this.mRotation = rotation; this.mRotation = rotation;
this.mSize = size; this.mSize = size;
this.mFormat = format; this.mFormat = format;
@ -73,6 +80,7 @@ public class Frame {
*/ */
public void release() { public void release() {
if (!isAlive()) return; if (!isAlive()) return;
LOG.v("Frame with time", mTime, "is being released. Has manager:", mManager != null);
if (mManager != null) { if (mManager != null) {
// If needed, the manager will call releaseManager on us. // If needed, the manager will call releaseManager on us.

@ -1,6 +1,7 @@
package com.otaliastudios.cameraview.frame; package com.otaliastudios.cameraview.frame;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.size.Size; import com.otaliastudios.cameraview.size.Size;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
@ -25,6 +26,9 @@ import java.util.concurrent.LinkedBlockingQueue;
*/ */
public class FrameManager { public class FrameManager {
private static final String TAG = FrameManager.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
/** /**
* Receives callbacks on buffer availability * Receives callbacks on buffer availability
* (when a Frame is released, we reuse its buffer). * (when a Frame is released, we reuse its buffer).
@ -33,10 +37,24 @@ public class FrameManager {
void onBufferAvailable(@NonNull byte[] buffer); void onBufferAvailable(@NonNull byte[] buffer);
} }
private int mPoolSize; private final int mPoolSize;
private int mBufferSize; private int mBufferSize = -1;
private BufferCallback mCallback; private LinkedBlockingQueue<Frame> mFrameQueue;
private LinkedBlockingQueue<Frame> mQueue; private LinkedBlockingQueue<byte[]> mBufferQueue;
private BufferCallback mBufferCallback;
private final int mBufferMode;
/**
* In this mode, we have a {@link #mBufferCallback} and dispatch
* new buffers to the callback.
*/
private final static int BUFFER_MODE_DISPATCH = 0;
/**
* In this mode, we have a {@link #mBufferQueue} where we store
* buffers and only dispatch when requested.
*/
private final static int BUFFER_MODE_ENQUEUE = 1;
/** /**
* Construct a new frame manager. * Construct a new frame manager.
@ -48,9 +66,14 @@ public class FrameManager {
*/ */
public FrameManager(int poolSize, @Nullable BufferCallback callback) { public FrameManager(int poolSize, @Nullable BufferCallback callback) {
mPoolSize = poolSize; mPoolSize = poolSize;
mCallback = callback; mFrameQueue = new LinkedBlockingQueue<>(mPoolSize);
mQueue = new LinkedBlockingQueue<>(mPoolSize); if (callback != null) {
mBufferSize = -1; mBufferCallback = callback;
mBufferMode = BUFFER_MODE_DISPATCH;
} else {
mBufferQueue = new LinkedBlockingQueue<>(mPoolSize);
mBufferMode = BUFFER_MODE_ENQUEUE;
}
} }
/** /**
@ -65,41 +88,46 @@ public class FrameManager {
*/ */
public int setUp(int bitsPerPixel, @NonNull Size previewSize) { public int setUp(int bitsPerPixel, @NonNull Size previewSize) {
// TODO throw if called twice without release? // TODO throw if called twice without release?
mBufferSize = getBufferSize(bitsPerPixel, previewSize); long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
mBufferSize = (int) Math.ceil(sizeInBits / 8.0d);
for (int i = 0; i < mPoolSize; i++) { for (int i = 0; i < mPoolSize; i++) {
mCallback.onBufferAvailable(new byte[mBufferSize]); if (mBufferMode == BUFFER_MODE_DISPATCH) {
mBufferCallback.onBufferAvailable(new byte[mBufferSize]);
} else {
mBufferQueue.offer(new byte[mBufferSize]);
}
} }
return mBufferSize; return mBufferSize;
} }
/** /**
* Releases all frames controlled by this manager and * Returns a new byte buffer than can be filled.
* clears the pool. * This can only be called in {@link #BUFFER_MODE_ENQUEUE} mode! Where the frame
* manager also holds a queue of the byte buffers.
*
* If not null, the buffer returned by this method can be filled and used to get
* a new frame through {@link #getFrame(byte[], long, int, Size, int)}.
*
* @return a buffer, or null
*/ */
public void release() { @Nullable
for (Frame frame : mQueue) { public byte[] getBuffer() {
frame.releaseManager(); if (mBufferMode != BUFFER_MODE_ENQUEUE) {
frame.release(); throw new IllegalStateException("Can't call getBuffer() when not in BUFFER_MODE_ENQUEUE.");
} }
mQueue.clear(); return mBufferQueue.poll();
mBufferSize = -1;
} }
void onFrameReleased(Frame frame) { /**
byte[] buffer = frame.getData(); * Can be called if the buffer obtained by {@link #getBuffer()}
boolean willRecycle = mQueue.offer(frame); * was not used to construct a frame, so it can be put back into the queue.
if (!willRecycle) { * @param buffer a buffer
// If frame queue is full, let's drop everything. */
frame.releaseManager(); public void onBufferUnused(@NonNull byte[] buffer) {
} else { if (mBufferMode != BUFFER_MODE_ENQUEUE) {
// If frame will be recycled, let's recycle the buffer as well. throw new IllegalStateException("Can't call onBufferUnused() when not in BUFFER_MODE_ENQUEUE.");
int currSize = buffer.length;
int reqSize = mBufferSize;
if (currSize == reqSize && mCallback != null) {
mCallback.onBufferAvailable(buffer);
}
} }
mBufferQueue.offer(buffer);
} }
/** /**
@ -113,15 +141,58 @@ public class FrameManager {
* *
* @return a new frame * @return a new frame
*/ */
@NonNull
public Frame getFrame(@NonNull byte[] data, long time, int rotation, @NonNull Size previewSize, int previewFormat) { public Frame getFrame(@NonNull byte[] data, long time, int rotation, @NonNull Size previewSize, int previewFormat) {
Frame frame = mQueue.poll(); Frame frame = mFrameQueue.poll();
if (frame == null) frame = new Frame(this); if (frame != null) {
LOG.v("getFrame for time:", time, "RECYCLING.", "Data:", data != null);
} else {
LOG.v("getFrame for time:", time, "CREATING.", "Data:", data != null);
frame = new Frame(this);
}
frame.set(data, time, rotation, previewSize, previewFormat); frame.set(data, time, rotation, previewSize, previewFormat);
return frame; return frame;
} }
private int getBufferSize(int bitsPerPixel, @NonNull Size previewSize) { /**
long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel; * Releases all frames controlled by this manager and
return (int) Math.ceil(sizeInBits / 8.0d); * clears the pool.
* In BUFFER_MODE_ENQUEUE, releases also all the buffers.
*/
public void release() {
LOG.w("Releasing all frames!");
for (Frame frame : mFrameQueue) {
frame.releaseManager();
frame.release();
}
mFrameQueue.clear();
if (mBufferMode == BUFFER_MODE_ENQUEUE) {
mBufferQueue.clear();
}
mBufferSize = -1;
}
/**
* Called by child frames when they are released.
* @param frame the released frame
*/
void onFrameReleased(@NonNull Frame frame) {
byte[] buffer = frame.getData();
boolean willRecycle = mFrameQueue.offer(frame);
if (!willRecycle) {
// If frame queue is full, let's drop everything.
frame.releaseManager();
} else {
// If frame will be recycled, let's recycle the buffer as well.
int currSize = buffer.length;
int reqSize = mBufferSize;
if (currSize == reqSize) {
if (mBufferMode == BUFFER_MODE_DISPATCH) {
mBufferCallback.onBufferAvailable(buffer);
} else {
mBufferQueue.offer(buffer);
}
}
}
} }
} }

@ -0,0 +1,96 @@
package com.otaliastudios.cameraview.internal.utils;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.media.Image;
import com.otaliastudios.cameraview.size.AspectRatio;
import com.otaliastudios.cameraview.size.Size;
import java.nio.ByteBuffer;
import androidx.annotation.NonNull;
import androidx.annotation.RequiresApi;
/**
* Conversions for {@link android.media.Image}s into byte arrays.
*/
@RequiresApi(19)
public class ImageHelper {
// https://stackoverflow.com/a/52740776/4288782
public static void convertToNV21(@NonNull Image image, @NonNull byte[] result) {
if (image.getFormat() != ImageFormat.YUV_420_888) {
throw new IllegalStateException("CAn only convert from YUV_420_888.");
}
int width = image.getWidth();
int height = image.getHeight();
int ySize = width * height;
int uvSize = width * height / 4;
ByteBuffer yBuffer = image.getPlanes()[0].getBuffer(); // Y
ByteBuffer uBuffer = image.getPlanes()[1].getBuffer(); // U
ByteBuffer vBuffer = image.getPlanes()[2].getBuffer(); // V
int rowStride = image.getPlanes()[0].getRowStride();
if (image.getPlanes()[0].getPixelStride() != 1) {
throw new AssertionError("Something wrong in convertToNV21");
}
int pos = 0;
if (rowStride == width) { // likely
yBuffer.get(result, 0, ySize);
pos += ySize;
}
else {
int yBufferPos = width - rowStride; // not an actual position
for (; pos<ySize; pos+=width) {
yBufferPos += rowStride - width;
yBuffer.position(yBufferPos);
yBuffer.get(result, pos, width);
}
}
rowStride = image.getPlanes()[2].getRowStride();
int pixelStride = image.getPlanes()[2].getPixelStride();
if (rowStride != image.getPlanes()[1].getRowStride()) {
throw new AssertionError("Something wrong in convertToNV21");
}
if (pixelStride != image.getPlanes()[1].getPixelStride()) {
throw new AssertionError("Something wrong in convertToNV21");
}
if (pixelStride == 2 && rowStride == width && uBuffer.get(0) == vBuffer.get(1)) {
// maybe V an U planes overlap as per NV21, which means vBuffer[1] is alias of uBuffer[0]
byte savePixel = vBuffer.get(1);
vBuffer.put(1, (byte)0);
if (uBuffer.get(0) == 0) {
vBuffer.put(1, (byte)255);
//noinspection ConstantConditions
if (uBuffer.get(0) == 255) {
vBuffer.put(1, savePixel);
vBuffer.get(result, ySize, uvSize);
return; // shortcut
}
}
// unfortunately, the check failed. We must save U and V pixel by pixel
vBuffer.put(1, savePixel);
}
// other optimizations could check if (pixelStride == 1) or (pixelStride == 2),
// but performance gain would be less significant
for (int row=0; row<height/2; row++) {
for (int col=0; col<width/2; col++) {
int vuPos = col*pixelStride + row*rowStride;
result[pos++] = vBuffer.get(vuPos);
result[pos++] = uBuffer.get(vuPos);
}
}
}
}

@ -7,6 +7,7 @@ import androidx.annotation.NonNull;
import com.google.android.material.bottomsheet.BottomSheetBehavior; import com.google.android.material.bottomsheet.BottomSheetBehavior;
import androidx.appcompat.app.AppCompatActivity; import androidx.appcompat.app.AppCompatActivity;
import android.util.Log;
import android.view.View; import android.view.View;
import android.view.ViewGroup; import android.view.ViewGroup;
import android.view.ViewTreeObserver; import android.view.ViewTreeObserver;
@ -20,6 +21,8 @@ import com.otaliastudios.cameraview.CameraView;
import com.otaliastudios.cameraview.PictureResult; import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.controls.Mode; import com.otaliastudios.cameraview.controls.Mode;
import com.otaliastudios.cameraview.VideoResult; import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.frame.Frame;
import com.otaliastudios.cameraview.frame.FrameProcessor;
import com.otaliastudios.cameraview.size.SizeSelectors; import com.otaliastudios.cameraview.size.SizeSelectors;
import java.io.File; import java.io.File;
@ -42,6 +45,16 @@ public class CameraActivity extends AppCompatActivity implements View.OnClickLis
camera = findViewById(R.id.camera); camera = findViewById(R.id.camera);
camera.setLifecycleOwner(this); camera.setLifecycleOwner(this);
camera.addCameraListener(new Listener()); camera.addCameraListener(new Listener());
camera.addFrameProcessor(new FrameProcessor() {
private long lastTime = System.currentTimeMillis();
@Override
public void process(@NonNull Frame frame) {
long newTime = frame.getTime();
long delay = newTime - lastTime;
lastTime = newTime;
Log.e("Frames", "Delay: " + delay + "millis, FPS: " + 1000 / delay);
}
});
findViewById(R.id.edit).setOnClickListener(this); findViewById(R.id.edit).setOnClickListener(this);
findViewById(R.id.capturePicture).setOnClickListener(this); findViewById(R.id.capturePicture).setOnClickListener(this);

@ -17,7 +17,7 @@
android:layout_marginBottom="88dp" android:layout_marginBottom="88dp"
android:keepScreenOn="true" android:keepScreenOn="true"
app:cameraExperimental="true" app:cameraExperimental="true"
app:cameraEngine="camera2" app:cameraEngine="camera1"
app:cameraPreview="glSurface" app:cameraPreview="glSurface"
app:cameraPlaySounds="true" app:cameraPlaySounds="true"
app:cameraGrid="off" app:cameraGrid="off"

Loading…
Cancel
Save