diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index f4543c7a..7f9ce02e 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -13,7 +13,7 @@ jobs: name: Base Checks runs-on: ubuntu-latest steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v2 - uses: actions/setup-java@v1 with: java-version: 1.8 @@ -23,7 +23,7 @@ jobs: name: Unit Tests runs-on: ubuntu-latest steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v2 - uses: actions/setup-java@v1 with: java-version: 1.8 @@ -59,7 +59,7 @@ jobs: - EMULATOR_API: 22 EMULATOR_ARCH: x86 steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v2 - uses: actions/setup-java@v1 with: java-version: 1.8 @@ -84,7 +84,7 @@ jobs: runs-on: ubuntu-latest needs: [ANDROID_UNIT_TESTS, ANDROID_EMULATOR_TESTS] steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v2 - uses: actions/setup-java@v1 with: java-version: 1.8 diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index a0cfa3b4..775f259d 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -1,6 +1,8 @@ # https://help.github.com/en/actions/automating-your-workflow-with-github-actions/workflow-syntax-for-github-actions name: Deploy -on: [release] +on: + release: + types: [published] jobs: BINTRAY_UPLOAD: name: Bintray Upload @@ -10,7 +12,7 @@ jobs: BINTRAY_USER: ${{ secrets.BINTRAY_USER }} BINTRAY_KEY: ${{ secrets.BINTRAY_KEY }} steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v2 - uses: actions/setup-java@v1 with: java-version: 1.8 diff --git a/cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraViewTest.java b/cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraViewTest.java index 1bc19bed..4564ad5a 100644 --- a/cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraViewTest.java +++ b/cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraViewTest.java @@ -184,6 +184,7 @@ public class CameraViewTest extends BaseTest { assertEquals(cameraView.getUseDeviceOrientation(), CameraView.DEFAULT_USE_DEVICE_ORIENTATION); assertEquals(cameraView.getPictureMetering(), CameraView.DEFAULT_PICTURE_METERING); assertEquals(cameraView.getPictureSnapshotMetering(), CameraView.DEFAULT_PICTURE_SNAPSHOT_METERING); + assertEquals(cameraView.getFrameProcessingPoolSize(), CameraView.DEFAULT_FRAME_PROCESSING_POOL_SIZE); assertEquals(cameraView.getGestureAction(Gesture.TAP), gestures.getTapAction()); assertEquals(cameraView.getGestureAction(Gesture.LONG_TAP), gestures.getLongTapAction()); assertEquals(cameraView.getGestureAction(Gesture.PINCH), gestures.getPinchAction()); @@ -831,6 +832,27 @@ public class CameraViewTest extends BaseTest { assertEquals(ImageFormat.YUV_422_888, cameraView.getFrameProcessingFormat()); } + @Test + public void testFrameProcessingPoolSize() { + cameraView.setFrameProcessingPoolSize(4); + assertEquals(4, cameraView.getFrameProcessingPoolSize()); + cameraView.setFrameProcessingPoolSize(6); + assertEquals(6, cameraView.getFrameProcessingPoolSize()); + } + + @Test + public void testFrameProcessingExecutors() { + cameraView.setFrameProcessingExecutors(5); + assertEquals(5, cameraView.getFrameProcessingExecutors()); + cameraView.setFrameProcessingExecutors(2); + assertEquals(2, cameraView.getFrameProcessingExecutors()); + } + + @Test(expected = RuntimeException.class) + public void testFrameProcessingExecutors_throws() { + cameraView.setFrameProcessingExecutors(0); + } + //endregion //region Lists of listeners and processors diff --git a/cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/Camera2IntegrationTest.java b/cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/Camera2IntegrationTest.java index 19d16c02..ecf3fd8d 100644 --- a/cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/Camera2IntegrationTest.java +++ b/cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/Camera2IntegrationTest.java @@ -36,7 +36,7 @@ public class Camera2IntegrationTest extends CameraIntegrationTest return Engine.CAMERA2; } - @Override + /* @Override protected void onOpenSync() { super.onOpenSync(); // Extra wait for the first frame to be dispatched. @@ -54,7 +54,7 @@ public class Camera2IntegrationTest extends CameraIntegrationTest } }.start(controller); try { latch.await(); } catch (InterruptedException ignore) {} - } + } */ @Override protected long getMeteringTimeoutMillis() { diff --git a/cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/CameraIntegrationTest.java b/cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/CameraIntegrationTest.java index 57500c1f..3866dd70 100644 --- a/cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/CameraIntegrationTest.java +++ b/cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/CameraIntegrationTest.java @@ -1135,12 +1135,17 @@ public abstract class CameraIntegrationTest extends @Retry(emulatorOnly = true) @SdkExclude(maxSdkVersion = 22, emulatorOnly = true) public void testFrameProcessing_format() { + // We wouldn't need to open/close for each format, but we do because legacy devices can + // crash due to their bad internal implementation when we perform restartBind(). + // And setFrameProcessorFormat can trigger such restart. CameraOptions o = openSync(true); Collection formats = o.getSupportedFrameProcessingFormats(); + closeSync(true); for (int format : formats) { LOG.i("[TEST FRAME FORMAT]", "Testing", format, "..."); Op op = testFrameProcessorFormat(format); assertNotNull(op.await(DELAY)); + closeSync(true); } } @@ -1157,6 +1162,7 @@ public abstract class CameraIntegrationTest extends } } }); + openSync(true); return op; } diff --git a/cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/MockCameraEngine.java b/cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/MockCameraEngine.java index aa556810..5b3486c4 100644 --- a/cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/MockCameraEngine.java +++ b/cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/MockCameraEngine.java @@ -195,8 +195,8 @@ public class MockCameraEngine extends CameraBaseEngine { @NonNull @Override - protected FrameManager instantiateFrameManager() { - return new ByteBufferFrameManager(2, null); + protected FrameManager instantiateFrameManager(int poolSize) { + return new ByteBufferFrameManager(poolSize, null); } @Override diff --git a/cameraview/src/androidTest/java/com/otaliastudios/cameraview/frame/ByteBufferFrameManagerTest.java b/cameraview/src/androidTest/java/com/otaliastudios/cameraview/frame/ByteBufferFrameManagerTest.java index d3f1242f..78c6321d 100644 --- a/cameraview/src/androidTest/java/com/otaliastudios/cameraview/frame/ByteBufferFrameManagerTest.java +++ b/cameraview/src/androidTest/java/com/otaliastudios/cameraview/frame/ByteBufferFrameManagerTest.java @@ -14,6 +14,7 @@ import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; +import static org.junit.Assert.assertNotNull; import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; @@ -56,9 +57,11 @@ public class ByteBufferFrameManagerTest extends BaseTest { int length = manager.getFrameBytes(); Frame frame1 = manager.getFrame(new byte[length], 0, 0); - // Since frame1 is already taken and poolSize = 1, a new Frame is created. - Frame frame2 = manager.getFrame(new byte[length], 0, 0); - // Release the first frame so it goes back into the pool. + assertNotNull(frame1); + // Since frame1 is already taken and poolSize = 1, getFrame() would return null. + // To create a new frame, freeze the first one. + Frame frame2 = frame1.freeze(); + // Now release the first frame so it goes back into the pool. manager.onFrameReleased(frame1, (byte[]) frame1.getData()); reset(callback); // Release the second. The pool is already full, so onBufferAvailable should not be called @@ -76,6 +79,7 @@ public class ByteBufferFrameManagerTest extends BaseTest { // A camera preview frame comes. Request a frame. byte[] picture = new byte[length]; Frame frame = manager.getFrame(picture, 0, 0); + assertNotNull(frame); // Release the frame and ensure that onBufferAvailable is called. reset(callback); @@ -92,6 +96,7 @@ public class ByteBufferFrameManagerTest extends BaseTest { // A camera preview frame comes. Request a frame. byte[] picture = new byte[length]; Frame frame = manager.getFrame(picture, 0, 0); + assertNotNull(frame); // Don't release the frame. Change the allocation size. manager.setUp(ImageFormat.NV16, new Size(15, 15)); diff --git a/cameraview/src/androidTest/java/com/otaliastudios/cameraview/frame/FrameManagerTest.java b/cameraview/src/androidTest/java/com/otaliastudios/cameraview/frame/FrameManagerTest.java index b553e145..de8aa6a7 100644 --- a/cameraview/src/androidTest/java/com/otaliastudios/cameraview/frame/FrameManagerTest.java +++ b/cameraview/src/androidTest/java/com/otaliastudios/cameraview/frame/FrameManagerTest.java @@ -14,8 +14,12 @@ import org.junit.Test; import org.junit.runner.RunWith; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.reset; import static org.mockito.Mockito.verify; @RunWith(AndroidJUnit4.class) @@ -38,9 +42,31 @@ public class FrameManagerTest extends BaseTest { manager.setUp(ImageFormat.NV21, new Size(50, 50)); Frame first = manager.getFrame("foo", 0, 0); + assertNotNull(first); first.release(); Frame second = manager.getFrame("bar", 0, 0); + assertNotNull(second); second.release(); assertEquals(first, second); } + + @Test + public void testGetFrame() { + FrameManager manager = new FrameManager(1, String.class) { + @Override + protected void onFrameDataReleased(@NonNull String data, boolean recycled) { } + + @NonNull + @Override + protected String onCloneFrameData(@NonNull String data) { + return data; + } + }; + manager.setUp(ImageFormat.NV21, new Size(50, 50)); + + Frame first = manager.getFrame("foo", 0, 0); + assertNotNull(first); + Frame second = manager.getFrame("bar", 0, 0); + assertNull(second); + } } diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java b/cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java index 636bc98a..bbf9d636 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java @@ -67,7 +67,6 @@ import com.otaliastudios.cameraview.gesture.TapGestureFinder; import com.otaliastudios.cameraview.internal.GridLinesLayout; import com.otaliastudios.cameraview.internal.utils.CropHelper; import com.otaliastudios.cameraview.internal.utils.OrientationHelper; -import com.otaliastudios.cameraview.internal.utils.WorkerHandler; import com.otaliastudios.cameraview.markers.AutoFocusMarker; import com.otaliastudios.cameraview.markers.AutoFocusTrigger; import com.otaliastudios.cameraview.markers.MarkerLayout; @@ -89,6 +88,12 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.Executor; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; import static android.view.View.MeasureSpec.AT_MOST; import static android.view.View.MeasureSpec.EXACTLY; @@ -111,6 +116,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver { final static boolean DEFAULT_USE_DEVICE_ORIENTATION = true; final static boolean DEFAULT_PICTURE_METERING = true; final static boolean DEFAULT_PICTURE_SNAPSHOT_METERING = false; + final static int DEFAULT_FRAME_PROCESSING_POOL_SIZE = 2; + final static int DEFAULT_FRAME_PROCESSING_EXECUTORS = 1; // Self managed parameters private boolean mPlaySounds; @@ -119,8 +126,11 @@ public class CameraView extends FrameLayout implements LifecycleObserver { private Preview mPreview; private Engine mEngine; private Filter mPendingFilter; + private int mFrameProcessingExecutors; // Components + private Handler mUiHandler; + private Executor mFrameProcessingExecutor; @VisibleForTesting CameraCallbacks mCameraCallbacks; private CameraPreview mCameraPreview; private OrientationHelper mOrientationHelper; @@ -148,10 +158,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { // Overlays @VisibleForTesting OverlayLayout mOverlayLayout; - // Threading - private Handler mUiHandler; - private WorkerHandler mFrameProcessorsHandler; - public CameraView(@NonNull Context context) { super(context, null); initialize(context, null); @@ -206,6 +212,10 @@ public class CameraView extends FrameLayout implements LifecycleObserver { int frameMaxWidth = a.getInteger(R.styleable.CameraView_cameraFrameProcessingMaxWidth, 0); int frameMaxHeight = a.getInteger(R.styleable.CameraView_cameraFrameProcessingMaxHeight, 0); int frameFormat = a.getInteger(R.styleable.CameraView_cameraFrameProcessingFormat, 0); + int framePoolSize = a.getInteger(R.styleable.CameraView_cameraFrameProcessingPoolSize, + DEFAULT_FRAME_PROCESSING_POOL_SIZE); + int frameExecutors = a.getInteger(R.styleable.CameraView_cameraFrameProcessingExecutors, + DEFAULT_FRAME_PROCESSING_EXECUTORS); // Size selectors and gestures SizeSelectorParser sizeSelectors = new SizeSelectorParser(a); @@ -218,7 +228,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { // Components mCameraCallbacks = new CameraCallbacks(); mUiHandler = new Handler(Looper.getMainLooper()); - mFrameProcessorsHandler = WorkerHandler.get("FrameProcessorsWorker"); // Gestures mPinchGestureFinder = new PinchGestureFinder(mCameraCallbacks); @@ -267,7 +276,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver { setFrameProcessingMaxWidth(frameMaxWidth); setFrameProcessingMaxHeight(frameMaxHeight); setFrameProcessingFormat(frameFormat); - mCameraEngine.setHasFrameProcessors(!mFrameProcessors.isEmpty()); + setFrameProcessingPoolSize(framePoolSize); + setFrameProcessingExecutors(frameExecutors); // Apply gestures mapGesture(Gesture.TAP, gestures.getTapAction()); @@ -982,6 +992,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver { setFrameProcessingMaxWidth(oldEngine.getFrameProcessingMaxWidth()); setFrameProcessingMaxHeight(oldEngine.getFrameProcessingMaxHeight()); setFrameProcessingFormat(0 /* this is very engine specific, so do not pass */); + setFrameProcessingPoolSize(oldEngine.getFrameProcessingPoolSize()); + mCameraEngine.setHasFrameProcessors(!mFrameProcessors.isEmpty()); } /** @@ -1984,7 +1996,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver { OrientationHelper.Callback, GestureFinder.Controller { - private CameraLogger mLogger = CameraLogger.create(CameraCallbacks.class.getSimpleName()); + private final String TAG = CameraCallbacks.class.getSimpleName(); + private final CameraLogger LOG = CameraLogger.create(TAG); @NonNull @Override @@ -2004,7 +2017,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver { @Override public void dispatchOnCameraOpened(@NonNull final CameraOptions options) { - mLogger.i("dispatchOnCameraOpened", options); + LOG.i("dispatchOnCameraOpened", options); mUiHandler.post(new Runnable() { @Override public void run() { @@ -2017,7 +2030,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver { @Override public void dispatchOnCameraClosed() { - mLogger.i("dispatchOnCameraClosed"); + LOG.i("dispatchOnCameraClosed"); mUiHandler.post(new Runnable() { @Override public void run() { @@ -2038,10 +2051,10 @@ public class CameraView extends FrameLayout implements LifecycleObserver { if (previewSize == null) { throw new RuntimeException("Preview stream size should not be null here."); } else if (previewSize.equals(mLastPreviewStreamSize)) { - mLogger.i("onCameraPreviewStreamSizeChanged:", + LOG.i("onCameraPreviewStreamSizeChanged:", "swallowing because the preview size has not changed.", previewSize); } else { - mLogger.i("onCameraPreviewStreamSizeChanged: posting a requestLayout call.", + LOG.i("onCameraPreviewStreamSizeChanged: posting a requestLayout call.", "Preview stream size:", previewSize); mUiHandler.post(new Runnable() { @Override @@ -2061,7 +2074,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver { @Override public void dispatchOnPictureTaken(@NonNull final PictureResult.Stub stub) { - mLogger.i("dispatchOnPictureTaken", stub); + LOG.i("dispatchOnPictureTaken", stub); mUiHandler.post(new Runnable() { @Override public void run() { @@ -2075,7 +2088,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver { @Override public void dispatchOnVideoTaken(@NonNull final VideoResult.Stub stub) { - mLogger.i("dispatchOnVideoTaken", stub); + LOG.i("dispatchOnVideoTaken", stub); mUiHandler.post(new Runnable() { @Override public void run() { @@ -2090,7 +2103,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver { @Override public void dispatchOnFocusStart(@Nullable final Gesture gesture, @NonNull final PointF point) { - mLogger.i("dispatchOnFocusStart", gesture, point); + LOG.i("dispatchOnFocusStart", gesture, point); mUiHandler.post(new Runnable() { @Override public void run() { @@ -2112,7 +2125,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver { public void dispatchOnFocusEnd(@Nullable final Gesture gesture, final boolean success, @NonNull final PointF point) { - mLogger.i("dispatchOnFocusEnd", gesture, success, point); + LOG.i("dispatchOnFocusEnd", gesture, success, point); mUiHandler.post(new Runnable() { @Override public void run() { @@ -2135,7 +2148,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver { @Override public void onDeviceOrientationChanged(int deviceOrientation) { - mLogger.i("onDeviceOrientationChanged", deviceOrientation); + LOG.i("onDeviceOrientationChanged", deviceOrientation); int displayOffset = mOrientationHelper.getLastDisplayOffset(); if (!mUseDeviceOrientation) { // To fool the engine to return outputs in the VIEW reference system, @@ -2158,12 +2171,12 @@ public class CameraView extends FrameLayout implements LifecycleObserver { @Override public void onDisplayOffsetChanged(int displayOffset, boolean willRecreate) { - mLogger.i("onDisplayOffsetChanged", displayOffset, "recreate:", willRecreate); + LOG.i("onDisplayOffsetChanged", displayOffset, "recreate:", willRecreate); if (isOpened() && !willRecreate) { // Display offset changes when the device rotation lock is off and the activity // is free to rotate. However, some changes will NOT recreate the activity, namely // 180 degrees flips. In this case, we must restart the camera manually. - mLogger.w("onDisplayOffsetChanged", "restarting the camera."); + LOG.w("onDisplayOffsetChanged", "restarting the camera."); close(); open(); } @@ -2171,7 +2184,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver { @Override public void dispatchOnZoomChanged(final float newValue, @Nullable final PointF[] fingers) { - mLogger.i("dispatchOnZoomChanged", newValue); + LOG.i("dispatchOnZoomChanged", newValue); mUiHandler.post(new Runnable() { @Override public void run() { @@ -2186,7 +2199,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver { public void dispatchOnExposureCorrectionChanged(final float newValue, @NonNull final float[] bounds, @Nullable final PointF[] fingers) { - mLogger.i("dispatchOnExposureCorrectionChanged", newValue); + LOG.i("dispatchOnExposureCorrectionChanged", newValue); mUiHandler.post(new Runnable() { @Override public void run() { @@ -2201,23 +2214,22 @@ public class CameraView extends FrameLayout implements LifecycleObserver { public void dispatchFrame(@NonNull final Frame frame) { // The getTime() below might crash if developers incorrectly release // frames asynchronously. - mLogger.v("dispatchFrame:", frame.getTime(), "processors:", mFrameProcessors.size()); + LOG.v("dispatchFrame:", frame.getTime(), "processors:", mFrameProcessors.size()); if (mFrameProcessors.isEmpty()) { // Mark as released. This instance will be reused. frame.release(); } else { // Dispatch this frame to frame processors. - mFrameProcessorsHandler.run(new Runnable() { + mFrameProcessingExecutor.execute(new Runnable() { @Override public void run() { - mLogger.v("dispatchFrame: dispatching", frame.getTime(), + LOG.v("dispatchFrame: executing. Passing", frame.getTime(), "to processors."); for (FrameProcessor processor : mFrameProcessors) { try { processor.process(frame); } catch (Exception e) { - // Don't let a single processor crash the processor thread. - mLogger.w("Frame processor crashed:", e); + LOG.w("Frame processor crashed:", e); } } frame.release(); @@ -2228,7 +2240,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver { @Override public void dispatchError(final CameraException exception) { - mLogger.i("dispatchError", exception); + LOG.i("dispatchError", exception); mUiHandler.post(new Runnable() { @Override public void run() { @@ -2241,7 +2253,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver { @Override public void dispatchOnVideoRecordingStart() { - mLogger.i("dispatchOnVideoRecordingStart"); + LOG.i("dispatchOnVideoRecordingStart"); mUiHandler.post(new Runnable() { @Override public void run() { @@ -2254,7 +2266,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver { @Override public void dispatchOnVideoRecordingEnd() { - mLogger.i("dispatchOnVideoRecordingEnd"); + LOG.i("dispatchOnVideoRecordingEnd"); mUiHandler.post(new Runnable() { @Override public void run() { @@ -2370,6 +2382,75 @@ public class CameraView extends FrameLayout implements LifecycleObserver { return mCameraEngine.getFrameProcessingFormat(); } + /** + * Sets the frame processing pool size. This is (roughly) the max number of + * {@link Frame} instances that can exist at a given moment in the frame pipeline, + * excluding frozen frames. + * + * Defaults to 2 - higher values will increase the memory usage with little benefit. + * Can be higher than 2 if {@link #setFrameProcessingExecutors(int)} is used. + * These values should be tuned together. We recommend setting a pool size that's equal to + * the number of executors plus 1, so that there's always a free Frame for the camera engine. + * + * Changing this value after camera initialization will have no effect. + * @param poolSize pool size + */ + public void setFrameProcessingPoolSize(int poolSize) { + mCameraEngine.setFrameProcessingPoolSize(poolSize); + } + + /** + * Returns the current frame processing pool size. + * @see #setFrameProcessingPoolSize(int) + * @return pool size + */ + public int getFrameProcessingPoolSize() { + return mCameraEngine.getFrameProcessingPoolSize(); + } + + /** + * Sets the thread pool size for frame processing. This means that if the processing rate + * is slower than the preview rate, you can set this value to something bigger than 1 + * to avoid losing frames. + * Defaults to 1 and this should be OK for most applications. + * + * Should be tuned depending on the task, the processor implementation, and along with + * {@link #setFrameProcessingPoolSize(int)}. We recommend choosing a pool size that is + * equal to the executors plus 1. + * @param executors thread count + */ + public void setFrameProcessingExecutors(int executors) { + if (executors < 1) { + throw new IllegalArgumentException("Need at least 1 executor, got " + executors); + } + mFrameProcessingExecutors = executors; + ThreadPoolExecutor executor = new ThreadPoolExecutor( + executors, + executors, + 4, + TimeUnit.SECONDS, + new LinkedBlockingQueue(), + new ThreadFactory() { + private final AtomicInteger mCount = new AtomicInteger(1); + @Override + public Thread newThread(@NonNull Runnable r) { + return new Thread(r, "FrameExecutor #" + mCount.getAndIncrement()); + } + } + ); + executor.allowCoreThreadTimeOut(true); + mFrameProcessingExecutor = executor; + } + + /** + * Returns the current executors count. + * @see #setFrameProcessingExecutors(int) + * @return thread count + */ + public int getFrameProcessingExecutors() { + return mFrameProcessingExecutors; + } + //endregion //region Overlays diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java index cc668bce..02553017 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java @@ -757,8 +757,8 @@ public class Camera1Engine extends CameraBaseEngine implements @NonNull @Override - protected FrameManager instantiateFrameManager() { - return new ByteBufferFrameManager(2, this); + protected FrameManager instantiateFrameManager(int poolSize) { + return new ByteBufferFrameManager(poolSize, this); } @NonNull @@ -793,10 +793,12 @@ public class Camera1Engine extends CameraBaseEngine implements // Seen this happen in logs. return; } - Frame frame = getFrameManager().getFrame(data, - System.currentTimeMillis(), - getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR)); - getCallback().dispatchFrame(frame); + int rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, + Axis.RELATIVE_TO_SENSOR); + Frame frame = getFrameManager().getFrame(data, System.currentTimeMillis(), rotation); + if (frame != null) { + getCallback().dispatchFrame(frame); + } } //endregion diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java index bf64da89..a315d9e3 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java @@ -61,7 +61,6 @@ import com.otaliastudios.cameraview.frame.FrameManager; import com.otaliastudios.cameraview.frame.ImageFrameManager; import com.otaliastudios.cameraview.gesture.Gesture; import com.otaliastudios.cameraview.internal.utils.CropHelper; -import com.otaliastudios.cameraview.internal.utils.WorkerHandler; import com.otaliastudios.cameraview.picture.Full2PictureRecorder; import com.otaliastudios.cameraview.picture.Snapshot2PictureRecorder; import com.otaliastudios.cameraview.preview.GlCameraPreview; @@ -81,7 +80,6 @@ public class Camera2Engine extends CameraBaseEngine implements ImageReader.OnImageAvailableListener, ActionHolder { - private static final int FRAME_PROCESSING_POOL_SIZE = 2; private static final int FRAME_PROCESSING_FORMAT = ImageFormat.YUV_420_888; @VisibleForTesting static final long METER_TIMEOUT = 2500; @@ -541,12 +539,21 @@ public class Camera2Engine extends CameraBaseEngine implements // 4. FRAME PROCESSING if (hasFrameProcessors()) { mFrameProcessingSize = computeFrameProcessingSize(); + // Hard to write down why, but in Camera2 we need a number of Frames that's one less + // than the number of Images. If we let all Images be part of Frames, thus letting all + // Images be used by processor at any given moment, the Camera2 output breaks. + // In fact, if there are no Images available, the sensor BLOCKS until it finds one, + // which is a big issue because processor times become a bottleneck for the preview. + // This is a design flaw in the ImageReader / sensor implementation, as they should + // simply DROP frames written to the surface if there are no Images available. + // Since this is not how things work, we ensure that one Image is always available here. mFrameProcessingReader = ImageReader.newInstance( mFrameProcessingSize.getWidth(), mFrameProcessingSize.getHeight(), mFrameProcessingFormat, - getFrameProcessingPoolSize()); - mFrameProcessingReader.setOnImageAvailableListener(this, null); + getFrameProcessingPoolSize() + 1); + mFrameProcessingReader.setOnImageAvailableListener(this, + null); mFrameProcessingSurface = mFrameProcessingReader.getSurface(); outputSurfaces.add(mFrameProcessingSurface); } else { @@ -621,7 +628,20 @@ public class Camera2Engine extends CameraBaseEngine implements } }); } - return Tasks.forResult(null); + + // Wait for the first frame. + final TaskCompletionSource task = new TaskCompletionSource<>(); + new BaseAction() { + @Override + public void onCaptureCompleted(@NonNull ActionHolder holder, + @NonNull CaptureRequest request, + @NonNull TotalCaptureResult result) { + super.onCaptureCompleted(holder, request, result); + setState(STATE_COMPLETED); + task.trySetResult(null); + } + }.start(this); + return task.getTask(); } //endregion @@ -1399,25 +1419,22 @@ public class Camera2Engine extends CameraBaseEngine implements //region Frame Processing - protected int getFrameProcessingPoolSize() { - return FRAME_PROCESSING_POOL_SIZE; - } - @NonNull @Override - protected FrameManager instantiateFrameManager() { - return new ImageFrameManager(getFrameProcessingPoolSize()); + protected FrameManager instantiateFrameManager(int poolSize) { + return new ImageFrameManager(poolSize); } + @EngineThread @Override public void onImageAvailable(ImageReader reader) { - LOG.v("onImageAvailable", "trying to acquire Image."); + LOG.v("onImageAvailable:", "trying to acquire Image."); Image image = null; try { image = reader.acquireLatestImage(); } catch (Exception ignore) { } if (image == null) { - LOG.w("onImageAvailable", "failed to acquire Image!"); + LOG.w("onImageAvailable:", "failed to acquire Image!"); } else if (getState() == CameraState.PREVIEW && !isChangingState()) { // After preview, the frame manager is correctly set up //noinspection unchecked @@ -1426,8 +1443,14 @@ public class Camera2Engine extends CameraBaseEngine implements getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR)); - getCallback().dispatchFrame(frame); + if (frame != null) { + LOG.v("onImageAvailable:", "Image acquired, dispatching."); + getCallback().dispatchFrame(frame); + } else { + LOG.i("onImageAvailable:", "Image acquired, but no free frames. DROPPING."); + } } else { + LOG.i("onImageAvailable:", "Image acquired in wrong state. Closing it now."); image.close(); } } diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraBaseEngine.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraBaseEngine.java index e2cd5ae9..486fd51a 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraBaseEngine.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraBaseEngine.java @@ -67,8 +67,8 @@ public abstract class CameraBaseEngine extends CameraEngine { @SuppressWarnings("WeakerAccess") protected boolean mPictureSnapshotMetering; @SuppressWarnings("WeakerAccess") protected float mPreviewFrameRate; - private final FrameManager mFrameManager; - private final Angles mAngles; + private FrameManager mFrameManager; + private final Angles mAngles = new Angles(); @Nullable private SizeSelector mPreviewStreamSizeSelector; private SizeSelector mPictureSizeSelector; private SizeSelector mVideoSizeSelector; @@ -84,6 +84,7 @@ public abstract class CameraBaseEngine extends CameraEngine { private int mSnapshotMaxHeight; // in REF_VIEW like SizeSelectors private int mFrameProcessingMaxWidth; // in REF_VIEW like SizeSelectors private int mFrameProcessingMaxHeight; // in REF_VIEW like SizeSelectors + private int mFrameProcessingPoolSize; private Overlay mOverlay; // Ops used for testing. @@ -107,17 +108,16 @@ public abstract class CameraBaseEngine extends CameraEngine { @SuppressWarnings("WeakerAccess") protected CameraBaseEngine(@NonNull Callback callback) { super(callback); - mFrameManager = instantiateFrameManager(); - mAngles = new Angles(); } /** * Called at construction time to get a frame manager that can later be * accessed through {@link #getFrameManager()}. + * @param poolSize pool size * @return a frame manager */ @NonNull - protected abstract FrameManager instantiateFrameManager(); + protected abstract FrameManager instantiateFrameManager(int poolSize); @NonNull @Override @@ -128,6 +128,9 @@ public abstract class CameraBaseEngine extends CameraEngine { @NonNull @Override public FrameManager getFrameManager() { + if (mFrameManager == null) { + mFrameManager = instantiateFrameManager(mFrameProcessingPoolSize); + } return mFrameManager; } @@ -290,6 +293,16 @@ public abstract class CameraBaseEngine extends CameraEngine { return mFrameProcessingFormat; } + @Override + public final void setFrameProcessingPoolSize(int poolSize) { + mFrameProcessingPoolSize = poolSize; + } + + @Override + public final int getFrameProcessingPoolSize() { + return mFrameProcessingPoolSize; + } + @Override public final void setAutoFocusResetDelay(long delayMillis) { mAutoFocusResetDelayMillis = delayMillis; diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraEngine.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraEngine.java index 837c7331..7fd3a062 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraEngine.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraEngine.java @@ -650,6 +650,9 @@ public abstract class CameraEngine implements public abstract void setFrameProcessingFormat(int format); public abstract int getFrameProcessingFormat(); + public abstract void setFrameProcessingPoolSize(int poolSize); + public abstract int getFrameProcessingPoolSize(); + public abstract void setAutoFocusResetDelay(long delayMillis); public abstract long getAutoFocusResetDelay(); diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/LogAction.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/LogAction.java index 1cb53316..86f36c38 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/LogAction.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/LogAction.java @@ -9,15 +9,13 @@ import androidx.annotation.NonNull; import androidx.annotation.RequiresApi; import com.otaliastudios.cameraview.CameraLogger; -import com.otaliastudios.cameraview.engine.Camera2Engine; -import com.otaliastudios.cameraview.engine.action.ActionHolder; -import com.otaliastudios.cameraview.engine.action.BaseAction; +import com.otaliastudios.cameraview.engine.CameraEngine; @RequiresApi(Build.VERSION_CODES.LOLLIPOP) public class LogAction extends BaseAction { private final static CameraLogger LOG - = CameraLogger.create(Camera2Engine.class.getSimpleName()); + = CameraLogger.create(CameraEngine.class.getSimpleName()); private String lastLog; diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/frame/FrameManager.java b/cameraview/src/main/java/com/otaliastudios/cameraview/frame/FrameManager.java index 65c095fe..31973b10 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/frame/FrameManager.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/frame/FrameManager.java @@ -7,6 +7,7 @@ import com.otaliastudios.cameraview.CameraLogger; import com.otaliastudios.cameraview.size.Size; import androidx.annotation.NonNull; +import androidx.annotation.Nullable; import java.util.concurrent.LinkedBlockingQueue; @@ -92,6 +93,9 @@ public abstract class FrameManager { int bitsPerPixel = ImageFormat.getBitsPerPixel(format); long sizeInBits = size.getHeight() * size.getWidth() * bitsPerPixel; mFrameBytes = (int) Math.ceil(sizeInBits / 8.0d); + for (int i = 0; i < getPoolSize(); i++) { + mFrameQueue.offer(new Frame(this)); + } } /** @@ -108,18 +112,14 @@ public abstract class FrameManager { /** * Returns a new Frame for the given data. This must be called * - after {@link #setUp(int, Size)}, which sets the buffer size - * - after the byte buffer given by setUp() has been filled. - * If this is called X times in a row without releasing frames, it will allocate - * X frames and that's bad. Callers must wait for the preview buffer to be available. - * - * In Camera1, this is always respected thanks to its internals. + * - after the T data has been filled * * @param data data * @param time timestamp * @param rotation rotation * @return a new frame */ - @NonNull + @Nullable public Frame getFrame(@NonNull T data, long time, int rotation) { if (!isSetUp()) { throw new IllegalStateException("Can't call getFrame() after releasing " + @@ -129,12 +129,13 @@ public abstract class FrameManager { Frame frame = mFrameQueue.poll(); if (frame != null) { LOG.v("getFrame for time:", time, "RECYCLING."); + frame.setContent(data, time, rotation, mFrameSize, mFrameFormat); + return frame; } else { - LOG.v("getFrame for time:", time, "CREATING."); - frame = new Frame(this); + LOG.i("getFrame for time:", time, "NOT AVAILABLE."); + onFrameDataReleased(data, false); + return null; } - frame.setContent(data, time, rotation, mFrameSize, mFrameFormat); - return frame; } /** diff --git a/cameraview/src/main/res/values/attrs.xml b/cameraview/src/main/res/values/attrs.xml index ba905f08..1b92128d 100644 --- a/cameraview/src/main/res/values/attrs.xml +++ b/cameraview/src/main/res/values/attrs.xml @@ -28,6 +28,8 @@ + + diff --git a/docs/_posts/2018-12-20-frame-processing.md b/docs/_posts/2018-12-20-frame-processing.md index 6b221d26..3a8f89b3 100644 --- a/docs/_posts/2018-12-20-frame-processing.md +++ b/docs/_posts/2018-12-20-frame-processing.md @@ -41,6 +41,7 @@ apply new data to it. So: - you can do your job synchronously in the `process()` method. This is **recommended**. - if you must hold the `Frame` instance longer, use `frame = frame.freeze()` to get a frozen instance that will not be affected. This is **discouraged** because it requires copying the whole array. + Also, starting from `v2.5.0`, this is not allowed when Camera2 is used. ### Process synchronously @@ -48,8 +49,9 @@ Processing synchronously, for the duration of the `process()` method, is the rec processors, because it solves different issues: - avoids the need of calling `frame = frame.freeze()` which is a very expensive operation -- the engine will **automatically drop frames** if the `process()` method is busy, so you'll only receive frames that you can handle -- we have already allocated a thread for you, so there's no need to create another +- the engine will **automatically drop frames** if the `process()` method is busy, so you'll only + receive frames that you can handle +- we have already allocated background threads for you, so there's no need to create another Some frame consumers might have a built-in asynchronous behavior. But you can still block the `process()` thread until the consumer has returned. @@ -117,13 +119,41 @@ cameraView.setFrameProcessingFormat(ImageFormat.YUV_422_888); With the Camera1 engine, the incoming format will always be `ImageFormat.NV21`. You can check which formats are available for use through `CameraOptions.getSupportedFrameProcessingFormats()`. +### Advanced: Thread Control + +Starting from `v2.5.1`, you can control the number of background threads that are allocated +for frame processing work. This should further push you into perform processing actions synchronously +and can be useful if processing is very slow with respect to the preview frame rate, in order to +avoid dropping too many frames. + +You can change the number of threads by calling `setFrameProcessingExecutors()`. Whenever you do, +we recommend that you also change the frame processing pool size to a compatible value. +The frame processing pool size is roughly the number of `Frame` instances that can exist at any given +moment. We recommend that this value is set to the number of executors plus 1. For example: + +- Single threaded (default): + +```java +cameraView.setFrameProcessingExecutors(1); +cameraView.setFrameProcessingPoolSize(2); +``` + +- Two threads: + +```java +cameraView.setFrameProcessingExecutors(2); +cameraView.setFrameProcessingPoolSize(3); +``` + ### XML Attributes ```xml + app:cameraFrameProcessingFormat="0x23" + app:cameraFrameProcessingPoolSize="2" + app:cameraFrameProcessingExecutors="1"/> ``` ### Related APIs @@ -135,16 +165,20 @@ You can check which formats are available for use through `CameraOptions.getSupp |`camera.clearFrameProcessors()`|`-`|Removes all `FrameProcessor`s.| |`camera.setFrameProcessingMaxWidth(int)`|`-`|Sets the max width for incoming frames.| |`camera.setFrameProcessingMaxHeight(int)`|`-`|Sets the max height for incoming frames.| -|`camera.getFrameProcessingMaxWidth()`|`int`|Gets the max width for incoming frames.| -|`camera.getFrameProcessingMaxHeight()`|`int`|Gets the max height for incoming frames.| +|`camera.getFrameProcessingMaxWidth()`|`int`|Returns the max width for incoming frames.| +|`camera.getFrameProcessingMaxHeight()`|`int`|Returns the max height for incoming frames.| |`camera.setFrameProcessingFormat(int)`|`-`|Sets the desired format for incoming frames. Should be one of the ImageFormat constants.| -|`camera.getFrameProcessingFormat()`|`-`|Gets the format for incoming frames. One of the ImageFormat constants.| +|`camera.getFrameProcessingFormat()`|`-`|Returns the format for incoming frames. One of the ImageFormat constants.| +|`camera.setFrameProcessingPoolSize(int)`|`-`|Sets the frame pool size, roughly the number of Frames that can exist at any given moment. Defaults to 2, which fits all use cases unless you change the executors.| +|`camera.getFrameProcessingPoolSize()`|`-`|Returns the frame pool size.| +|`camera.setFrameProcessingExecutors(int)`|`-`|Sets the processing thread size. Defaults to 1, but can be increased if your processing is slow and you are dropping too many frames. This should always be tuned together with the frame pool size.| +|`camera.getFrameProcessingExecutors()`|`-`|Returns the processing thread size.| |`frame.getDataClass()`|`Class`|The class of the data returned by `getData()`. Either `byte[]` or `android.media.Image`.| |`frame.getData()`|`T`|The current preview frame, in its original orientation.| |`frame.getTime()`|`long`|The preview timestamp, in `System.currentTimeMillis()` reference.| |`frame.getRotation()`|`int`|The rotation that should be applied to the byte array in order to see what the user sees.| |`frame.getSize()`|`Size`|The frame size, before any rotation is applied, to access data.| -|`frame.getFormat()`|`int`|The frame `ImageFormat`. This will always be `ImageFormat.NV21` for now.| +|`frame.getFormat()`|`int`|The frame `ImageFormat`. Defaults to `ImageFormat.NV21` for Camera1 and `ImageFormat.YUV_420_888` for Camera2.| |`frame.freeze()`|`Frame`|Clones this frame and makes it immutable. Can be expensive because requires copying the byte array.| |`frame.release()`|`-`|Disposes the content of this frame. Should be used on frozen frames to release memory.| diff --git a/docs/css/main.css b/docs/css/main.css index 328adbb7..fbb1438f 100644 --- a/docs/css/main.css +++ b/docs/css/main.css @@ -6,7 +6,7 @@ pre, code, pre code { border: none; border-radius: 0; background-color: #f9f9f9; - font-size: 0.85em; + font-size: 1em; } .highlight {