diff --git a/README.md b/README.md index c3150bdc..d5190846 100644 --- a/README.md +++ b/README.md @@ -136,6 +136,8 @@ Using CameraView is extremely simple: app:cameraAutoFocusMarker="@string/cameraview_default_autofocus_marker" app:cameraUseDeviceOrientation="true|false" app:cameraFilter="@string/real_time_filter" + app:cameraPictureMetering="true|false" + app:cameraPictureSnapshotMetering="false|true" app:cameraExperimental="false|true"> diff --git a/cameraview/build.gradle b/cameraview/build.gradle index 93e82cf4..4d47c6cf 100644 --- a/cameraview/build.gradle +++ b/cameraview/build.gradle @@ -240,9 +240,12 @@ task mergedCoverageReport(type: JacocoReport) { classFilter.add('**/com/otaliastudios/cameraview/engine/CameraEngine**.*') classFilter.add('**/com/otaliastudios/cameraview/engine/Camera1Engine**.*') classFilter.add('**/com/otaliastudios/cameraview/engine/Camera2Engine**.*') + classFilter.add('**/com/otaliastudios/cameraview/engine/action/**.*') + classFilter.add('**/com/otaliastudios/cameraview/engine/lock/**.*') + classFilter.add('**/com/otaliastudios/cameraview/engine/meter/**.*') classFilter.add('**/com/otaliastudios/cameraview/picture/**.*') classFilter.add('**/com/otaliastudios/cameraview/video/**.*') - // TODO these below could be testable ALSO outside of the integration tests + // TODO these below could be easily testable ALSO outside of the integration tests classFilter.add('**/com/otaliastudios/cameraview/video/encoding/**.*') } // We don't test OpenGL filters. diff --git a/cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraViewTest.java b/cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraViewTest.java index 8bf95be9..3d7207dd 100644 --- a/cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraViewTest.java +++ b/cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraViewTest.java @@ -171,7 +171,10 @@ public class CameraViewTest extends BaseTest { // Self managed GestureParser gestures = new GestureParser(empty); assertEquals(cameraView.getPlaySounds(), CameraView.DEFAULT_PLAY_SOUNDS); + assertEquals(cameraView.getAutoFocusResetDelay(), CameraView.DEFAULT_AUTOFOCUS_RESET_DELAY_MILLIS); assertEquals(cameraView.getUseDeviceOrientation(), CameraView.DEFAULT_USE_DEVICE_ORIENTATION); + assertEquals(cameraView.getPictureMetering(), CameraView.DEFAULT_PICTURE_METERING); + assertEquals(cameraView.getPictureSnapshotMetering(), CameraView.DEFAULT_PICTURE_SNAPSHOT_METERING); assertEquals(cameraView.getGestureAction(Gesture.TAP), gestures.getTapAction()); assertEquals(cameraView.getGestureAction(Gesture.LONG_TAP), gestures.getLongTapAction()); assertEquals(cameraView.getGestureAction(Gesture.PINCH), gestures.getPinchAction()); @@ -649,6 +652,22 @@ public class CameraViewTest extends BaseTest { assertFalse(cameraView.getUseDeviceOrientation()); } + @Test + public void testSetPictureMetering() { + cameraView.setPictureMetering(true); + assertTrue(cameraView.getPictureMetering()); + cameraView.setPictureMetering(false); + assertFalse(cameraView.getPictureMetering()); + } + + @Test + public void testSetPictureSnapshotMetering() { + cameraView.setPictureSnapshotMetering(true); + assertTrue(cameraView.getPictureSnapshotMetering()); + cameraView.setPictureSnapshotMetering(false); + assertFalse(cameraView.getPictureSnapshotMetering()); + } + @Test public void testSetFlash() { cameraView.set(Flash.TORCH); diff --git a/cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/CameraIntegration2Test.java b/cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/CameraIntegration2Test.java index 1995eeee..53eb9aa9 100644 --- a/cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/CameraIntegration2Test.java +++ b/cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/CameraIntegration2Test.java @@ -1,7 +1,12 @@ package com.otaliastudios.cameraview.engine; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.TotalCaptureResult; + import com.otaliastudios.cameraview.DoNotRunOnTravis; import com.otaliastudios.cameraview.controls.Engine; +import com.otaliastudios.cameraview.engine.action.ActionHolder; +import com.otaliastudios.cameraview.engine.action.BaseAction; import org.junit.Ignore; import org.junit.Test; @@ -11,6 +16,8 @@ import androidx.annotation.NonNull; import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.filters.LargeTest; +import java.util.concurrent.CountDownLatch; + /** * These tests work great on real devices, and are the only way to test actual CameraEngine * implementation - we really need to open the camera device. @@ -29,7 +36,23 @@ public class CameraIntegration2Test extends CameraIntegrationTest { } @Override - public void testFrameProcessing_afterVideo() throws Exception { - super.testFrameProcessing_afterVideo(); + protected void onOpenSync() { + super.onOpenSync(); + // Extra wait for the first frame to be dispatched. + // This is because various classes require getLastResult to be non-null + // and that's typically the case in a real app. + Camera2Engine engine = (Camera2Engine) controller; + final CountDownLatch latch = new CountDownLatch(1); + new BaseAction() { + @Override + public void onCaptureCompleted(@NonNull ActionHolder holder, + @NonNull CaptureRequest request, + @NonNull TotalCaptureResult result) { + super.onCaptureCompleted(holder, request, result); + latch.countDown(); + setState(STATE_COMPLETED); + } + }.start(engine); + try { latch.await(); } catch (InterruptedException ignore) {} } } diff --git a/cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/CameraIntegrationTest.java b/cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/CameraIntegrationTest.java index 9f95dfe4..f6f80e6d 100644 --- a/cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/CameraIntegrationTest.java +++ b/cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/CameraIntegrationTest.java @@ -71,7 +71,7 @@ public abstract class CameraIntegrationTest extends BaseTest { public ActivityTestRule rule = new ActivityTestRule<>(TestActivity.class); private CameraView camera; - private CameraEngine controller; + protected CameraEngine controller; private CameraListener listener; private Op uiExceptionOp; @@ -136,7 +136,6 @@ public abstract class CameraIntegrationTest extends BaseTest { } } - @SuppressWarnings("StatementWithEmptyBody") private CameraOptions openSync(boolean expectSuccess) { camera.open(); final Op open = new Op<>(true); @@ -144,17 +143,22 @@ public abstract class CameraIntegrationTest extends BaseTest { CameraOptions result = open.await(DELAY); if (expectSuccess) { assertNotNull("Can open", result); - // Extra wait for the bind and preview state, so we run tests in a fully operational - // state. If we didn't do so, we could have null values, for example, in getPictureSize - // or in getSnapshotSize. - while (controller.getBindState() != CameraEngine.STATE_STARTED) {} - while (controller.getPreviewState() != CameraEngine.STATE_STARTED) {} + onOpenSync(); } else { assertNull("Should not open", result); } return result; } + @SuppressWarnings("StatementWithEmptyBody") + protected void onOpenSync() { + // Extra wait for the bind and preview state, so we run tests in a fully operational + // state. If we didn't do so, we could have null values, for example, in getPictureSize + // or in getSnapshotSize. + while (controller.getBindState() != CameraEngine.STATE_STARTED) {} + while (controller.getPreviewState() != CameraEngine.STATE_STARTED) {} + } + private void closeSync(boolean expectSuccess) { camera.close(); final Op close = new Op<>(true); diff --git a/cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/MockCameraEngine.java b/cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/MockCameraEngine.java index 3b320da7..12dcd5e4 100644 --- a/cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/MockCameraEngine.java +++ b/cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/MockCameraEngine.java @@ -123,12 +123,12 @@ public class MockCameraEngine extends CameraEngine { } @Override - protected void onTakePicture(@NonNull PictureResult.Stub stub) { + protected void onTakePicture(@NonNull PictureResult.Stub stub, boolean doMetering) { } @Override - protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio outputRatio) { + protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio outputRatio, boolean doMetering) { } diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/CameraListener.java b/cameraview/src/main/java/com/otaliastudios/cameraview/CameraListener.java index 0a1f774d..fcc76515 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/CameraListener.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/CameraListener.java @@ -81,8 +81,9 @@ public abstract class CameraListener { /** - * Notifies that user interacted with the screen and started focus with a gesture, - * and the autofocus is trying to focus around that area. This can be used to draw things on screen. + * Notifies that user interacted with the screen and started metering with a gesture, + * and touch metering routine is trying to focus around that area. + * This callback can be used to draw things on screen. * Can also be triggered by {@link CameraView#startAutoFocus(float, float)}. * * @param point coordinates with respect to CameraView.getWidth() and CameraView.getHeight() @@ -92,12 +93,12 @@ public abstract class CameraListener { /** - * Notifies that a gesture focus event just ended, and the camera converged - * to a new focus (and possibly exposure and white balance). + * Notifies that a touch metering event just ended, and the camera converged + * to a new focus, exposure and possibly white balance. * This might succeed or not. * Can also be triggered by {@link CameraView#startAutoFocus(float, float)}. * - * @param successful whether camera succeeded + * @param successful whether metering succeeded * @param point coordinates with respect to CameraView.getWidth() and CameraView.getHeight() */ @UiThread @@ -105,7 +106,7 @@ public abstract class CameraListener { /** - * Noitifies that a finger gesture just caused the camera zoom + * Notifies that a finger gesture just caused the camera zoom * to be changed. This can be used to draw, for example, a seek bar. * * @param newValue the new zoom value diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/CameraOptions.java b/cameraview/src/main/java/com/otaliastudios/cameraview/CameraOptions.java index 1e50ae4b..cde2f9b9 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/CameraOptions.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/CameraOptions.java @@ -427,7 +427,7 @@ public class CameraOptions { /** - * Whether auto focus (metering with respect to a specific region of the screen) is + * Whether touch metering (metering with respect to a specific region of the screen) is * supported. If it is, you can map gestures to {@link GestureAction#AUTO_FOCUS} * and metering will change on tap. * diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java b/cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java index adbda849..b21cfd58 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java @@ -107,6 +107,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver { final static long DEFAULT_AUTOFOCUS_RESET_DELAY_MILLIS = 3000; final static boolean DEFAULT_PLAY_SOUNDS = true; final static boolean DEFAULT_USE_DEVICE_ORIENTATION = true; + final static boolean DEFAULT_PICTURE_METERING = true; + final static boolean DEFAULT_PICTURE_SNAPSHOT_METERING = false; // Self managed parameters private boolean mPlaySounds; @@ -182,6 +184,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver { int videoBitRate = a.getInteger(R.styleable.CameraView_cameraVideoBitRate, 0); int audioBitRate = a.getInteger(R.styleable.CameraView_cameraAudioBitRate, 0); long autoFocusResetDelay = (long) a.getInteger(R.styleable.CameraView_cameraAutoFocusResetDelay, (int) DEFAULT_AUTOFOCUS_RESET_DELAY_MILLIS); + boolean pictureMetering = a.getBoolean(R.styleable.CameraView_cameraPictureMetering, DEFAULT_PICTURE_METERING); + boolean pictureSnapshotMetering = a.getBoolean(R.styleable.CameraView_cameraPictureSnapshotMetering, DEFAULT_PICTURE_SNAPSHOT_METERING); // Size selectors and gestures SizeSelectorParser sizeSelectors = new SizeSelectorParser(a); @@ -228,6 +232,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver { setAudio(controls.getAudio()); setAudioBitRate(audioBitRate); setPictureSize(sizeSelectors.getPictureSizeSelector()); + setPictureMetering(pictureMetering); + setPictureSnapshotMetering(pictureSnapshotMetering); setVideoSize(sizeSelectors.getVideoSizeSelector()); setVideoCodec(controls.getVideoCodec()); setVideoMaxSize(videoMaxSize); @@ -280,7 +286,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { } } - /** * Instantiates the camera engine. * @@ -547,7 +552,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { return false; } - /** * Clears any action mapped to the given gesture. * @param gesture which gesture to clear @@ -556,7 +560,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { mapGesture(gesture, GestureAction.NONE); } - /** * Returns the action currently mapped to the given gesture. * @@ -569,13 +572,11 @@ public class CameraView extends FrameLayout implements LifecycleObserver { return mGestureMap.get(gesture); } - @Override public boolean onInterceptTouchEvent(MotionEvent ev) { return true; // Steal our own events. } - @SuppressLint("ClickableViewAccessibility") @Override public boolean onTouchEvent(MotionEvent event) { @@ -598,7 +599,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { return true; } - // Some gesture layout detected a gesture. It's not known at this moment: // (1) if it was mapped to some action (we check here) // (2) if it's supported by the camera (CameraEngine checks) @@ -691,7 +691,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { mLifecycle.addObserver(this); } - /** * Starts the camera preview, if not started already. * This should be called onResume(), or when you are ready with permissions. @@ -708,7 +707,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { } } - /** * Checks that we have appropriate permissions. * This means checking that we have audio permissions if audio = Audio.ON. @@ -736,7 +734,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { return true; } - /** * If audio is on we will ask for RECORD_AUDIO permission. * If the developer did not add this to its manifest, throw and fire warnings. @@ -760,7 +757,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { } } - /** * Stops the current preview, if any was started. * This should be called onPause(). @@ -772,7 +768,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { if (mCameraPreview != null) mCameraPreview.onPause(); } - /** * Destroys this instance, releasing immediately * the camera resource. @@ -865,7 +860,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { } } - /** * Controls the preview engine. Should only be called * if this CameraView was never added to any window @@ -959,7 +953,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { return mCameraEngine.getCameraOptions(); } - /** * Sets exposure adjustment, in EV stops. A positive value will mean brighter picture. * @@ -985,7 +978,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { } } - /** * Returns the current exposure correction value, typically 0 * at start-up. @@ -995,7 +987,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { return mCameraEngine.getExposureCorrectionValue(); } - /** * Sets a zoom value. This is not guaranteed to be supported by the current device, * but you can take a look at {@link CameraOptions#isZoomSupported()}. @@ -1012,7 +1003,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { mCameraEngine.setZoom(zoom, null, false); } - /** * Returns the current zoom value, something between 0 and 1. * @return the current zoom value @@ -1021,7 +1011,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { return mCameraEngine.getZoomValue(); } - /** * Controls the grids to be drawn over the current layout. * @@ -1036,7 +1025,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { mGridLinesLayout.setGridMode(gridMode); } - /** * Gets the current grid mode. * @return the current grid mode @@ -1046,7 +1034,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { return mGridLinesLayout.getGridMode(); } - /** * Controls the color of the grid lines that will be drawn * over the current layout. @@ -1077,7 +1064,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { mCameraEngine.setHdr(hdr); } - /** * Gets the current hdr value. * @return the current hdr value @@ -1087,7 +1073,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { return mCameraEngine.getHdr(); } - /** * Set location coordinates to be found later in the EXIF header * @@ -1103,7 +1088,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { mCameraEngine.setLocation(location); } - /** * Set location values to be found later in the EXIF header * @@ -1113,7 +1097,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { mCameraEngine.setLocation(location); } - /** * Retrieves the location previously applied with setLocation(). * @@ -1124,7 +1107,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { return mCameraEngine.getLocation(); } - /** * Sets desired white balance to current camera session. * @@ -1140,7 +1122,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { mCameraEngine.setWhiteBalance(whiteBalance); } - /** * Returns the current white balance behavior. * @return white balance value. @@ -1150,7 +1131,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { return mCameraEngine.getWhiteBalance(); } - /** * Sets which camera sensor should be used. * @@ -1163,7 +1143,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { mCameraEngine.setFacing(facing); } - /** * Gets the facing camera currently being used. * @return a facing value. @@ -1173,7 +1152,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { return mCameraEngine.getFacing(); } - /** * Toggles the facing value between {@link Facing#BACK} * and {@link Facing#FRONT}. @@ -1195,7 +1173,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { return mCameraEngine.getFacing(); } - /** * Sets the flash mode. * @@ -1210,7 +1187,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { mCameraEngine.setFlash(flash); } - /** * Gets the current flash mode. * @return a flash mode @@ -1220,7 +1196,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { return mCameraEngine.getFlash(); } - /** * Controls the audio mode. * @@ -1250,7 +1225,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { } } - /** * Gets the current audio value. * @return the current audio value @@ -1260,9 +1234,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver { return mCameraEngine.getAudio(); } - /** - * Sets an {@link AutoFocusMarker} to be notified of autofocus start, end and fail events + * Sets an {@link AutoFocusMarker} to be notified of metering start, end and fail events * so that it can draw elements on screen. * * @param autoFocusMarker the marker, or null @@ -1272,29 +1245,27 @@ public class CameraView extends FrameLayout implements LifecycleObserver { mMarkerLayout.onMarker(MarkerLayout.TYPE_AUTOFOCUS, autoFocusMarker); } - /** - * Sets the current delay in milliseconds to reset the focus after an autofocus process. + * Sets the current delay in milliseconds to reset the focus after a metering event. * - * @param delayMillis desired delay (in milliseconds). If the delay + * @param delayMillis desired delay (in milliseconds). If the delay * is less than or equal to 0 or equal to Long.MAX_VALUE, - * the autofocus will not be reset. + * the values will not be reset. */ public void setAutoFocusResetDelay(long delayMillis) { mCameraEngine.setAutoFocusResetDelay(delayMillis); } - /** - * Returns the current delay in milliseconds to reset the focus after an autofocus process. - * @return the current autofocus reset delay in milliseconds. + * Returns the current delay in milliseconds to reset the focus after a metering event. + * + * @return the current reset delay in milliseconds */ @SuppressWarnings("unused") public long getAutoFocusResetDelay() { return mCameraEngine.getAutoFocusResetDelay(); } - /** - * Starts an autofocus process at the given coordinates, with respect + * Starts a 3A touch metering process at the given coordinates, with respect * to the view width and height. * * @param x should be between 0 and getWidth() @@ -1306,7 +1277,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { mCameraEngine.startAutoFocus(null, new PointF(x, y)); } - /** * ADVANCED FEATURE - sets a size selector for the preview stream. * The {@link SizeSelector} will be invoked with the list of available sizes, and the first @@ -1328,7 +1298,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { mCameraEngine.setPreviewStreamSizeSelector(selector); } - /** * Set the current session type to either picture or video. * @@ -1341,7 +1310,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { mCameraEngine.setMode(mode); } - /** * Gets the current mode. * @return the current mode @@ -1351,7 +1319,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { return mCameraEngine.getMode(); } - /** * Sets a capture size selector for picture mode. * The {@link SizeSelector} will be invoked with the list of available sizes, and the first @@ -1364,6 +1331,61 @@ public class CameraView extends FrameLayout implements LifecycleObserver { mCameraEngine.setPictureSizeSelector(selector); } + /** + * Whether the engine should perform a metering sequence before taking pictures requested + * with {@link #takePicture()}. A metering sequence includes adjusting focus, exposure + * and white balance to ensure a good quality of the result. + * + * When this parameter is true, the quality of the picture increases, but the latency + * increases as well. Defaults to true. + * + * This is a CAMERA2 only API. On CAMERA1, picture metering is always enabled. + * + * @see #setPictureSnapshotMetering(boolean) + * @param enable true to enable + */ + public void setPictureMetering(boolean enable) { + mCameraEngine.setPictureMetering(enable); + } + + /** + * Whether the engine should perform a metering sequence before taking pictures requested + * with {@link #takePicture()}. See {@link #setPictureMetering(boolean)}. + * + * @see #setPictureMetering(boolean) + * @return true if picture metering is enabled + */ + public boolean getPictureMetering() { + return mCameraEngine.getPictureMetering(); + } + + /** + * Whether the engine should perform a metering sequence before taking pictures requested + * with {@link #takePictureSnapshot()}. A metering sequence includes adjusting focus, + * exposure and white balance to ensure a good quality of the result. + * + * When this parameter is true, the quality of the picture increases, but the latency + * increases as well. To keep snapshots fast, this defaults to false. + * + * This is a CAMERA2 only API. On CAMERA1, picture snapshot metering is always disabled. + * + * @see #setPictureMetering(boolean) + * @param enable true to enable + */ + public void setPictureSnapshotMetering(boolean enable) { + mCameraEngine.setPictureSnapshotMetering(enable); + } + + /** + * Whether the engine should perform a metering sequence before taking pictures requested + * with {@link #takePictureSnapshot()}. See {@link #setPictureSnapshotMetering(boolean)}. + * + * @see #setPictureSnapshotMetering(boolean) + * @return true if picture metering is enabled + */ + public boolean getPictureSnapshotMetering() { + return mCameraEngine.getPictureSnapshotMetering(); + } /** * Sets a capture size selector for video mode. @@ -1425,7 +1447,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { mListeners.add(cameraListener); } - /** * Remove a {@link CameraListener} that was previously registered. * @@ -1435,7 +1456,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { mListeners.remove(cameraListener); } - /** * Clears the list of {@link CameraListener} that are registered * to camera events. @@ -1444,7 +1464,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { mListeners.clear(); } - /** * Adds a {@link FrameProcessor} instance to be notified of * new frames in the preview stream. @@ -1460,7 +1479,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { } } - /** * Remove a {@link FrameProcessor} that was previously registered. * @@ -1475,7 +1493,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { } } - /** * Clears the list of {@link FrameProcessor} that have been registered * to preview frames. @@ -1488,7 +1505,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { } } - /** * Asks the camera to capture an image of the current scene. * This will trigger {@link CameraListener#onPictureTaken(PictureResult)} if a listener @@ -1504,7 +1520,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { mCameraEngine.takePicture(stub); } - /** * Asks the camera to capture a snapshot of the current preview. * This eventually triggers {@link CameraListener#onPictureTaken(PictureResult)} if a listener @@ -1520,7 +1535,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { mCameraEngine.takePictureSnapshot(stub); } - /** * Starts recording a video. Video will be written to the given file, * so callers should ensure they have appropriate permissions to write to the file. @@ -1560,7 +1574,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { }); } - /** * Starts recording a video. Video will be written to the given file, * so callers should ensure they have appropriate permissions to write to the file. @@ -1628,10 +1641,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver { takeVideoSnapshot(file); } - // TODO: pauseVideo and resumeVideo? There is mediarecorder.pause(), but API 24... - /** * Stops capturing video or video snapshots being recorded, if there was any. * This will fire {@link CameraListener#onVideoTaken(VideoResult)}. @@ -1699,7 +1710,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { } } - /** * Returns the size used for pictures taken with {@link #takePicture()}, * or null if it hasn't been computed (for example if the surface is not ready), @@ -1714,7 +1724,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { return mCameraEngine.getPictureSize(Reference.OUTPUT); } - /** * Returns the size used for videos taken with {@link #takeVideo(File)}, * or null if it hasn't been computed (for example if the surface is not ready), @@ -1729,7 +1738,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { return mCameraEngine.getVideoSize(Reference.OUTPUT); } - // If we end up here, we're in M. @TargetApi(Build.VERSION_CODES.M) private void requestPermissions(boolean requestCamera, boolean requestAudio) { @@ -1751,7 +1759,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { } } - @SuppressLint("NewApi") private void playSound(int soundType) { if (mPlaySounds) { @@ -1760,7 +1767,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { } } - /** * Controls whether CameraView should play sound effects on certain * events (picture taken, focus complete). Note that: @@ -1774,7 +1780,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { mCameraEngine.setPlaySounds(playSounds); } - /** * Gets the current sound effect behavior. * @@ -1821,7 +1826,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { mCameraEngine.setVideoCodec(codec); } - /** * Gets the current encoder for video recordings. * @return the current video codec @@ -1831,7 +1835,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { return mCameraEngine.getVideoCodec(); } - /** * Sets the maximum size in bytes for recorded video files. * Once this size is reached, the recording will automatically stop. @@ -1843,7 +1846,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { mCameraEngine.setVideoMaxSize(videoMaxSizeInBytes); } - /** * Returns the maximum size in bytes for recorded video files, or 0 * if no size was set. @@ -1855,7 +1857,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { return mCameraEngine.getVideoMaxSize(); } - /** * Sets the maximum duration in milliseconds for video recordings. * Once this duration is reached, the recording will automatically stop. @@ -1867,7 +1868,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { mCameraEngine.setVideoMaxDuration(videoMaxDurationMillis); } - /** * Returns the maximum duration in milliseconds for video recordings, or 0 * if no limit was set. @@ -1879,7 +1879,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { return mCameraEngine.getVideoMaxDuration(); } - /** * Returns true if the camera is currently recording a video * @return boolean indicating if the camera is recording a video @@ -1888,7 +1887,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { return mCameraEngine.isTakingVideo(); } - /** * Returns true if the camera is currently capturing a picture * @return boolean indicating if the camera is capturing a picture diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java index 7a181fd2..5d43c005 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java @@ -297,7 +297,7 @@ public class Camera1Engine extends CameraEngine implements @WorkerThread @Override - protected void onTakePicture(@NonNull PictureResult.Stub stub) { + protected void onTakePicture(@NonNull PictureResult.Stub stub, boolean doMetering) { stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR); stub.size = getPictureSize(Reference.OUTPUT); mPictureRecorder = new Full1PictureRecorder(stub, Camera1Engine.this, mCamera); @@ -306,12 +306,12 @@ public class Camera1Engine extends CameraEngine implements @WorkerThread @Override - protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio outputRatio) { + protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio outputRatio, boolean doMetering) { stub.size = getUncroppedSnapshotSize(Reference.OUTPUT); // Not the real size: it will be cropped to match the view ratio stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR); // Actually it will be rotated and set to 0. if (mPreview instanceof GlCameraPreview && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { - mPictureRecorder = new SnapshotGlPictureRecorder(stub, this, (GlCameraPreview) mPreview, outputRatio, getOverlay()); + mPictureRecorder = new SnapshotGlPictureRecorder(stub, this, (GlCameraPreview) mPreview, outputRatio); } else { mPictureRecorder = new Snapshot1PictureRecorder(stub, this, mCamera, outputRatio); } diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java index 08833e1a..82788f5d 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java @@ -42,7 +42,14 @@ import com.otaliastudios.cameraview.controls.Flash; import com.otaliastudios.cameraview.controls.Hdr; import com.otaliastudios.cameraview.controls.Mode; import com.otaliastudios.cameraview.controls.WhiteBalance; +import com.otaliastudios.cameraview.engine.action.Action; +import com.otaliastudios.cameraview.engine.action.ActionHolder; +import com.otaliastudios.cameraview.engine.action.Actions; +import com.otaliastudios.cameraview.engine.action.BaseAction; +import com.otaliastudios.cameraview.engine.action.CompletionCallback; import com.otaliastudios.cameraview.engine.mappers.Camera2Mapper; +import com.otaliastudios.cameraview.engine.meter.MeterAction; +import com.otaliastudios.cameraview.engine.meter.MeterResetAction; import com.otaliastudios.cameraview.engine.offset.Axis; import com.otaliastudios.cameraview.engine.offset.Reference; import com.otaliastudios.cameraview.frame.Frame; @@ -52,7 +59,7 @@ import com.otaliastudios.cameraview.internal.utils.CropHelper; import com.otaliastudios.cameraview.internal.utils.ImageHelper; import com.otaliastudios.cameraview.internal.utils.WorkerHandler; import com.otaliastudios.cameraview.picture.Full2PictureRecorder; -import com.otaliastudios.cameraview.picture.SnapshotGlPictureRecorder; +import com.otaliastudios.cameraview.picture.Snapshot2PictureRecorder; import com.otaliastudios.cameraview.preview.GlCameraPreview; import com.otaliastudios.cameraview.size.AspectRatio; import com.otaliastudios.cameraview.size.Size; @@ -64,16 +71,17 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; -import java.util.concurrent.atomic.AtomicBoolean; @RequiresApi(Build.VERSION_CODES.LOLLIPOP) -public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAvailableListener, Meter.Callback { +public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAvailableListener, + ActionHolder { private static final String TAG = Camera2Engine.class.getSimpleName(); private static final CameraLogger LOG = CameraLogger.create(TAG); private static final int FRAME_PROCESSING_FORMAT = ImageFormat.NV21; private static final int FRAME_PROCESSING_INPUT_FORMAT = ImageFormat.YUV_420_888; + private static final long METER_TIMEOUT = 2500; private final CameraManager mManager; private String mCameraId; @@ -81,8 +89,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv private CameraCharacteristics mCameraCharacteristics; private CameraCaptureSession mSession; private CaptureRequest.Builder mRepeatingRequestBuilder; - private CaptureRequest mRepeatingRequest; - private CameraCaptureSession.CaptureCallback mRepeatingRequestCallback; + private TotalCaptureResult mLastRepeatingResult; private final Camera2Mapper mMapper = Camera2Mapper.get(); // Frame processing @@ -101,13 +108,15 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv private ImageReader mPictureReader; private final boolean mPictureCaptureStopsPreview = false; // can make configurable at some point - // 3A metering - private Meter mMeter; + // Actions + private final List mActions = new ArrayList<>(); + private MeterAction mMeterAction; public Camera2Engine(Callback callback) { super(callback); mManager = (CameraManager) mCallback.getContext().getSystemService(Context.CAMERA_SERVICE); mFrameConversionHandler = WorkerHandler.get("CameraFrameConversion"); + new LogAction().start(this); } //region Utilities @@ -126,6 +135,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv return value == null ? fallback : value; } + @SuppressWarnings("DuplicateBranchesInSwitch") @NonNull private CameraException createCameraException(@NonNull CameraAccessException exception) { int reason; @@ -140,6 +150,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv return new CameraException(exception, reason); } + @SuppressWarnings("DuplicateBranchesInSwitch") @NonNull private CameraException createCameraException(int stateCallbackError) { int reason; @@ -163,9 +174,10 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv @SuppressWarnings("UnusedReturnValue") @NonNull private CaptureRequest.Builder createRepeatingRequestBuilder(int template) throws CameraAccessException { + CaptureRequest.Builder oldBuilder = mRepeatingRequestBuilder; mRepeatingRequestBuilder = mCamera.createCaptureRequest(template); mRepeatingRequestBuilder.setTag(template); - applyAllParameters(mRepeatingRequestBuilder); + applyAllParameters(mRepeatingRequestBuilder, oldBuilder); return mRepeatingRequestBuilder; } @@ -197,6 +209,17 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv } } + private void applyRepeatingRequestBuilderAsSingle() { + if (getPreviewState() == STATE_STARTED) { + try { + mSession.capture(mRepeatingRequestBuilder.build(), + mRepeatingRequestCallback, null); + } catch (CameraAccessException e) { + throw createCameraException(e); + } + } + } + /** * Applies the repeating request builder to the preview, assuming we actually have a preview * running. Can be called after changing parameters to the builder. @@ -206,55 +229,52 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv * {@link #createRepeatingRequestBuilder(int)}. */ private void applyRepeatingRequestBuilder() { - applyRepeatingRequestBuilder(true, CameraException.REASON_DISCONNECTED, null); + applyRepeatingRequestBuilder(true, CameraException.REASON_DISCONNECTED); } - private void applyRepeatingRequestBuilder(boolean checkStarted, int errorReason, @Nullable final Runnable onFirstFrame) { - if (!checkStarted || getPreviewState() == STATE_STARTED) { + private void applyRepeatingRequestBuilder(boolean checkStarted, int errorReason) { + if (getPreviewState() == STATE_STARTED || !checkStarted) { try { - mRepeatingRequest = mRepeatingRequestBuilder.build(); - final AtomicBoolean firstFrame = new AtomicBoolean(false); - mRepeatingRequestCallback = new CameraCaptureSession.CaptureCallback() { - @Override - public void onCaptureStarted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, long timestamp, long frameNumber) { - super.onCaptureStarted(session, request, timestamp, frameNumber); - if (firstFrame.compareAndSet(false, true) && onFirstFrame != null) { - onFirstFrame.run(); - } - if (mPictureRecorder instanceof Full2PictureRecorder) { - ((Full2PictureRecorder) mPictureRecorder).onCaptureStarted(request); - } - } - - @Override - public void onCaptureProgressed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureResult partialResult) { - super.onCaptureProgressed(session, request, partialResult); - if (mPictureRecorder instanceof Full2PictureRecorder) { - ((Full2PictureRecorder) mPictureRecorder).onCaptureProgressed(partialResult); - } - if (mMeter != null && mMeter.isMetering()) { - mMeter.onCapture(partialResult); - } - } - - @Override - public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) { - super.onCaptureCompleted(session, request, result); - if (mPictureRecorder instanceof Full2PictureRecorder) { - ((Full2PictureRecorder) mPictureRecorder).onCaptureCompleted(result); - } - if (mMeter != null && mMeter.isMetering()) { - mMeter.onCapture(result); - } - } - - }; - mSession.setRepeatingRequest(mRepeatingRequest, mRepeatingRequestCallback, null); + mSession.setRepeatingRequest(mRepeatingRequestBuilder.build(), + mRepeatingRequestCallback, null); } catch (CameraAccessException e) { throw new CameraException(e, errorReason); } } } + + private final CameraCaptureSession.CaptureCallback mRepeatingRequestCallback + = new CameraCaptureSession.CaptureCallback() { + @Override + public void onCaptureStarted(@NonNull CameraCaptureSession session, + @NonNull CaptureRequest request, + long timestamp, + long frameNumber) { + for (Action action : mActions) { + action.onCaptureStarted(Camera2Engine.this, request); + } + } + + @Override + public void onCaptureProgressed(@NonNull CameraCaptureSession session, + @NonNull CaptureRequest request, + @NonNull CaptureResult partialResult) { + for (Action action : mActions) { + action.onCaptureProgressed(Camera2Engine.this, request, partialResult); + } + } + + @Override + public void onCaptureCompleted(@NonNull CameraCaptureSession session, + @NonNull CaptureRequest request, + @NonNull TotalCaptureResult result) { + mLastRepeatingResult = result; + for (Action action : mActions) { + action.onCaptureCompleted(Camera2Engine.this, request, result); + } + } + }; + //endregion //region Protected APIs @@ -265,10 +285,11 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv try { CameraCharacteristics characteristics = mManager.getCameraCharacteristics(mCameraId); StreamConfigurationMap streamMap = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); - if (streamMap == null) + if (streamMap == null) { throw new RuntimeException("StreamConfigurationMap is null. Should not happen."); - // This works because our previews return either a SurfaceTexture or a SurfaceHolder, which are - // accepted class types by the getOutputSizes method. + } + // This works because our previews return either a SurfaceTexture or a SurfaceHolder, + // which are accepted class types by the getOutputSizes method. android.util.Size[] sizes = streamMap.getOutputSizes(mPreview.getOutputClass()); List candidates = new ArrayList<>(sizes.length); for (android.util.Size size : sizes) { @@ -298,13 +319,16 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv // However, let's launch an unrecoverable exception. throw createCameraException(e); } - LOG.i("collectCameraInfo", "Facing:", facing, "Internal:", internalFacing, "Cameras:", cameraIds.length); + LOG.i("collectCameraInfo", "Facing:", facing, + "Internal:", internalFacing, + "Cameras:", cameraIds.length); for (String cameraId : cameraIds) { try { CameraCharacteristics characteristics = mManager.getCameraCharacteristics(cameraId); if (internalFacing == readCharacteristic(characteristics, CameraCharacteristics.LENS_FACING, -99)) { mCameraId = cameraId; - int sensorOffset = readCharacteristic(characteristics, CameraCharacteristics.SENSOR_ORIENTATION, 0); + int sensorOffset = readCharacteristic(characteristics, + CameraCharacteristics.SENSOR_ORIENTATION, 0); getAngles().setSensorOffset(facing, sensorOffset); return true; } @@ -352,8 +376,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv // However, using trySetException should address this problem - it will only trigger // if the task has no result. // - // Docs say to release this camera instance, however, since we throw an unrecoverable CameraException, - // this will trigger a stop() through the exception handler. + // Docs say to release this camera instance, however, since we throw an unrecoverable + // CameraException, this will trigger a stop() through the exception handler. task.trySetException(new CameraException(CameraException.REASON_DISCONNECTED)); } @@ -505,7 +529,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv LOG.i("onStartPreview", "Starting preview."); addRepeatingRequestBuilderSurfaces(); - applyRepeatingRequestBuilder(false, CameraException.REASON_FAILED_TO_START_PREVIEW, null); + applyRepeatingRequestBuilder(false, CameraException.REASON_FAILED_TO_START_PREVIEW); LOG.i("onStartPreview", "Started preview."); // Start delayed video if needed. @@ -555,8 +579,6 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv throw createCameraException(e); } removeRepeatingRequestBuilderSurfaces(); - mRepeatingRequest = null; - mMeter = null; LOG.i("onStopPreview:", "Returning."); return Tasks.forResult(null); } @@ -611,35 +633,66 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv @WorkerThread @Override - protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio outputRatio) { - stub.size = getUncroppedSnapshotSize(Reference.OUTPUT); // Not the real size: it will be cropped to match the view ratio - stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR); // Actually it will be rotated and set to 0. - if (mPreview instanceof GlCameraPreview) { - mPictureRecorder = new SnapshotGlPictureRecorder(stub, this, (GlCameraPreview) mPreview, outputRatio, getOverlay()); + protected void onTakePictureSnapshot(@NonNull final PictureResult.Stub stub, @NonNull final AspectRatio outputRatio, boolean doMetering) { + if (doMetering) { + LOG.i("onTakePictureSnapshot:", "doMetering is true. Delaying."); + Action action = Actions.timeout(METER_TIMEOUT, createMeterAction(null)); + action.addCallback(new CompletionCallback() { + @Override + protected void onActionCompleted(@NonNull Action action) { + onTakePictureSnapshot(stub, outputRatio, false); + } + }); + action.start(this); } else { - throw new RuntimeException("takePictureSnapshot with Camera2 is only supported with Preview.GL_SURFACE"); + LOG.i("onTakePictureSnapshot:", "doMetering is false. Performing."); + if (!(mPreview instanceof GlCameraPreview)) { + throw new RuntimeException("takePictureSnapshot with Camera2 is only " + + "supported with Preview.GL_SURFACE"); + } + // stub.size is not the real size: it will be cropped to the given ratio + // stub.rotation will be set to 0 - we rotate the texture instead. + stub.size = getUncroppedSnapshotSize(Reference.OUTPUT); + stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR); + mPictureRecorder = new Snapshot2PictureRecorder(stub, this, + (GlCameraPreview) mPreview, outputRatio); + mPictureRecorder.take(); } - mPictureRecorder.take(); } @Override - protected void onTakePicture(@NonNull PictureResult.Stub stub) { - stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR); - stub.size = getPictureSize(Reference.OUTPUT); - try { - CaptureRequest.Builder builder = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); - applyAllParameters(builder); - mPictureRecorder = new Full2PictureRecorder(stub, this, - mCameraCharacteristics, - mSession, - mRepeatingRequestBuilder, - mRepeatingRequestCallback, - builder, - mPictureReader, - mPictureCaptureStopsPreview); - mPictureRecorder.take(); - } catch (CameraAccessException e) { - throw createCameraException(e); + protected void onTakePicture(@NonNull final PictureResult.Stub stub, boolean doMetering) { + if (doMetering) { + LOG.i("onTakePicture:", "doMetering is true. Delaying."); + Action action = Actions.timeout(METER_TIMEOUT, createMeterAction(null)); + action.addCallback(new CompletionCallback() { + @Override + protected void onActionCompleted(@NonNull Action action) { + onTakePicture(stub, false); + } + }); + action.start(this); + } else { + LOG.i("onTakePicture:", "doMetering is false. Performing."); + stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR); + stub.size = getPictureSize(Reference.OUTPUT); + try { + if (mPictureCaptureStopsPreview) { + // These two are present in official samples and are probably meant to speed things up? + // But from my tests, they actually make everything slower. So this is disabled by default + // with a boolean flag. Maybe in the future we can make this configurable as some + // people might want to stop the preview while picture is being taken even if it + // increases the latency. + mSession.stopRepeating(); + mSession.abortCaptures(); + } + CaptureRequest.Builder builder = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); + applyAllParameters(builder, mRepeatingRequestBuilder); + mPictureRecorder = new Full2PictureRecorder(stub, this, builder, mPictureReader); + mPictureRecorder.take(); + } catch (CameraAccessException e) { + throw createCameraException(e); + } } } @@ -648,9 +701,16 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv boolean fullPicture = mPictureRecorder instanceof Full2PictureRecorder; super.onPictureResult(result, error); if (fullPicture && mPictureCaptureStopsPreview) { - // See comments in Full2PictureRecorder. applyRepeatingRequestBuilder(); } + + // Some picture recorders might lock metering, and we usually run a metering sequence + // before running the recorders. So, run an unlock/reset sequence if needed. + boolean unlock = (fullPicture && getPictureMetering()) + || (!fullPicture && getPictureSnapshotMetering()); + if (unlock) { + unlockAndResetMetering(); + } } //endregion @@ -672,18 +732,15 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv private void doTakeVideo(@NonNull final VideoResult.Stub stub) { if (!(mVideoRecorder instanceof Full2VideoRecorder)) { - throw new IllegalStateException("doTakeVideo called, but video recorder is not a Full2VideoRecorder! " + mVideoRecorder); + throw new IllegalStateException("doTakeVideo called, but video recorder " + + "is not a Full2VideoRecorder! " + mVideoRecorder); } Full2VideoRecorder recorder = (Full2VideoRecorder) mVideoRecorder; try { createRepeatingRequestBuilder(CameraDevice.TEMPLATE_RECORD); addRepeatingRequestBuilderSurfaces(recorder.getInputSurface()); - applyRepeatingRequestBuilder(true, CameraException.REASON_DISCONNECTED, new Runnable() { - @Override - public void run() { - mVideoRecorder.start(stub); - } - }); + applyRepeatingRequestBuilder(true, CameraException.REASON_DISCONNECTED); + mVideoRecorder.start(stub); } catch (CameraAccessException e) { onVideoResult(null, e); throw createCameraException(e); @@ -757,7 +814,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv * the {@link #createRepeatingRequestBuilder(int)} method. */ private void maybeRestorePreviewTemplateAfterVideo() { - int template = (int) mRepeatingRequest.getTag(); + int template = (int) mRepeatingRequestBuilder.build().getTag(); if (template != CameraDevice.TEMPLATE_PREVIEW) { try { createRepeatingRequestBuilder(CameraDevice.TEMPLATE_PREVIEW); @@ -773,7 +830,9 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv //region Parameters - private void applyAllParameters(@NonNull CaptureRequest.Builder builder) { + private void applyAllParameters(@NonNull CaptureRequest.Builder builder, + @Nullable CaptureRequest.Builder oldBuilder) { + LOG.i("applyAllParameters:", "called for tag", builder.build().getTag()); builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO); applyDefaultFocus(builder); applyFlash(builder, Flash.OFF); @@ -782,6 +841,17 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv applyHdr(builder, Hdr.OFF); applyZoom(builder, 0F); applyExposureCorrection(builder, 0F); + + if (oldBuilder != null) { + // We might be in a metering operation, or the old builder might have some special + // metering parameters. Copy these special keys over to the new builder. + // These are the keys changed by metering.Parameters, or by us in applyFocusForMetering. + builder.set(CaptureRequest.CONTROL_AF_REGIONS, oldBuilder.get(CaptureRequest.CONTROL_AF_REGIONS)); + builder.set(CaptureRequest.CONTROL_AE_REGIONS, oldBuilder.get(CaptureRequest.CONTROL_AE_REGIONS)); + builder.set(CaptureRequest.CONTROL_AWB_REGIONS, oldBuilder.get(CaptureRequest.CONTROL_AWB_REGIONS)); + builder.set(CaptureRequest.CONTROL_AF_MODE, oldBuilder.get(CaptureRequest.CONTROL_AF_MODE)); + // Do NOT copy exposure or focus triggers! + } } private void applyDefaultFocus(@NonNull CaptureRequest.Builder builder) { @@ -836,14 +906,31 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv } @Override - public void setFlash(@NonNull Flash flash) { + public void setFlash(@NonNull final Flash flash) { final Flash old = mFlash; mFlash = flash; mHandler.run(new Runnable() { @Override public void run() { if (getEngineState() == STATE_STARTED) { - if (applyFlash(mRepeatingRequestBuilder, old)) { + boolean shouldApply = applyFlash(mRepeatingRequestBuilder, old); + boolean needsWorkaround = getPreviewState() == STATE_STARTED; + if (needsWorkaround) { + // Runtime changes to the flash value are not correctly handled by the driver. + // See https://stackoverflow.com/q/53003383/4288782 for example. + // For this reason, we go back to OFF, capture once, then go to the new one. + mFlash = Flash.OFF; + applyFlash(mRepeatingRequestBuilder, old); + try { + mSession.capture(mRepeatingRequestBuilder.build(), null, null); + } catch (CameraAccessException e) { + throw createCameraException(e); + } + mFlash = flash; + applyFlash(mRepeatingRequestBuilder, old); + applyRepeatingRequestBuilder(); + + } else if (shouldApply) { applyRepeatingRequestBuilder(); } } @@ -884,12 +971,6 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv LOG.i("applyFlash: setting FLASH_MODE to", pair.second); builder.set(CaptureRequest.CONTROL_AE_MODE, pair.first); builder.set(CaptureRequest.FLASH_MODE, pair.second); - - // On some devices, switching from TORCH/OFF to AUTO/ON is not immediately - // reflected (for example, torch stays active) unless we do as follows. - // It's just a way to wake up the AE routine. - builder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, - CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START); return true; } } @@ -1139,95 +1220,136 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv @Override public void startAutoFocus(@Nullable final Gesture gesture, @NonNull final PointF point) { - // TODO Should change this name at some point, and deprecate AF methods - startMetering(gesture, point); - } - - private void startMetering(@Nullable final Gesture gesture, @NonNull final PointF point) { - LOG.i("startMetering", "dispatching. Gesture:", gesture); + LOG.i("startAutoFocus", "dispatching. Gesture:", gesture); mHandler.run(new Runnable() { @Override public void run() { - LOG.i("startMetering", "executing. Preview state:", getPreviewState()); + LOG.i("startAutoFocus", "executing. Preview state:", getPreviewState()); // This will only work when we have a preview, since it launches the preview in the end. // Even without this it would need the bind state at least, since we need the preview size. if (getPreviewState() < STATE_STARTED) return; // The camera options API still has the auto focus API but it really - // refers to 3A metering. + // refers to "3A metering to a specific point". Since we have a point, check. if (!mCameraOptions.isAutoFocusSupported()) return; - // Reset the old meter if present. - if (mMeter != null) { - mMeter.resetMetering(); - } + // Create the meter and start. + mCallback.dispatchOnFocusStart(gesture, point); + final MeterAction action = createMeterAction(point); + Action wrapper = Actions.timeout(METER_TIMEOUT, action); + wrapper.start(Camera2Engine.this); + wrapper.addCallback(new CompletionCallback() { + @Override + protected void onActionCompleted(@NonNull Action a) { + mCallback.dispatchOnFocusEnd(gesture, action.isSuccessful(), point); + mHandler.remove(mUnlockAndResetMeteringRunnable); + if (shouldResetAutoFocus()) { + mHandler.post(getAutoFocusResetDelay(), mUnlockAndResetMeteringRunnable); + } + } + }); + } + }); + } - // The meter will check the current configuration to see if AF/AE/AWB should run. - // - AE should be on CONTROL_AE_MODE_ON* (this depends on setFlash()) - // - AWB should be on CONTROL_AWB_MODE_AUTO (this depends on setWhiteBalance()) - // - AF should be on CONTROL_AF_MODE_AUTO or others - // The last one is under our control because the library has no focus API. - // So let's set a good af mode here. This operation is reverted during onMeteringReset(). - applyFocusForMetering(mRepeatingRequestBuilder); + @NonNull + private MeterAction createMeterAction(@Nullable PointF point) { + // Before creating any new meter action, abort the old one. + if (mMeterAction != null) mMeterAction.abort(this); + // The meter will check the current configuration to see if AF/AE/AWB should run. + // - AE should be on CONTROL_AE_MODE_ON* (this depends on setFlash()) + // - AWB should be on CONTROL_AWB_MODE_AUTO (this depends on setWhiteBalance()) + // - AF should be on CONTROL_AF_MODE_AUTO or others + // The last one is under our control because the library has no focus API. + // So let's set a good af mode here. This operation is reverted during onMeteringReset(). + applyFocusForMetering(mRepeatingRequestBuilder); + mMeterAction = new MeterAction(Camera2Engine.this, point, point == null); + return mMeterAction; + } - // Create the meter and start. - mMeter = new Meter(Camera2Engine.this, - mRepeatingRequestBuilder, - mCameraCharacteristics, - Camera2Engine.this); - mMeter.startMetering(point, gesture); + private final Runnable mUnlockAndResetMeteringRunnable = new Runnable() { + @Override + public void run() { + unlockAndResetMetering(); + } + }; + + private void unlockAndResetMetering() { + if (getEngineState() == STATE_STARTED) { + Actions.sequence( + new BaseAction() { + @Override + protected void onStart(@NonNull ActionHolder holder) { + super.onStart(holder); + applyDefaultFocus(holder.getBuilder(this)); + holder.getBuilder(this).set(CaptureRequest.CONTROL_AE_LOCK, false); + holder.getBuilder(this).set(CaptureRequest.CONTROL_AWB_LOCK, false); + holder.applyBuilder(this); + setState(STATE_COMPLETED); + // TODO should wait results? + } + }, + new MeterResetAction() + ).start(Camera2Engine.this); + } + } + + //endregion + + //region Actions + + @Override + public void addAction(final @NonNull Action action) { + // This is likely to be called during a Capture callback while we iterate + // on the actions list, or worse, from other threads. We must use mHandler.post. + mHandler.post(new Runnable() { + @Override + public void run() { + if (!mActions.contains(action)) { + mActions.add(action); + } } }); } - /** - * Called by {@link Meter} when the metering process has started. - * We are currently exposing an auto focus API so that's what we dispatch. - * @param point point - * @param gesture gesture - */ @Override - public void onMeteringStarted(@NonNull PointF point, @Nullable Gesture gesture) { - LOG.w("onMeteringStarted - point:", point, "gesture:", gesture); - mCallback.dispatchOnFocusStart(gesture, point); - applyRepeatingRequestBuilder(); + public void removeAction(final @NonNull Action action) { + // This is likely to be called during a Capture callback while we iterate + // on the actions list, or worse, from other threads. We must use mHandler.post. + mHandler.post(new Runnable() { + @Override + public void run() { + mActions.remove(action); + } + }); } - /** - * Called by {@link Meter} when the metering process has ended. - * We are currently exposing an auto focus API so that's what we dispatch. - * @param point point - * @param gesture gesture - * @param success success - */ + @NonNull @Override - public void onMeteringEnd(@NonNull PointF point, @Nullable Gesture gesture, boolean success) { - LOG.w("onMeteringEnd - point:", point, "gesture:", gesture, "success:", success); - mCallback.dispatchOnFocusEnd(gesture, success, point); + public CameraCharacteristics getCharacteristics(@NonNull Action action) { + return mCameraCharacteristics; } - /** - * When metering is reset, we're not sure that the engine is still alive. - * We should check this here. - * @param point point - * @param gesture gesture - * @return true if metering can be reset - */ + @NonNull @Override - public boolean canResetMetering(@NonNull PointF point, @Nullable Gesture gesture) { - return getEngineState() == STATE_STARTED; + public TotalCaptureResult getLastResult(@NonNull Action action) { + return mLastRepeatingResult; + } + + @NonNull + @Override + public CaptureRequest.Builder getBuilder(@NonNull Action action) { + return mRepeatingRequestBuilder; + } + + @Override + public void applyBuilder(@NonNull Action source) { + applyRepeatingRequestBuilder(); } - /** - * Called by {@link Meter} after resetting the metering parameters. - * We should apply them, and also go back to default focus. - * @param point point - * @param gesture gesture - */ @Override - public void onMeteringReset(@NonNull PointF point, @Nullable Gesture gesture) { - applyDefaultFocus(mRepeatingRequestBuilder); - applyRepeatingRequestBuilder(); // only if preview started already + public void applyBuilder(@NonNull Action source, @NonNull CaptureRequest.Builder builder) throws CameraAccessException { + mSession.capture(builder.build(), mRepeatingRequestCallback, null); } //endregion diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraEngine.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraEngine.java index 0cc4fa9a..f27f4731 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraEngine.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraEngine.java @@ -167,6 +167,8 @@ public abstract class CameraEngine implements @SuppressWarnings("WeakerAccess") protected float mZoomValue; @SuppressWarnings("WeakerAccess") protected float mExposureCorrectionValue; @SuppressWarnings("WeakerAccess") protected boolean mPlaySounds; + @SuppressWarnings("WeakerAccess") protected boolean mPictureMetering; + @SuppressWarnings("WeakerAccess") protected boolean mPictureSnapshotMetering; // Can be private @VisibleForTesting Handler mCrashHandler; @@ -225,6 +227,11 @@ public abstract class CameraEngine implements mPreview.setSurfaceCallback(this); } + @NonNull + public CameraPreview getPreview() { + return mPreview; + } + //region Error handling /** @@ -1008,6 +1015,22 @@ public abstract class CameraEngine implements return mAutoFocusResetDelayMillis > 0 && mAutoFocusResetDelayMillis != Long.MAX_VALUE; } + public final void setPictureMetering(boolean enable) { + mPictureMetering = enable; + } + + public final boolean getPictureMetering() { + return mPictureMetering; + } + + public final void setPictureSnapshotMetering(boolean enable) { + mPictureSnapshotMetering = enable; + } + + public final boolean getPictureSnapshotMetering() { + return mPictureSnapshotMetering; + } + //endregion //region Abstract setters and APIs @@ -1070,7 +1093,6 @@ public abstract class CameraEngine implements public void run() { LOG.v("takePicture", "performing. BindState:", getBindState(), "isTakingPicture:", isTakingPicture()); if (mMode == Mode.VIDEO) { - // Could redirect to takePictureSnapshot, but it's better if people know what they are doing. throw new IllegalStateException("Can't take hq pictures while in VIDEO mode"); } if (getBindState() < STATE_STARTED) return; @@ -1078,7 +1100,7 @@ public abstract class CameraEngine implements stub.isSnapshot = false; stub.location = mLocation; stub.facing = mFacing; - onTakePicture(stub); + onTakePicture(stub, mPictureMetering); } }); } @@ -1102,7 +1124,7 @@ public abstract class CameraEngine implements // Leave the other parameters to subclasses. //noinspection ConstantConditions AspectRatio ratio = AspectRatio.of(getPreviewSurfaceSize(Reference.OUTPUT)); - onTakePictureSnapshot(stub, ratio); + onTakePictureSnapshot(stub, ratio, mPictureSnapshotMetering); } }); } @@ -1223,10 +1245,10 @@ public abstract class CameraEngine implements } @WorkerThread - protected abstract void onTakePicture(@NonNull PictureResult.Stub stub); + protected abstract void onTakePicture(@NonNull PictureResult.Stub stub, boolean doMetering); @WorkerThread - protected abstract void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio outputRatio); + protected abstract void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio outputRatio, boolean doMetering); @WorkerThread protected abstract void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull AspectRatio outputRatio); diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/LogAction.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/LogAction.java new file mode 100644 index 00000000..72de4d6e --- /dev/null +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/LogAction.java @@ -0,0 +1,62 @@ +package com.otaliastudios.cameraview.engine; + +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.CaptureResult; +import android.hardware.camera2.TotalCaptureResult; +import android.os.Build; + +import androidx.annotation.NonNull; +import androidx.annotation.RequiresApi; + +import com.otaliastudios.cameraview.CameraLogger; +import com.otaliastudios.cameraview.engine.action.ActionHolder; +import com.otaliastudios.cameraview.engine.action.BaseAction; + +@RequiresApi(Build.VERSION_CODES.LOLLIPOP) +class LogAction extends BaseAction { + + private final static CameraLogger LOG = CameraLogger.create(Camera2Engine.class.getSimpleName()); + + private String lastLog; + + @Override + public void onCaptureCompleted(@NonNull ActionHolder holder, @NonNull CaptureRequest request, + @NonNull TotalCaptureResult result) { + super.onCaptureCompleted(holder, request, result); + Integer aeMode = result.get(CaptureResult.CONTROL_AE_MODE); + Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE); + Integer afState = result.get(CaptureResult.CONTROL_AF_STATE); + Boolean aeLock = result.get(CaptureResult.CONTROL_AE_LOCK); + Integer aeTriggerState = result.get(CaptureResult.CONTROL_AE_PRECAPTURE_TRIGGER); + Integer afTriggerState = result.get(CaptureResult.CONTROL_AF_TRIGGER); + String log = "aeMode: " + aeMode + " aeLock: " + aeLock + + " aeState: " + aeState + " aeTriggerState: " + aeTriggerState + + " afState: " + afState + " afTriggerState: " + afTriggerState; + if (!log.equals(lastLog)) { + lastLog = log; + LOG.w(log); + } + + // START + // aeMode: 3 aeLock: false aeState: 4 aeTriggerState: 0 afState: 2 afTriggerState: 0 + // + // DURING metering (focus skips) + // aeMode: 3 aeLock: false aeState: 4 aeTriggerState: 0 afState: 0 afTriggerState: 0 + // aeMode: 3 aeLock: false aeState: 5 aeTriggerState: 1 afState: 0 afTriggerState: 0 + // + // DURING locking (focus skips) + // aeMode: 3 aeLock: false aeState: 4 aeTriggerState: 1 afState: 0 afTriggerState: 0 + // aeMode: 3 aeLock: true aeState: 5 aeTriggerState: 1 afState: 0 afTriggerState: 0 + // + // AFTER locked + // aeMode: 3 aeLock: true aeState: 3 aeTriggerState: 1 afState: 0 afTriggerState: 0 + // + // AFTER super.take() called + // aeMode: 1 aeLock: true aeState: 5 aeTriggerState: 1 afState: 0 afTriggerState: 0 + // aeMode: 1 aeLock: true aeState: 3 aeTriggerState: 1 afState: 0 afTriggerState: 0 + // + // Reverting flash changes + reset lock + reset metering + // aeMode: 3 aeLock: false aeState: 4 aeTriggerState: 2(1 now) afState: 2 afTriggerState: 0 + // aeMode: 3 aeLock: false aeState: 1 aeTriggerState: 2(1 now) afState: 2 afTriggerState: 0 + } +} diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Meter.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Meter.java deleted file mode 100644 index 751a3c44..00000000 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Meter.java +++ /dev/null @@ -1,380 +0,0 @@ -package com.otaliastudios.cameraview.engine; - -import android.graphics.PointF; -import android.graphics.Rect; -import android.hardware.camera2.CameraCharacteristics; -import android.hardware.camera2.CaptureRequest; -import android.hardware.camera2.CaptureResult; -import android.hardware.camera2.TotalCaptureResult; -import android.hardware.camera2.params.MeteringRectangle; -import android.os.Build; - -import androidx.annotation.NonNull; -import androidx.annotation.Nullable; -import androidx.annotation.RequiresApi; - -import com.otaliastudios.cameraview.CameraLogger; -import com.otaliastudios.cameraview.engine.metering.AutoExposure; -import com.otaliastudios.cameraview.engine.metering.AutoFocus; -import com.otaliastudios.cameraview.engine.metering.AutoWhiteBalance; -import com.otaliastudios.cameraview.engine.metering.MeteringParameter; -import com.otaliastudios.cameraview.engine.offset.Axis; -import com.otaliastudios.cameraview.engine.offset.Reference; -import com.otaliastudios.cameraview.gesture.Gesture; -import com.otaliastudios.cameraview.size.AspectRatio; -import com.otaliastudios.cameraview.size.Size; - -import java.util.Arrays; -import java.util.List; - -/** - * Helps Camera2-based engines to perform 3A (auto focus, auto exposure and auto white balance) - * metering. Users are required to: - * - * - Call {@link #startMetering(PointF, Gesture)} to start - * - Call {@link #onCapture(CaptureResult)} when they have partial or total results, as long as the - * meter is still in a metering operation, which can be checked through {@link #isMetering()} - * - Call {@link #resetMetering()} to reset the metering parameters if needed. This is done automatically - * by the meter based on the reset delay configuration in the engine, but can be called explicitly - * for example when we have multiple meter requests and want to cancel the old one. - */ -@RequiresApi(Build.VERSION_CODES.LOLLIPOP) -public class Meter { - - /** - * The meter callback. - */ - public interface Callback { - - /** - * Notifies that metering has started. At this point implementors should apply - * the builder onto the preview. - * @param point point - * @param gesture gesture - */ - void onMeteringStarted(@NonNull PointF point, @Nullable Gesture gesture); - - /** - * Notifies that metering has ended. No action is required for implementors. - * From now on, {@link #isMetering()} will return false so the meter should not - * be passed capture results anymore. - * @param point point - * @param gesture gesture - * @param success success - */ - void onMeteringEnd(@NonNull PointF point, @Nullable Gesture gesture, boolean success); - - /** - * Notifies that metering has been reset. From now on, this meter instance - * is done, although in theory it could be reused by calling - * {@link #startMetering(PointF, Gesture)} again. - * @param point point - * @param gesture gesture - */ - void onMeteringReset(@NonNull PointF point, @Nullable Gesture gesture); - - /** - * Whether metering can be reset. Since it happens at a future time, this should - * return true if the engine is still in a legit state for this operation. - * @param point point - * @param gesture gesture - * @return true if can reset - */ - // TODO is this useful? engine could do its checks onMeteringReset() - boolean canResetMetering(@NonNull PointF point, @Nullable Gesture gesture); - } - - private static final String TAG = Meter.class.getSimpleName(); - private static final CameraLogger LOG = CameraLogger.create(TAG); - private static final int FORCED_END_DELAY = 2500; - - private final CameraEngine mEngine; - private final CaptureRequest.Builder mBuilder; - private final CameraCharacteristics mCharacteristics; - private final Callback mCallback; - private PointF mPoint; - private Gesture mGesture; - - private boolean mIsMetering; - private long mMeteringStartTime; - private MeteringParameter mAutoFocus = new AutoFocus(); - private MeteringParameter mAutoWhiteBalance = new AutoWhiteBalance(); - private MeteringParameter mAutoExposure = new AutoExposure(); - - /** - * Creates a new meter. - * @param engine the engine - * @param builder a capture builder - * @param characteristics the camera characteristics - * @param callback the callback - */ - @SuppressWarnings("WeakerAccess") - public Meter(@NonNull CameraEngine engine, - @NonNull CaptureRequest.Builder builder, - @NonNull CameraCharacteristics characteristics, - @NonNull Callback callback) { - mEngine = engine; - mBuilder = builder; - mCharacteristics = characteristics; - mCallback = callback; - } - - /** - * Starts a metering sequence. - * @param point point - * @param gesture gesture - */ - @SuppressWarnings("WeakerAccess") - public void startMetering(@NonNull PointF point, @Nullable Gesture gesture) { - mPoint = point; - mGesture = gesture; - mIsMetering = true; - - // This is a good Q/A. https://stackoverflow.com/a/33181620/4288782 - // At first, the point is relative to the View system and does not account our own cropping. - // Will keep updating these two below. - final PointF referencePoint = new PointF(mPoint.x, mPoint.y); - Size referenceSize = mEngine.mPreview.getSurfaceSize(); - - // 1. Account for cropping. - // This will enlarge the preview size so that aspect ratio matches. - referenceSize = applyPreviewCropping(referenceSize, referencePoint); - - // 2. Scale to the preview stream coordinates. - // This will move to the preview stream coordinates by scaling. - referenceSize = applyPreviewScale(referenceSize, referencePoint); - - // 3. Rotate to the stream coordinate system. - // This leaves us with sensor stream coordinates. - referenceSize = applyPreviewToSensorRotation(referenceSize, referencePoint); - - // 4. Move to the crop region coordinate system. - // The crop region is the union of all currently active streams. - referenceSize = applyCropRegionCoordinates(referenceSize, referencePoint); - - // 5. Move to the active array coordinate system. - referenceSize = applyActiveArrayCoordinates(referenceSize, referencePoint); - - // 6. Now we can compute the metering regions. - // We want to define them as a fraction of the visible size which (apart from cropping) - // can be obtained through the SENSOR rotated preview stream size. - Size visibleSize = mEngine.getPreviewStreamSize(Reference.SENSOR); - //noinspection ConstantConditions - MeteringRectangle area1 = createMeteringRectangle(referenceSize, referencePoint, visibleSize, 0.05F, 1000); - MeteringRectangle area2 = createMeteringRectangle(referenceSize, referencePoint, visibleSize, 0.1F, 100); - List areas = Arrays.asList(area1, area2); - - // 7. And finally dispatch everything - mAutoFocus.startMetering(mCharacteristics, mBuilder, areas); - mAutoWhiteBalance.startMetering(mCharacteristics, mBuilder, areas); - mAutoExposure.startMetering(mCharacteristics, mBuilder, areas); - - // Dispatch to callback - mCallback.onMeteringStarted(mPoint, mGesture); - mMeteringStartTime = System.currentTimeMillis(); - } - - @SuppressWarnings("UnnecessaryLocalVariable") - @NonNull - private Size applyPreviewCropping(@NonNull Size referenceSize, @NonNull PointF referencePoint) { - Size previewStreamSize = mEngine.getPreviewStreamSize(Reference.VIEW); - Size previewSurfaceSize = referenceSize; - if (previewStreamSize == null) { - throw new IllegalStateException("getPreviewStreamSize should not be null at this point."); - } - int referenceWidth = previewSurfaceSize.getWidth(); - int referenceHeight = previewSurfaceSize.getHeight(); - AspectRatio previewStreamAspectRatio = AspectRatio.of(previewStreamSize); - AspectRatio previewSurfaceAspectRatio = AspectRatio.of(previewSurfaceSize); - if (mEngine.mPreview.isCropping()) { - if (previewStreamAspectRatio.toFloat() > previewSurfaceAspectRatio.toFloat()) { - // Stream is larger. The x coordinate must be increased: a touch on the left side - // of the surface is not on the left size of stream (it's more to the right). - float scale = previewStreamAspectRatio.toFloat() / previewSurfaceAspectRatio.toFloat(); - referencePoint.x += previewSurfaceSize.getWidth() * (scale - 1F) / 2F; - referenceWidth = Math.round(previewSurfaceSize.getWidth() * scale); - } else { - // Stream is taller. The y coordinate must be increased: a touch on the top side - // of the surface is not on the top size of stream (it's a bit lower). - float scale = previewSurfaceAspectRatio.toFloat() / previewStreamAspectRatio.toFloat(); - referencePoint.y += previewSurfaceSize.getHeight() * (scale - 1F) / 2F; - referenceHeight = Math.round(previewSurfaceSize.getHeight() * scale); - } - } - return new Size(referenceWidth, referenceHeight); - } - - @SuppressWarnings("ConstantConditions") - @NonNull - private Size applyPreviewScale(@NonNull Size referenceSize, @NonNull PointF referencePoint) { - // The referenceSize how has the same aspect ratio of the previewStreamSize, but they - // can still have different size (that is, a scale operation is needed). - Size previewStreamSize = mEngine.getPreviewStreamSize(Reference.VIEW); - referencePoint.x *= (float) previewStreamSize.getWidth() / referenceSize.getWidth(); - referencePoint.y *= (float) previewStreamSize.getHeight() / referenceSize.getHeight(); - return previewStreamSize; - } - - @SuppressWarnings("SuspiciousNameCombination") - @NonNull - private Size applyPreviewToSensorRotation(@NonNull Size referenceSize, @NonNull PointF referencePoint) { - // Not elegant, but the sin/cos way was failing for some reason. - int angle = mEngine.getAngles().offset(Reference.SENSOR, Reference.VIEW, Axis.ABSOLUTE); - boolean flip = angle % 180 != 0; - float tempX = referencePoint.x; - float tempY = referencePoint.y; - if (angle == 0) { - referencePoint.x = tempX; - referencePoint.y = tempY; - } else if (angle == 90) { - referencePoint.x = tempY; - referencePoint.y = referenceSize.getWidth() - tempX; - } else if (angle == 180) { - referencePoint.x = referenceSize.getWidth() - tempX; - referencePoint.y = referenceSize.getHeight() - tempY; - } else if (angle == 270) { - referencePoint.x = referenceSize.getHeight() - tempY; - referencePoint.y = tempX; - } else { - throw new IllegalStateException("Unexpected angle " + angle); - } - return flip ? referenceSize.flip() : referenceSize; - } - - @NonNull - private Size applyCropRegionCoordinates(@NonNull Size referenceSize, @NonNull PointF referencePoint) { - // The input point and size refer to the stream rect. - // The stream rect is part of the 'crop region', as described below. - // https://source.android.com/devices/camera/camera3_crop_reprocess.html - Rect cropRect = mBuilder.get(CaptureRequest.SCALER_CROP_REGION); - // For now, we don't care about x and y position. Rect should be non-null, but let's be safe. - int cropRectWidth = cropRect == null ? referenceSize.getWidth() : cropRect.width(); - int cropRectHeight = cropRect == null ? referenceSize.getHeight() : cropRect.height(); - // The stream is always centered inside the crop region, and one of the dimensions - // should always match. We just increase the other one. - referencePoint.x += (cropRectWidth - referenceSize.getWidth()) / 2F; - referencePoint.y += (cropRectHeight - referenceSize.getHeight()) / 2F; - return new Size(cropRectWidth, cropRectHeight); - } - - @NonNull - private Size applyActiveArrayCoordinates(@NonNull Size referenceSize, @NonNull PointF referencePoint) { - // The input point and size refer to the scaler crop region. - // We can query for the crop region position inside the active array, so this is easy. - Rect cropRect = mBuilder.get(CaptureRequest.SCALER_CROP_REGION); - referencePoint.x += cropRect == null ? 0 : cropRect.left; - referencePoint.y += cropRect == null ? 0 : cropRect.top; - // Finally, get the active rect width and height from characteristics. - Rect activeRect = mCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); - if (activeRect == null) { // Should never happen - activeRect = new Rect(0, 0, referenceSize.getWidth(), referenceSize.getHeight()); - } - return new Size(activeRect.width(), activeRect.height()); - } - - /** - * Creates a metering rectangle around the center point. - * The rectangle will have a size that's a factor of the visible width and height. - * The rectangle will also be constrained to be inside the given boundaries, - * so we don't exceed them in case the center point is exactly on one side for example. - * @return a new rectangle - */ - @NonNull - private MeteringRectangle createMeteringRectangle( - @NonNull Size boundaries, - @NonNull PointF center, - @NonNull Size visibleSize, - float factor, - int weight) { - float rectangleWidth = factor * visibleSize.getWidth(); - float rectangleHeight = factor * visibleSize.getHeight(); - float rectangleLeft = center.x - rectangleWidth / 2F; - float rectangleTop = center.y - rectangleHeight / 2F; - // Respect boundaries - if (rectangleLeft < 0) rectangleLeft = 0; - if (rectangleTop < 0) rectangleTop = 0; - if (rectangleLeft + rectangleWidth > boundaries.getWidth()) { - rectangleWidth = boundaries.getWidth() - rectangleLeft; - } - if (rectangleTop + rectangleHeight > boundaries.getHeight()) { - rectangleHeight = boundaries.getHeight() - rectangleTop; - } - return new MeteringRectangle( - (int) rectangleLeft, - (int) rectangleTop, - (int) rectangleWidth, - (int) rectangleHeight, - weight - ); - } - - /** - * True if we're metering. False if we're not, for example if we're waiting for - * a reset call, or if {@link #startMetering(PointF, Gesture)} was never called. - * @return true if metering - */ - @SuppressWarnings("WeakerAccess") - public boolean isMetering() { - return mIsMetering; - } - - /** - * Should be called when we have partial or total CaptureResults, - * but only while {@link #isMetering()} returns true. - * @param result result - */ - @SuppressWarnings("WeakerAccess") - public void onCapture(@NonNull CaptureResult result) { - if (!mIsMetering) return; // We're not interested in results anymore - if (!(result instanceof TotalCaptureResult)) return; // Let's ignore these, contents are missing/wrong - - if (!mAutoFocus.isMetered()) mAutoFocus.onCapture(result); - if (!mAutoExposure.isMetered()) mAutoExposure.onCapture(result); - if (!mAutoWhiteBalance.isMetered()) mAutoWhiteBalance.onCapture(result); - if (mAutoFocus.isMetered() && mAutoExposure.isMetered() && mAutoWhiteBalance.isMetered()) { - LOG.i("onCapture:", "all MeteringParameters have converged. Dispatching onMeteringEnd"); - boolean success = mAutoFocus.isSuccessful() - && mAutoExposure.isSuccessful() - && mAutoWhiteBalance.isSuccessful(); - onMeteringEnd(success); - } else if (System.currentTimeMillis() - mMeteringStartTime >= FORCED_END_DELAY) { - LOG.i("onCapture:", "FORCED_END_DELAY was reached. Some MeteringParameter is stuck. Forcing end."); - onMeteringEnd(false); - } - } - - private void onMeteringEnd(boolean success) { - mCallback.onMeteringEnd(mPoint, mGesture, success); - mIsMetering = false; - mEngine.mHandler.remove(mResetRunnable); - if (mEngine.shouldResetAutoFocus()) { - mEngine.mHandler.post(mEngine.getAutoFocusResetDelay(), mResetRunnable); - } - } - - /** - * Can be called to perform the reset at a time different than the one - * specified by the {@link CameraEngine} reset delay. - */ - @SuppressWarnings("WeakerAccess") - public void resetMetering() { - mEngine.mHandler.remove(mResetRunnable); - if (mCallback.canResetMetering(mPoint, mGesture)) { - LOG.i("Resetting the meter parameters."); - Rect whole = mCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); - if (whole == null) whole = new Rect(); - MeteringRectangle rectangle = new MeteringRectangle(whole, MeteringRectangle.METERING_WEIGHT_DONT_CARE); - mAutoFocus.resetMetering(mCharacteristics, mBuilder, rectangle); - mAutoWhiteBalance.resetMetering(mCharacteristics, mBuilder, rectangle); - mAutoExposure.resetMetering(mCharacteristics, mBuilder, rectangle); - mCallback.onMeteringReset(mPoint, mGesture); - } - } - - private Runnable mResetRunnable = new Runnable() { - @Override - public void run() { - resetMetering(); - } - }; -} diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/Action.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/Action.java new file mode 100644 index 00000000..ba4a00de --- /dev/null +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/Action.java @@ -0,0 +1,85 @@ +package com.otaliastudios.cameraview.engine.action; + +import android.hardware.camera2.CameraCaptureSession; +import android.hardware.camera2.CameraCaptureSession.CaptureCallback; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.CaptureResult; +import android.hardware.camera2.TotalCaptureResult; +import android.os.Build; + +import androidx.annotation.NonNull; +import androidx.annotation.RequiresApi; + +/** + * The Action class encapsulates logic for completing an action in a Camera2 environment. + * In this case, we are often interested in constantly receiving the {@link CaptureResult} + * and {@link CaptureRequest} callbacks, as well as applying changes to a {@link CaptureRequest.Builder} + * and having them applied to the sensor. + * + * The Action class receives the given callbacks and can operate over the engine + * through the {@link ActionHolder} object. + * + * Each Action operates on a given state in a given moment. This base class offers the + * {@link #STATE_COMPLETED} state which is common to all actions. + * + * See {@link BaseAction} for a base implementation. + */ +@RequiresApi(Build.VERSION_CODES.LOLLIPOP) +public interface Action { + + int STATE_COMPLETED = Integer.MAX_VALUE; + + /** + * Returns the current state. + * @return the state + */ + int getState(); + + /** + * Starts this action. + * @param holder the holder + */ + void start(@NonNull ActionHolder holder); + + /** + * Aborts this action. + * @param holder the holder + */ + void abort(@NonNull ActionHolder holder); + + /** + * Adds an {@link ActionCallback} to receive state + * change events. + * @param callback a callback + */ + void addCallback(@NonNull ActionCallback callback); + + /** + * Removes a previously added callback. + * @param callback a callback + */ + void removeCallback(@NonNull ActionCallback callback); + + /** + * Called from {@link CaptureCallback#onCaptureStarted(CameraCaptureSession, CaptureRequest, long, long)}. + * @param holder the holder + * @param request the request + */ + void onCaptureStarted(@NonNull ActionHolder holder, @NonNull CaptureRequest request); + + /** + * Called from {@link CaptureCallback#onCaptureProgressed(CameraCaptureSession, CaptureRequest, CaptureResult)}. + * @param holder the holder + * @param request the request + * @param result the result + */ + void onCaptureProgressed(@NonNull ActionHolder holder, @NonNull CaptureRequest request, @NonNull CaptureResult result); + + /** + * Called from {@link CaptureCallback#onCaptureCompleted(CameraCaptureSession, CaptureRequest, TotalCaptureResult)}. + * @param holder the holder + * @param request the request + * @param result the result + */ + void onCaptureCompleted(@NonNull ActionHolder holder, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result); +} diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/ActionCallback.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/ActionCallback.java new file mode 100644 index 00000000..6d327cb7 --- /dev/null +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/ActionCallback.java @@ -0,0 +1,23 @@ +package com.otaliastudios.cameraview.engine.action; + +import android.os.Build; + +import androidx.annotation.NonNull; +import androidx.annotation.RequiresApi; + +/** + * A callback for {@link Action} state changes. + * See the action class. + * + * See also {@link CompletionCallback}. + */ +@RequiresApi(Build.VERSION_CODES.LOLLIPOP) +public interface ActionCallback { + + /** + * Action state has just changed. + * @param action action + * @param state new state + */ + void onActionStateChanged(@NonNull Action action, int state); +} diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/ActionHolder.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/ActionHolder.java new file mode 100644 index 00000000..1673d10a --- /dev/null +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/ActionHolder.java @@ -0,0 +1,81 @@ +package com.otaliastudios.cameraview.engine.action; + +import android.hardware.camera2.CameraAccessException; +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.CaptureResult; +import android.hardware.camera2.TotalCaptureResult; +import android.os.Build; + +import androidx.annotation.NonNull; +import androidx.annotation.RequiresApi; + +/** + * The holder of {@link Action}. + * + * This class should keep a list or set of currently running actions, and offers + * to them the base Camera2 objects that are needed to apply changes. + * + * This class, or an holder of it, should also forward the capture callbacks + * to all {@link Action}s. See {@link com.otaliastudios.cameraview.engine.Camera2Engine} for + * our implementation. + */ +@RequiresApi(Build.VERSION_CODES.LOLLIPOP) +public interface ActionHolder { + + /** + * Adds a new action + * @param action action + */ + void addAction(@NonNull Action action); + + /** + * Removes a previously added action + * @param action action + */ + void removeAction(@NonNull Action action); + + /** + * Returns the {@link CameraCharacteristics} of the current + * camera device. + * @param action action + * @return characteristics + */ + @NonNull + CameraCharacteristics getCharacteristics(@NonNull Action action); + + /** + * Returns the latest {@link TotalCaptureResult}. Can be used + * by actions to start querying the state before receiving their + * first frame. + * @param action action + * @return last result + */ + @NonNull + TotalCaptureResult getLastResult(@NonNull Action action); + + /** + * Returns the current {@link CaptureRequest.Builder} so that + * actions can apply changes to it and later submit them. + * @param action action + * @return the builder + */ + @NonNull + CaptureRequest.Builder getBuilder(@NonNull Action action); + + /** + * Applies the current builder (as per {@link #getBuilder(Action)}) + * as a repeating request on the preview. + * @param source action + */ + void applyBuilder(@NonNull Action source); + + /** + * Applies the given builder as a single capture request. + * Callers can catch the exception and choose what to do. + * @param source action + * @param builder builder + * @throws CameraAccessException camera exception + */ + void applyBuilder(@NonNull Action source, @NonNull CaptureRequest.Builder builder) throws CameraAccessException; +} diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/ActionWrapper.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/ActionWrapper.java new file mode 100644 index 00000000..799cf31e --- /dev/null +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/ActionWrapper.java @@ -0,0 +1,63 @@ +package com.otaliastudios.cameraview.engine.action; + +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.CaptureResult; +import android.hardware.camera2.TotalCaptureResult; +import android.os.Build; + +import androidx.annotation.NonNull; +import androidx.annotation.RequiresApi; + +/** + * A simple wrapper around a {@link BaseAction}. + * This can be used to add functionality around a base action. + */ +@RequiresApi(Build.VERSION_CODES.LOLLIPOP) +public abstract class ActionWrapper extends BaseAction { + + /** + * Should return the wrapped action. + * @return the wrapped action + */ + @NonNull + public abstract BaseAction getAction(); + + @Override + protected void onStart(@NonNull ActionHolder holder) { + super.onStart(holder); + getAction().addCallback(new ActionCallback() { + @Override + public void onActionStateChanged(@NonNull Action action, int state) { + setState(state); + if (state == STATE_COMPLETED) { + action.removeCallback(this); + } + } + }); + getAction().onStart(holder); + } + + @Override + protected void onAbort(@NonNull ActionHolder holder) { + super.onAbort(holder); + getAction().onAbort(holder); + } + + @Override + public void onCaptureStarted(@NonNull ActionHolder holder, @NonNull CaptureRequest request) { + super.onCaptureStarted(holder, request); + getAction().onCaptureStarted(holder, request); + } + + @Override + public void onCaptureProgressed(@NonNull ActionHolder holder, @NonNull CaptureRequest request, @NonNull CaptureResult result) { + super.onCaptureProgressed(holder, request, result); + getAction().onCaptureProgressed(holder, request, result); + } + + @Override + public void onCaptureCompleted(@NonNull ActionHolder holder, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) { + super.onCaptureCompleted(holder, request, result); + getAction().onCaptureCompleted(holder, request, result); + } +} diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/Actions.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/Actions.java new file mode 100644 index 00000000..533af79b --- /dev/null +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/Actions.java @@ -0,0 +1,56 @@ +package com.otaliastudios.cameraview.engine.action; + +import android.os.Build; + +import androidx.annotation.NonNull; +import androidx.annotation.RequiresApi; + +import java.util.Arrays; + +/** + * Utilities for creating {@link Action} sequences. + */ +@RequiresApi(Build.VERSION_CODES.LOLLIPOP) +public class Actions { + + /** + * Creates a {@link BaseAction} that executes all the child actions + * together, at the same time, and completes once all of them are + * completed. + * + * @param actions input actions + * @return a new action + */ + @NonNull + public static BaseAction together(@NonNull BaseAction... actions) { + return new TogetherAction(Arrays.asList(actions)); + } + + /** + * Creates a {@link BaseAction} that executes all the child actions + * in sequence, waiting for the first to complete, then going on with + * the second and so on, finally completing when all are completed. + * + * @param actions input actions + * @return a new action + */ + @NonNull + public static BaseAction sequence(@NonNull BaseAction... actions) { + return new SequenceAction(Arrays.asList(actions)); + } + + /** + * Creates a {@link BaseAction} that completes as normal, but is also + * forced to complete if the given timeout is reached, by calling + * {@link Action#abort(ActionHolder)}. + * + * @param timeoutMillis timeout in milliseconds + * @param action action + * @return a new action + */ + @NonNull + public static BaseAction timeout(long timeoutMillis, @NonNull BaseAction action) { + return new TimeoutAction(timeoutMillis, action); + } + +} diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/BaseAction.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/BaseAction.java new file mode 100644 index 00000000..0c3d29e2 --- /dev/null +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/BaseAction.java @@ -0,0 +1,160 @@ +package com.otaliastudios.cameraview.engine.action; + +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.CaptureResult; +import android.hardware.camera2.TotalCaptureResult; +import android.os.Build; + +import androidx.annotation.CallSuper; +import androidx.annotation.NonNull; +import androidx.annotation.RequiresApi; + +import java.util.ArrayList; +import java.util.List; + +/** + * The base implementation of {@link Action} that should always be subclassed, + * instead of implementing the root interface itself. + * + * It holds a list of callbacks and dispatches events to them, plus it cares about + * its own lifecycle: + * - when {@link #start(ActionHolder)} is called, we add ourselves to the holder list + * - when {@link #STATE_COMPLETED} is reached, we remove ouverselves from the holder list + * + * This is very important in all cases. + */ +@RequiresApi(Build.VERSION_CODES.LOLLIPOP) +public abstract class BaseAction implements Action { + + private final List callbacks = new ArrayList<>(); + private int state; + private ActionHolder holder; + + @Override + public final int getState() { + return state; + } + + @Override + public final void start(@NonNull ActionHolder holder) { + holder.addAction(this); + onStart(holder); + } + + @Override + public final void abort(@NonNull ActionHolder holder) { + holder.removeAction(this); + if (!isCompleted()) { + onAbort(holder); + setState(STATE_COMPLETED); + } + } + + /** + * Action was started and will soon receive events from the + * holder stream. + * @param holder holder + */ + @CallSuper + protected void onStart(@NonNull ActionHolder holder) { + this.holder = holder; // must be here + // Overrideable + } + + /** + * Action was aborted and will not receive events from the + * holder stream anymore. It will soon be marked as completed. + * @param holder holder + */ + @SuppressWarnings("unused") + protected void onAbort(@NonNull ActionHolder holder) { + // Overrideable + } + + @Override + public void onCaptureStarted(@NonNull ActionHolder holder, @NonNull CaptureRequest request) { + // Overrideable + } + + @Override + public void onCaptureProgressed(@NonNull ActionHolder holder, @NonNull CaptureRequest request, @NonNull CaptureResult result) { + // Overrideable + } + + @Override + public void onCaptureCompleted(@NonNull ActionHolder holder, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) { + // Overrideable + } + + /** + * Called by subclasses to notify of their state. If state is {@link #STATE_COMPLETED}, + * this removes this action from the holder. + * @param newState new state + */ + protected void setState(int newState) { + if (newState != state) { + state = newState; + for (ActionCallback callback : callbacks) { + callback.onActionStateChanged(this, state); + } + if (state == STATE_COMPLETED) { + holder.removeAction(this); + onCompleted(holder); + } + } + } + + /** + * Whether this action has reached the completed state. + * @return true if completed + */ + public boolean isCompleted() { + return state == STATE_COMPLETED; + } + + /** + * Called when this action has completed (possibly aborted). + * @param holder holder + */ + protected void onCompleted(@NonNull ActionHolder holder) { + // Overrideable + } + + /** + * Returns the holder. + * @return the holder + */ + @NonNull + protected ActionHolder getHolder() { + return holder; + } + + + /** + * Reads a characteristic with a fallback. + * @param key key + * @param fallback fallback + * @param key type + * @return value or fallback + */ + @NonNull + protected T readCharacteristic(@NonNull CameraCharacteristics.Key key, + @NonNull T fallback) { + T value = holder.getCharacteristics(this).get(key); + return value == null ? fallback : value; + } + + @Override + public void addCallback(@NonNull ActionCallback callback) { + if (!callbacks.contains(callback)) { + callbacks.add(callback); + callback.onActionStateChanged(this, getState()); + } + } + + @Override + public void removeCallback(@NonNull ActionCallback callback) { + callbacks.remove(callback); + } +} diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/CompletionCallback.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/CompletionCallback.java new file mode 100644 index 00000000..5029693b --- /dev/null +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/CompletionCallback.java @@ -0,0 +1,27 @@ +package com.otaliastudios.cameraview.engine.action; + +import android.os.Build; + +import androidx.annotation.NonNull; +import androidx.annotation.RequiresApi; + +/** + * A special {@link ActionCallback} that just checks for the + * completed state. Handy as an inner anonymous class. + */ +@RequiresApi(Build.VERSION_CODES.LOLLIPOP) +public abstract class CompletionCallback implements ActionCallback { + + @Override + public final void onActionStateChanged(@NonNull Action action, int state) { + if (state == Action.STATE_COMPLETED) { + onActionCompleted(action); + } + } + + /** + * The given action has just reached the completed state. + * @param action action + */ + protected abstract void onActionCompleted(@NonNull Action action); +} diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/SequenceAction.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/SequenceAction.java new file mode 100644 index 00000000..4453877a --- /dev/null +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/SequenceAction.java @@ -0,0 +1,94 @@ +package com.otaliastudios.cameraview.engine.action; + +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.CaptureResult; +import android.hardware.camera2.TotalCaptureResult; +import android.os.Build; + +import androidx.annotation.NonNull; +import androidx.annotation.RequiresApi; + +import java.util.List; + +/** + * Executes a list of actions in sequence, completing once + * the last of them has been completed. + */ +@RequiresApi(Build.VERSION_CODES.LOLLIPOP) +class SequenceAction extends BaseAction { + // Need to be BaseAction so we can call onStart() instead of start() + private final List actions; + private int runningAction = -1; + + SequenceAction(@NonNull List actions) { + this.actions = actions; + increaseRunningAction(); + } + + private void increaseRunningAction() { + boolean first = runningAction == -1; + boolean last = runningAction == actions.size() - 1; + if (last) { + // This was the last action. We're done. + setState(STATE_COMPLETED); + } else { + runningAction++; + actions.get(runningAction).addCallback(new ActionCallback() { + @Override + public void onActionStateChanged(@NonNull Action action, int state) { + if (state == STATE_COMPLETED) { + action.removeCallback(this); + increaseRunningAction(); + } + } + }); + if (!first) { + actions.get(runningAction).onStart(getHolder()); + } + } + } + + @Override + protected void onStart(@NonNull ActionHolder holder) { + super.onStart(holder); + if (runningAction >= 0) { + actions.get(runningAction).onStart(holder); + } + } + + @Override + protected void onAbort(@NonNull ActionHolder holder) { + super.onAbort(holder); + if (runningAction >= 0) { + // Previous actions have been completed already. + // Future actions will never start. So this is OK. + actions.get(runningAction).onAbort(holder); + } + } + + @Override + public void onCaptureStarted(@NonNull ActionHolder holder, @NonNull CaptureRequest request) { + super.onCaptureStarted(holder, request); + if (runningAction >= 0) { + actions.get(runningAction).onCaptureStarted(holder, request); + } + } + + @Override + public void onCaptureProgressed(@NonNull ActionHolder holder, @NonNull CaptureRequest request, + @NonNull CaptureResult result) { + super.onCaptureProgressed(holder, request, result); + if (runningAction >= 0) { + actions.get(runningAction).onCaptureProgressed(holder, request, result); + } + } + + @Override + public void onCaptureCompleted(@NonNull ActionHolder holder, @NonNull CaptureRequest request, + @NonNull TotalCaptureResult result) { + super.onCaptureCompleted(holder, request, result); + if (runningAction >= 0) { + actions.get(runningAction).onCaptureCompleted(holder, request, result); + } + } +} diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/TimeoutAction.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/TimeoutAction.java new file mode 100644 index 00000000..c1e7ed55 --- /dev/null +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/TimeoutAction.java @@ -0,0 +1,50 @@ +package com.otaliastudios.cameraview.engine.action; + +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.TotalCaptureResult; +import android.os.Build; + +import androidx.annotation.NonNull; +import androidx.annotation.RequiresApi; + +/** + * An {@link Action} that wraps another, and forces the completion + * after the given timeout in milliseconds is reached. + */ +@RequiresApi(Build.VERSION_CODES.LOLLIPOP) +class TimeoutAction extends ActionWrapper { + + private long startMillis; + private long timeoutMillis; + private BaseAction action; + + TimeoutAction(long timeoutMillis, @NonNull BaseAction action) { + this.timeoutMillis = timeoutMillis; + this.action = action; + } + + @NonNull + @Override + public BaseAction getAction() { + return action; + } + + @Override + protected void onStart(@NonNull ActionHolder holder) { + startMillis = System.currentTimeMillis(); + super.onStart(holder); + } + + @Override + public void onCaptureCompleted(@NonNull ActionHolder holder, + @NonNull CaptureRequest request, + @NonNull TotalCaptureResult result) { + super.onCaptureCompleted(holder, request, result); + if (!isCompleted()) { + if (System.currentTimeMillis() > startMillis + timeoutMillis) { + // This will set our state to COMPLETED and stop requests. + getAction().abort(holder); + } + } + } +} diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/TogetherAction.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/TogetherAction.java new file mode 100644 index 00000000..a21da923 --- /dev/null +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/TogetherAction.java @@ -0,0 +1,84 @@ +package com.otaliastudios.cameraview.engine.action; + +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.CaptureResult; +import android.hardware.camera2.TotalCaptureResult; +import android.os.Build; + +import androidx.annotation.NonNull; +import androidx.annotation.RequiresApi; + +import java.util.ArrayList; +import java.util.List; + +/** + * Performs a list of actions together, completing + * once all of them have completed. + */ +@RequiresApi(Build.VERSION_CODES.LOLLIPOP) +class TogetherAction extends BaseAction { + // Need to be BaseAction so we can call onStart() instead of start() + private final List actions; + private final List runningActions; + + TogetherAction(@NonNull final List actions) { + this.actions = new ArrayList<>(actions); + this.runningActions = new ArrayList<>(actions); + for (BaseAction action : actions) { + action.addCallback(new ActionCallback() { + @Override + public void onActionStateChanged(@NonNull Action action, int state) { + if (state == STATE_COMPLETED) { + //noinspection SuspiciousMethodCalls + runningActions.remove(action); + } + if (runningActions.isEmpty()) { + setState(STATE_COMPLETED); + } + } + }); + } + } + + @Override + protected void onStart(@NonNull ActionHolder holder) { + super.onStart(holder); + for (BaseAction action : actions) { + if (!action.isCompleted()) action.onStart(holder); + } + } + + @Override + protected void onAbort(@NonNull ActionHolder holder) { + super.onAbort(holder); + for (BaseAction action : actions) { + if (!action.isCompleted()) action.onAbort(holder); + } + } + + @Override + public void onCaptureStarted(@NonNull ActionHolder holder, @NonNull CaptureRequest request) { + super.onCaptureStarted(holder, request); + for (BaseAction action : actions) { + if (!action.isCompleted()) action.onCaptureStarted(holder, request); + } + } + + @Override + public void onCaptureProgressed(@NonNull ActionHolder holder, @NonNull CaptureRequest request, + @NonNull CaptureResult result) { + super.onCaptureProgressed(holder, request, result); + for (BaseAction action : actions) { + if (!action.isCompleted()) action.onCaptureProgressed(holder, request, result); + } + } + + @Override + public void onCaptureCompleted(@NonNull ActionHolder holder, @NonNull CaptureRequest request, + @NonNull TotalCaptureResult result) { + super.onCaptureCompleted(holder, request, result); + for (BaseAction action : actions) { + if (!action.isCompleted()) action.onCaptureCompleted(holder, request, result); + } + } +} diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/lock/BaseLock.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/lock/BaseLock.java new file mode 100644 index 00000000..d3806443 --- /dev/null +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/lock/BaseLock.java @@ -0,0 +1,32 @@ +package com.otaliastudios.cameraview.engine.lock; + +import android.hardware.camera2.CameraCharacteristics; +import android.os.Build; + +import androidx.annotation.NonNull; +import androidx.annotation.RequiresApi; + +import com.otaliastudios.cameraview.engine.action.ActionHolder; +import com.otaliastudios.cameraview.engine.action.BaseAction; + +@RequiresApi(Build.VERSION_CODES.LOLLIPOP) +public abstract class BaseLock extends BaseAction { + + @Override + protected final void onStart(@NonNull ActionHolder holder) { + super.onStart(holder); + boolean isSkipped = checkShouldSkip(holder); + boolean isSupported = checkIsSupported(holder); + if (isSupported && !isSkipped) { + onStarted(holder); + } else { + setState(STATE_COMPLETED); + } + } + + protected abstract void onStarted(@NonNull ActionHolder holder); + + protected abstract boolean checkShouldSkip(@NonNull ActionHolder holder); + + protected abstract boolean checkIsSupported(@NonNull ActionHolder holder); +} diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/lock/ExposureLock.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/lock/ExposureLock.java new file mode 100644 index 00000000..4e221280 --- /dev/null +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/lock/ExposureLock.java @@ -0,0 +1,78 @@ +package com.otaliastudios.cameraview.engine.lock; + +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.CaptureResult; +import android.hardware.camera2.TotalCaptureResult; +import android.os.Build; + +import androidx.annotation.NonNull; +import androidx.annotation.RequiresApi; + +import com.otaliastudios.cameraview.CameraLogger; +import com.otaliastudios.cameraview.engine.action.ActionHolder; + +@RequiresApi(Build.VERSION_CODES.LOLLIPOP) +public class ExposureLock extends BaseLock { + + private final static String TAG = ExposureLock.class.getSimpleName(); + private final static CameraLogger LOG = CameraLogger.create(TAG); + + @Override + protected boolean checkIsSupported(@NonNull ActionHolder holder) { + boolean isNotLegacy = readCharacteristic(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL, -1) + != CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY; + // Not sure we should check aeMode as well, probably all aeModes support locking, + // but this should not be a big issue since we're not even using different AE modes. + Integer aeMode = holder.getBuilder(this).get(CaptureRequest.CONTROL_AE_MODE); + boolean isAEOn = aeMode != null && + (aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON + || aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON_ALWAYS_FLASH + || aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH + || aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE + || aeMode == 5 /* CameraCharacteristics.CONTROL_AE_MODE_ON_EXTERNAL_FLASH, API 28 */); + boolean result = isNotLegacy && isAEOn; + LOG.i("checkIsSupported:", result); + return result; + } + + @Override + protected boolean checkShouldSkip(@NonNull ActionHolder holder) { + Integer aeState = holder.getLastResult(this).get(CaptureResult.CONTROL_AE_STATE); + boolean result = aeState != null && aeState == CaptureResult.CONTROL_AE_STATE_LOCKED; + LOG.i("checkShouldSkip:", result); + return result; + } + + @Override + protected void onStarted(@NonNull ActionHolder holder) { + int cancelTrigger = Build.VERSION.SDK_INT >= 23 + ? CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL + : CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_IDLE; + holder.getBuilder(this).set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, cancelTrigger); + holder.getBuilder(this).set(CaptureRequest.CONTROL_AE_LOCK, true); + holder.applyBuilder(this); + } + + @Override + public void onCaptureCompleted(@NonNull ActionHolder holder, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) { + super.onCaptureCompleted(holder, request, result); + Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE); + LOG.i("processCapture:", "aeState:", aeState); + if (aeState == null) return; + switch (aeState) { + case CaptureRequest.CONTROL_AE_STATE_LOCKED: { + setState(STATE_COMPLETED); + break; + } + case CaptureRequest.CONTROL_AE_STATE_PRECAPTURE: + case CaptureRequest.CONTROL_AE_STATE_CONVERGED: + case CaptureRequest.CONTROL_AE_STATE_INACTIVE: + case CaptureRequest.CONTROL_AE_STATE_SEARCHING: + case CaptureRequest.CONTROL_AE_STATE_FLASH_REQUIRED: { + // Wait... + break; + } + } + } +} diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/lock/FocusLock.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/lock/FocusLock.java new file mode 100644 index 00000000..adfa4d2f --- /dev/null +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/lock/FocusLock.java @@ -0,0 +1,82 @@ +package com.otaliastudios.cameraview.engine.lock; + +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.CaptureResult; +import android.hardware.camera2.TotalCaptureResult; +import android.os.Build; + +import androidx.annotation.NonNull; +import androidx.annotation.RequiresApi; + +import com.otaliastudios.cameraview.CameraLogger; +import com.otaliastudios.cameraview.engine.action.ActionHolder; + +@RequiresApi(Build.VERSION_CODES.LOLLIPOP) +public class FocusLock extends BaseLock { + + private final static String TAG = FocusLock.class.getSimpleName(); + private final static CameraLogger LOG = CameraLogger.create(TAG); + + @Override + protected boolean checkIsSupported(@NonNull ActionHolder holder) { + // We'll lock by changing the AF mode to AUTO. + // In that mode, AF won't change unless someone starts a trigger operation. + int[] modes = readCharacteristic(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES, new int[]{}); + for (int mode : modes) { + if (mode == CameraCharacteristics.CONTROL_AF_MODE_AUTO) { + return true; + } + } + return false; + } + + @Override + protected boolean checkShouldSkip(@NonNull ActionHolder holder) { + CaptureResult lastResult = holder.getLastResult(this); + Integer afState = lastResult.get(CaptureResult.CONTROL_AF_STATE); + boolean afStateOk = afState != null && + (afState == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED + || afState == CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED + || afState == CaptureResult.CONTROL_AF_STATE_INACTIVE + || afState == CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED + || afState == CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED); + Integer afMode = lastResult.get(CaptureResult.CONTROL_AF_MODE); + boolean afModeOk = afMode != null && afMode == CaptureResult.CONTROL_AF_MODE_AUTO; + boolean result = afStateOk && afModeOk; + LOG.i("checkShouldSkip:", result); + return result; + } + + @Override + protected void onStarted(@NonNull ActionHolder holder) { + holder.getBuilder(this).set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO); + holder.getBuilder(this).set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_CANCEL); + holder.applyBuilder(this); + } + + @Override + public void onCaptureCompleted(@NonNull ActionHolder holder, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) { + super.onCaptureCompleted(holder, request, result); + Integer afState = result.get(CaptureResult.CONTROL_AF_STATE); + Integer afMode = result.get(CaptureResult.CONTROL_AF_MODE); + LOG.i("onCapture:", "afState:", afState, "afMode:", afMode); + if (afState == null || afMode == null) return; + if (afMode != CaptureResult.CONTROL_AF_MODE_AUTO) return; + switch (afState) { + case CaptureRequest.CONTROL_AF_STATE_FOCUSED_LOCKED: + case CaptureRequest.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED: + case CaptureRequest.CONTROL_AF_STATE_INACTIVE: + case CaptureRequest.CONTROL_AF_STATE_PASSIVE_FOCUSED: + case CaptureRequest.CONTROL_AF_STATE_PASSIVE_UNFOCUSED: { + setState(STATE_COMPLETED); + break; + } + case CaptureRequest.CONTROL_AF_STATE_ACTIVE_SCAN: + case CaptureRequest.CONTROL_AF_STATE_PASSIVE_SCAN: { + // Wait... + break; + } + } + } +} diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/lock/LockAction.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/lock/LockAction.java new file mode 100644 index 00000000..10aec763 --- /dev/null +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/lock/LockAction.java @@ -0,0 +1,26 @@ +package com.otaliastudios.cameraview.engine.lock; + +import android.os.Build; + +import androidx.annotation.NonNull; +import androidx.annotation.RequiresApi; + +import com.otaliastudios.cameraview.engine.action.ActionWrapper; +import com.otaliastudios.cameraview.engine.action.Actions; +import com.otaliastudios.cameraview.engine.action.BaseAction; + +@RequiresApi(Build.VERSION_CODES.LOLLIPOP) +public class LockAction extends ActionWrapper { + + private final BaseAction action = Actions.together( + new ExposureLock(), + new FocusLock(), + new WhiteBalanceLock() + ); + + @NonNull + @Override + public BaseAction getAction() { + return action; + } +} diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/lock/WhiteBalanceLock.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/lock/WhiteBalanceLock.java new file mode 100644 index 00000000..b49c2ab1 --- /dev/null +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/lock/WhiteBalanceLock.java @@ -0,0 +1,64 @@ +package com.otaliastudios.cameraview.engine.lock; + +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.CaptureResult; +import android.hardware.camera2.TotalCaptureResult; +import android.os.Build; + +import androidx.annotation.NonNull; +import androidx.annotation.RequiresApi; + +import com.otaliastudios.cameraview.CameraLogger; +import com.otaliastudios.cameraview.engine.action.ActionHolder; + +@RequiresApi(Build.VERSION_CODES.LOLLIPOP) +public class WhiteBalanceLock extends BaseLock { + + private final static String TAG = WhiteBalanceLock.class.getSimpleName(); + private final static CameraLogger LOG = CameraLogger.create(TAG); + + @Override + protected boolean checkIsSupported(@NonNull ActionHolder holder) { + boolean isNotLegacy = readCharacteristic(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL, -1) + != CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY; + Integer awbMode = holder.getBuilder(this).get(CaptureRequest.CONTROL_AWB_MODE); + boolean result = isNotLegacy && awbMode != null && awbMode == CaptureRequest.CONTROL_AWB_MODE_AUTO; + LOG.i("checkIsSupported:", result); + return result; + } + + @Override + protected boolean checkShouldSkip(@NonNull ActionHolder holder) { + Integer awbState = holder.getLastResult(this).get(CaptureResult.CONTROL_AWB_STATE); + boolean result = awbState != null && awbState == CaptureRequest.CONTROL_AWB_STATE_LOCKED; + LOG.i("checkShouldSkip:", result); + return result; + } + + @Override + protected void onStarted(@NonNull ActionHolder holder) { + holder.getBuilder(this).set(CaptureRequest.CONTROL_AWB_LOCK, true); + holder.applyBuilder(this); + } + + @Override + public void onCaptureCompleted(@NonNull ActionHolder holder, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) { + super.onCaptureCompleted(holder, request, result); + Integer awbState = result.get(CaptureResult.CONTROL_AWB_STATE); + LOG.i("processCapture:", "awbState:", awbState); + if (awbState == null) return; + switch (awbState) { + case CaptureRequest.CONTROL_AWB_STATE_LOCKED: { + setState(STATE_COMPLETED); + break; + } + case CaptureRequest.CONTROL_AWB_STATE_CONVERGED: + case CaptureRequest.CONTROL_AWB_STATE_INACTIVE: + case CaptureRequest.CONTROL_AWB_STATE_SEARCHING: { + // Wait... + break; + } + } + } +} diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/BaseMeter.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/BaseMeter.java new file mode 100644 index 00000000..d0c5154d --- /dev/null +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/BaseMeter.java @@ -0,0 +1,62 @@ +package com.otaliastudios.cameraview.engine.meter; + +import android.hardware.camera2.params.MeteringRectangle; +import android.os.Build; + +import androidx.annotation.NonNull; +import androidx.annotation.RequiresApi; + +import com.otaliastudios.cameraview.CameraLogger; +import com.otaliastudios.cameraview.engine.action.ActionHolder; +import com.otaliastudios.cameraview.engine.action.BaseAction; + +import java.util.List; + +@RequiresApi(Build.VERSION_CODES.LOLLIPOP) +public abstract class BaseMeter extends BaseAction { + + private final static String TAG = BaseMeter.class.getSimpleName(); + private final static CameraLogger LOG = CameraLogger.create(TAG); + + private final List areas; + private boolean isSuccessful; + private boolean skipIfPossible; + + @SuppressWarnings("WeakerAccess") + protected BaseMeter(@NonNull List areas, boolean skipIfPossible) { + this.areas = areas; + this.skipIfPossible = skipIfPossible; + } + + @Override + protected final void onStart(@NonNull ActionHolder holder) { + super.onStart(holder); + boolean isSkipped = skipIfPossible && checkShouldSkip(holder); + boolean isSupported = checkIsSupported(holder); + if (isSupported && !isSkipped) { + LOG.i("onStart:", "supported and not skipped. Dispatching onStarted."); + onStarted(holder, areas); + } else { + LOG.i("onStart:", "not supported or skipped. Dispatching COMPLETED state."); + setSuccessful(true); + setState(STATE_COMPLETED); + } + } + + protected abstract void onStarted(@NonNull ActionHolder holder, + @NonNull List areas); + + protected abstract boolean checkShouldSkip(@NonNull ActionHolder holder); + + protected abstract boolean checkIsSupported(@NonNull ActionHolder holder); + + @SuppressWarnings("WeakerAccess") + protected void setSuccessful(boolean successful) { + isSuccessful = successful; + } + + @SuppressWarnings("WeakerAccess") + public boolean isSuccessful() { + return isSuccessful; + } +} diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/BaseReset.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/BaseReset.java new file mode 100644 index 00000000..b4ea622d --- /dev/null +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/BaseReset.java @@ -0,0 +1,40 @@ +package com.otaliastudios.cameraview.engine.meter; + +import android.graphics.Rect; +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.params.MeteringRectangle; +import android.os.Build; + +import androidx.annotation.NonNull; +import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; + +import com.otaliastudios.cameraview.engine.action.ActionHolder; +import com.otaliastudios.cameraview.engine.action.BaseAction; + +import java.util.List; + +@RequiresApi(Build.VERSION_CODES.LOLLIPOP) +public abstract class BaseReset extends BaseAction { + + private boolean resetArea; + + @SuppressWarnings("WeakerAccess") + protected BaseReset(boolean resetArea) { + this.resetArea = resetArea; + } + + @Override + protected final void onStart(@NonNull ActionHolder holder) { + super.onStart(holder); + MeteringRectangle area = null; + if (resetArea) { + Rect rect = readCharacteristic(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE, new Rect()); + area = new MeteringRectangle(rect, MeteringRectangle.METERING_WEIGHT_DONT_CARE); + } + onStarted(holder, area); + } + + protected abstract void onStarted(@NonNull ActionHolder holder, + @Nullable MeteringRectangle area); +} diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/ExposureMeter.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/ExposureMeter.java new file mode 100644 index 00000000..cfd0826f --- /dev/null +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/ExposureMeter.java @@ -0,0 +1,150 @@ +package com.otaliastudios.cameraview.engine.meter; + +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.CaptureResult; +import android.hardware.camera2.TotalCaptureResult; +import android.hardware.camera2.params.MeteringRectangle; +import android.os.Build; + +import androidx.annotation.NonNull; +import androidx.annotation.RequiresApi; + +import com.otaliastudios.cameraview.CameraLogger; +import com.otaliastudios.cameraview.engine.action.ActionHolder; + +import java.util.List; + +@RequiresApi(Build.VERSION_CODES.LOLLIPOP) +public class ExposureMeter extends BaseMeter { + + private static final String TAG = ExposureMeter.class.getSimpleName(); + private static final CameraLogger LOG = CameraLogger.create(TAG); + + private static final int STATE_WAITING_PRECAPTURE = 0; + private static final int STATE_WAITING_PRECAPTURE_END = 1; + + @SuppressWarnings("WeakerAccess") + public ExposureMeter(@NonNull List areas, boolean skipIfPossible) { + super(areas, skipIfPossible); + } + + @Override + protected boolean checkIsSupported(@NonNull ActionHolder holder) { + // In our case, this means checking if we support the AE precapture trigger. + boolean isNotLegacy = readCharacteristic(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL, -1) + != CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY; + Integer aeMode = holder.getBuilder(this).get(CaptureRequest.CONTROL_AE_MODE); + boolean isAEOn = aeMode != null && + (aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON + || aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON_ALWAYS_FLASH + || aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH + || aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE + || aeMode == 5 /* CameraCharacteristics.CONTROL_AE_MODE_ON_EXTERNAL_FLASH, API 28 */); + boolean result = isNotLegacy && isAEOn; + LOG.i("checkIsSupported:", result); + return result; + } + + @Override + protected boolean checkShouldSkip(@NonNull ActionHolder holder) { + Integer aeState = holder.getLastResult(this).get(CaptureResult.CONTROL_AE_STATE); + boolean result = aeState != null && aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED; + LOG.i("checkShouldSkip:", result); + return result; + } + + @Override + protected void onStarted(@NonNull ActionHolder holder, @NonNull List areas) { + LOG.i("onStarted:", "with areas:", areas); + + // Launch the precapture trigger. + holder.getBuilder(this).set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, + CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START); + + // Check the regions. + int maxRegions = readCharacteristic(CameraCharacteristics.CONTROL_MAX_REGIONS_AE, 0); + if (!areas.isEmpty() && maxRegions > 0) { + int max = Math.min(maxRegions, areas.size()); + holder.getBuilder(this).set(CaptureRequest.CONTROL_AE_REGIONS, + areas.subList(0, max).toArray(new MeteringRectangle[]{})); + } + + // Apply + holder.applyBuilder(this); + setState(STATE_WAITING_PRECAPTURE); + } + + @Override + protected void onCompleted(@NonNull ActionHolder holder) { + super.onCompleted(holder); + // Remove (but not apply) the risky parameter so it is not included in new requests. + // Documentation about this key says that this should be allowed. + holder.getBuilder(this).set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, null); + } + + @Override + public void onCaptureCompleted(@NonNull ActionHolder holder, @NonNull CaptureRequest request, + @NonNull TotalCaptureResult result) { + super.onCaptureCompleted(holder, request, result); + Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE); + Integer aeTriggerState = result.get(CaptureResult.CONTROL_AE_PRECAPTURE_TRIGGER); + LOG.i("onCaptureCompleted:", "aeState:", aeState, "aeTriggerState:", aeTriggerState); + if (aeState == null) return; + + if (getState() == STATE_WAITING_PRECAPTURE) { + switch (aeState) { + case CaptureResult.CONTROL_AE_STATE_PRECAPTURE: { + setState(STATE_WAITING_PRECAPTURE_END); + break; + } + case CaptureResult.CONTROL_AE_STATE_CONVERGED: + case CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED: { + // PRECAPTURE is a transient state. Being here might mean that precapture run + // and was successful, OR that the trigger was not even received yet. To + // distinguish, check the trigger state. + if (aeTriggerState != null + && aeTriggerState == CaptureResult.CONTROL_AE_PRECAPTURE_TRIGGER_START) { + setSuccessful(true); + setState(STATE_COMPLETED); + } + break; + } + case CaptureResult.CONTROL_AE_STATE_LOCKED: { + // There's nothing we can do, AE was locked, triggers are ignored. + setSuccessful(false); + setState(STATE_COMPLETED); + break; + } + case CaptureResult.CONTROL_AE_STATE_INACTIVE: + case CaptureResult.CONTROL_AE_STATE_SEARCHING: { + // Wait... + break; + } + } + } + + if (getState() == STATE_WAITING_PRECAPTURE_END) { + switch (aeState) { + case CaptureResult.CONTROL_AE_STATE_CONVERGED: + case CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED: { + setSuccessful(true); + setState(STATE_COMPLETED); + break; + } + case CaptureResult.CONTROL_AE_STATE_LOCKED: { + // There's nothing we can do, AE was locked, triggers are ignored. + setSuccessful(false); + setState(STATE_COMPLETED); + break; + } + case CaptureResult.CONTROL_AE_STATE_PRECAPTURE: + case CaptureResult.CONTROL_AE_STATE_INACTIVE: + case CaptureResult.CONTROL_AE_STATE_SEARCHING: { + // Wait... + break; + } + } + } + } +} diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/ExposureReset.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/ExposureReset.java new file mode 100644 index 00000000..04ace0b9 --- /dev/null +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/ExposureReset.java @@ -0,0 +1,72 @@ +package com.otaliastudios.cameraview.engine.meter; + +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.CaptureResult; +import android.hardware.camera2.TotalCaptureResult; +import android.hardware.camera2.params.MeteringRectangle; +import android.os.Build; + +import androidx.annotation.NonNull; +import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; + +import com.otaliastudios.cameraview.CameraLogger; +import com.otaliastudios.cameraview.engine.action.ActionHolder; + +@RequiresApi(Build.VERSION_CODES.LOLLIPOP) +public class ExposureReset extends BaseReset { + + private static final String TAG = ExposureReset.class.getSimpleName(); + private static final CameraLogger LOG = CameraLogger.create(TAG); + + private static final int STATE_WAITING_LOCK = 0; + + @SuppressWarnings("WeakerAccess") + public ExposureReset() { + super(true); + } + + @Override + protected void onStarted(@NonNull ActionHolder holder, @Nullable MeteringRectangle area) { + int maxRegions = readCharacteristic(CameraCharacteristics.CONTROL_MAX_REGIONS_AE, 0); + if (area != null && maxRegions > 0) { + holder.getBuilder(this).set(CaptureRequest.CONTROL_AE_REGIONS, + new MeteringRectangle[]{area}); + } + + // NOTE: precapture might not be supported, in which case I think it will be ignored. + Integer trigger = holder.getLastResult(this).get(CaptureResult.CONTROL_AE_PRECAPTURE_TRIGGER); + LOG.i("onStarted:", "last precapture trigger is", trigger); + if (trigger != null && trigger == CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START) { + LOG.i("onStarted:", "canceling precapture."); + int newTrigger = Build.VERSION.SDK_INT >= 23 + ? CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL + : CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_IDLE; + holder.getBuilder(this).set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, newTrigger); + } + + // Documentation about CONTROL_AE_PRECAPTURE_TRIGGER says that, if it was started but not + // followed by a CAPTURE_INTENT_STILL_PICTURE request, the internal AE routine might remain + // locked unless we unlock manually. + // This is often the case for us, since the snapshot picture recorder does not use the intent + // and anyway we use the precapture sequence for touch metering as well. + // To reset, docs suggest the use of CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL, which we do above, + // or the technique used below: locking then unlocking. This proved to be the ONLY method + // to unlock reliably, unlike the cancel trigger (which we'll run anyway). + holder.getBuilder(this).set(CaptureRequest.CONTROL_AE_LOCK, true); + holder.applyBuilder(this); + setState(STATE_WAITING_LOCK); + } + + @Override + public void onCaptureCompleted(@NonNull ActionHolder holder, @NonNull CaptureRequest request, + @NonNull TotalCaptureResult result) { + super.onCaptureCompleted(holder, request, result); + if (getState() == STATE_WAITING_LOCK) { + holder.getBuilder(this).set(CaptureRequest.CONTROL_AE_LOCK, false); + holder.applyBuilder(this); + setState(STATE_COMPLETED); + } + } +} diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/FocusMeter.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/FocusMeter.java new file mode 100644 index 00000000..0356a20b --- /dev/null +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/FocusMeter.java @@ -0,0 +1,96 @@ +package com.otaliastudios.cameraview.engine.meter; + +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.CaptureResult; +import android.hardware.camera2.TotalCaptureResult; +import android.hardware.camera2.params.MeteringRectangle; +import android.os.Build; + +import androidx.annotation.NonNull; +import androidx.annotation.RequiresApi; + +import com.otaliastudios.cameraview.CameraLogger; +import com.otaliastudios.cameraview.engine.action.ActionHolder; + +import java.util.List; + +@RequiresApi(Build.VERSION_CODES.LOLLIPOP) +public class FocusMeter extends BaseMeter { + + private static final String TAG = FocusMeter.class.getSimpleName(); + private static final CameraLogger LOG = CameraLogger.create(TAG); + + public FocusMeter(@NonNull List areas, boolean skipIfPossible) { + super(areas, skipIfPossible); + } + + @Override + protected boolean checkIsSupported(@NonNull ActionHolder holder) { + // Exclude OFF and EDOF as per docs. These do no support the trigger. + Integer afMode = holder.getBuilder(this).get(CaptureRequest.CONTROL_AF_MODE); + boolean result = afMode != null && + (afMode == CameraCharacteristics.CONTROL_AF_MODE_AUTO + || afMode == CameraCharacteristics.CONTROL_AF_MODE_CONTINUOUS_PICTURE + || afMode == CameraCharacteristics.CONTROL_AF_MODE_CONTINUOUS_VIDEO + || afMode == CameraCharacteristics.CONTROL_AF_MODE_MACRO); + LOG.i("checkIsSupported:", result); + return result; + } + + @Override + protected boolean checkShouldSkip(@NonNull ActionHolder holder) { + Integer afState = holder.getLastResult(this).get(CaptureResult.CONTROL_AF_STATE); + boolean result = afState != null && + (afState == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED || + afState == CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED); + LOG.i("checkShouldSkip:", result); + return result; + } + + @Override + protected void onStarted(@NonNull ActionHolder holder, @NonNull List areas) { + LOG.i("onStarted:", "with areas:", areas); + holder.getBuilder(this).set(CaptureRequest.CONTROL_AF_TRIGGER, + CaptureRequest.CONTROL_AF_TRIGGER_START); + int maxRegions = readCharacteristic(CameraCharacteristics.CONTROL_MAX_REGIONS_AF, 0); + if (!areas.isEmpty() && maxRegions > 0) { + int max = Math.min(maxRegions, areas.size()); + holder.getBuilder(this).set(CaptureRequest.CONTROL_AF_REGIONS, + areas.subList(0, max).toArray(new MeteringRectangle[]{})); + } + holder.applyBuilder(this); + } + + @Override + protected void onCompleted(@NonNull ActionHolder holder) { + super.onCompleted(holder); + // Remove (but not apply) the risky parameter so it is not included in new requests. + // Documentation about this key says that this should be allowed. + holder.getBuilder(this).set(CaptureRequest.CONTROL_AF_TRIGGER, null); + } + + @Override + public void onCaptureCompleted(@NonNull ActionHolder holder, @NonNull CaptureRequest request, + @NonNull TotalCaptureResult result) { + super.onCaptureCompleted(holder, request, result); + Integer afState = result.get(CaptureResult.CONTROL_AF_STATE); + LOG.i("onCaptureCompleted:", "afState:", afState); + if (afState == null) return; + switch (afState) { + case CaptureRequest.CONTROL_AF_STATE_FOCUSED_LOCKED: { + setSuccessful(true); + setState(STATE_COMPLETED); + break; + } + case CaptureRequest.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED: { + setSuccessful(false); + setState(STATE_COMPLETED); + break; + } + case CaptureRequest.CONTROL_AF_STATE_INACTIVE: break; + case CaptureRequest.CONTROL_AF_STATE_ACTIVE_SCAN: break; + default: break; + } + } +} diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/FocusReset.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/FocusReset.java new file mode 100644 index 00000000..7554a86c --- /dev/null +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/FocusReset.java @@ -0,0 +1,49 @@ +package com.otaliastudios.cameraview.engine.meter; + +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.CaptureResult; +import android.hardware.camera2.params.MeteringRectangle; +import android.os.Build; + +import androidx.annotation.NonNull; +import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; + +import com.otaliastudios.cameraview.CameraLogger; +import com.otaliastudios.cameraview.engine.action.ActionHolder; + +@RequiresApi(Build.VERSION_CODES.LOLLIPOP) +public class FocusReset extends BaseReset { + + private static final String TAG = FocusReset.class.getSimpleName(); + private static final CameraLogger LOG = CameraLogger.create(TAG); + + @SuppressWarnings("WeakerAccess") + public FocusReset() { + super(true); + } + + @Override + protected void onStarted(@NonNull ActionHolder holder, @Nullable MeteringRectangle area) { + boolean changed = false; + int maxRegions = readCharacteristic(CameraCharacteristics.CONTROL_MAX_REGIONS_AF, 0); + if (area != null && maxRegions > 0) { + holder.getBuilder(this).set(CaptureRequest.CONTROL_AF_REGIONS, + new MeteringRectangle[]{area}); + changed = true; + } + + // NOTE: trigger might not be supported, in which case I think it will be ignored. + Integer trigger = holder.getLastResult(this).get(CaptureResult.CONTROL_AF_TRIGGER); + LOG.w("onStarted:", "last focus trigger is", trigger); + if (trigger != null && trigger == CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START) { + holder.getBuilder(this).set(CaptureRequest.CONTROL_AF_TRIGGER, + CaptureRequest.CONTROL_AF_TRIGGER_CANCEL); + changed = true; + } + + if (changed) holder.applyBuilder(this); + setState(STATE_COMPLETED); + } +} diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/MeterAction.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/MeterAction.java new file mode 100644 index 00000000..cc8f0ff4 --- /dev/null +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/MeterAction.java @@ -0,0 +1,262 @@ +package com.otaliastudios.cameraview.engine.meter; + +import android.graphics.PointF; +import android.graphics.Rect; +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.TotalCaptureResult; +import android.hardware.camera2.params.MeteringRectangle; +import android.os.Build; + +import androidx.annotation.NonNull; +import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; + +import com.otaliastudios.cameraview.CameraLogger; +import com.otaliastudios.cameraview.engine.CameraEngine; +import com.otaliastudios.cameraview.engine.action.ActionHolder; +import com.otaliastudios.cameraview.engine.action.ActionWrapper; +import com.otaliastudios.cameraview.engine.action.Actions; +import com.otaliastudios.cameraview.engine.action.BaseAction; +import com.otaliastudios.cameraview.engine.offset.Axis; +import com.otaliastudios.cameraview.engine.offset.Reference; +import com.otaliastudios.cameraview.size.AspectRatio; +import com.otaliastudios.cameraview.size.Size; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +@RequiresApi(Build.VERSION_CODES.LOLLIPOP) +public class MeterAction extends ActionWrapper { + + private final static String TAG = MeterAction.class.getSimpleName(); + private final static CameraLogger LOG = CameraLogger.create(TAG); + + private List meters; + private BaseAction action; + private ActionHolder holder; + private final PointF point; + private final CameraEngine engine; + private final boolean skipIfPossible; + + public MeterAction(@NonNull CameraEngine engine, @Nullable PointF point, + boolean skipIfPossible) { + this.point = point; + this.engine = engine; + this.skipIfPossible = skipIfPossible; + } + + @NonNull + @Override + public BaseAction getAction() { + return action; + } + + @Nullable + public PointF getPoint() { + return point; + } + + public boolean isSuccessful() { + for (BaseMeter meter : meters) { + if (!meter.isSuccessful()) { + LOG.i("isSuccessful:", "returning false."); + return false; + } + } + LOG.i("isSuccessful:", "returning true."); + return true; + } + + @Override + protected void onStart(@NonNull ActionHolder holder) { + LOG.w("onStart:", "initializing."); + initialize(holder); + LOG.w("onStart:", "initialized."); + super.onStart(holder); + } + + private void initialize(@NonNull ActionHolder holder) { + this.holder = holder; + List areas = new ArrayList<>(); + if (point != null) { + // This is a good Q/A. https://stackoverflow.com/a/33181620/4288782 + // At first, the point is relative to the View system and does not account our own cropping. + // Will keep updating these two below. + final PointF referencePoint = new PointF(point.x, point.y); + Size referenceSize = engine.getPreview().getSurfaceSize(); + + // 1. Account for cropping. + // This will enlarge the preview size so that aspect ratio matches. + referenceSize = applyPreviewCropping(referenceSize, referencePoint); + + // 2. Scale to the preview stream coordinates. + // This will move to the preview stream coordinates by scaling. + referenceSize = applyPreviewScale(referenceSize, referencePoint); + + // 3. Rotate to the stream coordinate system. + // This leaves us with sensor stream coordinates. + referenceSize = applyPreviewToSensorRotation(referenceSize, referencePoint); + + // 4. Move to the crop region coordinate system. + // The crop region is the union of all currently active streams. + referenceSize = applyCropRegionCoordinates(referenceSize, referencePoint); + + // 5. Move to the active array coordinate system. + referenceSize = applyActiveArrayCoordinates(referenceSize, referencePoint); + + // 6. Now we can compute the metering regions. + // We want to define them as a fraction of the visible size which (apart from cropping) + // can be obtained through the SENSOR rotated preview stream size. + Size visibleSize = engine.getPreviewStreamSize(Reference.SENSOR); + //noinspection ConstantConditions + MeteringRectangle area1 = createMeteringRectangle(referenceSize, referencePoint, + visibleSize, 0.05F, 1000); + MeteringRectangle area2 = createMeteringRectangle(referenceSize, referencePoint, + visibleSize, 0.1F, 100); + areas.add(area1); + areas.add(area2); + } + + BaseMeter ae = new ExposureMeter(areas, skipIfPossible); + BaseMeter af = new FocusMeter(areas, skipIfPossible); + BaseMeter awb = new WhiteBalanceMeter(areas, skipIfPossible); + meters = Arrays.asList(ae, af, awb); + action = Actions.together(ae, af, awb); + } + + @SuppressWarnings("UnnecessaryLocalVariable") + @NonNull + private Size applyPreviewCropping(@NonNull Size referenceSize, @NonNull PointF referencePoint) { + Size previewStreamSize = engine.getPreviewStreamSize(Reference.VIEW); + Size previewSurfaceSize = referenceSize; + if (previewStreamSize == null) { + throw new IllegalStateException("getPreviewStreamSize should not be null at this point."); + } + int referenceWidth = previewSurfaceSize.getWidth(); + int referenceHeight = previewSurfaceSize.getHeight(); + AspectRatio previewStreamAspectRatio = AspectRatio.of(previewStreamSize); + AspectRatio previewSurfaceAspectRatio = AspectRatio.of(previewSurfaceSize); + if (engine.getPreview().isCropping()) { + if (previewStreamAspectRatio.toFloat() > previewSurfaceAspectRatio.toFloat()) { + // Stream is larger. The x coordinate must be increased: a touch on the left side + // of the surface is not on the left size of stream (it's more to the right). + float scale = previewStreamAspectRatio.toFloat() / previewSurfaceAspectRatio.toFloat(); + referencePoint.x += previewSurfaceSize.getWidth() * (scale - 1F) / 2F; + referenceWidth = Math.round(previewSurfaceSize.getWidth() * scale); + } else { + // Stream is taller. The y coordinate must be increased: a touch on the top side + // of the surface is not on the top size of stream (it's a bit lower). + float scale = previewSurfaceAspectRatio.toFloat() / previewStreamAspectRatio.toFloat(); + referencePoint.y += previewSurfaceSize.getHeight() * (scale - 1F) / 2F; + referenceHeight = Math.round(previewSurfaceSize.getHeight() * scale); + } + } + return new Size(referenceWidth, referenceHeight); + } + + @SuppressWarnings("ConstantConditions") + @NonNull + private Size applyPreviewScale(@NonNull Size referenceSize, @NonNull PointF referencePoint) { + // The referenceSize how has the same aspect ratio of the previewStreamSize, but they + // can still have different size (that is, a scale operation is needed). + Size previewStreamSize = engine.getPreviewStreamSize(Reference.VIEW); + referencePoint.x *= (float) previewStreamSize.getWidth() / referenceSize.getWidth(); + referencePoint.y *= (float) previewStreamSize.getHeight() / referenceSize.getHeight(); + return previewStreamSize; + } + + @SuppressWarnings("SuspiciousNameCombination") + @NonNull + private Size applyPreviewToSensorRotation(@NonNull Size referenceSize, @NonNull PointF referencePoint) { + // Not elegant, but the sin/cos way was failing for some reason. + int angle = engine.getAngles().offset(Reference.SENSOR, Reference.VIEW, Axis.ABSOLUTE); + boolean flip = angle % 180 != 0; + float tempX = referencePoint.x; + float tempY = referencePoint.y; + if (angle == 0) { + referencePoint.x = tempX; + referencePoint.y = tempY; + } else if (angle == 90) { + referencePoint.x = tempY; + referencePoint.y = referenceSize.getWidth() - tempX; + } else if (angle == 180) { + referencePoint.x = referenceSize.getWidth() - tempX; + referencePoint.y = referenceSize.getHeight() - tempY; + } else if (angle == 270) { + referencePoint.x = referenceSize.getHeight() - tempY; + referencePoint.y = tempX; + } else { + throw new IllegalStateException("Unexpected angle " + angle); + } + return flip ? referenceSize.flip() : referenceSize; + } + + @NonNull + private Size applyCropRegionCoordinates(@NonNull Size referenceSize, @NonNull PointF referencePoint) { + // The input point and size refer to the stream rect. + // The stream rect is part of the 'crop region', as described below. + // https://source.android.com/devices/camera/camera3_crop_reprocess.html + Rect cropRect = holder.getBuilder(this).get(CaptureRequest.SCALER_CROP_REGION); + // For now, we don't care about x and y position. Rect should be non-null, but let's be safe. + int cropRectWidth = cropRect == null ? referenceSize.getWidth() : cropRect.width(); + int cropRectHeight = cropRect == null ? referenceSize.getHeight() : cropRect.height(); + // The stream is always centered inside the crop region, and one of the dimensions + // should always match. We just increase the other one. + referencePoint.x += (cropRectWidth - referenceSize.getWidth()) / 2F; + referencePoint.y += (cropRectHeight - referenceSize.getHeight()) / 2F; + return new Size(cropRectWidth, cropRectHeight); + } + + @NonNull + private Size applyActiveArrayCoordinates(@NonNull Size referenceSize, @NonNull PointF referencePoint) { + // The input point and size refer to the scaler crop region. + // We can query for the crop region position inside the active array, so this is easy. + Rect cropRect = holder.getBuilder(this).get(CaptureRequest.SCALER_CROP_REGION); + referencePoint.x += cropRect == null ? 0 : cropRect.left; + referencePoint.y += cropRect == null ? 0 : cropRect.top; + // Finally, get the active rect width and height from characteristics. + Rect activeRect = holder.getCharacteristics(this).get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); + if (activeRect == null) { // Should never happen + activeRect = new Rect(0, 0, referenceSize.getWidth(), referenceSize.getHeight()); + } + return new Size(activeRect.width(), activeRect.height()); + } + + /** + * Creates a metering rectangle around the center point. + * The rectangle will have a size that's a factor of the visible width and height. + * The rectangle will also be constrained to be inside the given boundaries, + * so we don't exceed them in case the center point is exactly on one side for example. + * @return a new rectangle + */ + @NonNull + private MeteringRectangle createMeteringRectangle( + @NonNull Size boundaries, + @NonNull PointF center, + @NonNull Size visibleSize, + float factor, + int weight) { + float rectangleWidth = factor * visibleSize.getWidth(); + float rectangleHeight = factor * visibleSize.getHeight(); + float rectangleLeft = center.x - rectangleWidth / 2F; + float rectangleTop = center.y - rectangleHeight / 2F; + // Respect boundaries + if (rectangleLeft < 0) rectangleLeft = 0; + if (rectangleTop < 0) rectangleTop = 0; + if (rectangleLeft + rectangleWidth > boundaries.getWidth()) { + rectangleWidth = boundaries.getWidth() - rectangleLeft; + } + if (rectangleTop + rectangleHeight > boundaries.getHeight()) { + rectangleHeight = boundaries.getHeight() - rectangleTop; + } + return new MeteringRectangle( + (int) rectangleLeft, + (int) rectangleTop, + (int) rectangleWidth, + (int) rectangleHeight, + weight + ); + } +} diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/MeterResetAction.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/MeterResetAction.java new file mode 100644 index 00000000..e2da92cf --- /dev/null +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/MeterResetAction.java @@ -0,0 +1,34 @@ +package com.otaliastudios.cameraview.engine.meter; + +import android.os.Build; + +import androidx.annotation.NonNull; +import androidx.annotation.RequiresApi; + +import com.otaliastudios.cameraview.engine.action.ActionHolder; +import com.otaliastudios.cameraview.engine.action.ActionWrapper; +import com.otaliastudios.cameraview.engine.action.Actions; +import com.otaliastudios.cameraview.engine.action.BaseAction; +import com.otaliastudios.cameraview.engine.lock.ExposureLock; +import com.otaliastudios.cameraview.engine.lock.FocusLock; +import com.otaliastudios.cameraview.engine.lock.WhiteBalanceLock; + +@RequiresApi(Build.VERSION_CODES.LOLLIPOP) +public class MeterResetAction extends ActionWrapper { + + private final BaseAction action; + + public MeterResetAction() { + this.action = Actions.together( + new ExposureReset(), + new FocusReset(), + new WhiteBalanceReset() + ); + } + + @NonNull + @Override + public BaseAction getAction() { + return action; + } +} diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/WhiteBalanceMeter.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/WhiteBalanceMeter.java new file mode 100644 index 00000000..4828838d --- /dev/null +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/WhiteBalanceMeter.java @@ -0,0 +1,85 @@ +package com.otaliastudios.cameraview.engine.meter; + +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.CaptureResult; +import android.hardware.camera2.TotalCaptureResult; +import android.hardware.camera2.params.MeteringRectangle; +import android.os.Build; + +import androidx.annotation.NonNull; +import androidx.annotation.RequiresApi; + +import com.otaliastudios.cameraview.CameraLogger; +import com.otaliastudios.cameraview.engine.action.ActionHolder; + +import java.util.List; + +@RequiresApi(Build.VERSION_CODES.LOLLIPOP) +public class WhiteBalanceMeter extends BaseMeter { + + private static final String TAG = WhiteBalanceMeter.class.getSimpleName(); + private static final CameraLogger LOG = CameraLogger.create(TAG); + + public WhiteBalanceMeter(@NonNull List areas, boolean skipIfPossible) { + super(areas, skipIfPossible); + } + + @Override + protected boolean checkIsSupported(@NonNull ActionHolder holder) { + boolean isNotLegacy = readCharacteristic(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL, -1) + != CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY; + Integer awbMode = holder.getBuilder(this).get(CaptureRequest.CONTROL_AWB_MODE); + boolean result = isNotLegacy && awbMode != null && awbMode == CaptureRequest.CONTROL_AWB_MODE_AUTO; + LOG.i("checkIsSupported:", result); + return result; + } + + @Override + protected boolean checkShouldSkip(@NonNull ActionHolder holder) { + Integer awbState = holder.getLastResult(this).get(CaptureResult.CONTROL_AWB_STATE); + boolean result = awbState != null && awbState == CaptureRequest.CONTROL_AWB_STATE_CONVERGED; + LOG.i("checkShouldSkip:", result); + return result; + } + + @Override + protected void onStarted(@NonNull ActionHolder holder, @NonNull List areas) { + LOG.i("onStarted:", "with areas:", areas); + int maxRegions = readCharacteristic(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB, 0); + if (!areas.isEmpty() && maxRegions > 0) { + int max = Math.min(maxRegions, areas.size()); + holder.getBuilder(this).set(CaptureRequest.CONTROL_AWB_REGIONS, + areas.subList(0, max).toArray(new MeteringRectangle[]{})); + holder.applyBuilder(this); + } + } + + @Override + public void onCaptureCompleted(@NonNull ActionHolder holder, @NonNull CaptureRequest request, + @NonNull TotalCaptureResult result) { + super.onCaptureCompleted(holder, request, result); + Integer awbState = result.get(CaptureResult.CONTROL_AWB_STATE); + LOG.i("onCaptureCompleted:", "awbState:", awbState); + if (awbState == null) return; + + switch (awbState) { + case CaptureRequest.CONTROL_AWB_STATE_CONVERGED: { + setSuccessful(true); + setState(STATE_COMPLETED); + break; + } + case CaptureRequest.CONTROL_AWB_STATE_LOCKED: { + // Nothing we can do if AWB was locked. + setSuccessful(false); + setState(STATE_COMPLETED); + break; + } + case CaptureRequest.CONTROL_AWB_STATE_INACTIVE: + case CaptureRequest.CONTROL_AWB_STATE_SEARCHING: { + // Wait... + break; + } + } + } +} diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/WhiteBalanceReset.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/WhiteBalanceReset.java new file mode 100644 index 00000000..5d24fd55 --- /dev/null +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/WhiteBalanceReset.java @@ -0,0 +1,40 @@ +package com.otaliastudios.cameraview.engine.meter; + +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.CaptureResult; +import android.hardware.camera2.TotalCaptureResult; +import android.hardware.camera2.params.MeteringRectangle; +import android.os.Build; + +import androidx.annotation.NonNull; +import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; + +import com.otaliastudios.cameraview.CameraLogger; +import com.otaliastudios.cameraview.engine.action.ActionHolder; + +import java.util.List; + +@RequiresApi(Build.VERSION_CODES.LOLLIPOP) +public class WhiteBalanceReset extends BaseReset { + + private static final String TAG = WhiteBalanceReset.class.getSimpleName(); + private static final CameraLogger LOG = CameraLogger.create(TAG); + + @SuppressWarnings("WeakerAccess") + public WhiteBalanceReset() { + super(true); + } + + @Override + protected void onStarted(@NonNull ActionHolder holder, @Nullable MeteringRectangle area) { + LOG.w("onStarted:", "with area:", area); + int maxRegions = readCharacteristic(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB, 0); + if (area != null && maxRegions > 0) { + holder.getBuilder(this).set(CaptureRequest.CONTROL_AWB_REGIONS, new MeteringRectangle[]{area}); + holder.applyBuilder(this); + } + setState(STATE_COMPLETED); + } +} diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/metering/AutoExposure.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/metering/AutoExposure.java deleted file mode 100644 index 3641f450..00000000 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/metering/AutoExposure.java +++ /dev/null @@ -1,101 +0,0 @@ -package com.otaliastudios.cameraview.engine.metering; - -import android.hardware.camera2.CameraCharacteristics; -import android.hardware.camera2.CaptureRequest; -import android.hardware.camera2.CaptureResult; -import android.hardware.camera2.params.MeteringRectangle; -import android.os.Build; - -import androidx.annotation.NonNull; -import androidx.annotation.RequiresApi; - -import com.otaliastudios.cameraview.CameraLogger; - -import java.util.List; - -@RequiresApi(Build.VERSION_CODES.LOLLIPOP) -public class AutoExposure extends MeteringParameter { - - private static final String TAG = AutoExposure.class.getSimpleName(); - private static final CameraLogger LOG = CameraLogger.create(TAG); - - private boolean isStarted; - - @Override - public void startMetering(@NonNull CameraCharacteristics characteristics, - @NonNull CaptureRequest.Builder builder, - @NonNull List areas) { - isSuccessful = false; - isMetered = false; - isStarted = false; - - boolean isNotLegacy = readCharacteristic(characteristics, - CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL, -1) != - CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY; - Integer aeMode = builder.get(CaptureRequest.CONTROL_AE_MODE); - boolean isAEOn = aeMode != null && - (aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON - || aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON_ALWAYS_FLASH - || aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH - || aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE - || aeMode == 5 /* CameraCharacteristics.CONTROL_AE_MODE_ON_EXTERNAL_FLASH, API 28 */); - isSupported = isNotLegacy && isAEOn; - - if (isSupported) { - builder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, - CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START); - } - - // Even if precapture is not supported, check the regions anyway. - int maxRegions = readCharacteristic(characteristics, - CameraCharacteristics.CONTROL_MAX_REGIONS_AE, 0); - if (maxRegions > 0) { - int max = Math.min(maxRegions, areas.size()); - builder.set(CaptureRequest.CONTROL_AE_REGIONS, - areas.subList(0, max).toArray(new MeteringRectangle[]{})); - } - } - - @Override - public void onCapture(@NonNull CaptureResult result) { - if (isMetered || !isSupported) return; - Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE); - LOG.i("onCapture:", "aeState:", aeState); - if (aeState == null) return; - - if (!isStarted) { - if (aeState == CaptureRequest.CONTROL_AE_STATE_PRECAPTURE) { - isStarted = true; - } else if (aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED - || aeState == CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED) { - // PRECAPTURE is a transient state, so also check for the final states. - isMetered = true; - isSuccessful = true; - } - } else { - if (aeState == CaptureRequest.CONTROL_AE_STATE_CONVERGED - || aeState == CaptureRequest.CONTROL_AE_STATE_FLASH_REQUIRED) { - isMetered = true; - isSuccessful = true; - } - } - } - - @Override - public void resetMetering(@NonNull CameraCharacteristics characteristics, - @NonNull CaptureRequest.Builder builder, - @NonNull MeteringRectangle area) { - int maxRegions = readCharacteristic(characteristics, - CameraCharacteristics.CONTROL_MAX_REGIONS_AE, 0); - if (maxRegions > 0) { - builder.set(CaptureRequest.CONTROL_AE_REGIONS, new MeteringRectangle[]{area}); - } - if (isSupported) { - // Cleanup any precapture sequence. - if (Build.VERSION.SDK_INT >= 23) { - builder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, - CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL); - } - } - } -} diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/metering/AutoFocus.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/metering/AutoFocus.java deleted file mode 100644 index 6c61639c..00000000 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/metering/AutoFocus.java +++ /dev/null @@ -1,87 +0,0 @@ -package com.otaliastudios.cameraview.engine.metering; - -import android.hardware.camera2.CameraCharacteristics; -import android.hardware.camera2.CaptureRequest; -import android.hardware.camera2.CaptureResult; -import android.hardware.camera2.params.MeteringRectangle; -import android.os.Build; - -import androidx.annotation.NonNull; -import androidx.annotation.RequiresApi; - -import com.otaliastudios.cameraview.CameraLogger; - -import java.util.List; - -@RequiresApi(Build.VERSION_CODES.LOLLIPOP) -public class AutoFocus extends MeteringParameter { - - private static final String TAG = AutoFocus.class.getSimpleName(); - private static final CameraLogger LOG = CameraLogger.create(TAG); - - @Override - public void startMetering(@NonNull CameraCharacteristics characteristics, - @NonNull CaptureRequest.Builder builder, - @NonNull List areas) { - isSuccessful = false; - isMetered = false; - - Integer afMode = builder.get(CaptureRequest.CONTROL_AF_MODE); - // Exclude OFF and EDOF as per docs. - isSupported = afMode != null && - (afMode == CameraCharacteristics.CONTROL_AF_MODE_AUTO - || afMode == CameraCharacteristics.CONTROL_AF_MODE_CONTINUOUS_PICTURE - || afMode == CameraCharacteristics.CONTROL_AF_MODE_CONTINUOUS_VIDEO - || afMode == CameraCharacteristics.CONTROL_AF_MODE_MACRO); - if (isSupported) { - builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START); - } - - // Even if auto is not supported, change the regions anyway. - int maxRegions = readCharacteristic(characteristics, CameraCharacteristics.CONTROL_MAX_REGIONS_AF, 0); - if (maxRegions > 0) { - int max = Math.min(maxRegions, areas.size()); - builder.set(CaptureRequest.CONTROL_AF_REGIONS, - areas.subList(0, max).toArray(new MeteringRectangle[]{})); - } - - } - - @Override - public void onCapture(@NonNull CaptureResult result) { - if (isMetered || !isSupported) return; - Integer afState = result.get(CaptureResult.CONTROL_AF_STATE); - LOG.i("onCapture:", "afState:", afState); - if (afState == null) return; - switch (afState) { - case CaptureRequest.CONTROL_AF_STATE_FOCUSED_LOCKED: { - isMetered = true; - isSuccessful = true; - break; - } - case CaptureRequest.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED: { - isMetered = true; - isSuccessful = false; - break; - } - case CaptureRequest.CONTROL_AF_STATE_INACTIVE: break; - case CaptureRequest.CONTROL_AF_STATE_ACTIVE_SCAN: break; - default: break; - } - } - - @Override - public void resetMetering(@NonNull CameraCharacteristics characteristics, - @NonNull CaptureRequest.Builder builder, - @NonNull MeteringRectangle area) { - int maxRegions = readCharacteristic(characteristics, - CameraCharacteristics.CONTROL_MAX_REGIONS_AF, 0); - if (maxRegions > 0) { - builder.set(CaptureRequest.CONTROL_AF_REGIONS, new MeteringRectangle[]{area}); - } - - if (isSupported) { // Cleanup any trigger. - builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_CANCEL); - } - } -} diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/metering/AutoWhiteBalance.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/metering/AutoWhiteBalance.java deleted file mode 100644 index a9eaf03c..00000000 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/metering/AutoWhiteBalance.java +++ /dev/null @@ -1,81 +0,0 @@ -package com.otaliastudios.cameraview.engine.metering; - -import android.hardware.camera2.CameraCharacteristics; -import android.hardware.camera2.CaptureRequest; -import android.hardware.camera2.CaptureResult; -import android.hardware.camera2.params.MeteringRectangle; -import android.os.Build; - -import androidx.annotation.NonNull; -import androidx.annotation.RequiresApi; - -import com.otaliastudios.cameraview.CameraLogger; - -import java.util.List; - -@RequiresApi(Build.VERSION_CODES.LOLLIPOP) -public class AutoWhiteBalance extends MeteringParameter { - - private static final String TAG = AutoWhiteBalance.class.getSimpleName(); - private static final CameraLogger LOG = CameraLogger.create(TAG); - - @Override - public void startMetering(@NonNull CameraCharacteristics characteristics, - @NonNull CaptureRequest.Builder builder, - @NonNull List areas) { - isSuccessful = false; - isMetered = false; - - boolean isNotLegacy = readCharacteristic(characteristics, - CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL, -1) != - CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY; - Integer awbMode = builder.get(CaptureRequest.CONTROL_AWB_MODE); - isSupported = isNotLegacy && awbMode != null && awbMode == CaptureRequest.CONTROL_AWB_MODE_AUTO; - - if (isSupported) { - // Remove any lock. We're not setting any, but just in case. - builder.set(CaptureRequest.CONTROL_AWB_LOCK, false); - } - - // Even if auto is not supported, change the regions anyway. - int maxRegions = readCharacteristic(characteristics, - CameraCharacteristics.CONTROL_MAX_REGIONS_AWB, 0); - if (maxRegions > 0) { - int max = Math.min(maxRegions, areas.size()); - builder.set(CaptureRequest.CONTROL_AWB_REGIONS, - areas.subList(0, max).toArray(new MeteringRectangle[]{})); - } - - } - - @Override - public void onCapture(@NonNull CaptureResult result) { - if (isMetered || !isSupported) return; - Integer awbState = result.get(CaptureResult.CONTROL_AWB_STATE); - LOG.i("onCapture:", "awbState:", awbState); - if (awbState == null) return; - - switch (awbState) { - case CaptureRequest.CONTROL_AWB_STATE_CONVERGED: { - isMetered = true; - isSuccessful = true; - break; - } - case CaptureRequest.CONTROL_AWB_STATE_LOCKED: break; - case CaptureRequest.CONTROL_AWB_STATE_INACTIVE: break; - case CaptureRequest.CONTROL_AWB_STATE_SEARCHING: break; - default: break; - } - } - - @Override - public void resetMetering(@NonNull CameraCharacteristics characteristics, - @NonNull CaptureRequest.Builder builder, - @NonNull MeteringRectangle area) { - int maxRegions = readCharacteristic(characteristics, - CameraCharacteristics.CONTROL_MAX_REGIONS_AWB, 0); - if (maxRegions > 0) { - builder.set(CaptureRequest.CONTROL_AWB_REGIONS, new MeteringRectangle[]{area}); - } - } -} diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/metering/MeteringParameter.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/metering/MeteringParameter.java deleted file mode 100644 index 2fb4a5af..00000000 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/metering/MeteringParameter.java +++ /dev/null @@ -1,54 +0,0 @@ -package com.otaliastudios.cameraview.engine.metering; - -import android.hardware.camera2.CameraCharacteristics; -import android.hardware.camera2.CaptureRequest; -import android.hardware.camera2.CaptureResult; -import android.hardware.camera2.params.MeteringRectangle; -import android.os.Build; - -import androidx.annotation.NonNull; -import androidx.annotation.RequiresApi; - -import java.util.List; - -@RequiresApi(Build.VERSION_CODES.LOLLIPOP) -public abstract class MeteringParameter { - - @SuppressWarnings("WeakerAccess") - protected boolean isSupported; - - @SuppressWarnings("WeakerAccess") - protected boolean isSuccessful; - - @SuppressWarnings("WeakerAccess") - protected boolean isMetered; - - @SuppressWarnings("WeakerAccess") - @NonNull - protected T readCharacteristic(@NonNull CameraCharacteristics characteristics, - @NonNull CameraCharacteristics.Key key, - @NonNull T fallback) { - T value = characteristics.get(key); - return value == null ? fallback : value; - } - - public final boolean isMetered() { - // A non supported parameter should always appear as metered - return isMetered || !isSupported; - } - - public final boolean isSuccessful() { - // A non supported parameter should always appear as successful - return isSuccessful || !isSupported; - } - - public abstract void startMetering(@NonNull CameraCharacteristics characteristics, - @NonNull CaptureRequest.Builder builder, - @NonNull List areas); - - public abstract void resetMetering(@NonNull CameraCharacteristics characteristics, - @NonNull CaptureRequest.Builder builder, - @NonNull MeteringRectangle area); - - public abstract void onCapture(@NonNull CaptureResult result); -} diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/gesture/GestureAction.java b/cameraview/src/main/java/com/otaliastudios/cameraview/gesture/GestureAction.java index b4fa3659..e384ad72 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/gesture/GestureAction.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/gesture/GestureAction.java @@ -23,7 +23,7 @@ public enum GestureAction { NONE(0, GestureType.ONE_SHOT), /** - * Auto focus control, typically assigned to the tap gesture. + * Touch metering control, typically assigned to the tap gesture. * This action can be mapped to one shot gestures: * * - {@link Gesture#TAP} diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/picture/Full2PictureRecorder.java b/cameraview/src/main/java/com/otaliastudios/cameraview/picture/Full2PictureRecorder.java index 0ccccecd..6a53e49e 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/picture/Full2PictureRecorder.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/picture/Full2PictureRecorder.java @@ -1,18 +1,18 @@ package com.otaliastudios.cameraview.picture; import android.hardware.camera2.CameraAccessException; -import android.hardware.camera2.CameraCaptureSession; -import android.hardware.camera2.CameraCharacteristics; import android.hardware.camera2.CameraDevice; import android.hardware.camera2.CaptureRequest; -import android.hardware.camera2.CaptureResult; -import android.hardware.camera2.TotalCaptureResult; import android.media.Image; import android.media.ImageReader; import android.os.Build; import com.otaliastudios.cameraview.CameraLogger; import com.otaliastudios.cameraview.PictureResult; +import com.otaliastudios.cameraview.engine.Camera2Engine; +import com.otaliastudios.cameraview.engine.action.Action; +import com.otaliastudios.cameraview.engine.action.ActionHolder; +import com.otaliastudios.cameraview.engine.action.BaseAction; import com.otaliastudios.cameraview.internal.utils.ExifHelper; import com.otaliastudios.cameraview.internal.utils.WorkerHandler; @@ -21,7 +21,6 @@ import java.io.IOException; import java.nio.ByteBuffer; import androidx.annotation.NonNull; -import androidx.annotation.Nullable; import androidx.annotation.RequiresApi; import androidx.exifinterface.media.ExifInterface; @@ -34,214 +33,57 @@ public class Full2PictureRecorder extends PictureRecorder implements ImageReader private static final String TAG = Full2PictureRecorder.class.getSimpleName(); private static final CameraLogger LOG = CameraLogger.create(TAG); - private static final int STATE_IDLE = 0; - private static final int STATE_WAITING_FIRST_FRAME = 1; - private static final int STATE_WAITING_AUTOFOCUS = 2; - private static final int STATE_WAITING_PRECAPTURE_START = 3; - private static final int STATE_WAITING_PRECAPTURE_END = 4; - private static final int STATE_WAITING_CAPTURE = 5; - private static final int STATE_WAITING_IMAGE = 6; - - private static final int REQUEST_TAG = CameraDevice.TEMPLATE_STILL_CAPTURE; - - private CameraCaptureSession mSession; - private CameraCharacteristics mCharacteristics; - private CaptureRequest.Builder mBuilder; - private CameraCaptureSession.CaptureCallback mCallback; - private ImageReader mPictureReader; - private CaptureRequest.Builder mPictureBuilder; - private boolean mStopPreviewBeforeCapture; - private int mState = STATE_IDLE; + private final ActionHolder mHolder; + private final Action mAction; + private final ImageReader mPictureReader; + private final CaptureRequest.Builder mPictureBuilder; public Full2PictureRecorder(@NonNull PictureResult.Stub stub, - @Nullable PictureResultListener listener, - @NonNull CameraCharacteristics characteristics, - @NonNull CameraCaptureSession session, - @NonNull CaptureRequest.Builder builder, - @NonNull CameraCaptureSession.CaptureCallback callback, + @NonNull Camera2Engine engine, @NonNull CaptureRequest.Builder pictureBuilder, - @NonNull ImageReader pictureReader, - boolean stopPreviewBeforeCapture) { - super(stub, listener); - mCharacteristics = characteristics; - mSession = session; - mBuilder = builder; - mCallback = callback; + @NonNull ImageReader pictureReader) { + super(stub, engine); + mHolder = engine; mPictureBuilder = pictureBuilder; - mStopPreviewBeforeCapture = stopPreviewBeforeCapture; mPictureReader = pictureReader; mPictureReader.setOnImageAvailableListener(this, WorkerHandler.get().getHandler()); - } - - @Override - public void take() { - mState = STATE_WAITING_FIRST_FRAME; - } - - private boolean supportsAutoFocus() { - //noinspection ConstantConditions - int afMode = mBuilder.get(CaptureRequest.CONTROL_AF_MODE); - // Exclude OFF and EDOF as per docs. - return afMode == CameraCharacteristics.CONTROL_AF_MODE_AUTO - || afMode == CameraCharacteristics.CONTROL_AF_MODE_CONTINUOUS_PICTURE - || afMode == CameraCharacteristics.CONTROL_AF_MODE_CONTINUOUS_VIDEO - || afMode == CameraCharacteristics.CONTROL_AF_MODE_MACRO; - } - - private void runAutoFocus(@NonNull CaptureResult lastResult) { - Integer afState = lastResult.get(CaptureResult.CONTROL_AF_STATE); - boolean shouldSkip = afState != null && afState == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED; - boolean supports = supportsAutoFocus(); - LOG.i("runAutoFocus:", "supports:", supports, "shouldSkip:", shouldSkip, "afState:", afState); - if (supports && !shouldSkip) { - try { - mState = STATE_WAITING_AUTOFOCUS; - mBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START); - mSession.capture(mBuilder.build(), mCallback, null); - } catch (CameraAccessException e) { - mResult = null; - mError = e; - dispatchResult(); - } - } else { - LOG.w("Device does not support auto focus. Running precapture."); - runPrecapture(lastResult); - } - } - - @SuppressWarnings("ConstantConditions") - private boolean supportsPrecapture() { - // Precapture is not supported on legacy devices. - int level = mCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL); - if (level == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) return false; - // We still have to check the current AE mode, see CaptureResult.CONTROL_AE_STATE. - int aeMode = mBuilder.get(CaptureRequest.CONTROL_AE_MODE); - return aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON - || aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON_ALWAYS_FLASH - || aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH - || aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE - || aeMode == 5 /* CameraCharacteristics.CONTROL_AE_MODE_ON_EXTERNAL_FLASH, API 28 */; - } - - private void runPrecapture(@NonNull CaptureResult lastResult) { - Integer aeState = lastResult.get(CaptureResult.CONTROL_AE_STATE); - boolean shouldSkip = aeState != null && aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED; - boolean supports = supportsPrecapture(); - LOG.i("runPrecapture:", "supports:", supports, "shouldSkip:", shouldSkip, "aeState:", aeState); - if (supports && !shouldSkip) { - try { - mState = STATE_WAITING_PRECAPTURE_START; - mBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, - CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START); - mSession.capture(mBuilder.build(), mCallback, null); - } catch (CameraAccessException e) { - mResult = null; - mError = e; - dispatchResult(); + mAction = new BaseAction() { + + @Override + protected void onStart(@NonNull ActionHolder holder) { + super.onStart(holder); + mPictureBuilder.addTarget(mPictureReader.getSurface()); + mPictureBuilder.set(CaptureRequest.JPEG_ORIENTATION, mResult.rotation); + mPictureBuilder.setTag(CameraDevice.TEMPLATE_STILL_CAPTURE); + try { + holder.applyBuilder(this, mPictureBuilder); + } catch (CameraAccessException e) { + mResult = null; + mError = e; + dispatchResult(); + } } - } else { - LOG.w("Device does not support precapture. Running capture."); - runCapture(); - } - } - private void runCapture() { - try { - mState = STATE_WAITING_CAPTURE; - mPictureBuilder.setTag(REQUEST_TAG); - mPictureBuilder.addTarget(mPictureReader.getSurface()); - mPictureBuilder.set(CaptureRequest.JPEG_ORIENTATION, mResult.rotation); - if (mStopPreviewBeforeCapture) { - // These two are present in official samples and are probably meant to speed things up? - // But from my tests, they actually make everything slower. So this is disabled by default - // with a boolean coming from the engine. Maybe in the future we can make this configurable - // as some people might want to stop the preview while picture is being taken even if it - // increases the latency. - mSession.stopRepeating(); - mSession.abortCaptures(); + @Override + public void onCaptureStarted(@NonNull ActionHolder holder, @NonNull CaptureRequest request) { + super.onCaptureStarted(holder, request); + if (request.getTag() == (Integer) CameraDevice.TEMPLATE_STILL_CAPTURE) { + LOG.i("onCaptureStarted:", "Dispatching picture shutter."); + dispatchOnShutter(false); + setState(STATE_COMPLETED); + } } - mSession.capture(mPictureBuilder.build(), mCallback, null); - } catch (CameraAccessException e) { - mResult = null; - mError = e; - dispatchResult(); - } - } - - public void onCaptureStarted(@NonNull CaptureRequest request) { - if (request.getTag() == (Integer) REQUEST_TAG) { - dispatchOnShutter(false); - } - } - - public void onCaptureProgressed(@NonNull CaptureResult result) { - // Let's ignore these. They often do not have good results. - // process(result); + }; } - public void onCaptureCompleted(@NonNull CaptureResult result) { - process(result); - } - - private void process(@NonNull CaptureResult result) { - switch (mState) { - case STATE_IDLE: break; - case STATE_WAITING_FIRST_FRAME: { - runAutoFocus(result); - break; - } - case STATE_WAITING_AUTOFOCUS: { - Integer afState = result.get(CaptureResult.CONTROL_AF_STATE); - if (afState == null - || afState == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED - || afState == CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) { - runPrecapture(result); - } - break; - } - case STATE_WAITING_PRECAPTURE_START: { - Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE); - if (aeState == null - || aeState == CaptureResult.CONTROL_AE_STATE_PRECAPTURE - // The one above is a transient state, which means it might not be reported - // by the camera. So in addition let's also check for the precature end states. - || aeState == CaptureRequest.CONTROL_AE_STATE_CONVERGED - || aeState == CaptureRequest.CONTROL_AE_STATE_FLASH_REQUIRED) { - mState = STATE_WAITING_PRECAPTURE_END; - } - break; - } - case STATE_WAITING_PRECAPTURE_END: { - Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE); - if (aeState == null - || aeState == CaptureRequest.CONTROL_AE_STATE_CONVERGED - || aeState == CaptureRequest.CONTROL_AE_STATE_FLASH_REQUIRED - // The two above are the correct states. However, just for safety, and - // since we got STATE_WAITING_PRECAPTURE_START already, let's accept anything - // other than the precapturing state. We don't want to get stuck here. - || aeState == CaptureRequest.CONTROL_AE_STATE_SEARCHING // Camera is in normal AE routine. Should never happen. - || aeState == CaptureRequest.CONTROL_AE_STATE_INACTIVE // AE is OFF. Should never happen. - || aeState == CaptureResult.CONTROL_AE_STATE_LOCKED // AE has been locked. Should never happen. - ) { - runCapture(); - } - break; - } - case STATE_WAITING_CAPTURE: { - if (result instanceof TotalCaptureResult - && result.getRequest().getTag() == (Integer) REQUEST_TAG) { - mState = STATE_WAITING_IMAGE; - } - break; - } - } + @Override + public void take() { + mAction.start(mHolder); } @Override public void onImageAvailable(ImageReader reader) { LOG.i("onImageAvailable started."); - mState = STATE_IDLE; - // Read the JPEG. Image image = null; //noinspection TryFinallyCanBeTryWithResources @@ -266,29 +108,13 @@ public class Full2PictureRecorder extends PictureRecorder implements ImageReader mResult.rotation = 0; try { ExifInterface exif = new ExifInterface(new ByteArrayInputStream(mResult.data)); - int exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL); + int exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, + ExifInterface.ORIENTATION_NORMAL); mResult.rotation = ExifHelper.readExifOrientation(exifOrientation); } catch (IOException ignore) { } - // Before leaving, unlock focus. - if (supportsAutoFocus()) { - try { - mBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, - CaptureRequest.CONTROL_AF_TRIGGER_CANCEL); - mSession.capture(mBuilder.build(), mCallback, null); - } catch (CameraAccessException ignore) { - } - } - // Leave. LOG.i("onImageAvailable ended."); dispatchResult(); } - - - @Override - protected void dispatchResult() { - mState = STATE_IDLE; - super.dispatchResult(); - } } diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/picture/Snapshot2PictureRecorder.java b/cameraview/src/main/java/com/otaliastudios/cameraview/picture/Snapshot2PictureRecorder.java new file mode 100644 index 00000000..a9653945 --- /dev/null +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/picture/Snapshot2PictureRecorder.java @@ -0,0 +1,139 @@ +package com.otaliastudios.cameraview.picture; + +import android.graphics.SurfaceTexture; +import android.hardware.camera2.CameraAccessException; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.CaptureResult; +import android.hardware.camera2.TotalCaptureResult; +import android.os.Build; + +import androidx.annotation.NonNull; +import androidx.annotation.RequiresApi; + +import com.otaliastudios.cameraview.CameraLogger; +import com.otaliastudios.cameraview.PictureResult; +import com.otaliastudios.cameraview.engine.Camera2Engine; +import com.otaliastudios.cameraview.engine.action.Action; +import com.otaliastudios.cameraview.engine.action.ActionHolder; +import com.otaliastudios.cameraview.engine.action.Actions; +import com.otaliastudios.cameraview.engine.action.BaseAction; +import com.otaliastudios.cameraview.engine.action.CompletionCallback; +import com.otaliastudios.cameraview.engine.lock.LockAction; +import com.otaliastudios.cameraview.preview.GlCameraPreview; +import com.otaliastudios.cameraview.size.AspectRatio; + +/** + * Wraps {@link SnapshotGlPictureRecorder} for Camera2. + * + * Camera2 engine supports metering for snapshots and we expect for them to correctly fire flash as well. + * The first idea, and in theory, the most correct one, was to set {@link CaptureRequest#CONTROL_CAPTURE_INTENT} + * to {@link CaptureRequest#CONTROL_CAPTURE_INTENT_STILL_CAPTURE}. + * + * According to documentation, this will automatically trigger the flash if parameters says so. + * In fact this is what happens, but it is a very fast flash that only lasts for 1 or 2 frames. + * It's not easy to call super.take() at the exact time so that we capture the frame that was lit. + * I have tried by comparing {@link SurfaceTexture#getTimestamp()} and {@link CaptureResult#SENSOR_TIMESTAMP} + * to identify the correct frame. These timestamps match, but the frame is not the correct one. + * + * So what we do here is ignore the {@link CaptureRequest#CONTROL_CAPTURE_INTENT} and instead open the + * torch, if requested to do so. Then wait for exposure to settle again and finally take a snapshot. + * I'd still love to use the capture intent instead of this, but was not able yet. + */ +@RequiresApi(Build.VERSION_CODES.LOLLIPOP) +public class Snapshot2PictureRecorder extends SnapshotGlPictureRecorder { + + private final static String TAG = Snapshot2PictureRecorder.class.getSimpleName(); + private final static CameraLogger LOG = CameraLogger.create(TAG); + private final static long LOCK_TIMEOUT = 2500; + + private static class FlashAction extends BaseAction { + + @Override + protected void onStart(@NonNull ActionHolder holder) { + super.onStart(holder); + LOG.i("FlashAction:", "Parameters locked, opening torch."); + holder.getBuilder(this).set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH); + holder.getBuilder(this).set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); + holder.applyBuilder(this); + } + + @Override + public void onCaptureCompleted(@NonNull ActionHolder holder, + @NonNull CaptureRequest request, + @NonNull TotalCaptureResult result) { + super.onCaptureCompleted(holder, request, result); + Integer flashState = result.get(CaptureResult.FLASH_STATE); + if (flashState == null) { + LOG.w("FlashAction:", "Waiting flash, but flashState is null! Taking snapshot."); + setState(STATE_COMPLETED); + } else if (flashState == CaptureResult.FLASH_STATE_FIRED) { + LOG.i("FlashAction:", "Waiting flash and we have FIRED state! Taking snapshot."); + setState(STATE_COMPLETED); + } else { + LOG.i("FlashAction:", "Waiting flash but flashState is", + flashState, ". Waiting..."); + } + } + } + + private final Action mAction; + private final ActionHolder mHolder; + private final boolean mActionNeeded; + private Integer mOriginalAeMode; + private Integer mOriginalFlashMode; + + public Snapshot2PictureRecorder(@NonNull PictureResult.Stub stub, + @NonNull Camera2Engine engine, + @NonNull GlCameraPreview preview, + @NonNull AspectRatio outputRatio) { + super(stub, engine, preview, outputRatio); + mHolder = engine; + + mAction = Actions.sequence( + Actions.timeout(LOCK_TIMEOUT, new LockAction()), + new FlashAction()); + mAction.addCallback(new CompletionCallback() { + @Override + protected void onActionCompleted(@NonNull Action action) { + LOG.i("Taking picture with super.take()."); + Snapshot2PictureRecorder.super.take(); + } + }); + + Integer aeState = mHolder.getLastResult(mAction).get(CaptureResult.CONTROL_AE_STATE); + mActionNeeded = engine.getPictureSnapshotMetering() + && aeState != null + && aeState == CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED; + mOriginalAeMode = mHolder.getBuilder(mAction).get(CaptureRequest.CONTROL_AE_MODE); + mOriginalFlashMode = mHolder.getBuilder(mAction).get(CaptureRequest.FLASH_MODE); + } + + @Override + public void take() { + if (!mActionNeeded) { + LOG.i("take:", "Engine does no metering or needs no flash, taking fast snapshot."); + super.take(); + } else { + LOG.i("take:", "Engine needs flash. Starting action"); + mAction.start(mHolder); + } + } + + @Override + protected void dispatchResult() { + // Revert our changes. + LOG.i("dispatchResult:", "Reverting the flash changes."); + try { + // See Camera2Engine.setFlash() comments: turning TORCH off has bugs and we must do + // as follows. + CaptureRequest.Builder builder = mHolder.getBuilder(mAction); + builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); + builder.set(CaptureRequest.FLASH_MODE, CaptureResult.FLASH_MODE_OFF); + mHolder.applyBuilder(mAction, builder); + builder.set(CaptureRequest.CONTROL_AE_MODE, mOriginalAeMode); + builder.set(CaptureRequest.FLASH_MODE, mOriginalFlashMode); + mHolder.applyBuilder(mAction); + } catch (CameraAccessException ignore) {} + super.dispatchResult(); + } +} diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/picture/SnapshotGlPictureRecorder.java b/cameraview/src/main/java/com/otaliastudios/cameraview/picture/SnapshotGlPictureRecorder.java index 5e42d13e..b4d019e8 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/picture/SnapshotGlPictureRecorder.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/picture/SnapshotGlPictureRecorder.java @@ -31,7 +31,7 @@ import com.otaliastudios.cameraview.size.AspectRatio; import com.otaliastudios.cameraview.size.Size; import androidx.annotation.NonNull; -import androidx.annotation.Nullable; +import androidx.annotation.WorkerThread; import android.view.Surface; @@ -75,14 +75,13 @@ public class SnapshotGlPictureRecorder extends PictureRecorder { @NonNull PictureResult.Stub stub, @NonNull CameraEngine engine, @NonNull GlCameraPreview preview, - @NonNull AspectRatio outputRatio, - @Nullable Overlay overlay) { + @NonNull AspectRatio outputRatio) { super(stub, engine); mEngine = engine; mPreview = preview; mOutputRatio = outputRatio; - mOverlay = overlay; - mHasOverlay = overlay != null && overlay.drawsOn(Overlay.Target.PICTURE_SNAPSHOT); + mOverlay = engine.getOverlay(); + mHasOverlay = mOverlay != null && mOverlay.drawsOn(Overlay.Target.PICTURE_SNAPSHOT); } @TargetApi(Build.VERSION_CODES.KITKAT) @@ -113,7 +112,7 @@ public class SnapshotGlPictureRecorder extends PictureRecorder { @RendererThread @TargetApi(Build.VERSION_CODES.KITKAT) - private void onRendererTextureCreated(int textureId) { + protected void onRendererTextureCreated(int textureId) { mTextureId = textureId; mViewport = new EglViewport(); // Need to crop the size. @@ -129,10 +128,30 @@ public class SnapshotGlPictureRecorder extends PictureRecorder { @RendererThread @TargetApi(Build.VERSION_CODES.KITKAT) - private void onRendererFilterChanged(@NonNull Filter filter) { + protected void onRendererFilterChanged(@NonNull Filter filter) { mViewport.setFilter(filter.copy()); } + @RendererThread + @TargetApi(Build.VERSION_CODES.KITKAT) + protected void onRendererFrame(@SuppressWarnings("unused") @NonNull final SurfaceTexture surfaceTexture, + final float scaleX, + final float scaleY) { + // Get egl context from the RendererThread, which is the one in which we have created + // the textureId and the overlayTextureId, managed by the GlSurfaceView. + // Next operations can then be performed on different threads using this handle. + final EGLContext eglContext = EGL14.eglGetCurrentContext(); + // Calling this invalidates the rotation/scale logic below: + // surfaceTexture.getTransformMatrix(mTransform); // TODO activate and fix the logic. + WorkerHandler.execute(new Runnable() { + @Override + public void run() { + takeFrame(surfaceTexture, scaleX, scaleY, eglContext); + + } + }); + } + /** * The tricky part here is the EGL surface creation. * @@ -156,78 +175,67 @@ public class SnapshotGlPictureRecorder extends PictureRecorder { * @param scaleX frame scale x in {@link Reference#VIEW} * @param scaleY frame scale y in {@link Reference#VIEW} */ - @RendererThread + @WorkerThread @TargetApi(Build.VERSION_CODES.KITKAT) - private void onRendererFrame(@SuppressWarnings("unused") @NonNull SurfaceTexture surfaceTexture, - final float scaleX, - final float scaleY) { - // Get egl context from the RendererThread, which is the one in which we have created - // the textureId and the overlayTextureId, managed by the GlSurfaceView. - // Next operations can then be performed on different threads using this handle. - final EGLContext eglContext = EGL14.eglGetCurrentContext(); - // Calling this invalidates the rotation/scale logic below: - // surfaceTexture.getTransformMatrix(mTransform); // TODO activate and fix the logic. - WorkerHandler.execute(new Runnable() { - @Override - public void run() { - // 0. EGL window will need an output. - // We create a fake one as explained in javadocs. - final int fakeOutputTextureId = 9999; - SurfaceTexture fakeOutputSurface = new SurfaceTexture(fakeOutputTextureId); - fakeOutputSurface.setDefaultBufferSize(mResult.size.getWidth(), mResult.size.getHeight()); - - // 1. Create an EGL surface - final EglCore core = new EglCore(eglContext, EglCore.FLAG_RECORDABLE); - final EglBaseSurface eglSurface = new EglWindowSurface(core, fakeOutputSurface); - eglSurface.makeCurrent(); - - // 2. Apply scale and crop - boolean flip = mEngine.getAngles().flip(Reference.VIEW, Reference.SENSOR); - float realScaleX = flip ? scaleY : scaleX; - float realScaleY = flip ? scaleX : scaleY; - float scaleTranslX = (1F - realScaleX) / 2F; - float scaleTranslY = (1F - realScaleY) / 2F; - Matrix.translateM(mTransform, 0, scaleTranslX, scaleTranslY, 0); - Matrix.scaleM(mTransform, 0, realScaleX, realScaleY, 1); - - // 3. Apply rotation and flip - Matrix.translateM(mTransform, 0, 0.5F, 0.5F, 0); // Go back to 0,0 - Matrix.rotateM(mTransform, 0, -mResult.rotation, 0, 0, 1); // Rotate (not sure why we need the minus) - mResult.rotation = 0; - if (mResult.facing == Facing.FRONT) { // 5. Flip horizontally for front camera - Matrix.scaleM(mTransform, 0, -1, 1, 1); - } - Matrix.translateM(mTransform, 0, -0.5F, -0.5F, 0); // Go back to old position - - // 4. Do pretty much the same for overlays - if (mHasOverlay) { - // 1. First we must draw on the texture and get latest image - mOverlayDrawer.draw(Overlay.Target.PICTURE_SNAPSHOT); - - // 2. Then we can apply the transformations - int rotation = mEngine.getAngles().offset(Reference.VIEW, Reference.OUTPUT, Axis.ABSOLUTE); - Matrix.translateM(mOverlayDrawer.getTransform(), 0, 0.5F, 0.5F, 0); - Matrix.rotateM(mOverlayDrawer.getTransform(), 0, rotation, 0, 0, 1); - // No need to flip the x axis for front camera, but need to flip the y axis always. - Matrix.scaleM(mOverlayDrawer.getTransform(), 0, 1, -1, 1); - Matrix.translateM(mOverlayDrawer.getTransform(), 0, -0.5F, -0.5F, 0); - } - - // 5. Draw and save - mViewport.drawFrame(mTextureId, mTransform); - if (mHasOverlay) mOverlayDrawer.render(); - mResult.format = PictureResult.FORMAT_JPEG; - mResult.data = eglSurface.saveFrameTo(Bitmap.CompressFormat.JPEG); - - // 6. Cleanup - eglSurface.releaseEglSurface(); - mViewport.release(); - fakeOutputSurface.release(); - if (mHasOverlay) mOverlayDrawer.release(); - core.release(); - dispatchResult(); - } - }); + protected void takeFrame(@NonNull SurfaceTexture surfaceTexture, float scaleX, float scaleY, @NonNull EGLContext eglContext) { + + // 0. EGL window will need an output. + // We create a fake one as explained in javadocs. + final int fakeOutputTextureId = 9999; + SurfaceTexture fakeOutputSurface = new SurfaceTexture(fakeOutputTextureId); + fakeOutputSurface.setDefaultBufferSize(mResult.size.getWidth(), mResult.size.getHeight()); + + // 1. Create an EGL surface + final EglCore core = new EglCore(eglContext, EglCore.FLAG_RECORDABLE); + final EglBaseSurface eglSurface = new EglWindowSurface(core, fakeOutputSurface); + eglSurface.makeCurrent(); + + // 2. Apply scale and crop + boolean flip = mEngine.getAngles().flip(Reference.VIEW, Reference.SENSOR); + float realScaleX = flip ? scaleY : scaleX; + float realScaleY = flip ? scaleX : scaleY; + float scaleTranslX = (1F - realScaleX) / 2F; + float scaleTranslY = (1F - realScaleY) / 2F; + Matrix.translateM(mTransform, 0, scaleTranslX, scaleTranslY, 0); + Matrix.scaleM(mTransform, 0, realScaleX, realScaleY, 1); + + // 3. Apply rotation and flip + Matrix.translateM(mTransform, 0, 0.5F, 0.5F, 0); // Go back to 0,0 + Matrix.rotateM(mTransform, 0, -mResult.rotation, 0, 0, 1); // Rotate (not sure why we need the minus) + mResult.rotation = 0; + if (mResult.facing == Facing.FRONT) { // 5. Flip horizontally for front camera + Matrix.scaleM(mTransform, 0, -1, 1, 1); + } + Matrix.translateM(mTransform, 0, -0.5F, -0.5F, 0); // Go back to old position + + // 4. Do pretty much the same for overlays + if (mHasOverlay) { + // 1. First we must draw on the texture and get latest image + mOverlayDrawer.draw(Overlay.Target.PICTURE_SNAPSHOT); + + // 2. Then we can apply the transformations + int rotation = mEngine.getAngles().offset(Reference.VIEW, Reference.OUTPUT, Axis.ABSOLUTE); + Matrix.translateM(mOverlayDrawer.getTransform(), 0, 0.5F, 0.5F, 0); + Matrix.rotateM(mOverlayDrawer.getTransform(), 0, rotation, 0, 0, 1); + // No need to flip the x axis for front camera, but need to flip the y axis always. + Matrix.scaleM(mOverlayDrawer.getTransform(), 0, 1, -1, 1); + Matrix.translateM(mOverlayDrawer.getTransform(), 0, -0.5F, -0.5F, 0); + } + + // 5. Draw and save + LOG.i("takeFrame:", "timestamp:", surfaceTexture.getTimestamp()); + mViewport.drawFrame(mTextureId, mTransform); + if (mHasOverlay) mOverlayDrawer.render(); + mResult.format = PictureResult.FORMAT_JPEG; + mResult.data = eglSurface.saveFrameTo(Bitmap.CompressFormat.JPEG); + + // 6. Cleanup + eglSurface.releaseEglSurface(); + mViewport.release(); + fakeOutputSurface.release(); + if (mHasOverlay) mOverlayDrawer.release(); + core.release(); + dispatchResult(); } @Override diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/preview/GlCameraPreview.java b/cameraview/src/main/java/com/otaliastudios/cameraview/preview/GlCameraPreview.java index b85785df..9afc1f92 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/preview/GlCameraPreview.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/preview/GlCameraPreview.java @@ -2,6 +2,7 @@ package com.otaliastudios.cameraview.preview; import android.content.Context; import android.graphics.SurfaceTexture; +import android.hardware.camera2.CaptureResult; import android.opengl.GLSurfaceView; import android.opengl.Matrix; import androidx.annotation.NonNull; @@ -186,14 +187,17 @@ public class GlCameraPreview extends FilterCameraPreview + + + diff --git a/demo/src/main/java/com/otaliastudios/cameraview/demo/CameraActivity.java b/demo/src/main/java/com/otaliastudios/cameraview/demo/CameraActivity.java index 3d8c8078..50508a71 100644 --- a/demo/src/main/java/com/otaliastudios/cameraview/demo/CameraActivity.java +++ b/demo/src/main/java/com/otaliastudios/cameraview/demo/CameraActivity.java @@ -7,11 +7,14 @@ import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Color; import android.graphics.ImageFormat; +import android.graphics.PointF; import android.graphics.Rect; import android.graphics.YuvImage; import android.os.Bundle; import androidx.annotation.NonNull; import com.google.android.material.bottomsheet.BottomSheetBehavior; + +import androidx.annotation.Nullable; import androidx.appcompat.app.AppCompatActivity; import android.view.View; @@ -111,6 +114,7 @@ public class CameraActivity extends AppCompatActivity implements View.OnClickLis new Option.Mode(), new Option.Engine(), new Option.Preview(), // Some controls new Option.Flash(), new Option.WhiteBalance(), new Option.Hdr(), + new Option.PictureMetering(), new Option.PictureSnapshotMetering(), // Video recording new Option.VideoCodec(), new Option.Audio(), // Gestures @@ -126,7 +130,7 @@ public class CameraActivity extends AppCompatActivity implements View.OnClickLis List dividers = Arrays.asList( false, true, false, false, true, - false, false, true, + false, false, false, false, true, false, true, false, false, false, false, true, false, false, true, @@ -236,6 +240,18 @@ public class CameraActivity extends AppCompatActivity implements View.OnClickLis message("Video taken. Processing...", false); LOG.w("onVideoRecordingEnd!"); } + + @Override + public void onExposureCorrectionChanged(float newValue, @NonNull float[] bounds, @Nullable PointF[] fingers) { + super.onExposureCorrectionChanged(newValue, bounds, fingers); + message("Exposure correction:" + newValue, false); + } + + @Override + public void onZoomChanged(float newValue, @NonNull float[] bounds, @Nullable PointF[] fingers) { + super.onZoomChanged(newValue, bounds, fingers); + message("Zoom:" + newValue, false); + } } @Override diff --git a/demo/src/main/java/com/otaliastudios/cameraview/demo/Option.java b/demo/src/main/java/com/otaliastudios/cameraview/demo/Option.java index 89ba17ea..34f66279 100644 --- a/demo/src/main/java/com/otaliastudios/cameraview/demo/Option.java +++ b/demo/src/main/java/com/otaliastudios/cameraview/demo/Option.java @@ -252,6 +252,54 @@ public abstract class Option { } } + public static class PictureMetering extends Option { + + public PictureMetering() { + super("Picture Metering"); + } + + @NonNull + @Override + public Boolean get(@NonNull CameraView view) { + return view.getPictureMetering(); + } + + @NonNull + @Override + public Collection getAll(@NonNull CameraView view, @NonNull CameraOptions options) { + return Arrays.asList(true, false); + } + + @Override + public void set(@NonNull CameraView view, @NonNull Boolean value) { + view.setPictureMetering(value); + } + } + + public static class PictureSnapshotMetering extends Option { + + public PictureSnapshotMetering() { + super("Picture Snapshot Metering"); + } + + @NonNull + @Override + public Boolean get(@NonNull CameraView view) { + return view.getPictureSnapshotMetering(); + } + + @NonNull + @Override + public Collection getAll(@NonNull CameraView view, @NonNull CameraOptions options) { + return Arrays.asList(true, false); + } + + @Override + public void set(@NonNull CameraView view, @NonNull Boolean value) { + view.setPictureSnapshotMetering(value); + } + } + public static class VideoCodec extends ControlOption { public VideoCodec() { super(com.otaliastudios.cameraview.controls.VideoCodec.class, "Video Codec"); diff --git a/docs/_posts/2018-12-20-capture-size.md b/docs/_posts/2018-12-20-capture-size.md index b4a76b34..abf63e14 100644 --- a/docs/_posts/2018-12-20-capture-size.md +++ b/docs/_posts/2018-12-20-capture-size.md @@ -4,7 +4,7 @@ title: "Capture Size" subtitle: "Set size of output media" description: "Set size of output media" category: docs -order: 8 +order: 9 date: 2018-12-20 22:07:22 disqus: 1 --- diff --git a/docs/_posts/2018-12-20-changelog.md b/docs/_posts/2018-12-20-changelog.md index e1dcdfcf..2e8d729c 100644 --- a/docs/_posts/2018-12-20-changelog.md +++ b/docs/_posts/2018-12-20-changelog.md @@ -70,7 +70,7 @@ https://github.com/natario1/CameraView/compare/v2.0.0-beta06...v2.0.0-rc1 If you were using `focus`, just switch to `autoFocus`. -If you were using `focusWithMarker`, you can [add back the old marker](../docs/controls.html#cameraautofocusmarker). +If you were using `focusWithMarker`, you can [add back the old marker](../docs/metering.html#touch-metering-markers). https://github.com/natario1/CameraView/compare/v2.0.0-beta05...v2.0.0-beta06 diff --git a/docs/_posts/2018-12-20-controls.md b/docs/_posts/2018-12-20-controls.md index 6c75262a..cf89d708 100644 --- a/docs/_posts/2018-12-20-controls.md +++ b/docs/_posts/2018-12-20-controls.md @@ -143,76 +143,6 @@ cameraView.setVideoBitRate(0); cameraView.setVideoBitRate(4000000); ``` -### Auto Focus - -There are many ways to focus a CameraView engine: - -- Continuous autofocus is activated by default, where present -- User can start focus with a [Gesture](gestures.html) -- The developer can start focus with the `startAutoFocus(float, float)` API. This action needs - the coordinates of a point to focus, with respect to the view width and height. - -The last two actions will trigger the focus callbacks: - -```java -cameraView.addCameraListener(new CameraListener() { - - @Override - public void onAutoFocusStart(@NonNull PointF point) { - // Auto focus was started by a gesture or by startAutoFocus(float, float). - // The camera is currently trying to focus around that area. - // This can be used to draw things on screen. - } - - @Override - public void onAutoFocusEnd(boolean successful, @NonNull PointF point) { - // Auto focus operation just ended. If successful, the camera will have converged - // to a new focus point, and possibly changed exposure and white balance as well. - // The point is the same that was passed to onAutoFocusStart. - } -}); -``` - -Auto focus is not guaranteed to be supported: check the `CameraOptions` to be sure. - -```xml - -``` - -##### cameraAutoFocusMarker - -Lets you set a marker for drawing on screen in response to auto focus events. -In XML, you should pass the qualified class name of your marker. - -```java -cameraView.setAutoFocusMarker(null); -cameraView.setAutoFocusMarker(marker); -``` - -We offer a default marker (similar to the old `focusWithMarker` attribute in v1), -which you can set in XML using the `@string/cameraview_default_autofocus_marker` resource, -or programmatically: - -```java -cameraView.setAutoFocusMarker(new DefaultAutoFocusMarker()); -``` - -##### cameraAutoFocusResetDelay - -Lets you control how an auto-focus operation is reset after completed. -Setting a value <= 0 or == Long.MAX_VALUE will not reset the auto-focus. -This is useful for low end devices that have slow auto-focus capabilities. -Defaults to 3 seconds. - -```java -cameraView.setCameraAutoFocusResetDelay(1000); // 1 second -cameraView.setCameraAutoFocusResetDelay(0); // NO reset -cameraView.setCameraAutoFocusResetDelay(-1); // NO reset -cameraView.setCameraAutoFocusResetDelay(Long.MAX_VALUE); // NO reset -``` - ### Zoom There are two ways to control the zoom value: @@ -235,28 +165,3 @@ cameraView.addCameraListener(new CameraListener() { ``` Zoom is not guaranteed to be supported: check the `CameraOptions` to be sure. - -### Exposure correction - -There are two ways to control the exposure correction value: - -- User can change the exposure correction with a [Gesture](gestures.html) -- The developer can change this value with the `setExposureCorrection(float)` API, passing in the EV - value, in camera stops. This value should be contained in the minimum and maximum supported values, - as returned by `CameraOptions`. - -Both actions will trigger the exposure correction callback, which can be used, for example, to draw a seek bar: - -```java -cameraView.addCameraListener(new CameraListener() { - - @UiThread - public void onExposureCorrectionChanged(float newValue, @NonNull float[] bounds, @Nullable PointF[] fingers) { - // newValue: the new correction value - // bounds: min and max bounds for newValue, as returned by {@link CameraOptions} - // fingers: finger positions that caused the event, null if not caused by touch - } -}); -``` - -EV correction is not guaranteed to be supported: check the `CameraOptions` to be sure. \ No newline at end of file diff --git a/docs/_posts/2018-12-20-debugging.md b/docs/_posts/2018-12-20-debugging.md index 17382d00..0fa288bf 100644 --- a/docs/_posts/2018-12-20-debugging.md +++ b/docs/_posts/2018-12-20-debugging.md @@ -2,7 +2,7 @@ layout: page title: "Debugging" category: docs -order: 14 +order: 15 date: 2018-12-20 20:02:38 disqus: 1 --- diff --git a/docs/_posts/2018-12-20-error-handling.md b/docs/_posts/2018-12-20-error-handling.md index 069e039e..b1a3fcc3 100644 --- a/docs/_posts/2018-12-20-error-handling.md +++ b/docs/_posts/2018-12-20-error-handling.md @@ -2,7 +2,7 @@ layout: page title: "Error Handling" category: docs -order: 13 +order: 14 date: 2018-12-20 20:02:31 disqus: 1 --- diff --git a/docs/_posts/2018-12-20-frame-processing.md b/docs/_posts/2018-12-20-frame-processing.md index 1f1e463b..87d5714d 100644 --- a/docs/_posts/2018-12-20-frame-processing.md +++ b/docs/_posts/2018-12-20-frame-processing.md @@ -4,7 +4,7 @@ title: "Frame Processing" subtitle: "Process each frame in real time" description: "Process each frame in real time" category: docs -order: 5 +order: 6 date: 2018-12-20 20:45:42 disqus: 1 --- diff --git a/docs/_posts/2018-12-20-gestures.md b/docs/_posts/2018-12-20-gestures.md index 6a510ba8..43f88054 100644 --- a/docs/_posts/2018-12-20-gestures.md +++ b/docs/_posts/2018-12-20-gestures.md @@ -4,7 +4,7 @@ title: "Gestures" subtitle: "Gestures control" description: "Gestures control" category: docs -order: 4 +order: 5 date: 2018-12-20 20:49:35 disqus: 1 --- @@ -39,10 +39,10 @@ Looking at this from the other side: |Gesture action|Description|Can be mapped to| |--------------|-----------|----------------| |`NONE`|Disables this gesture.|`TAP` `LONG_TAP` `PINCH` `SCROLL_HORIZONTAL` `SCROLL_VERTICAL`| -|`AUTO_FOCUS`|Launches an [auto-focus operation](controls.html#auto-focus) on the finger position.|`TAP` `LONG_TAP`| +|`AUTO_FOCUS`|Launches a [touch metering operation](metering.html#touch-metering) on the finger position.|`TAP` `LONG_TAP`| |`TAKE_PICTURE`|Takes a picture using [takePicture](capturing-media.html).|`TAP` `LONG_TAP`| |`ZOOM`|[Zooms](controls.html#zoom) in or out.|`PINCH` `SCROLL_HORIZONTAL` `SCROLL_VERTICAL`| -|`EXPOSURE_CORRECTION`|Controls the [exposure correction](controls.html#exposure-correction).|`PINCH` `SCROLL_HORIZONTAL` `SCROLL_VERTICAL`| +|`EXPOSURE_CORRECTION`|Controls the [exposure correction](metering.html#exposure-correction).|`PINCH` `SCROLL_HORIZONTAL` `SCROLL_VERTICAL`| |`FILTER_CONTROL_1`|Controls the first parameter (if any) of a [real-time filter](filters.html).|`PINCH` `SCROLL_HORIZONTAL` `SCROLL_VERTICAL`| |`FILTER_CONTROL_2`|Controls the second parameter (if any) of a [real-time filter](filters.html).|`PINCH` `SCROLL_HORIZONTAL` `SCROLL_VERTICAL`| diff --git a/docs/_posts/2018-12-20-more-features.md b/docs/_posts/2018-12-20-more-features.md index 36cb9eed..0afff7aa 100644 --- a/docs/_posts/2018-12-20-more-features.md +++ b/docs/_posts/2018-12-20-more-features.md @@ -4,7 +4,7 @@ title: "More features" subtitle: "Undocumented features & more" description: "Undocumented features & more" category: docs -order: 15 +order: 16 date: 2018-12-20 20:41:20 disqus: 1 --- diff --git a/docs/_posts/2018-12-20-preview-size.md b/docs/_posts/2018-12-20-preview-size.md index 5914b5c6..78a1ac4f 100644 --- a/docs/_posts/2018-12-20-preview-size.md +++ b/docs/_posts/2018-12-20-preview-size.md @@ -4,7 +4,7 @@ title: "Preview Size" subtitle: "Measuring behavior" description: "Measuring behavior" category: docs -order: 7 +order: 8 date: 2018-12-20 22:07:17 disqus: 1 --- diff --git a/docs/_posts/2018-12-20-previews.md b/docs/_posts/2018-12-20-previews.md index 32c9a532..a2a53d2a 100644 --- a/docs/_posts/2018-12-20-previews.md +++ b/docs/_posts/2018-12-20-previews.md @@ -4,7 +4,7 @@ title: "Engine and Previews" subtitle: "Camera engine and preview implementations" description: "Camera engine and preview implementations" category: docs -order: 6 +order: 7 date: 2018-12-20 21:58:16 disqus: 1 --- diff --git a/docs/_posts/2018-12-20-runtime-permissions.md b/docs/_posts/2018-12-20-runtime-permissions.md index cd3d7934..1a44eb2e 100644 --- a/docs/_posts/2018-12-20-runtime-permissions.md +++ b/docs/_posts/2018-12-20-runtime-permissions.md @@ -4,7 +4,7 @@ title: "Runtime Permissions" subtitle: "Permissions and Manifest setup" description: "Permissions and Manifest setup" category: docs -order: 12 +order: 13 date: 2018-12-20 20:03:03 disqus: 1 --- diff --git a/docs/_posts/2019-02-24-snapshot-size.md b/docs/_posts/2019-02-24-snapshot-size.md index b6b326fd..c79ff2de 100644 --- a/docs/_posts/2019-02-24-snapshot-size.md +++ b/docs/_posts/2019-02-24-snapshot-size.md @@ -4,7 +4,7 @@ title: "Snapshot Size" subtitle: "Sizing the snapshots output" description: "Sizing the snapshots output" category: docs -order: 9 +order: 10 date: 2019-02-24 17:36:39 disqus: 1 --- diff --git a/docs/_posts/2019-07-14-watermarks-and-overlays.md b/docs/_posts/2019-07-14-watermarks-and-overlays.md index 527c46a2..22024ce6 100644 --- a/docs/_posts/2019-07-14-watermarks-and-overlays.md +++ b/docs/_posts/2019-07-14-watermarks-and-overlays.md @@ -4,7 +4,7 @@ title: "Watermarks and Overlays" subtitle: "Static and animated overlays" description: "Static and animated overlays" category: docs -order: 10 +order: 11 date: 2019-07-14 20:14:31 disqus: 1 --- diff --git a/docs/_posts/2019-08-06-filters.md b/docs/_posts/2019-08-06-filters.md index 56c80a85..46a07274 100644 --- a/docs/_posts/2019-08-06-filters.md +++ b/docs/_posts/2019-08-06-filters.md @@ -4,7 +4,7 @@ title: "Real-time Filters" subtitle: "Apply filters to preview and snapshots" description: "Apply filters to preview and snapshots" category: docs -order: 11 +order: 12 date: 2019-08-06 17:10:17 disqus: 1 --- diff --git a/docs/_posts/2019-09-04-metering.md b/docs/_posts/2019-09-04-metering.md new file mode 100644 index 00000000..123e1c6d --- /dev/null +++ b/docs/_posts/2019-09-04-metering.md @@ -0,0 +1,158 @@ +--- +layout: page +title: "Metering" +subtitle: "Exposure and metering controls" +description: "Exposure and metering controls" +category: docs +order: 4 +date: 2019-09-04 19:39:03 +disqus: 1 +--- + +In CameraView grammar, metering is the act of measuring the scene brightness, colors and focus +distance in order to automatically adapt the camera exposure, focus and white balance (AE, AF and AWB, +often referred as 3A). + +We treat three different types on metering: [continuous metering](#continuous-metering), +[picture metering](#picture-metering) and [touch metering](#touch-metering). + +You can also apply adjustment to the metered exposure through the [exposure correction](#exposure-correction) control. + +### Continuous Metering + +By default, and if the device supports it, all three routines (AE, AF, AWB) are continuously metered +as the device moves or the scene changes. + +- For AE, this is always enabled if supported +- For AF, this is always enabled if supported +- For AWB, this is enabled if the `WhiteBalance` parameter is set to `AUTO` [[docs]](#controls.html#camerawhitebalance) + +### Picture Metering + +*In Camera1, picture metering is always enabled for pictures, and always disabled for picture snapshots. +The following applies to Camera2 only.* + +The camera engine will try to trigger metering when a picture is requested, either with `takePicture()` +or `takePictureSnapshot()`. This has two obvious consequences: + +- improves the picture quality +- increases the latency, because metering takes time + +For these reasons, picture metering is **enabled** by default for HQ pictures and **disabled** by +default for picture snapshots. However, the behavior can be changed with two flags and their +respective XML attributes: + +```java +cameraView.setPictureMetering(true); // Meter before takePicture() +cameraView.setPictureMetering(false); // Don't +cameraView.setPictureSnapshotMetering(true); // Meter before takePictureSnapshot() +cameraView.setPictureSnapshotMetering(false); // Don't +``` + +### Touch Metering + +Touch metering is triggered by either a [Gesture](gestures.html) or by the developer itself, which +can start touch metering on a specific point with the `startAutoFocus(float, float)` API. +This action needs the coordinates of a point computed with respect to the view width and height. + +In both cases, the metering callbacks will be triggered: + +```java +cameraView.addCameraListener(new CameraListener() { + + @Override + public void onAutoFocusStart(@NonNull PointF point) { + // Touch metering was started by a gesture or by startAutoFocus(float, float). + // The camera is currently trying to meter around that area. + // This can be used to draw things on screen. + } + + @Override + public void onAutoFocusEnd(boolean successful, @NonNull PointF point) { + // Touch metering operation just ended. If successful, the camera will have converged + // to a new focus point, and possibly new exposure and white balance as well. + // The point is the same that was passed to onAutoFocusStart. + } +}); +``` + +Touch metering is not guaranteed to be supported: check the `CameraOptions` to be sure. + +##### Touch Metering Markers + +You can set a marker for drawing on screen in response to touch metering events. +In XML, you should pass the qualified class name of your marker. + +```java +cameraView.setAutoFocusMarker(null); +cameraView.setAutoFocusMarker(marker); +``` + +We offer a default marker (similar to the old `focusWithMarker` attribute in v1), +which you can set in XML using the `@string/cameraview_default_autofocus_marker` resource, +or programmatically: + +```java +cameraView.setAutoFocusMarker(new DefaultAutoFocusMarker()); +``` + +##### Touch Metering Reset Delay + +You control control how a touch metering operation is reset after completed. +Setting a value <= 0 or == Long.MAX_VALUE will not reset the metering values. +This is useful for low end devices that have slow auto-focus capabilities. +Defaults to 3 seconds. + +```java +cameraView.setCameraAutoFocusResetDelay(1000); // 1 second +cameraView.setCameraAutoFocusResetDelay(0); // NO reset +cameraView.setCameraAutoFocusResetDelay(-1); // NO reset +cameraView.setCameraAutoFocusResetDelay(Long.MAX_VALUE); // NO reset +``` + +### Exposure correction + +There are two ways to control the exposure correction value: + +- User can change the exposure correction with a [Gesture](gestures.html) +- The developer can change this value with the `setExposureCorrection(float)` API, passing in the EV + value, in camera stops. This value should be contained in the minimum and maximum supported values, + as returned by `CameraOptions`. + +Both actions will trigger the exposure correction callback, which can be used, for example, to draw a seek bar: + +```java +cameraView.addCameraListener(new CameraListener() { + + @UiThread + public void onExposureCorrectionChanged(float newValue, @NonNull float[] bounds, @Nullable PointF[] fingers) { + // newValue: the new correction value + // bounds: min and max bounds for newValue, as returned by CameraOptions + // fingers: finger positions that caused the event, null if not caused by touch + } +}); +``` + +EV correction is not guaranteed to be supported: check the `CameraOptions` to be sure. + +### Related XML Attributes + +```xml + +``` + +### Related APIs + +|Method|Description| +|------|-----------| +|`setPictureMetering(boolean)`|Whether the engine should trigger 3A metering when a picture is requested. Defaults to true.| +|`setPictureSnapshotMetering(boolean)`|Whether the engine should trigger 3A metering when a picture snapshot is requested. Defaults to false.| +|`startAutoFocus(float, float)`|Starts the 3A touch metering routine at the given coordinates, with respect to the view system.| +|`CameraOptions.isAutoFocusSupported()`|Whether touch metering (metering with respect to a specific region of the screen) is supported.| +|`setExposureCorrection(float)`|Changes the exposure adjustment, in EV stops. A positive value means a brighter picture.| +|`CameraOptions.getExposureCorrectionMinValue()`|The minimum value of negative exposure correction, in EV stops.| +|`CameraOptions.getExposureCorrectionMaxValue()`|The maximum value of positive exposure correction, in EV stops.| \ No newline at end of file