Focus improvements (#574)

* New approach for metering

* Include AWB

* Complete AWB integration

* Rearrange code into MeteringParameters objects

* Add FORCED_END_DELAY into Meter

* Small changes

* Improve Camera2 pictures speed and quality

* Extend auto focus functionality to more cameras

* Move Mapper to own package

* Refactor Camera1Mapper

* Refactor Camera2Mapper

* Rename mapper methods

* Add Camera2MapperTests

* Fix success parameter

* Fix focus when zooming
pull/608/head
Mattia Iavarone 5 years ago committed by GitHub
parent 0731b64c2f
commit a8fddc482f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 7
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraOptions1Test.java
  2. 40
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/mappers/Camera1MapperTest.java
  3. 101
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/mappers/Camera2MapperTest.java
  4. 44
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraOptions.java
  5. 5
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraUtils.java
  6. 12
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java
  7. 342
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java
  8. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraEngine.java
  9. 221
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Mapper.java
  10. 380
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Meter.java
  11. 112
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/mappers/Camera1Mapper.java
  12. 168
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/mappers/Camera2Mapper.java
  13. 101
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/metering/AutoExposure.java
  14. 87
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/metering/AutoFocus.java
  15. 81
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/metering/AutoWhiteBalance.java
  16. 54
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/metering/MeteringParameter.java
  17. 54
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/Full2PictureRecorder.java
  18. 2
      demo/src/main/res/layout/activity_camera.xml
  19. 2
      docs/_posts/2018-12-20-changelog.md

@ -4,10 +4,9 @@ package com.otaliastudios.cameraview;
import android.hardware.Camera;
import com.otaliastudios.cameraview.controls.Audio;
import com.otaliastudios.cameraview.controls.Engine;
import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.controls.Flash;
import com.otaliastudios.cameraview.engine.Mapper;
import com.otaliastudios.cameraview.engine.mappers.Camera1Mapper;
import com.otaliastudios.cameraview.gesture.GestureAction;
import com.otaliastudios.cameraview.controls.Grid;
import com.otaliastudios.cameraview.controls.Hdr;
@ -252,11 +251,11 @@ public class CameraOptions1Test extends BaseTest {
}
CameraOptions o = new CameraOptions(mock(Camera.Parameters.class), false);
Mapper m = Mapper.get(Engine.CAMERA1);
Camera1Mapper m = Camera1Mapper.get();
Collection<Facing> s = o.getSupportedControls(Facing.class);
assertEquals(s.size(), supported.size());
for (Facing facing : s) {
assertTrue(supported.contains(m.<Integer>map(facing)));
assertTrue(supported.contains(m.mapFacing(facing)));
assertTrue(o.supports(facing));
}
}

@ -1,14 +1,14 @@
package com.otaliastudios.cameraview.engine;
package com.otaliastudios.cameraview.engine.mappers;
import android.hardware.Camera;
import com.otaliastudios.cameraview.BaseTest;
import com.otaliastudios.cameraview.controls.Engine;
import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.controls.Flash;
import com.otaliastudios.cameraview.controls.Hdr;
import com.otaliastudios.cameraview.controls.WhiteBalance;
import com.otaliastudios.cameraview.engine.mappers.Camera1Mapper;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest;
@ -21,28 +21,28 @@ import static org.junit.Assert.*;
@RunWith(AndroidJUnit4.class)
@SmallTest
public class Mapper1Test extends BaseTest {
public class Camera1MapperTest extends BaseTest {
private Mapper mapper = Mapper.get(Engine.CAMERA1);
private Camera1Mapper mapper = Camera1Mapper.get();
@Test
public void testMap() {
assertEquals(mapper.map(Flash.OFF), Camera.Parameters.FLASH_MODE_OFF);
assertEquals(mapper.map(Flash.ON), Camera.Parameters.FLASH_MODE_ON);
assertEquals(mapper.map(Flash.AUTO), Camera.Parameters.FLASH_MODE_AUTO);
assertEquals(mapper.map(Flash.TORCH), Camera.Parameters.FLASH_MODE_TORCH);
assertEquals(mapper.map(Facing.BACK), Camera.CameraInfo.CAMERA_FACING_BACK);
assertEquals(mapper.map(Facing.FRONT), Camera.CameraInfo.CAMERA_FACING_FRONT);
assertEquals(mapper.map(Hdr.OFF), Camera.Parameters.SCENE_MODE_AUTO);
assertEquals(mapper.map(Hdr.ON), Camera.Parameters.SCENE_MODE_HDR);
assertEquals(mapper.map(WhiteBalance.AUTO), Camera.Parameters.WHITE_BALANCE_AUTO);
assertEquals(mapper.map(WhiteBalance.DAYLIGHT), Camera.Parameters.WHITE_BALANCE_DAYLIGHT);
assertEquals(mapper.map(WhiteBalance.CLOUDY), Camera.Parameters.WHITE_BALANCE_CLOUDY_DAYLIGHT);
assertEquals(mapper.map(WhiteBalance.INCANDESCENT), Camera.Parameters.WHITE_BALANCE_INCANDESCENT);
assertEquals(mapper.map(WhiteBalance.FLUORESCENT), Camera.Parameters.WHITE_BALANCE_FLUORESCENT);
assertEquals(mapper.mapFlash(Flash.OFF), Camera.Parameters.FLASH_MODE_OFF);
assertEquals(mapper.mapFlash(Flash.ON), Camera.Parameters.FLASH_MODE_ON);
assertEquals(mapper.mapFlash(Flash.AUTO), Camera.Parameters.FLASH_MODE_AUTO);
assertEquals(mapper.mapFlash(Flash.TORCH), Camera.Parameters.FLASH_MODE_TORCH);
assertEquals(mapper.mapFacing(Facing.BACK), Camera.CameraInfo.CAMERA_FACING_BACK);
assertEquals(mapper.mapFacing(Facing.FRONT), Camera.CameraInfo.CAMERA_FACING_FRONT);
assertEquals(mapper.mapHdr(Hdr.OFF), Camera.Parameters.SCENE_MODE_AUTO);
assertEquals(mapper.mapHdr(Hdr.ON), Camera.Parameters.SCENE_MODE_HDR);
assertEquals(mapper.mapWhiteBalance(WhiteBalance.AUTO), Camera.Parameters.WHITE_BALANCE_AUTO);
assertEquals(mapper.mapWhiteBalance(WhiteBalance.DAYLIGHT), Camera.Parameters.WHITE_BALANCE_DAYLIGHT);
assertEquals(mapper.mapWhiteBalance(WhiteBalance.CLOUDY), Camera.Parameters.WHITE_BALANCE_CLOUDY_DAYLIGHT);
assertEquals(mapper.mapWhiteBalance(WhiteBalance.INCANDESCENT), Camera.Parameters.WHITE_BALANCE_INCANDESCENT);
assertEquals(mapper.mapWhiteBalance(WhiteBalance.FLUORESCENT), Camera.Parameters.WHITE_BALANCE_FLUORESCENT);
}

@ -0,0 +1,101 @@
package com.otaliastudios.cameraview.engine.mappers;
import android.hardware.Camera;
import android.util.Pair;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest;
import com.otaliastudios.cameraview.BaseTest;
import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.controls.Flash;
import com.otaliastudios.cameraview.controls.Hdr;
import com.otaliastudios.cameraview.controls.WhiteBalance;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.List;
import java.util.Set;
import static android.hardware.camera2.CameraMetadata.*;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@RunWith(AndroidJUnit4.class)
@SmallTest
public class Camera2MapperTest extends BaseTest {
private Camera2Mapper mapper = Camera2Mapper.get();
@Test
public void testMap() {
List<Pair<Integer, Integer>> values = mapper.mapFlash(Flash.OFF);
assertEquals(2, values.size());
assertTrue(values.contains(new Pair<>(CONTROL_AE_MODE_ON, FLASH_MODE_OFF)));
assertTrue(values.contains(new Pair<>(CONTROL_AE_MODE_OFF, FLASH_MODE_OFF)));
values = mapper.mapFlash(Flash.TORCH);
assertEquals(2, values.size());
assertTrue(values.contains(new Pair<>(CONTROL_AE_MODE_ON, FLASH_MODE_TORCH)));
assertTrue(values.contains(new Pair<>(CONTROL_AE_MODE_OFF, FLASH_MODE_TORCH)));
values = mapper.mapFlash(Flash.AUTO);
assertEquals(2, values.size());
assertTrue(values.contains(new Pair<>(CONTROL_AE_MODE_ON_AUTO_FLASH, FLASH_MODE_OFF)));
assertTrue(values.contains(new Pair<>(CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, FLASH_MODE_OFF)));
values = mapper.mapFlash(Flash.ON);
assertEquals(1, values.size());
assertTrue(values.contains(new Pair<>(CONTROL_AE_MODE_ON_ALWAYS_FLASH, FLASH_MODE_OFF)));
assertEquals(mapper.mapFacing(Facing.BACK), LENS_FACING_BACK);
assertEquals(mapper.mapFacing(Facing.FRONT), LENS_FACING_FRONT);
assertEquals(mapper.mapHdr(Hdr.OFF), CONTROL_SCENE_MODE_DISABLED);
assertEquals(mapper.mapHdr(Hdr.ON), CONTROL_SCENE_MODE_HDR);
assertEquals(mapper.mapWhiteBalance(WhiteBalance.AUTO), CONTROL_AWB_MODE_AUTO);
assertEquals(mapper.mapWhiteBalance(WhiteBalance.DAYLIGHT), CONTROL_AWB_MODE_DAYLIGHT);
assertEquals(mapper.mapWhiteBalance(WhiteBalance.CLOUDY), CONTROL_AWB_MODE_CLOUDY_DAYLIGHT);
assertEquals(mapper.mapWhiteBalance(WhiteBalance.INCANDESCENT), CONTROL_AWB_MODE_INCANDESCENT);
assertEquals(mapper.mapWhiteBalance(WhiteBalance.FLUORESCENT), CONTROL_AWB_MODE_FLUORESCENT);
}
@Test
public void testUnmap() {
Set<Flash> values;
values = mapper.unmapFlash(CONTROL_AE_MODE_OFF);
assertEquals(values.size(), 2);
assertTrue(values.contains(Flash.OFF));
assertTrue(values.contains(Flash.TORCH));
values = mapper.unmapFlash(CONTROL_AE_MODE_ON);
assertEquals(values.size(), 2);
assertTrue(values.contains(Flash.OFF));
assertTrue(values.contains(Flash.TORCH));
values = mapper.unmapFlash(CONTROL_AE_MODE_ON_ALWAYS_FLASH);
assertEquals(values.size(), 1);
assertTrue(values.contains(Flash.ON));
values = mapper.unmapFlash(CONTROL_AE_MODE_ON_AUTO_FLASH);
assertEquals(values.size(), 1);
assertTrue(values.contains(Flash.AUTO));
values = mapper.unmapFlash(CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE);
assertEquals(values.size(), 1);
assertTrue(values.contains(Flash.AUTO));
values = mapper.unmapFlash(CONTROL_AE_MODE_ON_EXTERNAL_FLASH);
assertEquals(values.size(), 0);
assertEquals(Facing.BACK, mapper.unmapFacing(LENS_FACING_BACK));
assertEquals(Facing.FRONT, mapper.unmapFacing(LENS_FACING_FRONT));
assertEquals(Hdr.OFF, mapper.unmapHdr(CONTROL_SCENE_MODE_DISABLED));
assertEquals(Hdr.ON, mapper.unmapHdr(CONTROL_SCENE_MODE_HDR));
assertEquals(WhiteBalance.AUTO, mapper.unmapWhiteBalance(CONTROL_AWB_MODE_AUTO));
assertEquals(WhiteBalance.DAYLIGHT, mapper.unmapWhiteBalance(CONTROL_AWB_MODE_DAYLIGHT));
assertEquals(WhiteBalance.CLOUDY, mapper.unmapWhiteBalance(CONTROL_AWB_MODE_CLOUDY_DAYLIGHT));
assertEquals(WhiteBalance.INCANDESCENT, mapper.unmapWhiteBalance(CONTROL_AWB_MODE_INCANDESCENT));
assertEquals(WhiteBalance.FLUORESCENT, mapper.unmapWhiteBalance(CONTROL_AWB_MODE_FLUORESCENT));
}
}

@ -19,7 +19,8 @@ import com.otaliastudios.cameraview.controls.Engine;
import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.controls.Flash;
import com.otaliastudios.cameraview.controls.Preview;
import com.otaliastudios.cameraview.engine.Mapper;
import com.otaliastudios.cameraview.engine.mappers.Camera1Mapper;
import com.otaliastudios.cameraview.engine.mappers.Camera2Mapper;
import com.otaliastudios.cameraview.gesture.GestureAction;
import com.otaliastudios.cameraview.controls.Grid;
import com.otaliastudios.cameraview.controls.Hdr;
@ -63,7 +64,7 @@ public class CameraOptions {
public CameraOptions(@NonNull Camera.Parameters params, boolean flipSizes) {
List<String> strings;
Mapper mapper = Mapper.get(Engine.CAMERA1);
Camera1Mapper mapper = Camera1Mapper.get();
// Facing
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
@ -148,7 +149,7 @@ public class CameraOptions {
// Camera2Engine constructor.
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
public CameraOptions(@NonNull CameraManager manager, @NonNull String cameraId, boolean flipSizes) throws CameraAccessException {
Mapper mapper = Mapper.get(Engine.CAMERA2);
Camera2Mapper mapper = Camera2Mapper.get();
CameraCharacteristics cameraCharacteristics = manager.getCameraCharacteristics(cameraId);
// Facing
@ -176,14 +177,8 @@ public class CameraOptions {
int[] aeModes = cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_MODES);
//noinspection ConstantConditions
for (int aeMode : aeModes) {
Flash value = mapper.unmapFlash(aeMode);
if (value != null) supportedFlash.add(value);
}
// Check for torch specifically since the Mapper flash support is not so good.
// If OFF works, it means we have AE_MODE_OFF or AE_MODE_ON. This means we can use
// the torch control.
if (supportedFlash.contains(Flash.OFF)) {
supportedFlash.add(Flash.TORCH);
Set<Flash> flashes = mapper.unmapFlash(aeMode);
supportedFlash.addAll(flashes);
}
}
@ -196,22 +191,24 @@ public class CameraOptions {
if (value != null) supportedHdr.add(value);
}
//zoom
// Zoom
Float maxZoom = cameraCharacteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM);
if(maxZoom != null) {
zoomSupported = maxZoom > 1;
}
// autofocus
int[] afModes = cameraCharacteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
autoFocusSupported = false;
//noinspection ConstantConditions
for (int afMode : afModes) {
if (afMode == CameraCharacteristics.CONTROL_AF_MODE_AUTO) {
autoFocusSupported = true;
}
}
// AutoFocus
// This now means 3A metering with respect to a specific region of the screen.
// Some controls (AF, AE) have special triggers that might or might not be supported.
// But they can also be on some continuous search mode so that the trigger is not needed.
// What really matters in my opinion is the availability of regions.
Integer afRegions = cameraCharacteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AF);
Integer aeRegions = cameraCharacteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AE);
Integer awbRegions = cameraCharacteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB);
autoFocusSupported = (afRegions != null && afRegions > 0)
|| (aeRegions != null && aeRegions > 0)
|| (awbRegions != null && awbRegions > 0);
// Exposure correction
Range<Integer> exposureRange = cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE);
@ -430,8 +427,9 @@ public class CameraOptions {
/**
* Whether auto focus is supported. This means you can map gestures to
* {@link GestureAction#AUTO_FOCUS} and focus will be changed on tap.
* Whether auto focus (metering with respect to a specific region of the screen) is
* supported. If it is, you can map gestures to {@link GestureAction#AUTO_FOCUS}
* and metering will change on tap.
*
* @return whether auto focus is supported.
*/

@ -9,9 +9,8 @@ import android.graphics.Matrix;
import android.hardware.Camera;
import android.os.Handler;
import com.otaliastudios.cameraview.controls.Engine;
import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.engine.Mapper;
import com.otaliastudios.cameraview.engine.mappers.Camera1Mapper;
import com.otaliastudios.cameraview.internal.utils.ExifHelper;
import com.otaliastudios.cameraview.internal.utils.WorkerHandler;
@ -61,7 +60,7 @@ public class CameraUtils {
*/
public static boolean hasCameraFacing(@SuppressWarnings("unused") @NonNull Context context,
@NonNull Facing facing) {
int internal = Mapper.get(Engine.CAMERA1).map(facing);
int internal = Camera1Mapper.get().mapFacing(facing);
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int i = 0, count = Camera.getNumberOfCameras(); i < count; i++) {
Camera.getCameraInfo(i, cameraInfo);

@ -21,7 +21,7 @@ import com.google.android.gms.tasks.Tasks;
import com.otaliastudios.cameraview.CameraException;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.CameraOptions;
import com.otaliastudios.cameraview.controls.Engine;
import com.otaliastudios.cameraview.engine.mappers.Camera1Mapper;
import com.otaliastudios.cameraview.engine.offset.Axis;
import com.otaliastudios.cameraview.engine.offset.Reference;
import com.otaliastudios.cameraview.frame.Frame;
@ -61,13 +61,13 @@ public class Camera1Engine extends CameraEngine implements
private static final int PREVIEW_FORMAT = ImageFormat.NV21;
@VisibleForTesting static final int AUTOFOCUS_END_DELAY_MILLIS = 2500;
private final Camera1Mapper mMapper = Camera1Mapper.get();
private Camera mCamera;
@VisibleForTesting int mCameraId;
private Runnable mFocusEndRunnable;
public Camera1Engine(@NonNull Callback callback) {
super(callback);
mMapper = Mapper.get(Engine.CAMERA1);
}
//region Utilities
@ -117,7 +117,7 @@ public class Camera1Engine extends CameraEngine implements
@Override
protected boolean collectCameraInfo(@NonNull Facing facing) {
int internalFacing = mMapper.map(facing);
int internalFacing = mMapper.mapFacing(facing);
LOG.i("collectCameraInfo", "Facing:", facing, "Internal:", internalFacing, "Cameras:", Camera.getNumberOfCameras());
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int i = 0, count = Camera.getNumberOfCameras(); i < count; i++) {
@ -443,7 +443,7 @@ public class Camera1Engine extends CameraEngine implements
private boolean applyFlash(@NonNull Camera.Parameters params, @NonNull Flash oldFlash) {
if (mCameraOptions.supports(mFlash)) {
params.setFlashMode((String) mMapper.map(mFlash));
params.setFlashMode(mMapper.mapFlash(mFlash));
return true;
}
mFlash = oldFlash;
@ -496,7 +496,7 @@ public class Camera1Engine extends CameraEngine implements
private boolean applyWhiteBalance(@NonNull Camera.Parameters params, @NonNull WhiteBalance oldWhiteBalance) {
if (mCameraOptions.supports(mWhiteBalance)) {
params.setWhiteBalance((String) mMapper.map(mWhiteBalance));
params.setWhiteBalance(mMapper.mapWhiteBalance(mWhiteBalance));
return true;
}
mWhiteBalance = oldWhiteBalance;
@ -521,7 +521,7 @@ public class Camera1Engine extends CameraEngine implements
private boolean applyHdr(@NonNull Camera.Parameters params, @NonNull Hdr oldHdr) {
if (mCameraOptions.supports(mHdr)) {
params.setSceneMode((String) mMapper.map(mHdr));
params.setSceneMode(mMapper.mapHdr(mHdr));
return true;
}
mHdr = oldHdr;

@ -14,12 +14,12 @@ import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.params.MeteringRectangle;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.location.Location;
import android.media.Image;
import android.media.ImageReader;
import android.os.Build;
import android.util.Pair;
import android.util.Rational;
import android.view.Surface;
import android.view.SurfaceHolder;
@ -37,12 +37,12 @@ import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.CameraOptions;
import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.controls.Engine;
import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.controls.Flash;
import com.otaliastudios.cameraview.controls.Hdr;
import com.otaliastudios.cameraview.controls.Mode;
import com.otaliastudios.cameraview.controls.WhiteBalance;
import com.otaliastudios.cameraview.engine.mappers.Camera2Mapper;
import com.otaliastudios.cameraview.engine.offset.Axis;
import com.otaliastudios.cameraview.engine.offset.Reference;
import com.otaliastudios.cameraview.frame.Frame;
@ -61,14 +61,13 @@ import com.otaliastudios.cameraview.video.Full2VideoRecorder;
import com.otaliastudios.cameraview.video.SnapshotVideoRecorder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.atomic.AtomicBoolean;
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAvailableListener {
public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAvailableListener, Meter.Callback {
private static final String TAG = Camera2Engine.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
@ -84,6 +83,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
private CaptureRequest.Builder mRepeatingRequestBuilder;
private CaptureRequest mRepeatingRequest;
private CameraCaptureSession.CaptureCallback mRepeatingRequestCallback;
private final Camera2Mapper mMapper = Camera2Mapper.get();
// Frame processing
private Size mFrameProcessingSize;
@ -101,13 +101,11 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
private ImageReader mPictureReader;
private final boolean mPictureCaptureStopsPreview = false; // can make configurable at some point
// Autofocus
private PointF mAutoFocusPoint;
private Gesture mAutoFocusGesture;
// 3A metering
private Meter mMeter;
public Camera2Engine(Callback callback) {
super(callback);
mMapper = Mapper.get(Engine.CAMERA2);
mManager = (CameraManager) mCallback.getContext().getSystemService(Context.CAMERA_SERVICE);
mFrameConversionHandler = WorkerHandler.get("CameraFrameConversion");
}
@ -234,7 +232,9 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
if (mPictureRecorder instanceof Full2PictureRecorder) {
((Full2PictureRecorder) mPictureRecorder).onCaptureProgressed(partialResult);
}
if (isInAutoFocus()) onAutoFocusCapture(partialResult);
if (mMeter != null && mMeter.isMetering()) {
mMeter.onCapture(partialResult);
}
}
@Override
@ -243,7 +243,9 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
if (mPictureRecorder instanceof Full2PictureRecorder) {
((Full2PictureRecorder) mPictureRecorder).onCaptureCompleted(result);
}
if (isInAutoFocus()) onAutoFocusCapture(result);
if (mMeter != null && mMeter.isMetering()) {
mMeter.onCapture(result);
}
}
};
@ -287,7 +289,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@Override
protected boolean collectCameraInfo(@NonNull Facing facing) {
int internalFacing = mMapper.map(facing);
int internalFacing = mMapper.mapFacing(facing);
String[] cameraIds = null;
try {
cameraIds = mManager.getCameraIdList();
@ -554,8 +556,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
}
removeRepeatingRequestBuilderSurfaces();
mRepeatingRequest = null;
mAutoFocusPoint = null;
mAutoFocusGesture = null;
mMeter = null;
LOG.i("onStopPreview:", "Returning.");
return Tasks.forResult(null);
}
@ -811,6 +812,29 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
}
}
private void applyFocusForMetering(@NonNull CaptureRequest.Builder builder) {
// All focus modes support the AF trigger, except OFF and EDOF.
// However, unlike the preview, we'd prefer AUTO to any CONTINUOUS value.
int[] modesArray = readCharacteristic(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES, new int[]{});
List<Integer> modes = new ArrayList<>();
for (int mode : modesArray) { modes.add(mode); }
if (modes.contains(CaptureRequest.CONTROL_AF_MODE_AUTO)) {
builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO);
return;
}
if (getMode() == Mode.VIDEO &&
modes.contains(CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO)) {
builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
return;
}
if (modes.contains(CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE)) {
builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
//noinspection UnnecessaryReturnStatement
return;
}
}
@Override
public void setFlash(@NonNull Flash flash) {
final Flash old = mFlash;
@ -848,22 +872,28 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
private boolean applyFlash(@NonNull CaptureRequest.Builder builder,
@NonNull Flash oldFlash) {
if (mCameraOptions.supports(mFlash)) {
List<Integer> modes = mMapper.map(mFlash);
int[] availableModes = readCharacteristic(CameraCharacteristics.CONTROL_AE_AVAILABLE_MODES, new int[]{});
for (int mode : modes) {
for (int availableMode : availableModes) {
if (mode == availableMode) {
builder.set(CaptureRequest.CONTROL_AE_MODE, mode);
if (mFlash == Flash.TORCH) {
builder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH);
} else if (mFlash == Flash.OFF) {
builder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
}
int[] availableAeModesArray = readCharacteristic(
CameraCharacteristics.CONTROL_AE_AVAILABLE_MODES, new int[]{});
List<Integer> availableAeModes = new ArrayList<>();
for (int mode : availableAeModesArray) { availableAeModes.add(mode); }
List<Pair<Integer, Integer>> pairs = mMapper.mapFlash(mFlash);
for (Pair<Integer, Integer> pair : pairs) {
if (availableAeModes.contains(pair.first)) {
LOG.i("applyFlash: setting CONTROL_AE_MODE to", pair.first);
LOG.i("applyFlash: setting FLASH_MODE to", pair.second);
builder.set(CaptureRequest.CONTROL_AE_MODE, pair.first);
builder.set(CaptureRequest.FLASH_MODE, pair.second);
// On some devices, switching from TORCH/OFF to AUTO/ON is not immediately
// reflected (for example, torch stays active) unless we do as follows.
// It's just a way to wake up the AE routine.
builder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
return true;
}
}
}
}
mFlash = oldFlash;
return false;
}
@ -913,7 +943,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
private boolean applyWhiteBalance(@NonNull CaptureRequest.Builder builder,
@NonNull WhiteBalance oldWhiteBalance) {
if (mCameraOptions.supports(mWhiteBalance)) {
Integer whiteBalance = mMapper.map(mWhiteBalance);
int whiteBalance = mMapper.mapWhiteBalance(mWhiteBalance);
builder.set(CaptureRequest.CONTROL_AWB_MODE, whiteBalance);
return true;
}
@ -941,7 +971,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
private boolean applyHdr(@NonNull CaptureRequest.Builder builder, @NonNull Hdr oldHdr) {
if (mCameraOptions.supports(mHdr)) {
Integer hdr = mMapper.map(mHdr);
int hdr = mMapper.mapHdr(mHdr);
builder.set(CaptureRequest.CONTROL_SCENE_MODE, hdr);
return true;
}
@ -1105,234 +1135,100 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
//endregion
//region Auto Focus
//region 3A Metering
@Override
public void startAutoFocus(@Nullable final Gesture gesture, @NonNull final PointF point) {
LOG.i("startAutoFocus", "dispatching. Gesture:", gesture);
// TODO Should change this name at some point, and deprecate AF methods
startMetering(gesture, point);
}
private void startMetering(@Nullable final Gesture gesture, @NonNull final PointF point) {
LOG.i("startMetering", "dispatching. Gesture:", gesture);
mHandler.run(new Runnable() {
@Override
public void run() {
LOG.i("startAutoFocus", "executing. Preview state:", getPreviewState());
LOG.i("startMetering", "executing. Preview state:", getPreviewState());
// This will only work when we have a preview, since it launches the preview in the end.
// Even without this it would need the bind state at least, since we need the preview size.
if (!mCameraOptions.isAutoFocusSupported()) return;
if (getPreviewState() < STATE_STARTED) return;
mAutoFocusPoint = point;
mAutoFocusGesture = gesture;
// This is a good Q/A. https://stackoverflow.com/a/33181620/4288782
// At first, the point is relative to the View system and does not account our own cropping.
// Will keep updating these two below.
PointF referencePoint = new PointF(point.x, point.y);
Size referenceSize /* = previewSurfaceSize */;
// 1. Account for cropping.
Size previewStreamSize = getPreviewStreamSize(Reference.VIEW);
Size previewSurfaceSize = mPreview.getSurfaceSize();
if (previewStreamSize == null) throw new IllegalStateException("getPreviewStreamSize should not be null at this point.");
AspectRatio previewStreamAspectRatio = AspectRatio.of(previewStreamSize);
AspectRatio previewSurfaceAspectRatio = AspectRatio.of(previewSurfaceSize);
if (mPreview.isCropping()) {
if (previewStreamAspectRatio.toFloat() > previewSurfaceAspectRatio.toFloat()) {
// Stream is larger. The x coordinate must be increased: a touch on the left side
// of the surface is not on the left size of stream (it's more to the right).
float scale = previewStreamAspectRatio.toFloat() / previewSurfaceAspectRatio.toFloat();
referencePoint.x += previewSurfaceSize.getWidth() * (scale - 1F) / 2F;
} else {
// Stream is taller. The y coordinate must be increased: a touch on the top side
// of the surface is not on the top size of stream (it's a bit lower).
float scale = previewSurfaceAspectRatio.toFloat() / previewStreamAspectRatio.toFloat();
referencePoint.x += previewSurfaceSize.getHeight() * (scale - 1F) / 2F;
}
}
// 2. Scale to the stream coordinates (not the surface).
referencePoint.x *= (float) previewStreamSize.getWidth() / previewSurfaceSize.getWidth();
referencePoint.y *= (float) previewStreamSize.getHeight() / previewSurfaceSize.getHeight();
referenceSize = previewStreamSize;
// 3. Rotate to the stream coordinate system.
// Not elegant, but the sin/cos way was failing.
int angle = getAngles().offset(Reference.SENSOR, Reference.VIEW, Axis.ABSOLUTE);
boolean flip = angle % 180 != 0;
float tempX = referencePoint.x; float tempY = referencePoint.y;
if (angle == 0) {
referencePoint.x = tempX;
referencePoint.y = tempY;
} else if (angle == 90) {
//noinspection SuspiciousNameCombination
referencePoint.x = tempY;
referencePoint.y = referenceSize.getWidth() - tempX;
} else if (angle == 180) {
referencePoint.x = referenceSize.getWidth() - tempX;
referencePoint.y = referenceSize.getHeight() - tempY;
} else if (angle == 270) {
referencePoint.x = referenceSize.getHeight() - tempY;
//noinspection SuspiciousNameCombination
referencePoint.y = tempX;
} else {
throw new IllegalStateException("Unexpected angle " + angle);
}
referenceSize = flip ? referenceSize.flip() : referenceSize;
// These points are now referencing the stream rect on the sensor array.
// But we still have to figure out how the stream rect is laid on the sensor array.
// https://source.android.com/devices/camera/camera3_crop_reprocess.html
// For sanity, let's assume it is centered.
// For sanity, let's also assume that the crop region is equal to the stream region.
// 4. Move to the active sensor array coordinate system.
Rect activeRect = readCharacteristic(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE,
new Rect(0, 0, referenceSize.getWidth(), referenceSize.getHeight()));
referencePoint.x += (activeRect.width() - referenceSize.getWidth()) / 2F;
referencePoint.y += (activeRect.height() - referenceSize.getHeight()) / 2F;
referenceSize = new Size(activeRect.width(), activeRect.height());
// 5. Account for zoom! This only works for mZoomValue = 0.
// We must scale down with respect to the reference size center. If mZoomValue = 1,
// This must leave everything unchanged.
float maxZoom = readCharacteristic(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
1F /* no zoom */);
float currZoom = 1 + mZoomValue * (maxZoom - 1); // 1 ... maxZoom
float currReduction = 1 / currZoom;
float referenceCenterX = referenceSize.getWidth() / 2F;
float referenceCenterY = referenceSize.getHeight() / 2F;
referencePoint.x = referenceCenterX + currReduction * (referencePoint.x - referenceCenterX);
referencePoint.y = referenceCenterY + currReduction * (referencePoint.y - referenceCenterY);
// 6. NOW we can compute the metering regions.
float visibleWidth = referenceSize.getWidth() * currReduction;
float visibleHeight = referenceSize.getHeight() * currReduction;
MeteringRectangle area1 = createMeteringRectangle(referencePoint, referenceSize, visibleWidth, visibleHeight, 0.05F, 1000);
MeteringRectangle area2 = createMeteringRectangle(referencePoint, referenceSize, visibleWidth, visibleHeight, 0.1F, 100);
// 7. And finally dispatch them...
List<MeteringRectangle> areas = Arrays.asList(area1, area2);
int maxReagionsAf = readCharacteristic(CameraCharacteristics.CONTROL_MAX_REGIONS_AF, 0);
int maxReagionsAe = readCharacteristic(CameraCharacteristics.CONTROL_MAX_REGIONS_AE, 0);
int maxReagionsAwb = readCharacteristic(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB, 0);
if (maxReagionsAf > 0) {
int max = Math.min(maxReagionsAf, areas.size());
mRepeatingRequestBuilder.set(CaptureRequest.CONTROL_AF_REGIONS,
areas.subList(0, max).toArray(new MeteringRectangle[]{}));
}
if (maxReagionsAe > 0) {
int max = Math.min(maxReagionsAe, areas.size());
mRepeatingRequestBuilder.set(CaptureRequest.CONTROL_AE_REGIONS,
areas.subList(0, max).toArray(new MeteringRectangle[]{}));
}
if (maxReagionsAwb > 0) {
int max = Math.min(maxReagionsAwb, areas.size());
mRepeatingRequestBuilder.set(CaptureRequest.CONTROL_AWB_REGIONS,
areas.subList(0, max).toArray(new MeteringRectangle[]{}));
}
// 8. Set AF mode to AUTO so it doesn't use the CONTINUOUS schedule.
// When this ends, we will reset everything. We know CONTROL_AF_MODE_AUTO is available
// because we have called cameraOptions.isAutoFocusSupported().
mCallback.dispatchOnFocusStart(gesture, point);
mRepeatingRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO);
mRepeatingRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START);
mRepeatingRequestBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
applyRepeatingRequestBuilder();
// The camera options API still has the auto focus API but it really
// refers to 3A metering.
if (!mCameraOptions.isAutoFocusSupported()) return;
// Reset the old meter if present.
if (mMeter != null) {
mMeter.resetMetering();
}
// The meter will check the current configuration to see if AF/AE/AWB should run.
// - AE should be on CONTROL_AE_MODE_ON* (this depends on setFlash())
// - AWB should be on CONTROL_AWB_MODE_AUTO (this depends on setWhiteBalance())
// - AF should be on CONTROL_AF_MODE_AUTO or others
// The last one is under our control because the library has no focus API.
// So let's set a good af mode here. This operation is reverted during onMeteringReset().
applyFocusForMetering(mRepeatingRequestBuilder);
// Create the meter and start.
mMeter = new Meter(Camera2Engine.this,
mRepeatingRequestBuilder,
mCameraCharacteristics,
Camera2Engine.this);
mMeter.startMetering(point, gesture);
}
});
}
/**
* Creates a metering rectangle around the center point.
* The rectangle will have a size that's a factor of the visible width and height.
* The rectangle will also be constrained to be inside the given boundaries,
* so we don't exceed them in case the center point is exactly on one side for example.
* @return a new rectangle
* Called by {@link Meter} when the metering process has started.
* We are currently exposing an auto focus API so that's what we dispatch.
* @param point point
* @param gesture gesture
*/
@NonNull
private MeteringRectangle createMeteringRectangle(
@NonNull PointF center, @NonNull Size boundaries,
float visibleWidth, float visibleHeight,
float factor, int weight) {
float halfWidth = factor * visibleWidth / 2F;
float halfHeight = factor * visibleHeight / 2F;
return new MeteringRectangle(
(int) Math.max(0, center.x - halfWidth),
(int) Math.max(0, center.y - halfHeight),
(int) Math.min(boundaries.getWidth(), halfWidth * 2F),
(int) Math.min(boundaries.getHeight(), halfHeight * 2F),
weight
);
@Override
public void onMeteringStarted(@NonNull PointF point, @Nullable Gesture gesture) {
LOG.w("onMeteringStarted - point:", point, "gesture:", gesture);
mCallback.dispatchOnFocusStart(gesture, point);
applyRepeatingRequestBuilder();
}
/**
* Whether we are in an auto focus operation, which means that
* {@link CaptureResult#CONTROL_AF_MODE} is set to {@link CaptureResult#CONTROL_AF_MODE_AUTO}.
* @return true if we're in auto focus
* Called by {@link Meter} when the metering process has ended.
* We are currently exposing an auto focus API so that's what we dispatch.
* @param point point
* @param gesture gesture
* @param success success
*/
private boolean isInAutoFocus() {
return mAutoFocusPoint != null;
@Override
public void onMeteringEnd(@NonNull PointF point, @Nullable Gesture gesture, boolean success) {
LOG.w("onMeteringEnd - point:", point, "gesture:", gesture, "success:", success);
mCallback.dispatchOnFocusEnd(gesture, success, point);
}
/**
* If this is called, we're in autofocus and {@link CaptureResult#CONTROL_AF_MODE}
* is set to {@link CaptureResult#CONTROL_AF_MODE_AUTO}.
* @param result the result
* When metering is reset, we're not sure that the engine is still alive.
* We should check this here.
* @param point point
* @param gesture gesture
* @return true if metering can be reset
*/
private void onAutoFocusCapture(@NonNull CaptureResult result) {
Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
if (afState == null) {
LOG.i("onAutoFocusCapture", "afState is null! This can happen for partial results. Waiting.");
return;
}
switch (afState) {
case CaptureRequest.CONTROL_AF_STATE_FOCUSED_LOCKED: {
onAutoFocusEnd(true);
break;
}
case CaptureRequest.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED: {
onAutoFocusEnd(false);
break;
}
case CaptureRequest.CONTROL_AF_STATE_INACTIVE: break;
case CaptureRequest.CONTROL_AF_STATE_ACTIVE_SCAN: break;
default: break;
}
@Override
public boolean canResetMetering(@NonNull PointF point, @Nullable Gesture gesture) {
return getEngineState() == STATE_STARTED;
}
/**
* Called by {@link #onAutoFocusCapture(CaptureResult)} when we detect that the
* auto focus operataion has ended.
* @param success true if success
* Called by {@link Meter} after resetting the metering parameters.
* We should apply them, and also go back to default focus.
* @param point point
* @param gesture gesture
*/
private void onAutoFocusEnd(boolean success) {
Gesture gesture = mAutoFocusGesture;
PointF point = mAutoFocusPoint;
mAutoFocusGesture = null;
mAutoFocusPoint = null;
if (point == null) return;
mCallback.dispatchOnFocusEnd(gesture, success, point);
mHandler.remove(mAutoFocusResetRunnable);
if (shouldResetAutoFocus()) {
mHandler.post(getAutoFocusResetDelay(), mAutoFocusResetRunnable);
}
}
private Runnable mAutoFocusResetRunnable = new Runnable() {
@Override
public void run() {
if (getEngineState() < STATE_STARTED) return;
Rect whole = readCharacteristic(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE, new Rect());
MeteringRectangle[] rectangle = new MeteringRectangle[]{new MeteringRectangle(whole, MeteringRectangle.METERING_WEIGHT_DONT_CARE)};
int maxReagionsAf = readCharacteristic(CameraCharacteristics.CONTROL_MAX_REGIONS_AF, 0);
int maxReagionsAe = readCharacteristic(CameraCharacteristics.CONTROL_MAX_REGIONS_AE, 0);
int maxReagionsAwb = readCharacteristic(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB, 0);
if (maxReagionsAf > 0) mRepeatingRequestBuilder.set(CaptureRequest.CONTROL_AF_REGIONS, rectangle);
if (maxReagionsAe > 0) mRepeatingRequestBuilder.set(CaptureRequest.CONTROL_AE_REGIONS, rectangle);
if (maxReagionsAwb > 0) mRepeatingRequestBuilder.set(CaptureRequest.CONTROL_AWB_REGIONS, rectangle);
public void onMeteringReset(@NonNull PointF point, @Nullable Gesture gesture) {
applyDefaultFocus(mRepeatingRequestBuilder);
applyRepeatingRequestBuilder(); // only if preview started already
}
};
//endregion
}

@ -18,6 +18,7 @@ import com.otaliastudios.cameraview.CameraException;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.CameraOptions;
import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.engine.mappers.Camera1Mapper;
import com.otaliastudios.cameraview.overlay.Overlay;
import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.engine.offset.Angles;
@ -154,7 +155,6 @@ public abstract class CameraEngine implements
@SuppressWarnings("WeakerAccess") protected final Callback mCallback;
@SuppressWarnings("WeakerAccess") protected CameraPreview mPreview;
@SuppressWarnings("WeakerAccess") protected CameraOptions mCameraOptions;
@SuppressWarnings("WeakerAccess") protected Mapper mMapper;
@SuppressWarnings("WeakerAccess") protected PictureRecorder mPictureRecorder;
@SuppressWarnings("WeakerAccess") protected VideoRecorder mVideoRecorder;
@SuppressWarnings("WeakerAccess") protected Size mCaptureSize;

@ -1,221 +0,0 @@
package com.otaliastudios.cameraview.engine;
import android.hardware.Camera;
import android.hardware.camera2.CameraCharacteristics;
import android.os.Build;
import com.otaliastudios.cameraview.controls.Control;
import com.otaliastudios.cameraview.controls.Engine;
import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.controls.Flash;
import com.otaliastudios.cameraview.controls.Hdr;
import com.otaliastudios.cameraview.controls.WhiteBalance;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import androidx.annotation.NonNull;
import androidx.annotation.RequiresApi;
/**
* A Mapper maps camera engine constants to CameraView constants.
*/
public abstract class Mapper {
private static Mapper CAMERA1;
private static Mapper CAMERA2;
public static Mapper get(@NonNull Engine engine) {
if (engine == Engine.CAMERA1) {
if (CAMERA1 == null) CAMERA1 = new Camera1Mapper();
return CAMERA1;
} else if (engine == Engine.CAMERA2 && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
if (CAMERA2 == null) CAMERA2 = new Camera2Mapper();
return CAMERA2;
} else {
throw new IllegalArgumentException("Unknown engine or unsupported API level.");
}
}
private Mapper() {}
public abstract <T> T map(Flash flash);
public abstract <T> T map(Facing facing);
public abstract <T> T map(WhiteBalance whiteBalance);
public abstract <T> T map(Hdr hdr);
public abstract <T> Flash unmapFlash(T cameraConstant);
public abstract <T> Facing unmapFacing(T cameraConstant);
public abstract <T> WhiteBalance unmapWhiteBalance(T cameraConstant);
public abstract <T> Hdr unmapHdr(T cameraConstant);
@SuppressWarnings("WeakerAccess")
protected <C extends Control, T> C reverseLookup(HashMap<C, T> map, T object) {
for (C value : map.keySet()) {
if (object.equals(map.get(value))) {
return value;
}
}
return null;
}
@SuppressWarnings("WeakerAccess")
protected <C extends Control, T> C reverseListLookup(HashMap<C, List<T>> map, T object) {
for (C value : map.keySet()) {
List<T> list = map.get(value);
if (list == null) continue;
for (T candidate : list) {
if (object.equals(candidate)) {
return value;
}
}
}
return null;
}
@SuppressWarnings("unchecked")
private static class Camera1Mapper extends Mapper {
private static final HashMap<Flash, String> FLASH = new HashMap<>();
private static final HashMap<WhiteBalance, String> WB = new HashMap<>();
private static final HashMap<Facing, Integer> FACING = new HashMap<>();
private static final HashMap<Hdr, String> HDR = new HashMap<>();
static {
FLASH.put(Flash.OFF, Camera.Parameters.FLASH_MODE_OFF);
FLASH.put(Flash.ON, Camera.Parameters.FLASH_MODE_ON);
FLASH.put(Flash.AUTO, Camera.Parameters.FLASH_MODE_AUTO);
FLASH.put(Flash.TORCH, Camera.Parameters.FLASH_MODE_TORCH);
FACING.put(Facing.BACK, Camera.CameraInfo.CAMERA_FACING_BACK);
FACING.put(Facing.FRONT, Camera.CameraInfo.CAMERA_FACING_FRONT);
WB.put(WhiteBalance.AUTO, Camera.Parameters.WHITE_BALANCE_AUTO);
WB.put(WhiteBalance.INCANDESCENT, Camera.Parameters.WHITE_BALANCE_INCANDESCENT);
WB.put(WhiteBalance.FLUORESCENT, Camera.Parameters.WHITE_BALANCE_FLUORESCENT);
WB.put(WhiteBalance.DAYLIGHT, Camera.Parameters.WHITE_BALANCE_DAYLIGHT);
WB.put(WhiteBalance.CLOUDY, Camera.Parameters.WHITE_BALANCE_CLOUDY_DAYLIGHT);
HDR.put(Hdr.OFF, Camera.Parameters.SCENE_MODE_AUTO);
if (Build.VERSION.SDK_INT >= 17) {
HDR.put(Hdr.ON, Camera.Parameters.SCENE_MODE_HDR);
} else {
HDR.put(Hdr.ON, "hdr");
}
}
@Override
public <T> T map(Flash flash) {
return (T) FLASH.get(flash);
}
@Override
public <T> T map(Facing facing) {
return (T) FACING.get(facing);
}
@Override
public <T> T map(WhiteBalance whiteBalance) {
return (T) WB.get(whiteBalance);
}
@Override
public <T> T map(Hdr hdr) {
return (T) HDR.get(hdr);
}
@Override
public <T> Flash unmapFlash(T cameraConstant) {
return reverseLookup(FLASH, (String) cameraConstant);
}
@Override
public <T> Facing unmapFacing(T cameraConstant) {
return reverseLookup(FACING, (Integer) cameraConstant);
}
@Override
public <T> WhiteBalance unmapWhiteBalance(T cameraConstant) {
return reverseLookup(WB, (String) cameraConstant);
}
@Override
public <T> Hdr unmapHdr(T cameraConstant) {
return reverseLookup(HDR, (String) cameraConstant);
}
}
@SuppressWarnings("unchecked")
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
private static class Camera2Mapper extends Mapper {
private static final HashMap<Flash, List<Integer>> FLASH = new HashMap<>();
private static final HashMap<Facing, Integer> FACING = new HashMap<>();
private static final HashMap<WhiteBalance, Integer> WB = new HashMap<>();
private static final HashMap<Hdr, Integer> HDR = new HashMap<>();
static {
// OFF and TORCH have also a second condition - to CameraCharacteristics.CONTROL_FLASH_MODE - but that does not
// fit into the Mapper interface. TODO review this
FLASH.put(Flash.OFF, Arrays.asList(CameraCharacteristics.CONTROL_AE_MODE_ON, CameraCharacteristics.CONTROL_AE_MODE_OFF));
FLASH.put(Flash.TORCH, Arrays.asList(CameraCharacteristics.CONTROL_AE_MODE_ON, CameraCharacteristics.CONTROL_AE_MODE_OFF));
FLASH.put(Flash.AUTO, Arrays.asList(CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH, CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE));
FLASH.put(Flash.ON, Collections.singletonList(CameraCharacteristics.CONTROL_AE_MODE_ON_ALWAYS_FLASH));
FACING.put(Facing.BACK, CameraCharacteristics.LENS_FACING_BACK);
FACING.put(Facing.FRONT, CameraCharacteristics.LENS_FACING_FRONT);
WB.put(WhiteBalance.AUTO, CameraCharacteristics.CONTROL_AWB_MODE_AUTO);
WB.put(WhiteBalance.CLOUDY, CameraCharacteristics.CONTROL_AWB_MODE_CLOUDY_DAYLIGHT);
WB.put(WhiteBalance.DAYLIGHT, CameraCharacteristics.CONTROL_AWB_MODE_DAYLIGHT);
WB.put(WhiteBalance.FLUORESCENT, CameraCharacteristics.CONTROL_AWB_MODE_FLUORESCENT);
WB.put(WhiteBalance.INCANDESCENT, CameraCharacteristics.CONTROL_AWB_MODE_INCANDESCENT);
HDR.put(Hdr.OFF, CameraCharacteristics.CONTROL_SCENE_MODE_DISABLED);
HDR.put(Hdr.ON, 18 /* CameraCharacteristics.CONTROL_SCENE_MODE_HDR */);
}
@SuppressWarnings("ConstantConditions")
@Override
public <T> T map(Flash flash) {
return (T) FLASH.get(flash);
}
@Override
public <T> T map(Facing facing) {
return (T) FACING.get(facing);
}
@Override
public <T> T map(WhiteBalance whiteBalance) {
return (T) WB.get(whiteBalance);
}
@Override
public <T> T map(Hdr hdr) {
return (T) HDR.get(hdr);
}
@Override
public <T> Flash unmapFlash(T cameraConstant) {
return reverseListLookup(FLASH, (Integer) cameraConstant);
}
@Override
public <T> Facing unmapFacing(T cameraConstant) {
return reverseLookup(FACING, (Integer) cameraConstant);
}
@Override
public <T> WhiteBalance unmapWhiteBalance(T cameraConstant) {
return reverseLookup(WB, (Integer) cameraConstant);
}
@Override
public <T> Hdr unmapHdr(T cameraConstant) {
return reverseLookup(HDR, (Integer) cameraConstant);
}
}
}

@ -0,0 +1,380 @@
package com.otaliastudios.cameraview.engine;
import android.graphics.PointF;
import android.graphics.Rect;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.params.MeteringRectangle;
import android.os.Build;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.engine.metering.AutoExposure;
import com.otaliastudios.cameraview.engine.metering.AutoFocus;
import com.otaliastudios.cameraview.engine.metering.AutoWhiteBalance;
import com.otaliastudios.cameraview.engine.metering.MeteringParameter;
import com.otaliastudios.cameraview.engine.offset.Axis;
import com.otaliastudios.cameraview.engine.offset.Reference;
import com.otaliastudios.cameraview.gesture.Gesture;
import com.otaliastudios.cameraview.size.AspectRatio;
import com.otaliastudios.cameraview.size.Size;
import java.util.Arrays;
import java.util.List;
/**
* Helps Camera2-based engines to perform 3A (auto focus, auto exposure and auto white balance)
* metering. Users are required to:
*
* - Call {@link #startMetering(PointF, Gesture)} to start
* - Call {@link #onCapture(CaptureResult)} when they have partial or total results, as long as the
* meter is still in a metering operation, which can be checked through {@link #isMetering()}
* - Call {@link #resetMetering()} to reset the metering parameters if needed. This is done automatically
* by the meter based on the reset delay configuration in the engine, but can be called explicitly
* for example when we have multiple meter requests and want to cancel the old one.
*/
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
public class Meter {
/**
* The meter callback.
*/
public interface Callback {
/**
* Notifies that metering has started. At this point implementors should apply
* the builder onto the preview.
* @param point point
* @param gesture gesture
*/
void onMeteringStarted(@NonNull PointF point, @Nullable Gesture gesture);
/**
* Notifies that metering has ended. No action is required for implementors.
* From now on, {@link #isMetering()} will return false so the meter should not
* be passed capture results anymore.
* @param point point
* @param gesture gesture
* @param success success
*/
void onMeteringEnd(@NonNull PointF point, @Nullable Gesture gesture, boolean success);
/**
* Notifies that metering has been reset. From now on, this meter instance
* is done, although in theory it could be reused by calling
* {@link #startMetering(PointF, Gesture)} again.
* @param point point
* @param gesture gesture
*/
void onMeteringReset(@NonNull PointF point, @Nullable Gesture gesture);
/**
* Whether metering can be reset. Since it happens at a future time, this should
* return true if the engine is still in a legit state for this operation.
* @param point point
* @param gesture gesture
* @return true if can reset
*/
// TODO is this useful? engine could do its checks onMeteringReset()
boolean canResetMetering(@NonNull PointF point, @Nullable Gesture gesture);
}
private static final String TAG = Meter.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
private static final int FORCED_END_DELAY = 2500;
private final CameraEngine mEngine;
private final CaptureRequest.Builder mBuilder;
private final CameraCharacteristics mCharacteristics;
private final Callback mCallback;
private PointF mPoint;
private Gesture mGesture;
private boolean mIsMetering;
private long mMeteringStartTime;
private MeteringParameter mAutoFocus = new AutoFocus();
private MeteringParameter mAutoWhiteBalance = new AutoWhiteBalance();
private MeteringParameter mAutoExposure = new AutoExposure();
/**
* Creates a new meter.
* @param engine the engine
* @param builder a capture builder
* @param characteristics the camera characteristics
* @param callback the callback
*/
@SuppressWarnings("WeakerAccess")
public Meter(@NonNull CameraEngine engine,
@NonNull CaptureRequest.Builder builder,
@NonNull CameraCharacteristics characteristics,
@NonNull Callback callback) {
mEngine = engine;
mBuilder = builder;
mCharacteristics = characteristics;
mCallback = callback;
}
/**
* Starts a metering sequence.
* @param point point
* @param gesture gesture
*/
@SuppressWarnings("WeakerAccess")
public void startMetering(@NonNull PointF point, @Nullable Gesture gesture) {
mPoint = point;
mGesture = gesture;
mIsMetering = true;
// This is a good Q/A. https://stackoverflow.com/a/33181620/4288782
// At first, the point is relative to the View system and does not account our own cropping.
// Will keep updating these two below.
final PointF referencePoint = new PointF(mPoint.x, mPoint.y);
Size referenceSize = mEngine.mPreview.getSurfaceSize();
// 1. Account for cropping.
// This will enlarge the preview size so that aspect ratio matches.
referenceSize = applyPreviewCropping(referenceSize, referencePoint);
// 2. Scale to the preview stream coordinates.
// This will move to the preview stream coordinates by scaling.
referenceSize = applyPreviewScale(referenceSize, referencePoint);
// 3. Rotate to the stream coordinate system.
// This leaves us with sensor stream coordinates.
referenceSize = applyPreviewToSensorRotation(referenceSize, referencePoint);
// 4. Move to the crop region coordinate system.
// The crop region is the union of all currently active streams.
referenceSize = applyCropRegionCoordinates(referenceSize, referencePoint);
// 5. Move to the active array coordinate system.
referenceSize = applyActiveArrayCoordinates(referenceSize, referencePoint);
// 6. Now we can compute the metering regions.
// We want to define them as a fraction of the visible size which (apart from cropping)
// can be obtained through the SENSOR rotated preview stream size.
Size visibleSize = mEngine.getPreviewStreamSize(Reference.SENSOR);
//noinspection ConstantConditions
MeteringRectangle area1 = createMeteringRectangle(referenceSize, referencePoint, visibleSize, 0.05F, 1000);
MeteringRectangle area2 = createMeteringRectangle(referenceSize, referencePoint, visibleSize, 0.1F, 100);
List<MeteringRectangle> areas = Arrays.asList(area1, area2);
// 7. And finally dispatch everything
mAutoFocus.startMetering(mCharacteristics, mBuilder, areas);
mAutoWhiteBalance.startMetering(mCharacteristics, mBuilder, areas);
mAutoExposure.startMetering(mCharacteristics, mBuilder, areas);
// Dispatch to callback
mCallback.onMeteringStarted(mPoint, mGesture);
mMeteringStartTime = System.currentTimeMillis();
}
@SuppressWarnings("UnnecessaryLocalVariable")
@NonNull
private Size applyPreviewCropping(@NonNull Size referenceSize, @NonNull PointF referencePoint) {
Size previewStreamSize = mEngine.getPreviewStreamSize(Reference.VIEW);
Size previewSurfaceSize = referenceSize;
if (previewStreamSize == null) {
throw new IllegalStateException("getPreviewStreamSize should not be null at this point.");
}
int referenceWidth = previewSurfaceSize.getWidth();
int referenceHeight = previewSurfaceSize.getHeight();
AspectRatio previewStreamAspectRatio = AspectRatio.of(previewStreamSize);
AspectRatio previewSurfaceAspectRatio = AspectRatio.of(previewSurfaceSize);
if (mEngine.mPreview.isCropping()) {
if (previewStreamAspectRatio.toFloat() > previewSurfaceAspectRatio.toFloat()) {
// Stream is larger. The x coordinate must be increased: a touch on the left side
// of the surface is not on the left size of stream (it's more to the right).
float scale = previewStreamAspectRatio.toFloat() / previewSurfaceAspectRatio.toFloat();
referencePoint.x += previewSurfaceSize.getWidth() * (scale - 1F) / 2F;
referenceWidth = Math.round(previewSurfaceSize.getWidth() * scale);
} else {
// Stream is taller. The y coordinate must be increased: a touch on the top side
// of the surface is not on the top size of stream (it's a bit lower).
float scale = previewSurfaceAspectRatio.toFloat() / previewStreamAspectRatio.toFloat();
referencePoint.y += previewSurfaceSize.getHeight() * (scale - 1F) / 2F;
referenceHeight = Math.round(previewSurfaceSize.getHeight() * scale);
}
}
return new Size(referenceWidth, referenceHeight);
}
@SuppressWarnings("ConstantConditions")
@NonNull
private Size applyPreviewScale(@NonNull Size referenceSize, @NonNull PointF referencePoint) {
// The referenceSize how has the same aspect ratio of the previewStreamSize, but they
// can still have different size (that is, a scale operation is needed).
Size previewStreamSize = mEngine.getPreviewStreamSize(Reference.VIEW);
referencePoint.x *= (float) previewStreamSize.getWidth() / referenceSize.getWidth();
referencePoint.y *= (float) previewStreamSize.getHeight() / referenceSize.getHeight();
return previewStreamSize;
}
@SuppressWarnings("SuspiciousNameCombination")
@NonNull
private Size applyPreviewToSensorRotation(@NonNull Size referenceSize, @NonNull PointF referencePoint) {
// Not elegant, but the sin/cos way was failing for some reason.
int angle = mEngine.getAngles().offset(Reference.SENSOR, Reference.VIEW, Axis.ABSOLUTE);
boolean flip = angle % 180 != 0;
float tempX = referencePoint.x;
float tempY = referencePoint.y;
if (angle == 0) {
referencePoint.x = tempX;
referencePoint.y = tempY;
} else if (angle == 90) {
referencePoint.x = tempY;
referencePoint.y = referenceSize.getWidth() - tempX;
} else if (angle == 180) {
referencePoint.x = referenceSize.getWidth() - tempX;
referencePoint.y = referenceSize.getHeight() - tempY;
} else if (angle == 270) {
referencePoint.x = referenceSize.getHeight() - tempY;
referencePoint.y = tempX;
} else {
throw new IllegalStateException("Unexpected angle " + angle);
}
return flip ? referenceSize.flip() : referenceSize;
}
@NonNull
private Size applyCropRegionCoordinates(@NonNull Size referenceSize, @NonNull PointF referencePoint) {
// The input point and size refer to the stream rect.
// The stream rect is part of the 'crop region', as described below.
// https://source.android.com/devices/camera/camera3_crop_reprocess.html
Rect cropRect = mBuilder.get(CaptureRequest.SCALER_CROP_REGION);
// For now, we don't care about x and y position. Rect should be non-null, but let's be safe.
int cropRectWidth = cropRect == null ? referenceSize.getWidth() : cropRect.width();
int cropRectHeight = cropRect == null ? referenceSize.getHeight() : cropRect.height();
// The stream is always centered inside the crop region, and one of the dimensions
// should always match. We just increase the other one.
referencePoint.x += (cropRectWidth - referenceSize.getWidth()) / 2F;
referencePoint.y += (cropRectHeight - referenceSize.getHeight()) / 2F;
return new Size(cropRectWidth, cropRectHeight);
}
@NonNull
private Size applyActiveArrayCoordinates(@NonNull Size referenceSize, @NonNull PointF referencePoint) {
// The input point and size refer to the scaler crop region.
// We can query for the crop region position inside the active array, so this is easy.
Rect cropRect = mBuilder.get(CaptureRequest.SCALER_CROP_REGION);
referencePoint.x += cropRect == null ? 0 : cropRect.left;
referencePoint.y += cropRect == null ? 0 : cropRect.top;
// Finally, get the active rect width and height from characteristics.
Rect activeRect = mCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
if (activeRect == null) { // Should never happen
activeRect = new Rect(0, 0, referenceSize.getWidth(), referenceSize.getHeight());
}
return new Size(activeRect.width(), activeRect.height());
}
/**
* Creates a metering rectangle around the center point.
* The rectangle will have a size that's a factor of the visible width and height.
* The rectangle will also be constrained to be inside the given boundaries,
* so we don't exceed them in case the center point is exactly on one side for example.
* @return a new rectangle
*/
@NonNull
private MeteringRectangle createMeteringRectangle(
@NonNull Size boundaries,
@NonNull PointF center,
@NonNull Size visibleSize,
float factor,
int weight) {
float rectangleWidth = factor * visibleSize.getWidth();
float rectangleHeight = factor * visibleSize.getHeight();
float rectangleLeft = center.x - rectangleWidth / 2F;
float rectangleTop = center.y - rectangleHeight / 2F;
// Respect boundaries
if (rectangleLeft < 0) rectangleLeft = 0;
if (rectangleTop < 0) rectangleTop = 0;
if (rectangleLeft + rectangleWidth > boundaries.getWidth()) {
rectangleWidth = boundaries.getWidth() - rectangleLeft;
}
if (rectangleTop + rectangleHeight > boundaries.getHeight()) {
rectangleHeight = boundaries.getHeight() - rectangleTop;
}
return new MeteringRectangle(
(int) rectangleLeft,
(int) rectangleTop,
(int) rectangleWidth,
(int) rectangleHeight,
weight
);
}
/**
* True if we're metering. False if we're not, for example if we're waiting for
* a reset call, or if {@link #startMetering(PointF, Gesture)} was never called.
* @return true if metering
*/
@SuppressWarnings("WeakerAccess")
public boolean isMetering() {
return mIsMetering;
}
/**
* Should be called when we have partial or total CaptureResults,
* but only while {@link #isMetering()} returns true.
* @param result result
*/
@SuppressWarnings("WeakerAccess")
public void onCapture(@NonNull CaptureResult result) {
if (!mIsMetering) return; // We're not interested in results anymore
if (!(result instanceof TotalCaptureResult)) return; // Let's ignore these, contents are missing/wrong
if (!mAutoFocus.isMetered()) mAutoFocus.onCapture(result);
if (!mAutoExposure.isMetered()) mAutoExposure.onCapture(result);
if (!mAutoWhiteBalance.isMetered()) mAutoWhiteBalance.onCapture(result);
if (mAutoFocus.isMetered() && mAutoExposure.isMetered() && mAutoWhiteBalance.isMetered()) {
LOG.i("onCapture:", "all MeteringParameters have converged. Dispatching onMeteringEnd");
boolean success = mAutoFocus.isSuccessful()
&& mAutoExposure.isSuccessful()
&& mAutoWhiteBalance.isSuccessful();
onMeteringEnd(success);
} else if (System.currentTimeMillis() - mMeteringStartTime >= FORCED_END_DELAY) {
LOG.i("onCapture:", "FORCED_END_DELAY was reached. Some MeteringParameter is stuck. Forcing end.");
onMeteringEnd(false);
}
}
private void onMeteringEnd(boolean success) {
mCallback.onMeteringEnd(mPoint, mGesture, success);
mIsMetering = false;
mEngine.mHandler.remove(mResetRunnable);
if (mEngine.shouldResetAutoFocus()) {
mEngine.mHandler.post(mEngine.getAutoFocusResetDelay(), mResetRunnable);
}
}
/**
* Can be called to perform the reset at a time different than the one
* specified by the {@link CameraEngine} reset delay.
*/
@SuppressWarnings("WeakerAccess")
public void resetMetering() {
mEngine.mHandler.remove(mResetRunnable);
if (mCallback.canResetMetering(mPoint, mGesture)) {
LOG.i("Resetting the meter parameters.");
Rect whole = mCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
if (whole == null) whole = new Rect();
MeteringRectangle rectangle = new MeteringRectangle(whole, MeteringRectangle.METERING_WEIGHT_DONT_CARE);
mAutoFocus.resetMetering(mCharacteristics, mBuilder, rectangle);
mAutoWhiteBalance.resetMetering(mCharacteristics, mBuilder, rectangle);
mAutoExposure.resetMetering(mCharacteristics, mBuilder, rectangle);
mCallback.onMeteringReset(mPoint, mGesture);
}
}
private Runnable mResetRunnable = new Runnable() {
@Override
public void run() {
resetMetering();
}
};
}

@ -0,0 +1,112 @@
package com.otaliastudios.cameraview.engine.mappers;
import android.hardware.Camera;
import android.os.Build;
import com.otaliastudios.cameraview.controls.Control;
import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.controls.Flash;
import com.otaliastudios.cameraview.controls.Hdr;
import com.otaliastudios.cameraview.controls.WhiteBalance;
import java.util.HashMap;
import java.util.Map;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
/**
* A Mapper maps camera engine constants to CameraView constants.
*/
public class Camera1Mapper {
private static Camera1Mapper sInstance;
@NonNull
public static Camera1Mapper get() {
if (sInstance == null) {
sInstance = new Camera1Mapper();
}
return sInstance;
}
private static final Map<Flash, String> FLASH = new HashMap<>();
private static final Map<WhiteBalance, String> WB = new HashMap<>();
private static final Map<Facing, Integer> FACING = new HashMap<>();
private static final Map<Hdr, String> HDR = new HashMap<>();
static {
FLASH.put(Flash.OFF, Camera.Parameters.FLASH_MODE_OFF);
FLASH.put(Flash.ON, Camera.Parameters.FLASH_MODE_ON);
FLASH.put(Flash.AUTO, Camera.Parameters.FLASH_MODE_AUTO);
FLASH.put(Flash.TORCH, Camera.Parameters.FLASH_MODE_TORCH);
FACING.put(Facing.BACK, Camera.CameraInfo.CAMERA_FACING_BACK);
FACING.put(Facing.FRONT, Camera.CameraInfo.CAMERA_FACING_FRONT);
WB.put(WhiteBalance.AUTO, Camera.Parameters.WHITE_BALANCE_AUTO);
WB.put(WhiteBalance.INCANDESCENT, Camera.Parameters.WHITE_BALANCE_INCANDESCENT);
WB.put(WhiteBalance.FLUORESCENT, Camera.Parameters.WHITE_BALANCE_FLUORESCENT);
WB.put(WhiteBalance.DAYLIGHT, Camera.Parameters.WHITE_BALANCE_DAYLIGHT);
WB.put(WhiteBalance.CLOUDY, Camera.Parameters.WHITE_BALANCE_CLOUDY_DAYLIGHT);
HDR.put(Hdr.OFF, Camera.Parameters.SCENE_MODE_AUTO);
if (Build.VERSION.SDK_INT >= 17) {
HDR.put(Hdr.ON, Camera.Parameters.SCENE_MODE_HDR);
} else {
HDR.put(Hdr.ON, "hdr");
}
}
private Camera1Mapper() {}
@NonNull
public String mapFlash(@NonNull Flash flash) {
//noinspection ConstantConditions
return FLASH.get(flash);
}
public int mapFacing(@NonNull Facing facing) {
//noinspection ConstantConditions
return FACING.get(facing);
}
@NonNull
public String mapWhiteBalance(@NonNull WhiteBalance whiteBalance) {
//noinspection ConstantConditions
return WB.get(whiteBalance);
}
@NonNull
public String mapHdr(@NonNull Hdr hdr) {
//noinspection ConstantConditions
return HDR.get(hdr);
}
@Nullable
public Flash unmapFlash(@NonNull String cameraConstant) {
return reverseLookup(FLASH, cameraConstant);
}
@Nullable
public Facing unmapFacing(int cameraConstant) {
return reverseLookup(FACING, cameraConstant);
}
@Nullable
public WhiteBalance unmapWhiteBalance(@NonNull String cameraConstant) {
return reverseLookup(WB, cameraConstant);
}
@Nullable
public Hdr unmapHdr(@NonNull String cameraConstant) {
return reverseLookup(HDR, cameraConstant);
}
@Nullable
private <C extends Control, T> C reverseLookup(@NonNull Map<C, T> map, @NonNull T object) {
for (C value : map.keySet()) {
if (object.equals(map.get(value))) {
return value;
}
}
return null;
}
}

@ -0,0 +1,168 @@
package com.otaliastudios.cameraview.engine.mappers;
import android.hardware.Camera;
import android.hardware.camera2.CameraCharacteristics;
import android.os.Build;
import android.util.Pair;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import com.otaliastudios.cameraview.controls.Control;
import com.otaliastudios.cameraview.controls.Engine;
import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.controls.Flash;
import com.otaliastudios.cameraview.controls.Hdr;
import com.otaliastudios.cameraview.controls.WhiteBalance;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* A Mapper maps camera engine constants to CameraView constants.
*/
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
public class Camera2Mapper {
private static Camera2Mapper sInstance;
public static Camera2Mapper get() {
if (sInstance == null) {
sInstance = new Camera2Mapper();
}
return sInstance;
}
private static final Map<Facing, Integer> FACING = new HashMap<>();
private static final Map<WhiteBalance, Integer> WB = new HashMap<>();
private static final Map<Hdr, Integer> HDR = new HashMap<>();
static {
FACING.put(Facing.BACK, CameraCharacteristics.LENS_FACING_BACK);
FACING.put(Facing.FRONT, CameraCharacteristics.LENS_FACING_FRONT);
WB.put(WhiteBalance.AUTO, CameraCharacteristics.CONTROL_AWB_MODE_AUTO);
WB.put(WhiteBalance.CLOUDY, CameraCharacteristics.CONTROL_AWB_MODE_CLOUDY_DAYLIGHT);
WB.put(WhiteBalance.DAYLIGHT, CameraCharacteristics.CONTROL_AWB_MODE_DAYLIGHT);
WB.put(WhiteBalance.FLUORESCENT, CameraCharacteristics.CONTROL_AWB_MODE_FLUORESCENT);
WB.put(WhiteBalance.INCANDESCENT, CameraCharacteristics.CONTROL_AWB_MODE_INCANDESCENT);
HDR.put(Hdr.OFF, CameraCharacteristics.CONTROL_SCENE_MODE_DISABLED);
HDR.put(Hdr.ON, 18 /* CameraCharacteristics.CONTROL_SCENE_MODE_HDR */);
}
private Camera2Mapper() {}
@NonNull
public List<Pair<Integer, Integer>> mapFlash(@NonNull Flash flash) {
List<Pair<Integer, Integer>> result = new ArrayList<>();
switch (flash) {
case ON: {
result.add(new Pair<>(
CameraCharacteristics.CONTROL_AE_MODE_ON_ALWAYS_FLASH,
CameraCharacteristics.FLASH_MODE_OFF));
break;
}
case AUTO: {
result.add(new Pair<>(
CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH,
CameraCharacteristics.FLASH_MODE_OFF));
result.add(new Pair<>(
CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE,
CameraCharacteristics.FLASH_MODE_OFF));
break;
}
case OFF: {
result.add(new Pair<>(
CameraCharacteristics.CONTROL_AE_MODE_ON,
CameraCharacteristics.FLASH_MODE_OFF));
result.add(new Pair<>(
CameraCharacteristics.CONTROL_AE_MODE_OFF,
CameraCharacteristics.FLASH_MODE_OFF));
break;
}
case TORCH: {
// When AE_MODE is ON or OFF, we can finally use the flash mode
// low level control to either turn flash off or open the torch
result.add(new Pair<>(
CameraCharacteristics.CONTROL_AE_MODE_ON,
CameraCharacteristics.FLASH_MODE_TORCH));
result.add(new Pair<>(
CameraCharacteristics.CONTROL_AE_MODE_OFF,
CameraCharacteristics.FLASH_MODE_TORCH));
break;
}
}
return result;
}
public int mapFacing(@NonNull Facing facing) {
//noinspection ConstantConditions
return FACING.get(facing);
}
public int mapWhiteBalance(@NonNull WhiteBalance whiteBalance) {
//noinspection ConstantConditions
return WB.get(whiteBalance);
}
public int mapHdr(@NonNull Hdr hdr) {
//noinspection ConstantConditions
return HDR.get(hdr);
}
@NonNull
public Set<Flash> unmapFlash(int cameraConstant) {
Set<Flash> result = new HashSet<>();
switch (cameraConstant) {
case CameraCharacteristics.CONTROL_AE_MODE_OFF:
case CameraCharacteristics.CONTROL_AE_MODE_ON: {
result.add(Flash.OFF);
result.add(Flash.TORCH);
break;
}
case CameraCharacteristics.CONTROL_AE_MODE_ON_ALWAYS_FLASH: {
result.add(Flash.ON);
break;
}
case CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH:
case CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE: {
result.add(Flash.AUTO);
break;
}
case CameraCharacteristics.CONTROL_AE_MODE_ON_EXTERNAL_FLASH:
default: break; // we don't support external flash
}
return result;
}
@Nullable
public Facing unmapFacing(int cameraConstant) {
return reverseLookup(FACING, cameraConstant);
}
@Nullable
public WhiteBalance unmapWhiteBalance(int cameraConstant) {
return reverseLookup(WB, cameraConstant);
}
@Nullable
public Hdr unmapHdr(int cameraConstant) {
return reverseLookup(HDR, cameraConstant);
}
@Nullable
private <C extends Control, T> C reverseLookup(@NonNull Map<C, T> map, @NonNull T object) {
for (C value : map.keySet()) {
if (object.equals(map.get(value))) {
return value;
}
}
return null;
}
}

@ -0,0 +1,101 @@
package com.otaliastudios.cameraview.engine.metering;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.params.MeteringRectangle;
import android.os.Build;
import androidx.annotation.NonNull;
import androidx.annotation.RequiresApi;
import com.otaliastudios.cameraview.CameraLogger;
import java.util.List;
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
public class AutoExposure extends MeteringParameter {
private static final String TAG = AutoExposure.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
private boolean isStarted;
@Override
public void startMetering(@NonNull CameraCharacteristics characteristics,
@NonNull CaptureRequest.Builder builder,
@NonNull List<MeteringRectangle> areas) {
isSuccessful = false;
isMetered = false;
isStarted = false;
boolean isNotLegacy = readCharacteristic(characteristics,
CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL, -1) !=
CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY;
Integer aeMode = builder.get(CaptureRequest.CONTROL_AE_MODE);
boolean isAEOn = aeMode != null &&
(aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON
|| aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON_ALWAYS_FLASH
|| aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH
|| aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE
|| aeMode == 5 /* CameraCharacteristics.CONTROL_AE_MODE_ON_EXTERNAL_FLASH, API 28 */);
isSupported = isNotLegacy && isAEOn;
if (isSupported) {
builder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
}
// Even if precapture is not supported, check the regions anyway.
int maxRegions = readCharacteristic(characteristics,
CameraCharacteristics.CONTROL_MAX_REGIONS_AE, 0);
if (maxRegions > 0) {
int max = Math.min(maxRegions, areas.size());
builder.set(CaptureRequest.CONTROL_AE_REGIONS,
areas.subList(0, max).toArray(new MeteringRectangle[]{}));
}
}
@Override
public void onCapture(@NonNull CaptureResult result) {
if (isMetered || !isSupported) return;
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
LOG.i("onCapture:", "aeState:", aeState);
if (aeState == null) return;
if (!isStarted) {
if (aeState == CaptureRequest.CONTROL_AE_STATE_PRECAPTURE) {
isStarted = true;
} else if (aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED
|| aeState == CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED) {
// PRECAPTURE is a transient state, so also check for the final states.
isMetered = true;
isSuccessful = true;
}
} else {
if (aeState == CaptureRequest.CONTROL_AE_STATE_CONVERGED
|| aeState == CaptureRequest.CONTROL_AE_STATE_FLASH_REQUIRED) {
isMetered = true;
isSuccessful = true;
}
}
}
@Override
public void resetMetering(@NonNull CameraCharacteristics characteristics,
@NonNull CaptureRequest.Builder builder,
@NonNull MeteringRectangle area) {
int maxRegions = readCharacteristic(characteristics,
CameraCharacteristics.CONTROL_MAX_REGIONS_AE, 0);
if (maxRegions > 0) {
builder.set(CaptureRequest.CONTROL_AE_REGIONS, new MeteringRectangle[]{area});
}
if (isSupported) {
// Cleanup any precapture sequence.
if (Build.VERSION.SDK_INT >= 23) {
builder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL);
}
}
}
}

@ -0,0 +1,87 @@
package com.otaliastudios.cameraview.engine.metering;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.params.MeteringRectangle;
import android.os.Build;
import androidx.annotation.NonNull;
import androidx.annotation.RequiresApi;
import com.otaliastudios.cameraview.CameraLogger;
import java.util.List;
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
public class AutoFocus extends MeteringParameter {
private static final String TAG = AutoFocus.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
@Override
public void startMetering(@NonNull CameraCharacteristics characteristics,
@NonNull CaptureRequest.Builder builder,
@NonNull List<MeteringRectangle> areas) {
isSuccessful = false;
isMetered = false;
Integer afMode = builder.get(CaptureRequest.CONTROL_AF_MODE);
// Exclude OFF and EDOF as per docs.
isSupported = afMode != null &&
(afMode == CameraCharacteristics.CONTROL_AF_MODE_AUTO
|| afMode == CameraCharacteristics.CONTROL_AF_MODE_CONTINUOUS_PICTURE
|| afMode == CameraCharacteristics.CONTROL_AF_MODE_CONTINUOUS_VIDEO
|| afMode == CameraCharacteristics.CONTROL_AF_MODE_MACRO);
if (isSupported) {
builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START);
}
// Even if auto is not supported, change the regions anyway.
int maxRegions = readCharacteristic(characteristics, CameraCharacteristics.CONTROL_MAX_REGIONS_AF, 0);
if (maxRegions > 0) {
int max = Math.min(maxRegions, areas.size());
builder.set(CaptureRequest.CONTROL_AF_REGIONS,
areas.subList(0, max).toArray(new MeteringRectangle[]{}));
}
}
@Override
public void onCapture(@NonNull CaptureResult result) {
if (isMetered || !isSupported) return;
Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
LOG.i("onCapture:", "afState:", afState);
if (afState == null) return;
switch (afState) {
case CaptureRequest.CONTROL_AF_STATE_FOCUSED_LOCKED: {
isMetered = true;
isSuccessful = true;
break;
}
case CaptureRequest.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED: {
isMetered = true;
isSuccessful = false;
break;
}
case CaptureRequest.CONTROL_AF_STATE_INACTIVE: break;
case CaptureRequest.CONTROL_AF_STATE_ACTIVE_SCAN: break;
default: break;
}
}
@Override
public void resetMetering(@NonNull CameraCharacteristics characteristics,
@NonNull CaptureRequest.Builder builder,
@NonNull MeteringRectangle area) {
int maxRegions = readCharacteristic(characteristics,
CameraCharacteristics.CONTROL_MAX_REGIONS_AF, 0);
if (maxRegions > 0) {
builder.set(CaptureRequest.CONTROL_AF_REGIONS, new MeteringRectangle[]{area});
}
if (isSupported) { // Cleanup any trigger.
builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_CANCEL);
}
}
}

@ -0,0 +1,81 @@
package com.otaliastudios.cameraview.engine.metering;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.params.MeteringRectangle;
import android.os.Build;
import androidx.annotation.NonNull;
import androidx.annotation.RequiresApi;
import com.otaliastudios.cameraview.CameraLogger;
import java.util.List;
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
public class AutoWhiteBalance extends MeteringParameter {
private static final String TAG = AutoWhiteBalance.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
@Override
public void startMetering(@NonNull CameraCharacteristics characteristics,
@NonNull CaptureRequest.Builder builder,
@NonNull List<MeteringRectangle> areas) {
isSuccessful = false;
isMetered = false;
boolean isNotLegacy = readCharacteristic(characteristics,
CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL, -1) !=
CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY;
Integer awbMode = builder.get(CaptureRequest.CONTROL_AWB_MODE);
isSupported = isNotLegacy && awbMode != null && awbMode == CaptureRequest.CONTROL_AWB_MODE_AUTO;
if (isSupported) {
// Remove any lock. We're not setting any, but just in case.
builder.set(CaptureRequest.CONTROL_AWB_LOCK, false);
}
// Even if auto is not supported, change the regions anyway.
int maxRegions = readCharacteristic(characteristics,
CameraCharacteristics.CONTROL_MAX_REGIONS_AWB, 0);
if (maxRegions > 0) {
int max = Math.min(maxRegions, areas.size());
builder.set(CaptureRequest.CONTROL_AWB_REGIONS,
areas.subList(0, max).toArray(new MeteringRectangle[]{}));
}
}
@Override
public void onCapture(@NonNull CaptureResult result) {
if (isMetered || !isSupported) return;
Integer awbState = result.get(CaptureResult.CONTROL_AWB_STATE);
LOG.i("onCapture:", "awbState:", awbState);
if (awbState == null) return;
switch (awbState) {
case CaptureRequest.CONTROL_AWB_STATE_CONVERGED: {
isMetered = true;
isSuccessful = true;
break;
}
case CaptureRequest.CONTROL_AWB_STATE_LOCKED: break;
case CaptureRequest.CONTROL_AWB_STATE_INACTIVE: break;
case CaptureRequest.CONTROL_AWB_STATE_SEARCHING: break;
default: break;
}
}
@Override
public void resetMetering(@NonNull CameraCharacteristics characteristics,
@NonNull CaptureRequest.Builder builder,
@NonNull MeteringRectangle area) {
int maxRegions = readCharacteristic(characteristics,
CameraCharacteristics.CONTROL_MAX_REGIONS_AWB, 0);
if (maxRegions > 0) {
builder.set(CaptureRequest.CONTROL_AWB_REGIONS, new MeteringRectangle[]{area});
}
}
}

@ -0,0 +1,54 @@
package com.otaliastudios.cameraview.engine.metering;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.params.MeteringRectangle;
import android.os.Build;
import androidx.annotation.NonNull;
import androidx.annotation.RequiresApi;
import java.util.List;
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
public abstract class MeteringParameter {
@SuppressWarnings("WeakerAccess")
protected boolean isSupported;
@SuppressWarnings("WeakerAccess")
protected boolean isSuccessful;
@SuppressWarnings("WeakerAccess")
protected boolean isMetered;
@SuppressWarnings("WeakerAccess")
@NonNull
protected <T> T readCharacteristic(@NonNull CameraCharacteristics characteristics,
@NonNull CameraCharacteristics.Key<T> key,
@NonNull T fallback) {
T value = characteristics.get(key);
return value == null ? fallback : value;
}
public final boolean isMetered() {
// A non supported parameter should always appear as metered
return isMetered || !isSupported;
}
public final boolean isSuccessful() {
// A non supported parameter should always appear as successful
return isSuccessful || !isSupported;
}
public abstract void startMetering(@NonNull CameraCharacteristics characteristics,
@NonNull CaptureRequest.Builder builder,
@NonNull List<MeteringRectangle> areas);
public abstract void resetMetering(@NonNull CameraCharacteristics characteristics,
@NonNull CaptureRequest.Builder builder,
@NonNull MeteringRectangle area);
public abstract void onCapture(@NonNull CaptureResult result);
}

@ -35,11 +35,12 @@ public class Full2PictureRecorder extends PictureRecorder implements ImageReader
private static final CameraLogger LOG = CameraLogger.create(TAG);
private static final int STATE_IDLE = 0;
private static final int STATE_WAITING_FOCUS_LOCK = 1;
private static final int STATE_WAITING_PRECAPTURE_START = 2;
private static final int STATE_WAITING_PRECAPTURE_END = 3;
private static final int STATE_WAITING_CAPTURE = 4;
private static final int STATE_WAITING_IMAGE = 5;
private static final int STATE_WAITING_FIRST_FRAME = 1;
private static final int STATE_WAITING_AUTOFOCUS = 2;
private static final int STATE_WAITING_PRECAPTURE_START = 3;
private static final int STATE_WAITING_PRECAPTURE_END = 4;
private static final int STATE_WAITING_CAPTURE = 5;
private static final int STATE_WAITING_IMAGE = 6;
private static final int REQUEST_TAG = CameraDevice.TEMPLATE_STILL_CAPTURE;
@ -74,23 +75,27 @@ public class Full2PictureRecorder extends PictureRecorder implements ImageReader
@Override
public void take() {
runFocusLock();
mState = STATE_WAITING_FIRST_FRAME;
}
private boolean supportsFocusLock() {
private boolean supportsAutoFocus() {
//noinspection ConstantConditions
int afMode = mBuilder.get(CaptureRequest.CONTROL_AF_MODE);
// Exclude OFF and EDOF as per their docs.
// Exclude OFF and EDOF as per docs.
return afMode == CameraCharacteristics.CONTROL_AF_MODE_AUTO
|| afMode == CameraCharacteristics.CONTROL_AF_MODE_CONTINUOUS_PICTURE
|| afMode == CameraCharacteristics.CONTROL_AF_MODE_CONTINUOUS_VIDEO
|| afMode == CameraCharacteristics.CONTROL_AF_MODE_MACRO;
}
private void runFocusLock() {
if (supportsFocusLock()) {
private void runAutoFocus(@NonNull CaptureResult lastResult) {
Integer afState = lastResult.get(CaptureResult.CONTROL_AF_STATE);
boolean shouldSkip = afState != null && afState == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED;
boolean supports = supportsAutoFocus();
LOG.i("runAutoFocus:", "supports:", supports, "shouldSkip:", shouldSkip, "afState:", afState);
if (supports && !shouldSkip) {
try {
mState = STATE_WAITING_FOCUS_LOCK;
mState = STATE_WAITING_AUTOFOCUS;
mBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START);
mSession.capture(mBuilder.build(), mCallback, null);
} catch (CameraAccessException e) {
@ -99,8 +104,8 @@ public class Full2PictureRecorder extends PictureRecorder implements ImageReader
dispatchResult();
}
} else {
LOG.w("Device does not support focus lock. Running precapture.");
runPrecapture(null);
LOG.w("Device does not support auto focus. Running precapture.");
runPrecapture(lastResult);
}
}
@ -118,12 +123,12 @@ public class Full2PictureRecorder extends PictureRecorder implements ImageReader
|| aeMode == 5 /* CameraCharacteristics.CONTROL_AE_MODE_ON_EXTERNAL_FLASH, API 28 */;
}
private void runPrecapture(@Nullable CaptureResult lastResult) {
//noinspection ConstantConditions
boolean shouldSkipPrecapture = lastResult != null
&& lastResult.get(CaptureResult.CONTROL_AE_STATE) != null
&& lastResult.get(CaptureResult.CONTROL_AE_STATE) == CaptureResult.CONTROL_AE_STATE_CONVERGED;
if (supportsPrecapture() && !shouldSkipPrecapture) {
private void runPrecapture(@NonNull CaptureResult lastResult) {
Integer aeState = lastResult.get(CaptureResult.CONTROL_AE_STATE);
boolean shouldSkip = aeState != null && aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED;
boolean supports = supportsPrecapture();
LOG.i("runPrecapture:", "supports:", supports, "shouldSkip:", shouldSkip, "aeState:", aeState);
if (supports && !shouldSkip) {
try {
mState = STATE_WAITING_PRECAPTURE_START;
mBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
@ -170,7 +175,8 @@ public class Full2PictureRecorder extends PictureRecorder implements ImageReader
}
public void onCaptureProgressed(@NonNull CaptureResult result) {
process(result);
// Let's ignore these. They often do not have good results.
// process(result);
}
public void onCaptureCompleted(@NonNull CaptureResult result) {
@ -180,7 +186,11 @@ public class Full2PictureRecorder extends PictureRecorder implements ImageReader
private void process(@NonNull CaptureResult result) {
switch (mState) {
case STATE_IDLE: break;
case STATE_WAITING_FOCUS_LOCK: {
case STATE_WAITING_FIRST_FRAME: {
runAutoFocus(result);
break;
}
case STATE_WAITING_AUTOFOCUS: {
Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
if (afState == null
|| afState == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED
@ -261,7 +271,7 @@ public class Full2PictureRecorder extends PictureRecorder implements ImageReader
} catch (IOException ignore) { }
// Before leaving, unlock focus.
if (supportsFocusLock()) {
if (supportsAutoFocus()) {
try {
mBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
CaptureRequest.CONTROL_AF_TRIGGER_CANCEL);

@ -20,7 +20,7 @@
app:cameraPreview="glSurface"
app:cameraPlaySounds="true"
app:cameraGrid="off"
app:cameraFlash="off"
app:cameraFlash="auto"
app:cameraAudio="on"
app:cameraFacing="back"
app:cameraGestureTap="autoFocus"

@ -20,7 +20,7 @@ New versions are released through GitHub, so the reference page is the [GitHub R
## v2.1.0
This release adds experimental support for [real-time filters](../docs.filters.html) thanks to [@agrawalsuneet][agrawalsuneet].
This release adds experimental support for [real-time filters](../docs/filters.html) thanks to [@agrawalsuneet][agrawalsuneet].
Please read the documentation page for usage instructions.
- New: Real-time filters support ([#527][527])

Loading…
Cancel
Save