Focus improvements (#574)
* New approach for metering * Include AWB * Complete AWB integration * Rearrange code into MeteringParameters objects * Add FORCED_END_DELAY into Meter * Small changes * Improve Camera2 pictures speed and quality * Extend auto focus functionality to more cameras * Move Mapper to own package * Refactor Camera1Mapper * Refactor Camera2Mapper * Rename mapper methods * Add Camera2MapperTests * Fix success parameter * Fix focus when zoomingpull/608/head
parent
0731b64c2f
commit
a8fddc482f
@ -0,0 +1,101 @@ |
||||
package com.otaliastudios.cameraview.engine.mappers; |
||||
|
||||
|
||||
import android.hardware.Camera; |
||||
import android.util.Pair; |
||||
|
||||
import androidx.test.ext.junit.runners.AndroidJUnit4; |
||||
import androidx.test.filters.SmallTest; |
||||
|
||||
import com.otaliastudios.cameraview.BaseTest; |
||||
import com.otaliastudios.cameraview.controls.Facing; |
||||
import com.otaliastudios.cameraview.controls.Flash; |
||||
import com.otaliastudios.cameraview.controls.Hdr; |
||||
import com.otaliastudios.cameraview.controls.WhiteBalance; |
||||
|
||||
import org.junit.Test; |
||||
import org.junit.runner.RunWith; |
||||
|
||||
import java.util.List; |
||||
import java.util.Set; |
||||
|
||||
import static android.hardware.camera2.CameraMetadata.*; |
||||
import static org.junit.Assert.assertEquals; |
||||
import static org.junit.Assert.assertTrue; |
||||
|
||||
|
||||
|
||||
@RunWith(AndroidJUnit4.class) |
||||
@SmallTest |
||||
public class Camera2MapperTest extends BaseTest { |
||||
|
||||
private Camera2Mapper mapper = Camera2Mapper.get(); |
||||
|
||||
@Test |
||||
public void testMap() { |
||||
List<Pair<Integer, Integer>> values = mapper.mapFlash(Flash.OFF); |
||||
assertEquals(2, values.size()); |
||||
assertTrue(values.contains(new Pair<>(CONTROL_AE_MODE_ON, FLASH_MODE_OFF))); |
||||
assertTrue(values.contains(new Pair<>(CONTROL_AE_MODE_OFF, FLASH_MODE_OFF))); |
||||
values = mapper.mapFlash(Flash.TORCH); |
||||
assertEquals(2, values.size()); |
||||
assertTrue(values.contains(new Pair<>(CONTROL_AE_MODE_ON, FLASH_MODE_TORCH))); |
||||
assertTrue(values.contains(new Pair<>(CONTROL_AE_MODE_OFF, FLASH_MODE_TORCH))); |
||||
values = mapper.mapFlash(Flash.AUTO); |
||||
assertEquals(2, values.size()); |
||||
assertTrue(values.contains(new Pair<>(CONTROL_AE_MODE_ON_AUTO_FLASH, FLASH_MODE_OFF))); |
||||
assertTrue(values.contains(new Pair<>(CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, FLASH_MODE_OFF))); |
||||
values = mapper.mapFlash(Flash.ON); |
||||
assertEquals(1, values.size()); |
||||
assertTrue(values.contains(new Pair<>(CONTROL_AE_MODE_ON_ALWAYS_FLASH, FLASH_MODE_OFF))); |
||||
|
||||
assertEquals(mapper.mapFacing(Facing.BACK), LENS_FACING_BACK); |
||||
assertEquals(mapper.mapFacing(Facing.FRONT), LENS_FACING_FRONT); |
||||
|
||||
assertEquals(mapper.mapHdr(Hdr.OFF), CONTROL_SCENE_MODE_DISABLED); |
||||
assertEquals(mapper.mapHdr(Hdr.ON), CONTROL_SCENE_MODE_HDR); |
||||
|
||||
assertEquals(mapper.mapWhiteBalance(WhiteBalance.AUTO), CONTROL_AWB_MODE_AUTO); |
||||
assertEquals(mapper.mapWhiteBalance(WhiteBalance.DAYLIGHT), CONTROL_AWB_MODE_DAYLIGHT); |
||||
assertEquals(mapper.mapWhiteBalance(WhiteBalance.CLOUDY), CONTROL_AWB_MODE_CLOUDY_DAYLIGHT); |
||||
assertEquals(mapper.mapWhiteBalance(WhiteBalance.INCANDESCENT), CONTROL_AWB_MODE_INCANDESCENT); |
||||
assertEquals(mapper.mapWhiteBalance(WhiteBalance.FLUORESCENT), CONTROL_AWB_MODE_FLUORESCENT); |
||||
} |
||||
|
||||
|
||||
@Test |
||||
public void testUnmap() { |
||||
Set<Flash> values; |
||||
values = mapper.unmapFlash(CONTROL_AE_MODE_OFF); |
||||
assertEquals(values.size(), 2); |
||||
assertTrue(values.contains(Flash.OFF)); |
||||
assertTrue(values.contains(Flash.TORCH)); |
||||
values = mapper.unmapFlash(CONTROL_AE_MODE_ON); |
||||
assertEquals(values.size(), 2); |
||||
assertTrue(values.contains(Flash.OFF)); |
||||
assertTrue(values.contains(Flash.TORCH)); |
||||
values = mapper.unmapFlash(CONTROL_AE_MODE_ON_ALWAYS_FLASH); |
||||
assertEquals(values.size(), 1); |
||||
assertTrue(values.contains(Flash.ON)); |
||||
values = mapper.unmapFlash(CONTROL_AE_MODE_ON_AUTO_FLASH); |
||||
assertEquals(values.size(), 1); |
||||
assertTrue(values.contains(Flash.AUTO)); |
||||
values = mapper.unmapFlash(CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE); |
||||
assertEquals(values.size(), 1); |
||||
assertTrue(values.contains(Flash.AUTO)); |
||||
values = mapper.unmapFlash(CONTROL_AE_MODE_ON_EXTERNAL_FLASH); |
||||
assertEquals(values.size(), 0); |
||||
|
||||
assertEquals(Facing.BACK, mapper.unmapFacing(LENS_FACING_BACK)); |
||||
assertEquals(Facing.FRONT, mapper.unmapFacing(LENS_FACING_FRONT)); |
||||
|
||||
assertEquals(Hdr.OFF, mapper.unmapHdr(CONTROL_SCENE_MODE_DISABLED)); |
||||
assertEquals(Hdr.ON, mapper.unmapHdr(CONTROL_SCENE_MODE_HDR)); |
||||
|
||||
assertEquals(WhiteBalance.AUTO, mapper.unmapWhiteBalance(CONTROL_AWB_MODE_AUTO)); |
||||
assertEquals(WhiteBalance.DAYLIGHT, mapper.unmapWhiteBalance(CONTROL_AWB_MODE_DAYLIGHT)); |
||||
assertEquals(WhiteBalance.CLOUDY, mapper.unmapWhiteBalance(CONTROL_AWB_MODE_CLOUDY_DAYLIGHT)); |
||||
assertEquals(WhiteBalance.INCANDESCENT, mapper.unmapWhiteBalance(CONTROL_AWB_MODE_INCANDESCENT)); |
||||
assertEquals(WhiteBalance.FLUORESCENT, mapper.unmapWhiteBalance(CONTROL_AWB_MODE_FLUORESCENT)); |
||||
} |
||||
} |
@ -1,221 +0,0 @@ |
||||
package com.otaliastudios.cameraview.engine; |
||||
|
||||
import android.hardware.Camera; |
||||
import android.hardware.camera2.CameraCharacteristics; |
||||
import android.os.Build; |
||||
|
||||
import com.otaliastudios.cameraview.controls.Control; |
||||
import com.otaliastudios.cameraview.controls.Engine; |
||||
import com.otaliastudios.cameraview.controls.Facing; |
||||
import com.otaliastudios.cameraview.controls.Flash; |
||||
import com.otaliastudios.cameraview.controls.Hdr; |
||||
import com.otaliastudios.cameraview.controls.WhiteBalance; |
||||
|
||||
import java.util.Arrays; |
||||
import java.util.Collections; |
||||
import java.util.HashMap; |
||||
import java.util.List; |
||||
|
||||
import androidx.annotation.NonNull; |
||||
import androidx.annotation.RequiresApi; |
||||
|
||||
/** |
||||
* A Mapper maps camera engine constants to CameraView constants. |
||||
*/ |
||||
public abstract class Mapper { |
||||
|
||||
private static Mapper CAMERA1; |
||||
private static Mapper CAMERA2; |
||||
|
||||
public static Mapper get(@NonNull Engine engine) { |
||||
if (engine == Engine.CAMERA1) { |
||||
if (CAMERA1 == null) CAMERA1 = new Camera1Mapper(); |
||||
return CAMERA1; |
||||
} else if (engine == Engine.CAMERA2 && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { |
||||
if (CAMERA2 == null) CAMERA2 = new Camera2Mapper(); |
||||
return CAMERA2; |
||||
} else { |
||||
throw new IllegalArgumentException("Unknown engine or unsupported API level."); |
||||
} |
||||
} |
||||
|
||||
private Mapper() {} |
||||
|
||||
public abstract <T> T map(Flash flash); |
||||
|
||||
public abstract <T> T map(Facing facing); |
||||
|
||||
public abstract <T> T map(WhiteBalance whiteBalance); |
||||
|
||||
public abstract <T> T map(Hdr hdr); |
||||
|
||||
public abstract <T> Flash unmapFlash(T cameraConstant); |
||||
|
||||
public abstract <T> Facing unmapFacing(T cameraConstant); |
||||
|
||||
public abstract <T> WhiteBalance unmapWhiteBalance(T cameraConstant); |
||||
|
||||
public abstract <T> Hdr unmapHdr(T cameraConstant); |
||||
|
||||
@SuppressWarnings("WeakerAccess") |
||||
protected <C extends Control, T> C reverseLookup(HashMap<C, T> map, T object) { |
||||
for (C value : map.keySet()) { |
||||
if (object.equals(map.get(value))) { |
||||
return value; |
||||
} |
||||
} |
||||
return null; |
||||
} |
||||
|
||||
@SuppressWarnings("WeakerAccess") |
||||
protected <C extends Control, T> C reverseListLookup(HashMap<C, List<T>> map, T object) { |
||||
for (C value : map.keySet()) { |
||||
List<T> list = map.get(value); |
||||
if (list == null) continue; |
||||
for (T candidate : list) { |
||||
if (object.equals(candidate)) { |
||||
return value; |
||||
} |
||||
} |
||||
} |
||||
return null; |
||||
} |
||||
|
||||
@SuppressWarnings("unchecked") |
||||
private static class Camera1Mapper extends Mapper { |
||||
|
||||
private static final HashMap<Flash, String> FLASH = new HashMap<>(); |
||||
private static final HashMap<WhiteBalance, String> WB = new HashMap<>(); |
||||
private static final HashMap<Facing, Integer> FACING = new HashMap<>(); |
||||
private static final HashMap<Hdr, String> HDR = new HashMap<>(); |
||||
|
||||
static { |
||||
FLASH.put(Flash.OFF, Camera.Parameters.FLASH_MODE_OFF); |
||||
FLASH.put(Flash.ON, Camera.Parameters.FLASH_MODE_ON); |
||||
FLASH.put(Flash.AUTO, Camera.Parameters.FLASH_MODE_AUTO); |
||||
FLASH.put(Flash.TORCH, Camera.Parameters.FLASH_MODE_TORCH); |
||||
FACING.put(Facing.BACK, Camera.CameraInfo.CAMERA_FACING_BACK); |
||||
FACING.put(Facing.FRONT, Camera.CameraInfo.CAMERA_FACING_FRONT); |
||||
WB.put(WhiteBalance.AUTO, Camera.Parameters.WHITE_BALANCE_AUTO); |
||||
WB.put(WhiteBalance.INCANDESCENT, Camera.Parameters.WHITE_BALANCE_INCANDESCENT); |
||||
WB.put(WhiteBalance.FLUORESCENT, Camera.Parameters.WHITE_BALANCE_FLUORESCENT); |
||||
WB.put(WhiteBalance.DAYLIGHT, Camera.Parameters.WHITE_BALANCE_DAYLIGHT); |
||||
WB.put(WhiteBalance.CLOUDY, Camera.Parameters.WHITE_BALANCE_CLOUDY_DAYLIGHT); |
||||
HDR.put(Hdr.OFF, Camera.Parameters.SCENE_MODE_AUTO); |
||||
if (Build.VERSION.SDK_INT >= 17) { |
||||
HDR.put(Hdr.ON, Camera.Parameters.SCENE_MODE_HDR); |
||||
} else { |
||||
HDR.put(Hdr.ON, "hdr"); |
||||
} |
||||
} |
||||
|
||||
@Override |
||||
public <T> T map(Flash flash) { |
||||
return (T) FLASH.get(flash); |
||||
} |
||||
|
||||
@Override |
||||
public <T> T map(Facing facing) { |
||||
return (T) FACING.get(facing); |
||||
} |
||||
|
||||
@Override |
||||
public <T> T map(WhiteBalance whiteBalance) { |
||||
return (T) WB.get(whiteBalance); |
||||
} |
||||
|
||||
@Override |
||||
public <T> T map(Hdr hdr) { |
||||
return (T) HDR.get(hdr); |
||||
} |
||||
|
||||
@Override |
||||
public <T> Flash unmapFlash(T cameraConstant) { |
||||
return reverseLookup(FLASH, (String) cameraConstant); |
||||
} |
||||
|
||||
@Override |
||||
public <T> Facing unmapFacing(T cameraConstant) { |
||||
return reverseLookup(FACING, (Integer) cameraConstant); |
||||
} |
||||
|
||||
@Override |
||||
public <T> WhiteBalance unmapWhiteBalance(T cameraConstant) { |
||||
return reverseLookup(WB, (String) cameraConstant); |
||||
} |
||||
|
||||
@Override |
||||
public <T> Hdr unmapHdr(T cameraConstant) { |
||||
return reverseLookup(HDR, (String) cameraConstant); |
||||
} |
||||
} |
||||
|
||||
@SuppressWarnings("unchecked") |
||||
@RequiresApi(Build.VERSION_CODES.LOLLIPOP) |
||||
private static class Camera2Mapper extends Mapper { |
||||
|
||||
private static final HashMap<Flash, List<Integer>> FLASH = new HashMap<>(); |
||||
private static final HashMap<Facing, Integer> FACING = new HashMap<>(); |
||||
private static final HashMap<WhiteBalance, Integer> WB = new HashMap<>(); |
||||
private static final HashMap<Hdr, Integer> HDR = new HashMap<>(); |
||||
|
||||
static { |
||||
// OFF and TORCH have also a second condition - to CameraCharacteristics.CONTROL_FLASH_MODE - but that does not
|
||||
// fit into the Mapper interface. TODO review this
|
||||
FLASH.put(Flash.OFF, Arrays.asList(CameraCharacteristics.CONTROL_AE_MODE_ON, CameraCharacteristics.CONTROL_AE_MODE_OFF)); |
||||
FLASH.put(Flash.TORCH, Arrays.asList(CameraCharacteristics.CONTROL_AE_MODE_ON, CameraCharacteristics.CONTROL_AE_MODE_OFF)); |
||||
FLASH.put(Flash.AUTO, Arrays.asList(CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH, CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE)); |
||||
FLASH.put(Flash.ON, Collections.singletonList(CameraCharacteristics.CONTROL_AE_MODE_ON_ALWAYS_FLASH)); |
||||
FACING.put(Facing.BACK, CameraCharacteristics.LENS_FACING_BACK); |
||||
FACING.put(Facing.FRONT, CameraCharacteristics.LENS_FACING_FRONT); |
||||
WB.put(WhiteBalance.AUTO, CameraCharacteristics.CONTROL_AWB_MODE_AUTO); |
||||
WB.put(WhiteBalance.CLOUDY, CameraCharacteristics.CONTROL_AWB_MODE_CLOUDY_DAYLIGHT); |
||||
WB.put(WhiteBalance.DAYLIGHT, CameraCharacteristics.CONTROL_AWB_MODE_DAYLIGHT); |
||||
WB.put(WhiteBalance.FLUORESCENT, CameraCharacteristics.CONTROL_AWB_MODE_FLUORESCENT); |
||||
WB.put(WhiteBalance.INCANDESCENT, CameraCharacteristics.CONTROL_AWB_MODE_INCANDESCENT); |
||||
HDR.put(Hdr.OFF, CameraCharacteristics.CONTROL_SCENE_MODE_DISABLED); |
||||
HDR.put(Hdr.ON, 18 /* CameraCharacteristics.CONTROL_SCENE_MODE_HDR */); |
||||
} |
||||
|
||||
@SuppressWarnings("ConstantConditions") |
||||
@Override |
||||
public <T> T map(Flash flash) { |
||||
return (T) FLASH.get(flash); |
||||
} |
||||
|
||||
@Override |
||||
public <T> T map(Facing facing) { |
||||
return (T) FACING.get(facing); |
||||
} |
||||
|
||||
@Override |
||||
public <T> T map(WhiteBalance whiteBalance) { |
||||
return (T) WB.get(whiteBalance); |
||||
} |
||||
|
||||
@Override |
||||
public <T> T map(Hdr hdr) { |
||||
return (T) HDR.get(hdr); |
||||
} |
||||
|
||||
@Override |
||||
public <T> Flash unmapFlash(T cameraConstant) { |
||||
return reverseListLookup(FLASH, (Integer) cameraConstant); |
||||
} |
||||
|
||||
@Override |
||||
public <T> Facing unmapFacing(T cameraConstant) { |
||||
return reverseLookup(FACING, (Integer) cameraConstant); |
||||
} |
||||
|
||||
@Override |
||||
public <T> WhiteBalance unmapWhiteBalance(T cameraConstant) { |
||||
return reverseLookup(WB, (Integer) cameraConstant); |
||||
} |
||||
|
||||
@Override |
||||
public <T> Hdr unmapHdr(T cameraConstant) { |
||||
return reverseLookup(HDR, (Integer) cameraConstant); |
||||
} |
||||
} |
||||
} |
@ -0,0 +1,380 @@ |
||||
package com.otaliastudios.cameraview.engine; |
||||
|
||||
import android.graphics.PointF; |
||||
import android.graphics.Rect; |
||||
import android.hardware.camera2.CameraCharacteristics; |
||||
import android.hardware.camera2.CaptureRequest; |
||||
import android.hardware.camera2.CaptureResult; |
||||
import android.hardware.camera2.TotalCaptureResult; |
||||
import android.hardware.camera2.params.MeteringRectangle; |
||||
import android.os.Build; |
||||
|
||||
import androidx.annotation.NonNull; |
||||
import androidx.annotation.Nullable; |
||||
import androidx.annotation.RequiresApi; |
||||
|
||||
import com.otaliastudios.cameraview.CameraLogger; |
||||
import com.otaliastudios.cameraview.engine.metering.AutoExposure; |
||||
import com.otaliastudios.cameraview.engine.metering.AutoFocus; |
||||
import com.otaliastudios.cameraview.engine.metering.AutoWhiteBalance; |
||||
import com.otaliastudios.cameraview.engine.metering.MeteringParameter; |
||||
import com.otaliastudios.cameraview.engine.offset.Axis; |
||||
import com.otaliastudios.cameraview.engine.offset.Reference; |
||||
import com.otaliastudios.cameraview.gesture.Gesture; |
||||
import com.otaliastudios.cameraview.size.AspectRatio; |
||||
import com.otaliastudios.cameraview.size.Size; |
||||
|
||||
import java.util.Arrays; |
||||
import java.util.List; |
||||
|
||||
/** |
||||
* Helps Camera2-based engines to perform 3A (auto focus, auto exposure and auto white balance) |
||||
* metering. Users are required to: |
||||
* |
||||
* - Call {@link #startMetering(PointF, Gesture)} to start |
||||
* - Call {@link #onCapture(CaptureResult)} when they have partial or total results, as long as the |
||||
* meter is still in a metering operation, which can be checked through {@link #isMetering()} |
||||
* - Call {@link #resetMetering()} to reset the metering parameters if needed. This is done automatically |
||||
* by the meter based on the reset delay configuration in the engine, but can be called explicitly |
||||
* for example when we have multiple meter requests and want to cancel the old one. |
||||
*/ |
||||
@RequiresApi(Build.VERSION_CODES.LOLLIPOP) |
||||
public class Meter { |
||||
|
||||
/** |
||||
* The meter callback. |
||||
*/ |
||||
public interface Callback { |
||||
|
||||
/** |
||||
* Notifies that metering has started. At this point implementors should apply |
||||
* the builder onto the preview. |
||||
* @param point point |
||||
* @param gesture gesture |
||||
*/ |
||||
void onMeteringStarted(@NonNull PointF point, @Nullable Gesture gesture); |
||||
|
||||
/** |
||||
* Notifies that metering has ended. No action is required for implementors. |
||||
* From now on, {@link #isMetering()} will return false so the meter should not |
||||
* be passed capture results anymore. |
||||
* @param point point |
||||
* @param gesture gesture |
||||
* @param success success |
||||
*/ |
||||
void onMeteringEnd(@NonNull PointF point, @Nullable Gesture gesture, boolean success); |
||||
|
||||
/** |
||||
* Notifies that metering has been reset. From now on, this meter instance |
||||
* is done, although in theory it could be reused by calling |
||||
* {@link #startMetering(PointF, Gesture)} again. |
||||
* @param point point |
||||
* @param gesture gesture |
||||
*/ |
||||
void onMeteringReset(@NonNull PointF point, @Nullable Gesture gesture); |
||||
|
||||
/** |
||||
* Whether metering can be reset. Since it happens at a future time, this should |
||||
* return true if the engine is still in a legit state for this operation. |
||||
* @param point point |
||||
* @param gesture gesture |
||||
* @return true if can reset |
||||
*/ |
||||
// TODO is this useful? engine could do its checks onMeteringReset()
|
||||
boolean canResetMetering(@NonNull PointF point, @Nullable Gesture gesture); |
||||
} |
||||
|
||||
private static final String TAG = Meter.class.getSimpleName(); |
||||
private static final CameraLogger LOG = CameraLogger.create(TAG); |
||||
private static final int FORCED_END_DELAY = 2500; |
||||
|
||||
private final CameraEngine mEngine; |
||||
private final CaptureRequest.Builder mBuilder; |
||||
private final CameraCharacteristics mCharacteristics; |
||||
private final Callback mCallback; |
||||
private PointF mPoint; |
||||
private Gesture mGesture; |
||||
|
||||
private boolean mIsMetering; |
||||
private long mMeteringStartTime; |
||||
private MeteringParameter mAutoFocus = new AutoFocus(); |
||||
private MeteringParameter mAutoWhiteBalance = new AutoWhiteBalance(); |
||||
private MeteringParameter mAutoExposure = new AutoExposure(); |
||||
|
||||
/** |
||||
* Creates a new meter. |
||||
* @param engine the engine |
||||
* @param builder a capture builder |
||||
* @param characteristics the camera characteristics |
||||
* @param callback the callback |
||||
*/ |
||||
@SuppressWarnings("WeakerAccess") |
||||
public Meter(@NonNull CameraEngine engine, |
||||
@NonNull CaptureRequest.Builder builder, |
||||
@NonNull CameraCharacteristics characteristics, |
||||
@NonNull Callback callback) { |
||||
mEngine = engine; |
||||
mBuilder = builder; |
||||
mCharacteristics = characteristics; |
||||
mCallback = callback; |
||||
} |
||||
|
||||
/** |
||||
* Starts a metering sequence. |
||||
* @param point point |
||||
* @param gesture gesture |
||||
*/ |
||||
@SuppressWarnings("WeakerAccess") |
||||
public void startMetering(@NonNull PointF point, @Nullable Gesture gesture) { |
||||
mPoint = point; |
||||
mGesture = gesture; |
||||
mIsMetering = true; |
||||
|
||||
// This is a good Q/A. https://stackoverflow.com/a/33181620/4288782
|
||||
// At first, the point is relative to the View system and does not account our own cropping.
|
||||
// Will keep updating these two below.
|
||||
final PointF referencePoint = new PointF(mPoint.x, mPoint.y); |
||||
Size referenceSize = mEngine.mPreview.getSurfaceSize(); |
||||
|
||||
// 1. Account for cropping.
|
||||
// This will enlarge the preview size so that aspect ratio matches.
|
||||
referenceSize = applyPreviewCropping(referenceSize, referencePoint); |
||||
|
||||
// 2. Scale to the preview stream coordinates.
|
||||
// This will move to the preview stream coordinates by scaling.
|
||||
referenceSize = applyPreviewScale(referenceSize, referencePoint); |
||||
|
||||
// 3. Rotate to the stream coordinate system.
|
||||
// This leaves us with sensor stream coordinates.
|
||||
referenceSize = applyPreviewToSensorRotation(referenceSize, referencePoint); |
||||
|
||||
// 4. Move to the crop region coordinate system.
|
||||
// The crop region is the union of all currently active streams.
|
||||
referenceSize = applyCropRegionCoordinates(referenceSize, referencePoint); |
||||
|
||||
// 5. Move to the active array coordinate system.
|
||||
referenceSize = applyActiveArrayCoordinates(referenceSize, referencePoint); |
||||
|
||||
// 6. Now we can compute the metering regions.
|
||||
// We want to define them as a fraction of the visible size which (apart from cropping)
|
||||
// can be obtained through the SENSOR rotated preview stream size.
|
||||
Size visibleSize = mEngine.getPreviewStreamSize(Reference.SENSOR); |
||||
//noinspection ConstantConditions
|
||||
MeteringRectangle area1 = createMeteringRectangle(referenceSize, referencePoint, visibleSize, 0.05F, 1000); |
||||
MeteringRectangle area2 = createMeteringRectangle(referenceSize, referencePoint, visibleSize, 0.1F, 100); |
||||
List<MeteringRectangle> areas = Arrays.asList(area1, area2); |
||||
|
||||
// 7. And finally dispatch everything
|
||||
mAutoFocus.startMetering(mCharacteristics, mBuilder, areas); |
||||
mAutoWhiteBalance.startMetering(mCharacteristics, mBuilder, areas); |
||||
mAutoExposure.startMetering(mCharacteristics, mBuilder, areas); |
||||
|
||||
// Dispatch to callback
|
||||
mCallback.onMeteringStarted(mPoint, mGesture); |
||||
mMeteringStartTime = System.currentTimeMillis(); |
||||
} |
||||
|
||||
@SuppressWarnings("UnnecessaryLocalVariable") |
||||
@NonNull |
||||
private Size applyPreviewCropping(@NonNull Size referenceSize, @NonNull PointF referencePoint) { |
||||
Size previewStreamSize = mEngine.getPreviewStreamSize(Reference.VIEW); |
||||
Size previewSurfaceSize = referenceSize; |
||||
if (previewStreamSize == null) { |
||||
throw new IllegalStateException("getPreviewStreamSize should not be null at this point."); |
||||
} |
||||
int referenceWidth = previewSurfaceSize.getWidth(); |
||||
int referenceHeight = previewSurfaceSize.getHeight(); |
||||
AspectRatio previewStreamAspectRatio = AspectRatio.of(previewStreamSize); |
||||
AspectRatio previewSurfaceAspectRatio = AspectRatio.of(previewSurfaceSize); |
||||
if (mEngine.mPreview.isCropping()) { |
||||
if (previewStreamAspectRatio.toFloat() > previewSurfaceAspectRatio.toFloat()) { |
||||
// Stream is larger. The x coordinate must be increased: a touch on the left side
|
||||
// of the surface is not on the left size of stream (it's more to the right).
|
||||
float scale = previewStreamAspectRatio.toFloat() / previewSurfaceAspectRatio.toFloat(); |
||||
referencePoint.x += previewSurfaceSize.getWidth() * (scale - 1F) / 2F; |
||||
referenceWidth = Math.round(previewSurfaceSize.getWidth() * scale); |
||||
} else { |
||||
// Stream is taller. The y coordinate must be increased: a touch on the top side
|
||||
// of the surface is not on the top size of stream (it's a bit lower).
|
||||
float scale = previewSurfaceAspectRatio.toFloat() / previewStreamAspectRatio.toFloat(); |
||||
referencePoint.y += previewSurfaceSize.getHeight() * (scale - 1F) / 2F; |
||||
referenceHeight = Math.round(previewSurfaceSize.getHeight() * scale); |
||||
} |
||||
} |
||||
return new Size(referenceWidth, referenceHeight); |
||||
} |
||||
|
||||
@SuppressWarnings("ConstantConditions") |
||||
@NonNull |
||||
private Size applyPreviewScale(@NonNull Size referenceSize, @NonNull PointF referencePoint) { |
||||
// The referenceSize how has the same aspect ratio of the previewStreamSize, but they
|
||||
// can still have different size (that is, a scale operation is needed).
|
||||
Size previewStreamSize = mEngine.getPreviewStreamSize(Reference.VIEW); |
||||
referencePoint.x *= (float) previewStreamSize.getWidth() / referenceSize.getWidth(); |
||||
referencePoint.y *= (float) previewStreamSize.getHeight() / referenceSize.getHeight(); |
||||
return previewStreamSize; |
||||
} |
||||
|
||||
@SuppressWarnings("SuspiciousNameCombination") |
||||
@NonNull |
||||
private Size applyPreviewToSensorRotation(@NonNull Size referenceSize, @NonNull PointF referencePoint) { |
||||
// Not elegant, but the sin/cos way was failing for some reason.
|
||||
int angle = mEngine.getAngles().offset(Reference.SENSOR, Reference.VIEW, Axis.ABSOLUTE); |
||||
boolean flip = angle % 180 != 0; |
||||
float tempX = referencePoint.x; |
||||
float tempY = referencePoint.y; |
||||
if (angle == 0) { |
||||
referencePoint.x = tempX; |
||||
referencePoint.y = tempY; |
||||
} else if (angle == 90) { |
||||
referencePoint.x = tempY; |
||||
referencePoint.y = referenceSize.getWidth() - tempX; |
||||
} else if (angle == 180) { |
||||
referencePoint.x = referenceSize.getWidth() - tempX; |
||||
referencePoint.y = referenceSize.getHeight() - tempY; |
||||
} else if (angle == 270) { |
||||
referencePoint.x = referenceSize.getHeight() - tempY; |
||||
referencePoint.y = tempX; |
||||
} else { |
||||
throw new IllegalStateException("Unexpected angle " + angle); |
||||
} |
||||
return flip ? referenceSize.flip() : referenceSize; |
||||
} |
||||
|
||||
@NonNull |
||||
private Size applyCropRegionCoordinates(@NonNull Size referenceSize, @NonNull PointF referencePoint) { |
||||
// The input point and size refer to the stream rect.
|
||||
// The stream rect is part of the 'crop region', as described below.
|
||||
// https://source.android.com/devices/camera/camera3_crop_reprocess.html
|
||||
Rect cropRect = mBuilder.get(CaptureRequest.SCALER_CROP_REGION); |
||||
// For now, we don't care about x and y position. Rect should be non-null, but let's be safe.
|
||||
int cropRectWidth = cropRect == null ? referenceSize.getWidth() : cropRect.width(); |
||||
int cropRectHeight = cropRect == null ? referenceSize.getHeight() : cropRect.height(); |
||||
// The stream is always centered inside the crop region, and one of the dimensions
|
||||
// should always match. We just increase the other one.
|
||||
referencePoint.x += (cropRectWidth - referenceSize.getWidth()) / 2F; |
||||
referencePoint.y += (cropRectHeight - referenceSize.getHeight()) / 2F; |
||||
return new Size(cropRectWidth, cropRectHeight); |
||||
} |
||||
|
||||
@NonNull |
||||
private Size applyActiveArrayCoordinates(@NonNull Size referenceSize, @NonNull PointF referencePoint) { |
||||
// The input point and size refer to the scaler crop region.
|
||||
// We can query for the crop region position inside the active array, so this is easy.
|
||||
Rect cropRect = mBuilder.get(CaptureRequest.SCALER_CROP_REGION); |
||||
referencePoint.x += cropRect == null ? 0 : cropRect.left; |
||||
referencePoint.y += cropRect == null ? 0 : cropRect.top; |
||||
// Finally, get the active rect width and height from characteristics.
|
||||
Rect activeRect = mCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); |
||||
if (activeRect == null) { // Should never happen
|
||||
activeRect = new Rect(0, 0, referenceSize.getWidth(), referenceSize.getHeight()); |
||||
} |
||||
return new Size(activeRect.width(), activeRect.height()); |
||||
} |
||||
|
||||
/** |
||||
* Creates a metering rectangle around the center point. |
||||
* The rectangle will have a size that's a factor of the visible width and height. |
||||
* The rectangle will also be constrained to be inside the given boundaries, |
||||
* so we don't exceed them in case the center point is exactly on one side for example. |
||||
* @return a new rectangle |
||||
*/ |
||||
@NonNull |
||||
private MeteringRectangle createMeteringRectangle( |
||||
@NonNull Size boundaries, |
||||
@NonNull PointF center, |
||||
@NonNull Size visibleSize, |
||||
float factor, |
||||
int weight) { |
||||
float rectangleWidth = factor * visibleSize.getWidth(); |
||||
float rectangleHeight = factor * visibleSize.getHeight(); |
||||
float rectangleLeft = center.x - rectangleWidth / 2F; |
||||
float rectangleTop = center.y - rectangleHeight / 2F; |
||||
// Respect boundaries
|
||||
if (rectangleLeft < 0) rectangleLeft = 0; |
||||
if (rectangleTop < 0) rectangleTop = 0; |
||||
if (rectangleLeft + rectangleWidth > boundaries.getWidth()) { |
||||
rectangleWidth = boundaries.getWidth() - rectangleLeft; |
||||
} |
||||
if (rectangleTop + rectangleHeight > boundaries.getHeight()) { |
||||
rectangleHeight = boundaries.getHeight() - rectangleTop; |
||||
} |
||||
return new MeteringRectangle( |
||||
(int) rectangleLeft, |
||||
(int) rectangleTop, |
||||
(int) rectangleWidth, |
||||
(int) rectangleHeight, |
||||
weight |
||||
); |
||||
} |
||||
|
||||
/** |
||||
* True if we're metering. False if we're not, for example if we're waiting for |
||||
* a reset call, or if {@link #startMetering(PointF, Gesture)} was never called. |
||||
* @return true if metering |
||||
*/ |
||||
@SuppressWarnings("WeakerAccess") |
||||
public boolean isMetering() { |
||||
return mIsMetering; |
||||
} |
||||
|
||||
/** |
||||
* Should be called when we have partial or total CaptureResults, |
||||
* but only while {@link #isMetering()} returns true. |
||||
* @param result result |
||||
*/ |
||||
@SuppressWarnings("WeakerAccess") |
||||
public void onCapture(@NonNull CaptureResult result) { |
||||
if (!mIsMetering) return; // We're not interested in results anymore
|
||||
if (!(result instanceof TotalCaptureResult)) return; // Let's ignore these, contents are missing/wrong
|
||||
|
||||
if (!mAutoFocus.isMetered()) mAutoFocus.onCapture(result); |
||||
if (!mAutoExposure.isMetered()) mAutoExposure.onCapture(result); |
||||
if (!mAutoWhiteBalance.isMetered()) mAutoWhiteBalance.onCapture(result); |
||||
if (mAutoFocus.isMetered() && mAutoExposure.isMetered() && mAutoWhiteBalance.isMetered()) { |
||||
LOG.i("onCapture:", "all MeteringParameters have converged. Dispatching onMeteringEnd"); |
||||
boolean success = mAutoFocus.isSuccessful() |
||||
&& mAutoExposure.isSuccessful() |
||||
&& mAutoWhiteBalance.isSuccessful(); |
||||
onMeteringEnd(success); |
||||
} else if (System.currentTimeMillis() - mMeteringStartTime >= FORCED_END_DELAY) { |
||||
LOG.i("onCapture:", "FORCED_END_DELAY was reached. Some MeteringParameter is stuck. Forcing end."); |
||||
onMeteringEnd(false); |
||||
} |
||||
} |
||||
|
||||
private void onMeteringEnd(boolean success) { |
||||
mCallback.onMeteringEnd(mPoint, mGesture, success); |
||||
mIsMetering = false; |
||||
mEngine.mHandler.remove(mResetRunnable); |
||||
if (mEngine.shouldResetAutoFocus()) { |
||||
mEngine.mHandler.post(mEngine.getAutoFocusResetDelay(), mResetRunnable); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Can be called to perform the reset at a time different than the one |
||||
* specified by the {@link CameraEngine} reset delay. |
||||
*/ |
||||
@SuppressWarnings("WeakerAccess") |
||||
public void resetMetering() { |
||||
mEngine.mHandler.remove(mResetRunnable); |
||||
if (mCallback.canResetMetering(mPoint, mGesture)) { |
||||
LOG.i("Resetting the meter parameters."); |
||||
Rect whole = mCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); |
||||
if (whole == null) whole = new Rect(); |
||||
MeteringRectangle rectangle = new MeteringRectangle(whole, MeteringRectangle.METERING_WEIGHT_DONT_CARE); |
||||
mAutoFocus.resetMetering(mCharacteristics, mBuilder, rectangle); |
||||
mAutoWhiteBalance.resetMetering(mCharacteristics, mBuilder, rectangle); |
||||
mAutoExposure.resetMetering(mCharacteristics, mBuilder, rectangle); |
||||
mCallback.onMeteringReset(mPoint, mGesture); |
||||
} |
||||
} |
||||
|
||||
private Runnable mResetRunnable = new Runnable() { |
||||
@Override |
||||
public void run() { |
||||
resetMetering(); |
||||
} |
||||
}; |
||||
} |
@ -0,0 +1,112 @@ |
||||
package com.otaliastudios.cameraview.engine.mappers; |
||||
|
||||
import android.hardware.Camera; |
||||
import android.os.Build; |
||||
|
||||
import com.otaliastudios.cameraview.controls.Control; |
||||
import com.otaliastudios.cameraview.controls.Facing; |
||||
import com.otaliastudios.cameraview.controls.Flash; |
||||
import com.otaliastudios.cameraview.controls.Hdr; |
||||
import com.otaliastudios.cameraview.controls.WhiteBalance; |
||||
|
||||
import java.util.HashMap; |
||||
import java.util.Map; |
||||
|
||||
import androidx.annotation.NonNull; |
||||
import androidx.annotation.Nullable; |
||||
|
||||
/** |
||||
* A Mapper maps camera engine constants to CameraView constants. |
||||
*/ |
||||
public class Camera1Mapper { |
||||
|
||||
private static Camera1Mapper sInstance; |
||||
|
||||
@NonNull |
||||
public static Camera1Mapper get() { |
||||
if (sInstance == null) { |
||||
sInstance = new Camera1Mapper(); |
||||
} |
||||
return sInstance; |
||||
} |
||||
|
||||
private static final Map<Flash, String> FLASH = new HashMap<>(); |
||||
private static final Map<WhiteBalance, String> WB = new HashMap<>(); |
||||
private static final Map<Facing, Integer> FACING = new HashMap<>(); |
||||
private static final Map<Hdr, String> HDR = new HashMap<>(); |
||||
|
||||
static { |
||||
FLASH.put(Flash.OFF, Camera.Parameters.FLASH_MODE_OFF); |
||||
FLASH.put(Flash.ON, Camera.Parameters.FLASH_MODE_ON); |
||||
FLASH.put(Flash.AUTO, Camera.Parameters.FLASH_MODE_AUTO); |
||||
FLASH.put(Flash.TORCH, Camera.Parameters.FLASH_MODE_TORCH); |
||||
FACING.put(Facing.BACK, Camera.CameraInfo.CAMERA_FACING_BACK); |
||||
FACING.put(Facing.FRONT, Camera.CameraInfo.CAMERA_FACING_FRONT); |
||||
WB.put(WhiteBalance.AUTO, Camera.Parameters.WHITE_BALANCE_AUTO); |
||||
WB.put(WhiteBalance.INCANDESCENT, Camera.Parameters.WHITE_BALANCE_INCANDESCENT); |
||||
WB.put(WhiteBalance.FLUORESCENT, Camera.Parameters.WHITE_BALANCE_FLUORESCENT); |
||||
WB.put(WhiteBalance.DAYLIGHT, Camera.Parameters.WHITE_BALANCE_DAYLIGHT); |
||||
WB.put(WhiteBalance.CLOUDY, Camera.Parameters.WHITE_BALANCE_CLOUDY_DAYLIGHT); |
||||
HDR.put(Hdr.OFF, Camera.Parameters.SCENE_MODE_AUTO); |
||||
if (Build.VERSION.SDK_INT >= 17) { |
||||
HDR.put(Hdr.ON, Camera.Parameters.SCENE_MODE_HDR); |
||||
} else { |
||||
HDR.put(Hdr.ON, "hdr"); |
||||
} |
||||
} |
||||
|
||||
private Camera1Mapper() {} |
||||
|
||||
@NonNull |
||||
public String mapFlash(@NonNull Flash flash) { |
||||
//noinspection ConstantConditions
|
||||
return FLASH.get(flash); |
||||
} |
||||
|
||||
public int mapFacing(@NonNull Facing facing) { |
||||
//noinspection ConstantConditions
|
||||
return FACING.get(facing); |
||||
} |
||||
|
||||
@NonNull |
||||
public String mapWhiteBalance(@NonNull WhiteBalance whiteBalance) { |
||||
//noinspection ConstantConditions
|
||||
return WB.get(whiteBalance); |
||||
} |
||||
|
||||
@NonNull |
||||
public String mapHdr(@NonNull Hdr hdr) { |
||||
//noinspection ConstantConditions
|
||||
return HDR.get(hdr); |
||||
} |
||||
|
||||
@Nullable |
||||
public Flash unmapFlash(@NonNull String cameraConstant) { |
||||
return reverseLookup(FLASH, cameraConstant); |
||||
} |
||||
|
||||
@Nullable |
||||
public Facing unmapFacing(int cameraConstant) { |
||||
return reverseLookup(FACING, cameraConstant); |
||||
} |
||||
|
||||
@Nullable |
||||
public WhiteBalance unmapWhiteBalance(@NonNull String cameraConstant) { |
||||
return reverseLookup(WB, cameraConstant); |
||||
} |
||||
|
||||
@Nullable |
||||
public Hdr unmapHdr(@NonNull String cameraConstant) { |
||||
return reverseLookup(HDR, cameraConstant); |
||||
} |
||||
|
||||
@Nullable |
||||
private <C extends Control, T> C reverseLookup(@NonNull Map<C, T> map, @NonNull T object) { |
||||
for (C value : map.keySet()) { |
||||
if (object.equals(map.get(value))) { |
||||
return value; |
||||
} |
||||
} |
||||
return null; |
||||
} |
||||
} |
@ -0,0 +1,168 @@ |
||||
package com.otaliastudios.cameraview.engine.mappers; |
||||
|
||||
import android.hardware.Camera; |
||||
import android.hardware.camera2.CameraCharacteristics; |
||||
import android.os.Build; |
||||
import android.util.Pair; |
||||
|
||||
import androidx.annotation.NonNull; |
||||
import androidx.annotation.Nullable; |
||||
import androidx.annotation.RequiresApi; |
||||
|
||||
import com.otaliastudios.cameraview.controls.Control; |
||||
import com.otaliastudios.cameraview.controls.Engine; |
||||
import com.otaliastudios.cameraview.controls.Facing; |
||||
import com.otaliastudios.cameraview.controls.Flash; |
||||
import com.otaliastudios.cameraview.controls.Hdr; |
||||
import com.otaliastudios.cameraview.controls.WhiteBalance; |
||||
|
||||
import java.util.ArrayList; |
||||
import java.util.Arrays; |
||||
import java.util.Collections; |
||||
import java.util.HashMap; |
||||
import java.util.HashSet; |
||||
import java.util.List; |
||||
import java.util.Map; |
||||
import java.util.Set; |
||||
|
||||
/** |
||||
* A Mapper maps camera engine constants to CameraView constants. |
||||
*/ |
||||
@RequiresApi(Build.VERSION_CODES.LOLLIPOP) |
||||
public class Camera2Mapper { |
||||
|
||||
private static Camera2Mapper sInstance; |
||||
|
||||
public static Camera2Mapper get() { |
||||
if (sInstance == null) { |
||||
sInstance = new Camera2Mapper(); |
||||
} |
||||
return sInstance; |
||||
} |
||||
|
||||
private static final Map<Facing, Integer> FACING = new HashMap<>(); |
||||
private static final Map<WhiteBalance, Integer> WB = new HashMap<>(); |
||||
private static final Map<Hdr, Integer> HDR = new HashMap<>(); |
||||
|
||||
static { |
||||
FACING.put(Facing.BACK, CameraCharacteristics.LENS_FACING_BACK); |
||||
FACING.put(Facing.FRONT, CameraCharacteristics.LENS_FACING_FRONT); |
||||
WB.put(WhiteBalance.AUTO, CameraCharacteristics.CONTROL_AWB_MODE_AUTO); |
||||
WB.put(WhiteBalance.CLOUDY, CameraCharacteristics.CONTROL_AWB_MODE_CLOUDY_DAYLIGHT); |
||||
WB.put(WhiteBalance.DAYLIGHT, CameraCharacteristics.CONTROL_AWB_MODE_DAYLIGHT); |
||||
WB.put(WhiteBalance.FLUORESCENT, CameraCharacteristics.CONTROL_AWB_MODE_FLUORESCENT); |
||||
WB.put(WhiteBalance.INCANDESCENT, CameraCharacteristics.CONTROL_AWB_MODE_INCANDESCENT); |
||||
HDR.put(Hdr.OFF, CameraCharacteristics.CONTROL_SCENE_MODE_DISABLED); |
||||
HDR.put(Hdr.ON, 18 /* CameraCharacteristics.CONTROL_SCENE_MODE_HDR */); |
||||
} |
||||
|
||||
private Camera2Mapper() {} |
||||
|
||||
@NonNull |
||||
public List<Pair<Integer, Integer>> mapFlash(@NonNull Flash flash) { |
||||
List<Pair<Integer, Integer>> result = new ArrayList<>(); |
||||
switch (flash) { |
||||
case ON: { |
||||
result.add(new Pair<>( |
||||
CameraCharacteristics.CONTROL_AE_MODE_ON_ALWAYS_FLASH, |
||||
CameraCharacteristics.FLASH_MODE_OFF)); |
||||
break; |
||||
} |
||||
case AUTO: { |
||||
result.add(new Pair<>( |
||||
CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH, |
||||
CameraCharacteristics.FLASH_MODE_OFF)); |
||||
result.add(new Pair<>( |
||||
CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, |
||||
CameraCharacteristics.FLASH_MODE_OFF)); |
||||
break; |
||||
} |
||||
case OFF: { |
||||
result.add(new Pair<>( |
||||
CameraCharacteristics.CONTROL_AE_MODE_ON, |
||||
CameraCharacteristics.FLASH_MODE_OFF)); |
||||
result.add(new Pair<>( |
||||
CameraCharacteristics.CONTROL_AE_MODE_OFF, |
||||
CameraCharacteristics.FLASH_MODE_OFF)); |
||||
break; |
||||
} |
||||
case TORCH: { |
||||
// When AE_MODE is ON or OFF, we can finally use the flash mode
|
||||
// low level control to either turn flash off or open the torch
|
||||
result.add(new Pair<>( |
||||
CameraCharacteristics.CONTROL_AE_MODE_ON, |
||||
CameraCharacteristics.FLASH_MODE_TORCH)); |
||||
result.add(new Pair<>( |
||||
CameraCharacteristics.CONTROL_AE_MODE_OFF, |
||||
CameraCharacteristics.FLASH_MODE_TORCH)); |
||||
break; |
||||
} |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
public int mapFacing(@NonNull Facing facing) { |
||||
//noinspection ConstantConditions
|
||||
return FACING.get(facing); |
||||
} |
||||
|
||||
public int mapWhiteBalance(@NonNull WhiteBalance whiteBalance) { |
||||
//noinspection ConstantConditions
|
||||
return WB.get(whiteBalance); |
||||
} |
||||
|
||||
public int mapHdr(@NonNull Hdr hdr) { |
||||
//noinspection ConstantConditions
|
||||
return HDR.get(hdr); |
||||
} |
||||
|
||||
@NonNull |
||||
public Set<Flash> unmapFlash(int cameraConstant) { |
||||
Set<Flash> result = new HashSet<>(); |
||||
switch (cameraConstant) { |
||||
case CameraCharacteristics.CONTROL_AE_MODE_OFF: |
||||
case CameraCharacteristics.CONTROL_AE_MODE_ON: { |
||||
result.add(Flash.OFF); |
||||
result.add(Flash.TORCH); |
||||
break; |
||||
} |
||||
case CameraCharacteristics.CONTROL_AE_MODE_ON_ALWAYS_FLASH: { |
||||
result.add(Flash.ON); |
||||
break; |
||||
} |
||||
case CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH: |
||||
case CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE: { |
||||
result.add(Flash.AUTO); |
||||
break; |
||||
} |
||||
case CameraCharacteristics.CONTROL_AE_MODE_ON_EXTERNAL_FLASH: |
||||
default: break; // we don't support external flash
|
||||
} |
||||
return result; |
||||
} |
||||
|
||||
@Nullable |
||||
public Facing unmapFacing(int cameraConstant) { |
||||
return reverseLookup(FACING, cameraConstant); |
||||
} |
||||
|
||||
@Nullable |
||||
public WhiteBalance unmapWhiteBalance(int cameraConstant) { |
||||
return reverseLookup(WB, cameraConstant); |
||||
} |
||||
|
||||
@Nullable |
||||
public Hdr unmapHdr(int cameraConstant) { |
||||
return reverseLookup(HDR, cameraConstant); |
||||
} |
||||
|
||||
@Nullable |
||||
private <C extends Control, T> C reverseLookup(@NonNull Map<C, T> map, @NonNull T object) { |
||||
for (C value : map.keySet()) { |
||||
if (object.equals(map.get(value))) { |
||||
return value; |
||||
} |
||||
} |
||||
return null; |
||||
} |
||||
} |
@ -0,0 +1,101 @@ |
||||
package com.otaliastudios.cameraview.engine.metering; |
||||
|
||||
import android.hardware.camera2.CameraCharacteristics; |
||||
import android.hardware.camera2.CaptureRequest; |
||||
import android.hardware.camera2.CaptureResult; |
||||
import android.hardware.camera2.params.MeteringRectangle; |
||||
import android.os.Build; |
||||
|
||||
import androidx.annotation.NonNull; |
||||
import androidx.annotation.RequiresApi; |
||||
|
||||
import com.otaliastudios.cameraview.CameraLogger; |
||||
|
||||
import java.util.List; |
||||
|
||||
@RequiresApi(Build.VERSION_CODES.LOLLIPOP) |
||||
public class AutoExposure extends MeteringParameter { |
||||
|
||||
private static final String TAG = AutoExposure.class.getSimpleName(); |
||||
private static final CameraLogger LOG = CameraLogger.create(TAG); |
||||
|
||||
private boolean isStarted; |
||||
|
||||
@Override |
||||
public void startMetering(@NonNull CameraCharacteristics characteristics, |
||||
@NonNull CaptureRequest.Builder builder, |
||||
@NonNull List<MeteringRectangle> areas) { |
||||
isSuccessful = false; |
||||
isMetered = false; |
||||
isStarted = false; |
||||
|
||||
boolean isNotLegacy = readCharacteristic(characteristics, |
||||
CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL, -1) != |
||||
CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY; |
||||
Integer aeMode = builder.get(CaptureRequest.CONTROL_AE_MODE); |
||||
boolean isAEOn = aeMode != null && |
||||
(aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON |
||||
|| aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON_ALWAYS_FLASH |
||||
|| aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH |
||||
|| aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE |
||||
|| aeMode == 5 /* CameraCharacteristics.CONTROL_AE_MODE_ON_EXTERNAL_FLASH, API 28 */); |
||||
isSupported = isNotLegacy && isAEOn; |
||||
|
||||
if (isSupported) { |
||||
builder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, |
||||
CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START); |
||||
} |
||||
|
||||
// Even if precapture is not supported, check the regions anyway.
|
||||
int maxRegions = readCharacteristic(characteristics, |
||||
CameraCharacteristics.CONTROL_MAX_REGIONS_AE, 0); |
||||
if (maxRegions > 0) { |
||||
int max = Math.min(maxRegions, areas.size()); |
||||
builder.set(CaptureRequest.CONTROL_AE_REGIONS, |
||||
areas.subList(0, max).toArray(new MeteringRectangle[]{})); |
||||
} |
||||
} |
||||
|
||||
@Override |
||||
public void onCapture(@NonNull CaptureResult result) { |
||||
if (isMetered || !isSupported) return; |
||||
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE); |
||||
LOG.i("onCapture:", "aeState:", aeState); |
||||
if (aeState == null) return; |
||||
|
||||
if (!isStarted) { |
||||
if (aeState == CaptureRequest.CONTROL_AE_STATE_PRECAPTURE) { |
||||
isStarted = true; |
||||
} else if (aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED |
||||
|| aeState == CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED) { |
||||
// PRECAPTURE is a transient state, so also check for the final states.
|
||||
isMetered = true; |
||||
isSuccessful = true; |
||||
} |
||||
} else { |
||||
if (aeState == CaptureRequest.CONTROL_AE_STATE_CONVERGED |
||||
|| aeState == CaptureRequest.CONTROL_AE_STATE_FLASH_REQUIRED) { |
||||
isMetered = true; |
||||
isSuccessful = true; |
||||
} |
||||
} |
||||
} |
||||
|
||||
@Override |
||||
public void resetMetering(@NonNull CameraCharacteristics characteristics, |
||||
@NonNull CaptureRequest.Builder builder, |
||||
@NonNull MeteringRectangle area) { |
||||
int maxRegions = readCharacteristic(characteristics, |
||||
CameraCharacteristics.CONTROL_MAX_REGIONS_AE, 0); |
||||
if (maxRegions > 0) { |
||||
builder.set(CaptureRequest.CONTROL_AE_REGIONS, new MeteringRectangle[]{area}); |
||||
} |
||||
if (isSupported) { |
||||
// Cleanup any precapture sequence.
|
||||
if (Build.VERSION.SDK_INT >= 23) { |
||||
builder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, |
||||
CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL); |
||||
} |
||||
} |
||||
} |
||||
} |
@ -0,0 +1,87 @@ |
||||
package com.otaliastudios.cameraview.engine.metering; |
||||
|
||||
import android.hardware.camera2.CameraCharacteristics; |
||||
import android.hardware.camera2.CaptureRequest; |
||||
import android.hardware.camera2.CaptureResult; |
||||
import android.hardware.camera2.params.MeteringRectangle; |
||||
import android.os.Build; |
||||
|
||||
import androidx.annotation.NonNull; |
||||
import androidx.annotation.RequiresApi; |
||||
|
||||
import com.otaliastudios.cameraview.CameraLogger; |
||||
|
||||
import java.util.List; |
||||
|
||||
@RequiresApi(Build.VERSION_CODES.LOLLIPOP) |
||||
public class AutoFocus extends MeteringParameter { |
||||
|
||||
private static final String TAG = AutoFocus.class.getSimpleName(); |
||||
private static final CameraLogger LOG = CameraLogger.create(TAG); |
||||
|
||||
@Override |
||||
public void startMetering(@NonNull CameraCharacteristics characteristics, |
||||
@NonNull CaptureRequest.Builder builder, |
||||
@NonNull List<MeteringRectangle> areas) { |
||||
isSuccessful = false; |
||||
isMetered = false; |
||||
|
||||
Integer afMode = builder.get(CaptureRequest.CONTROL_AF_MODE); |
||||
// Exclude OFF and EDOF as per docs.
|
||||
isSupported = afMode != null && |
||||
(afMode == CameraCharacteristics.CONTROL_AF_MODE_AUTO |
||||
|| afMode == CameraCharacteristics.CONTROL_AF_MODE_CONTINUOUS_PICTURE |
||||
|| afMode == CameraCharacteristics.CONTROL_AF_MODE_CONTINUOUS_VIDEO |
||||
|| afMode == CameraCharacteristics.CONTROL_AF_MODE_MACRO); |
||||
if (isSupported) { |
||||
builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START); |
||||
} |
||||
|
||||
// Even if auto is not supported, change the regions anyway.
|
||||
int maxRegions = readCharacteristic(characteristics, CameraCharacteristics.CONTROL_MAX_REGIONS_AF, 0); |
||||
if (maxRegions > 0) { |
||||
int max = Math.min(maxRegions, areas.size()); |
||||
builder.set(CaptureRequest.CONTROL_AF_REGIONS, |
||||
areas.subList(0, max).toArray(new MeteringRectangle[]{})); |
||||
} |
||||
|
||||
} |
||||
|
||||
@Override |
||||
public void onCapture(@NonNull CaptureResult result) { |
||||
if (isMetered || !isSupported) return; |
||||
Integer afState = result.get(CaptureResult.CONTROL_AF_STATE); |
||||
LOG.i("onCapture:", "afState:", afState); |
||||
if (afState == null) return; |
||||
switch (afState) { |
||||
case CaptureRequest.CONTROL_AF_STATE_FOCUSED_LOCKED: { |
||||
isMetered = true; |
||||
isSuccessful = true; |
||||
break; |
||||
} |
||||
case CaptureRequest.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED: { |
||||
isMetered = true; |
||||
isSuccessful = false; |
||||
break; |
||||
} |
||||
case CaptureRequest.CONTROL_AF_STATE_INACTIVE: break; |
||||
case CaptureRequest.CONTROL_AF_STATE_ACTIVE_SCAN: break; |
||||
default: break; |
||||
} |
||||
} |
||||
|
||||
@Override |
||||
public void resetMetering(@NonNull CameraCharacteristics characteristics, |
||||
@NonNull CaptureRequest.Builder builder, |
||||
@NonNull MeteringRectangle area) { |
||||
int maxRegions = readCharacteristic(characteristics, |
||||
CameraCharacteristics.CONTROL_MAX_REGIONS_AF, 0); |
||||
if (maxRegions > 0) { |
||||
builder.set(CaptureRequest.CONTROL_AF_REGIONS, new MeteringRectangle[]{area}); |
||||
} |
||||
|
||||
if (isSupported) { // Cleanup any trigger.
|
||||
builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_CANCEL); |
||||
} |
||||
} |
||||
} |
@ -0,0 +1,81 @@ |
||||
package com.otaliastudios.cameraview.engine.metering; |
||||
|
||||
import android.hardware.camera2.CameraCharacteristics; |
||||
import android.hardware.camera2.CaptureRequest; |
||||
import android.hardware.camera2.CaptureResult; |
||||
import android.hardware.camera2.params.MeteringRectangle; |
||||
import android.os.Build; |
||||
|
||||
import androidx.annotation.NonNull; |
||||
import androidx.annotation.RequiresApi; |
||||
|
||||
import com.otaliastudios.cameraview.CameraLogger; |
||||
|
||||
import java.util.List; |
||||
|
||||
@RequiresApi(Build.VERSION_CODES.LOLLIPOP) |
||||
public class AutoWhiteBalance extends MeteringParameter { |
||||
|
||||
private static final String TAG = AutoWhiteBalance.class.getSimpleName(); |
||||
private static final CameraLogger LOG = CameraLogger.create(TAG); |
||||
|
||||
@Override |
||||
public void startMetering(@NonNull CameraCharacteristics characteristics, |
||||
@NonNull CaptureRequest.Builder builder, |
||||
@NonNull List<MeteringRectangle> areas) { |
||||
isSuccessful = false; |
||||
isMetered = false; |
||||
|
||||
boolean isNotLegacy = readCharacteristic(characteristics, |
||||
CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL, -1) != |
||||
CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY; |
||||
Integer awbMode = builder.get(CaptureRequest.CONTROL_AWB_MODE); |
||||
isSupported = isNotLegacy && awbMode != null && awbMode == CaptureRequest.CONTROL_AWB_MODE_AUTO; |
||||
|
||||
if (isSupported) { |
||||
// Remove any lock. We're not setting any, but just in case.
|
||||
builder.set(CaptureRequest.CONTROL_AWB_LOCK, false); |
||||
} |
||||
|
||||
// Even if auto is not supported, change the regions anyway.
|
||||
int maxRegions = readCharacteristic(characteristics, |
||||
CameraCharacteristics.CONTROL_MAX_REGIONS_AWB, 0); |
||||
if (maxRegions > 0) { |
||||
int max = Math.min(maxRegions, areas.size()); |
||||
builder.set(CaptureRequest.CONTROL_AWB_REGIONS, |
||||
areas.subList(0, max).toArray(new MeteringRectangle[]{})); |
||||
} |
||||
|
||||
} |
||||
|
||||
@Override |
||||
public void onCapture(@NonNull CaptureResult result) { |
||||
if (isMetered || !isSupported) return; |
||||
Integer awbState = result.get(CaptureResult.CONTROL_AWB_STATE); |
||||
LOG.i("onCapture:", "awbState:", awbState); |
||||
if (awbState == null) return; |
||||
|
||||
switch (awbState) { |
||||
case CaptureRequest.CONTROL_AWB_STATE_CONVERGED: { |
||||
isMetered = true; |
||||
isSuccessful = true; |
||||
break; |
||||
} |
||||
case CaptureRequest.CONTROL_AWB_STATE_LOCKED: break; |
||||
case CaptureRequest.CONTROL_AWB_STATE_INACTIVE: break; |
||||
case CaptureRequest.CONTROL_AWB_STATE_SEARCHING: break; |
||||
default: break; |
||||
} |
||||
} |
||||
|
||||
@Override |
||||
public void resetMetering(@NonNull CameraCharacteristics characteristics, |
||||
@NonNull CaptureRequest.Builder builder, |
||||
@NonNull MeteringRectangle area) { |
||||
int maxRegions = readCharacteristic(characteristics, |
||||
CameraCharacteristics.CONTROL_MAX_REGIONS_AWB, 0); |
||||
if (maxRegions > 0) { |
||||
builder.set(CaptureRequest.CONTROL_AWB_REGIONS, new MeteringRectangle[]{area}); |
||||
} |
||||
} |
||||
} |
@ -0,0 +1,54 @@ |
||||
package com.otaliastudios.cameraview.engine.metering; |
||||
|
||||
import android.hardware.camera2.CameraCharacteristics; |
||||
import android.hardware.camera2.CaptureRequest; |
||||
import android.hardware.camera2.CaptureResult; |
||||
import android.hardware.camera2.params.MeteringRectangle; |
||||
import android.os.Build; |
||||
|
||||
import androidx.annotation.NonNull; |
||||
import androidx.annotation.RequiresApi; |
||||
|
||||
import java.util.List; |
||||
|
||||
@RequiresApi(Build.VERSION_CODES.LOLLIPOP) |
||||
public abstract class MeteringParameter { |
||||
|
||||
@SuppressWarnings("WeakerAccess") |
||||
protected boolean isSupported; |
||||
|
||||
@SuppressWarnings("WeakerAccess") |
||||
protected boolean isSuccessful; |
||||
|
||||
@SuppressWarnings("WeakerAccess") |
||||
protected boolean isMetered; |
||||
|
||||
@SuppressWarnings("WeakerAccess") |
||||
@NonNull |
||||
protected <T> T readCharacteristic(@NonNull CameraCharacteristics characteristics, |
||||
@NonNull CameraCharacteristics.Key<T> key, |
||||
@NonNull T fallback) { |
||||
T value = characteristics.get(key); |
||||
return value == null ? fallback : value; |
||||
} |
||||
|
||||
public final boolean isMetered() { |
||||
// A non supported parameter should always appear as metered
|
||||
return isMetered || !isSupported; |
||||
} |
||||
|
||||
public final boolean isSuccessful() { |
||||
// A non supported parameter should always appear as successful
|
||||
return isSuccessful || !isSupported; |
||||
} |
||||
|
||||
public abstract void startMetering(@NonNull CameraCharacteristics characteristics, |
||||
@NonNull CaptureRequest.Builder builder, |
||||
@NonNull List<MeteringRectangle> areas); |
||||
|
||||
public abstract void resetMetering(@NonNull CameraCharacteristics characteristics, |
||||
@NonNull CaptureRequest.Builder builder, |
||||
@NonNull MeteringRectangle area); |
||||
|
||||
public abstract void onCapture(@NonNull CaptureResult result); |
||||
} |
Loading…
Reference in new issue