Remove old metering package

pull/580/head
Mattia Iavarone 6 years ago
parent 47b7cdd514
commit 6db954420d
  1. 207
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java
  2. 369
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Meter.java
  3. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/WhiteBalanceMeter.java
  4. 188
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/metering/AutoExposure.java
  5. 130
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/metering/AutoFocus.java
  6. 103
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/metering/AutoWhiteBalance.java
  7. 108
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/metering/Parameter.java

@ -44,8 +44,13 @@ import com.otaliastudios.cameraview.controls.Mode;
import com.otaliastudios.cameraview.controls.WhiteBalance;
import com.otaliastudios.cameraview.engine.action.Action;
import com.otaliastudios.cameraview.engine.action.ActionHolder;
import com.otaliastudios.cameraview.engine.action.Actions;
import com.otaliastudios.cameraview.engine.action.CompletionCallback;
import com.otaliastudios.cameraview.engine.action.TimeoutAction;
import com.otaliastudios.cameraview.engine.lock.UnlockAction;
import com.otaliastudios.cameraview.engine.mappers.Camera2Mapper;
import com.otaliastudios.cameraview.engine.meter.MeterAction;
import com.otaliastudios.cameraview.engine.meter.MeterResetAction;
import com.otaliastudios.cameraview.engine.offset.Axis;
import com.otaliastudios.cameraview.engine.offset.Reference;
import com.otaliastudios.cameraview.frame.Frame;
@ -70,14 +75,14 @@ import java.util.concurrent.ExecutionException;
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAvailableListener,
ActionHolder,
Meter.Callback {
ActionHolder {
private static final String TAG = Camera2Engine.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
private static final int FRAME_PROCESSING_FORMAT = ImageFormat.NV21;
private static final int FRAME_PROCESSING_INPUT_FORMAT = ImageFormat.YUV_420_888;
private static final long METER_TIMEOUT = 2500;
private final CameraManager mManager;
private String mCameraId;
@ -107,12 +112,6 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
// Actions
private final List<Action> mActions = new ArrayList<>();
// 3A metering
private Meter mMeter;
private Gesture mMeteringGesture;
private PictureResult.Stub mDelayedPictureStub;
private AspectRatio mDelayedPictureRatio;
public Camera2Engine(Callback callback) {
super(callback);
mManager = (CameraManager) mCallback.getContext().getSystemService(Context.CAMERA_SERVICE);
@ -260,9 +259,6 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
public void onCaptureProgressed(@NonNull CameraCaptureSession session,
@NonNull CaptureRequest request,
@NonNull CaptureResult partialResult) {
if (mMeter != null && mMeter.isMetering()) {
mMeter.onCapture(partialResult);
}
for (Action action : mActions) {
action.onCaptureProgressed(Camera2Engine.this, request, partialResult);
}
@ -273,9 +269,6 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@NonNull CaptureRequest request,
@NonNull TotalCaptureResult result) {
mLastRepeatingResult = result;
if (mMeter != null && mMeter.isMetering()) {
mMeter.onCapture(result);
}
for (Action action : mActions) {
action.onCaptureProgressed(Camera2Engine.this, request, result);
}
@ -572,7 +565,6 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
mVideoRecorder = null;
}
mPictureRecorder = null;
mMeteringResetRunnable.run();
if (hasFrameProcessors()) {
getFrameManager().release();
}
@ -641,20 +633,27 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@WorkerThread
@Override
protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio outputRatio, boolean doMetering) {
stub.size = getUncroppedSnapshotSize(Reference.OUTPUT); // Not the real size: it will be cropped to match the view ratio
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR); // Actually it will be rotated and set to 0.
protected void onTakePictureSnapshot(@NonNull final PictureResult.Stub stub, @NonNull final AspectRatio outputRatio, boolean doMetering) {
if (doMetering) {
LOG.i("onTakePictureSnapshot:", "doMetering is true. Delaying.");
mDelayedPictureStub = stub;
mDelayedPictureRatio = outputRatio;
startMetering(null, null);
Action action = new TimeoutAction(METER_TIMEOUT, createMeterAction(null));
action.addCallback(new CompletionCallback() {
@Override
protected void onActionCompleted(@NonNull Action action) {
onTakePictureSnapshot(stub, outputRatio, false);
}
});
action.start(this);
} else {
LOG.i("onTakePictureSnapshot:", "doMetering is false. Performing.");
if (!(mPreview instanceof GlCameraPreview)) {
throw new RuntimeException("takePictureSnapshot with Camera2 is only " +
"supported with Preview.GL_SURFACE");
}
// stub.size is not the real size: it will be cropped to the given ratio
// stub.rotation will be set to 0 - we rotate the texture instead.
stub.size = getUncroppedSnapshotSize(Reference.OUTPUT);
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR);
mPictureRecorder = new Snapshot2PictureRecorder(stub, this,
(GlCameraPreview) mPreview, outputRatio);
mPictureRecorder.take();
@ -662,15 +661,21 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
}
@Override
protected void onTakePicture(@NonNull PictureResult.Stub stub, boolean doMetering) {
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR);
stub.size = getPictureSize(Reference.OUTPUT);
protected void onTakePicture(@NonNull final PictureResult.Stub stub, boolean doMetering) {
if (doMetering) {
LOG.i("onTakePicture:", "doMetering is true. Delaying.");
mDelayedPictureStub = stub;
startMetering(null, null);
Action action = new TimeoutAction(METER_TIMEOUT, createMeterAction(null));
action.addCallback(new CompletionCallback() {
@Override
protected void onActionCompleted(@NonNull Action action) {
onTakePicture(stub, false);
}
});
action.start(this);
} else {
LOG.i("onTakePicture:", "doMetering is false. Performing.");
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR);
stub.size = getPictureSize(Reference.OUTPUT);
try {
if (mPictureCaptureStopsPreview) {
// These two are present in official samples and are probably meant to speed things up?
@ -702,7 +707,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
boolean unlock = (fullPicture && getPictureMetering()) ||
(!fullPicture && getPictureSnapshotMetering());
if (unlock) {
mMeteringResetRunnable.run();
unlockAndResetMetering();
}
}
@ -1212,138 +1217,68 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@Override
public void startAutoFocus(@Nullable final Gesture gesture, @NonNull final PointF point) {
startMetering(gesture, point);
}
private void startMetering(@Nullable final Gesture gesture, @Nullable final PointF point) {
LOG.i("startMetering", "dispatching. Gesture:", gesture);
LOG.i("startAutoFocus", "dispatching. Gesture:", gesture);
mHandler.run(new Runnable() {
@Override
public void run() {
LOG.i("startMetering", "executing. Preview state:", getPreviewState());
LOG.i("startAutoFocus", "executing. Preview state:", getPreviewState());
// This will only work when we have a preview, since it launches the preview in the end.
// Even without this it would need the bind state at least, since we need the preview size.
if (getPreviewState() < STATE_STARTED) return;
// The camera options API still has the auto focus API but it really
// refers to "3A metering to a specific point". So if we have one, let's check.
if (point != null && !mCameraOptions.isAutoFocusSupported()) return;
// refers to "3A metering to a specific point". Since we have a point, check.
if (!mCameraOptions.isAutoFocusSupported()) return;
// Reset the old meter and locker if present.
mMeteringResetRunnable.run();
// The meter will check the current configuration to see if AF/AE/AWB should run.
// - AE should be on CONTROL_AE_MODE_ON* (this depends on setFlash())
// - AWB should be on CONTROL_AWB_MODE_AUTO (this depends on setWhiteBalance())
// - AF should be on CONTROL_AF_MODE_AUTO or others
// The last one is under our control because the library has no focus API.
// So let's set a good af mode here. This operation is reverted during onMeteringReset().
// TODO applyFocusForMetering(mRepeatingRequestBuilder);
// TODO applyRepeatingRequestBuilder();
// TODO implement this with abort() API!
// mMeteringResetRunnable.run();
// Create the meter and start.
mMeteringGesture = gesture;
mMeter = new Meter(Camera2Engine.this,
mCameraCharacteristics,
Camera2Engine.this);
mMeter.startMetering(mLastRepeatingResult, point, point == null);
mCallback.dispatchOnFocusStart(gesture, point);
final MeterAction action = createMeterAction(point);
final TimeoutAction wrapper = new TimeoutAction(METER_TIMEOUT, action);
wrapper.start(Camera2Engine.this);
wrapper.addCallback(new CompletionCallback() {
@Override
protected void onActionCompleted(@NonNull Action a) {
mCallback.dispatchOnFocusEnd(gesture, action.isSuccessful(), point);
mHandler.remove(mUnlockAndResetMeteringRunnable);
if (shouldResetAutoFocus()) {
mHandler.post(getAutoFocusResetDelay(), mUnlockAndResetMeteringRunnable);
}
}
});
}
});
}
private final Runnable mMeteringResetRunnable = new Runnable() {
@NonNull
private MeterAction createMeterAction(@Nullable PointF point) {
// The meter will check the current configuration to see if AF/AE/AWB should run.
// - AE should be on CONTROL_AE_MODE_ON* (this depends on setFlash())
// - AWB should be on CONTROL_AWB_MODE_AUTO (this depends on setWhiteBalance())
// - AF should be on CONTROL_AF_MODE_AUTO or others
// The last one is under our control because the library has no focus API.
// So let's set a good af mode here. This operation is reverted during onMeteringReset().
applyFocusForMetering(mRepeatingRequestBuilder);
return new MeterAction(Camera2Engine.this, point, point == null);
}
private final Runnable mUnlockAndResetMeteringRunnable = new Runnable() {
@Override
public void run() {
unlockMetering();
if (mMeter != null) {
mMeter.resetMetering();
mMeter = null;
}
unlockAndResetMetering();
}
};
/**
* Called by {@link Meter} when the metering process has started.
* We are currently exposing an auto focus API so that's what we dispatch.
* @param point point
*/
@Override
public void onMeteringStarted(@Nullable PointF point) {
LOG.w("onMeteringStarted - point:", point, "gesture:", mMeteringGesture);
if (point != null) {
mCallback.dispatchOnFocusStart(mMeteringGesture, point);
}
}
/**
* Called by {@link Meter} when the metering process has ended.
* We are currently exposing an auto focus API so that's what we dispatch.
* @param point point
* @param success success
*/
@Override
public void onMeteringEnd(@Nullable PointF point, boolean success) {
LOG.w("onMeteringEnd - point:", point,
"gesture:", mMeteringGesture,
"success:", success);
if (point != null) {
mCallback.dispatchOnFocusEnd(mMeteringGesture, success, point);
mHandler.remove(mMeteringResetRunnable);
if (shouldResetAutoFocus()) {
mHandler.post(getAutoFocusResetDelay(), mMeteringResetRunnable);
}
} else {
LOG.w("onMeteringEnd - restoring the picture capturing. isSnapshot:", mDelayedPictureStub.isSnapshot);
if (mDelayedPictureStub.isSnapshot) {
onTakePictureSnapshot(mDelayedPictureStub, mDelayedPictureRatio, false);
} else {
onTakePicture(mDelayedPictureStub, false);
}
mDelayedPictureStub = null;
mDelayedPictureRatio = null;
}
}
/**
* Called by {@link Meter} after resetting the metering parameters.
* We should apply them, and also go back to default focus.
* @param point point
*
*/
@Override
public void onMeteringReset(@Nullable PointF point) {
if (getEngineState() == STATE_STARTED) {
// TODO applyDefaultFocus(mRepeatingRequestBuilder);
// TODO applyRepeatingRequestBuilder(); // only if preview started already
}
}
@NonNull
@Override
public CaptureRequest.Builder getMeteringBuilder() {
return mRepeatingRequestBuilder;
}
@Override
public void onMeteringChange(boolean single) {
LOG.i("onMeteringChange:", "applying the builder.");
if (single) {
applyRepeatingRequestBuilderAsSingle();
} else {
applyRepeatingRequestBuilder();
}
}
//endregion
//region 3A Locking
// TODO this might become public API
// TODO add lockMetering
private void unlockMetering() {
private void unlockAndResetMetering() {
if (getEngineState() == STATE_STARTED) {
applyDefaultFocus(mRepeatingRequestBuilder);
new UnlockAction().start(this);
Actions.sequence(
new UnlockAction(),
new MeterResetAction(true)
).start(Camera2Engine.this);
}
}

@ -1,369 +0,0 @@
package com.otaliastudios.cameraview.engine;
import android.graphics.PointF;
import android.graphics.Rect;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.params.MeteringRectangle;
import android.os.Build;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.engine.metering.AutoExposure;
import com.otaliastudios.cameraview.engine.metering.AutoFocus;
import com.otaliastudios.cameraview.engine.metering.AutoWhiteBalance;
import com.otaliastudios.cameraview.engine.metering.Parameter;
import com.otaliastudios.cameraview.engine.offset.Axis;
import com.otaliastudios.cameraview.engine.offset.Reference;
import com.otaliastudios.cameraview.size.AspectRatio;
import com.otaliastudios.cameraview.size.Size;
import java.util.ArrayList;
import java.util.List;
/**
* Helps Camera2-based engines to perform 3A (auto focus, auto exposure and auto white balance)
* metering. Users are required to:
*
* - Call {@link #startMetering(CaptureResult, PointF, boolean)} to start
* - Call {@link #onCapture(CaptureResult)} when they have partial or total results, as long as the
* meter is still in a metering operation, which can be checked through {@link #isMetering()}
* - Call {@link #resetMetering()} to reset the metering parameters if needed.
*/
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
public class Meter {
/**
* The meter callback.
*/
public interface Callback extends Parameter.MeteringChangeCallback {
/**
* Notifies that metering has started. At this point implementors should apply
* the builder onto the preview.
* @param point point
*
*/
void onMeteringStarted(@Nullable PointF point);
/**
* Notifies that metering has ended. No action is required for implementors.
* From now on, {@link #isMetering()} will return false so the meter should not
* be passed capture results anymore.
* @param point point
* @param success success
*/
void onMeteringEnd(@Nullable PointF point, boolean success);
/**
* Notifies that metering has been reset. From now on, this meter instance
* is done, although in theory it could be reused by calling
* {@link #startMetering(CaptureResult, PointF, boolean)} again.
* @param point point
*
*/
void onMeteringReset(@Nullable PointF point);
/**
* Returns the currently used builder. This can change while a metering
* operation happens, so the meter will never cache this value.
* It is the engine responsibility to copy over values to the new builder
* when it changes.
* @return a builder
*/
@NonNull
CaptureRequest.Builder getMeteringBuilder();
}
private static final String TAG = Meter.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
private static final int FORCED_END_DELAY = 2500;
private final CameraEngine mEngine;
private final CameraCharacteristics mCharacteristics;
private final Callback mCallback;
private PointF mPoint;
private boolean mIsMetering;
private long mMeteringStartTime;
private Parameter mAutoFocus;
private Parameter mAutoWhiteBalance;
private Parameter mAutoExposure;
/**
* Creates a new meter.
* @param engine the engine
* @param characteristics the camera characteristics
* @param callback the callback
*/
@SuppressWarnings("WeakerAccess")
public Meter(@NonNull CameraEngine engine,
@NonNull CameraCharacteristics characteristics,
@NonNull Callback callback) {
mEngine = engine;
mCharacteristics = characteristics;
mCallback = callback;
mAutoFocus = new AutoFocus(callback);
mAutoExposure = new AutoExposure(callback);
mAutoWhiteBalance = new AutoWhiteBalance(callback);
}
/**
* Starts a metering sequence.
* @param lastResult the last result
* @param point point
* @param skipIfPossible try skip
*/
@SuppressWarnings("WeakerAccess")
public void startMetering(@NonNull CaptureResult lastResult,
@Nullable PointF point,
boolean skipIfPossible) {
mPoint = point;
mIsMetering = true;
List<MeteringRectangle> areas = new ArrayList<>();
if (point != null) {
// This is a good Q/A. https://stackoverflow.com/a/33181620/4288782
// At first, the point is relative to the View system and does not account our own cropping.
// Will keep updating these two below.
final PointF referencePoint = new PointF(mPoint.x, mPoint.y);
Size referenceSize = mEngine.mPreview.getSurfaceSize();
// 1. Account for cropping.
// This will enlarge the preview size so that aspect ratio matches.
referenceSize = applyPreviewCropping(referenceSize, referencePoint);
// 2. Scale to the preview stream coordinates.
// This will move to the preview stream coordinates by scaling.
referenceSize = applyPreviewScale(referenceSize, referencePoint);
// 3. Rotate to the stream coordinate system.
// This leaves us with sensor stream coordinates.
referenceSize = applyPreviewToSensorRotation(referenceSize, referencePoint);
// 4. Move to the crop region coordinate system.
// The crop region is the union of all currently active streams.
referenceSize = applyCropRegionCoordinates(referenceSize, referencePoint);
// 5. Move to the active array coordinate system.
referenceSize = applyActiveArrayCoordinates(referenceSize, referencePoint);
// 6. Now we can compute the metering regions.
// We want to define them as a fraction of the visible size which (apart from cropping)
// can be obtained through the SENSOR rotated preview stream size.
Size visibleSize = mEngine.getPreviewStreamSize(Reference.SENSOR);
//noinspection ConstantConditions
MeteringRectangle area1 = createMeteringRectangle(referenceSize, referencePoint,
visibleSize, 0.05F, 1000);
MeteringRectangle area2 = createMeteringRectangle(referenceSize, referencePoint,
visibleSize, 0.1F, 100);
areas.add(area1);
areas.add(area2);
}
// 7. And finally dispatch everything
// TODO mAutoFocus.startMetering(mCharacteristics, mCallback.getMeteringBuilder(), areas, lastResult, skipIfPossible);
// TODO mAutoWhiteBalance.startMetering(mCharacteristics, mCallback.getMeteringBuilder(), areas, lastResult, skipIfPossible);
mAutoExposure.startMetering(mCharacteristics, mCallback.getMeteringBuilder(), areas, lastResult, skipIfPossible);
// Dispatch to callback
mCallback.onMeteringStarted(mPoint);
mMeteringStartTime = System.currentTimeMillis();
}
@SuppressWarnings("UnnecessaryLocalVariable")
@NonNull
private Size applyPreviewCropping(@NonNull Size referenceSize, @NonNull PointF referencePoint) {
Size previewStreamSize = mEngine.getPreviewStreamSize(Reference.VIEW);
Size previewSurfaceSize = referenceSize;
if (previewStreamSize == null) {
throw new IllegalStateException("getPreviewStreamSize should not be null at this point.");
}
int referenceWidth = previewSurfaceSize.getWidth();
int referenceHeight = previewSurfaceSize.getHeight();
AspectRatio previewStreamAspectRatio = AspectRatio.of(previewStreamSize);
AspectRatio previewSurfaceAspectRatio = AspectRatio.of(previewSurfaceSize);
if (mEngine.mPreview.isCropping()) {
if (previewStreamAspectRatio.toFloat() > previewSurfaceAspectRatio.toFloat()) {
// Stream is larger. The x coordinate must be increased: a touch on the left side
// of the surface is not on the left size of stream (it's more to the right).
float scale = previewStreamAspectRatio.toFloat() / previewSurfaceAspectRatio.toFloat();
referencePoint.x += previewSurfaceSize.getWidth() * (scale - 1F) / 2F;
referenceWidth = Math.round(previewSurfaceSize.getWidth() * scale);
} else {
// Stream is taller. The y coordinate must be increased: a touch on the top side
// of the surface is not on the top size of stream (it's a bit lower).
float scale = previewSurfaceAspectRatio.toFloat() / previewStreamAspectRatio.toFloat();
referencePoint.y += previewSurfaceSize.getHeight() * (scale - 1F) / 2F;
referenceHeight = Math.round(previewSurfaceSize.getHeight() * scale);
}
}
return new Size(referenceWidth, referenceHeight);
}
@SuppressWarnings("ConstantConditions")
@NonNull
private Size applyPreviewScale(@NonNull Size referenceSize, @NonNull PointF referencePoint) {
// The referenceSize how has the same aspect ratio of the previewStreamSize, but they
// can still have different size (that is, a scale operation is needed).
Size previewStreamSize = mEngine.getPreviewStreamSize(Reference.VIEW);
referencePoint.x *= (float) previewStreamSize.getWidth() / referenceSize.getWidth();
referencePoint.y *= (float) previewStreamSize.getHeight() / referenceSize.getHeight();
return previewStreamSize;
}
@SuppressWarnings("SuspiciousNameCombination")
@NonNull
private Size applyPreviewToSensorRotation(@NonNull Size referenceSize, @NonNull PointF referencePoint) {
// Not elegant, but the sin/cos way was failing for some reason.
int angle = mEngine.getAngles().offset(Reference.SENSOR, Reference.VIEW, Axis.ABSOLUTE);
boolean flip = angle % 180 != 0;
float tempX = referencePoint.x;
float tempY = referencePoint.y;
if (angle == 0) {
referencePoint.x = tempX;
referencePoint.y = tempY;
} else if (angle == 90) {
referencePoint.x = tempY;
referencePoint.y = referenceSize.getWidth() - tempX;
} else if (angle == 180) {
referencePoint.x = referenceSize.getWidth() - tempX;
referencePoint.y = referenceSize.getHeight() - tempY;
} else if (angle == 270) {
referencePoint.x = referenceSize.getHeight() - tempY;
referencePoint.y = tempX;
} else {
throw new IllegalStateException("Unexpected angle " + angle);
}
return flip ? referenceSize.flip() : referenceSize;
}
@NonNull
private Size applyCropRegionCoordinates(@NonNull Size referenceSize, @NonNull PointF referencePoint) {
// The input point and size refer to the stream rect.
// The stream rect is part of the 'crop region', as described below.
// https://source.android.com/devices/camera/camera3_crop_reprocess.html
Rect cropRect = mCallback.getMeteringBuilder().get(CaptureRequest.SCALER_CROP_REGION);
// For now, we don't care about x and y position. Rect should be non-null, but let's be safe.
int cropRectWidth = cropRect == null ? referenceSize.getWidth() : cropRect.width();
int cropRectHeight = cropRect == null ? referenceSize.getHeight() : cropRect.height();
// The stream is always centered inside the crop region, and one of the dimensions
// should always match. We just increase the other one.
referencePoint.x += (cropRectWidth - referenceSize.getWidth()) / 2F;
referencePoint.y += (cropRectHeight - referenceSize.getHeight()) / 2F;
return new Size(cropRectWidth, cropRectHeight);
}
@NonNull
private Size applyActiveArrayCoordinates(@NonNull Size referenceSize, @NonNull PointF referencePoint) {
// The input point and size refer to the scaler crop region.
// We can query for the crop region position inside the active array, so this is easy.
Rect cropRect = mCallback.getMeteringBuilder().get(CaptureRequest.SCALER_CROP_REGION);
referencePoint.x += cropRect == null ? 0 : cropRect.left;
referencePoint.y += cropRect == null ? 0 : cropRect.top;
// Finally, get the active rect width and height from characteristics.
Rect activeRect = mCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
if (activeRect == null) { // Should never happen
activeRect = new Rect(0, 0, referenceSize.getWidth(), referenceSize.getHeight());
}
return new Size(activeRect.width(), activeRect.height());
}
/**
* Creates a metering rectangle around the center point.
* The rectangle will have a size that's a factor of the visible width and height.
* The rectangle will also be constrained to be inside the given boundaries,
* so we don't exceed them in case the center point is exactly on one side for example.
* @return a new rectangle
*/
@NonNull
private MeteringRectangle createMeteringRectangle(
@NonNull Size boundaries,
@NonNull PointF center,
@NonNull Size visibleSize,
float factor,
int weight) {
float rectangleWidth = factor * visibleSize.getWidth();
float rectangleHeight = factor * visibleSize.getHeight();
float rectangleLeft = center.x - rectangleWidth / 2F;
float rectangleTop = center.y - rectangleHeight / 2F;
// Respect boundaries
if (rectangleLeft < 0) rectangleLeft = 0;
if (rectangleTop < 0) rectangleTop = 0;
if (rectangleLeft + rectangleWidth > boundaries.getWidth()) {
rectangleWidth = boundaries.getWidth() - rectangleLeft;
}
if (rectangleTop + rectangleHeight > boundaries.getHeight()) {
rectangleHeight = boundaries.getHeight() - rectangleTop;
}
return new MeteringRectangle(
(int) rectangleLeft,
(int) rectangleTop,
(int) rectangleWidth,
(int) rectangleHeight,
weight
);
}
/**
* True if we're metering. False if we're not, for example if we're waiting for
* a reset call, or if {@link #startMetering(CaptureResult, PointF, boolean)} was never called.
* @return true if metering
*/
public boolean isMetering() {
return mIsMetering;
}
/**
* Should be called when we have partial or total CaptureResults,
* but only while {@link #isMetering()} returns true.
* @param result result
*/
public void onCapture(@NonNull CaptureResult result) {
if (!mIsMetering) return; // We're not interested in results anymore
if (!(result instanceof TotalCaptureResult)) return; // Let's ignore these, contents are missing/wrong
// TODO if (!mAutoFocus.isMetered()) mAutoFocus.onCapture(mCallback.getMeteringBuilder(), result);
if (!mAutoExposure.isMetered()) mAutoExposure.onCapture(mCallback.getMeteringBuilder(), result);
// TODO if (!mAutoWhiteBalance.isMetered()) mAutoWhiteBalance.onCapture(mCallback.getMeteringBuilder(), result);
if (/* TODO mAutoFocus.isMetered() && */ mAutoExposure.isMetered() /* && mAutoWhiteBalance.isMetered() */) {
LOG.i("onCapture:", "all Parameters have converged. Dispatching onMeteringEnd");
boolean success = /* TODO mAutoFocus.isSuccessful()
&& */ mAutoExposure.isSuccessful()
/* TODO && mAutoWhiteBalance.isSuccessful() */;
onMeteringEnd(success);
} else if (System.currentTimeMillis() - mMeteringStartTime >= FORCED_END_DELAY) {
LOG.e("onCapture:", "FORCED_END_DELAY was reached. Some Parameter is stuck. Forcing end.");
onMeteringEnd(false);
}
}
private void onMeteringEnd(boolean success) {
mCallback.onMeteringEnd(mPoint, success);
mIsMetering = false;
}
/**
* Can be called to perform the reset.
*/
@SuppressWarnings("WeakerAccess")
public void resetMetering() {
LOG.i("Resetting the meter parameters.");
MeteringRectangle whole = null;
if (mPoint != null) {
// If we have a point, we must reset the metering areas.
Rect wholeRect = mCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
if (wholeRect == null) wholeRect = new Rect();
whole = new MeteringRectangle(wholeRect, MeteringRectangle.METERING_WEIGHT_DONT_CARE);
}
// TODO mAutoFocus.resetMetering(mCharacteristics, mCallback.getMeteringBuilder(), whole);
// TODO mAutoWhiteBalance.resetMetering(mCharacteristics, mCallback.getMeteringBuilder(), whole);
mAutoExposure.resetMetering(mCharacteristics, mCallback.getMeteringBuilder(), whole);
mCallback.onMeteringReset(mPoint);
}
}

@ -8,12 +8,10 @@ import android.hardware.camera2.params.MeteringRectangle;
import android.os.Build;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.engine.action.ActionHolder;
import com.otaliastudios.cameraview.engine.metering.Parameter;
import java.util.List;

@ -1,188 +0,0 @@
package com.otaliastudios.cameraview.engine.metering;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.params.MeteringRectangle;
import android.os.Build;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import com.otaliastudios.cameraview.CameraLogger;
import java.util.List;
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
public class AutoExposure extends Parameter {
private static final String TAG = AutoExposure.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG + "Metering");
public AutoExposure(@NonNull MeteringChangeCallback callback) {
super(callback);
}
private boolean isStarted;
@Override
protected boolean checkSupportsProcessing(@NonNull CameraCharacteristics characteristics,
@NonNull CaptureRequest.Builder builder) {
// In our case, this means checking if we support the AE precapture trigger.
boolean isNotLegacy = readCharacteristic(characteristics,
CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL, -1) !=
CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY;
Integer aeMode = builder.get(CaptureRequest.CONTROL_AE_MODE);
boolean isAEOn = aeMode != null &&
(aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON
|| aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON_ALWAYS_FLASH
|| aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH
|| aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE
|| aeMode == 5 /* CameraCharacteristics.CONTROL_AE_MODE_ON_EXTERNAL_FLASH, API 28 */);
boolean result = isNotLegacy && isAEOn;
LOG.i("checkSupportsProcessing:", result);
return result;
}
@Override
protected boolean checkShouldSkip(@NonNull CaptureResult lastResult) {
Integer aeState = lastResult.get(CaptureResult.CONTROL_AE_STATE);
boolean result = aeState != null && aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED;
LOG.i("checkShouldSkip:", result);
return result;
}
@Override
protected void onStartMetering(@NonNull CameraCharacteristics characteristics,
@NonNull CaptureRequest.Builder builder,
@NonNull List<MeteringRectangle> areas,
boolean supportsProcessing) {
isStarted = false;
boolean changed = false;
if (supportsProcessing) {
// Launch the precapture trigger.
builder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
changed = true;
}
// Even if precapture is not supported, check the regions anyway.
int maxRegions = readCharacteristic(characteristics,
CameraCharacteristics.CONTROL_MAX_REGIONS_AE, 0);
if (!areas.isEmpty() && maxRegions > 0) {
int max = Math.min(maxRegions, areas.size());
builder.set(CaptureRequest.CONTROL_AE_REGIONS,
areas.subList(0, max).toArray(new MeteringRectangle[]{}));
changed = true;
}
if (changed) {
notifyBuilderChanged(false);
}
}
@Override
public void processCapture(@NonNull CaptureResult result) {
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
Integer aeTriggerState = result.get(CaptureResult.CONTROL_AE_PRECAPTURE_TRIGGER);
LOG.i("onCapture:", "aeState:", aeState, "aeTriggerState:", aeTriggerState);
if (aeState == null) return;
if (!isStarted) {
switch (aeState) {
case CaptureResult.CONTROL_AE_STATE_PRECAPTURE: {
isStarted = true;
break;
}
case CaptureResult.CONTROL_AE_STATE_CONVERGED:
case CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED: {
// PRECAPTURE is a transient state. Being here might mean that precapture run
// and was successful, OR that the trigger was not even received yet. To
// distinguish, check the trigger state.
if (aeTriggerState != null
&& aeTriggerState == CaptureResult.CONTROL_AE_PRECAPTURE_TRIGGER_START) {
notifyMetered(true);
}
break;
}
case CaptureResult.CONTROL_AE_STATE_LOCKED: {
// There's nothing we can do, AE was locked, triggers are ignored.
notifyMetered(false);
break;
}
case CaptureResult.CONTROL_AE_STATE_INACTIVE:
case CaptureResult.CONTROL_AE_STATE_SEARCHING: {
// Wait...
break;
}
}
} else {
switch (aeState) {
case CaptureResult.CONTROL_AE_STATE_CONVERGED:
case CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED: {
notifyMetered(true);
break;
}
case CaptureResult.CONTROL_AE_STATE_LOCKED: {
// There's nothing we can do, AE was locked, triggers are ignored.
notifyMetered(false);
break;
}
case CaptureResult.CONTROL_AE_STATE_PRECAPTURE:
case CaptureResult.CONTROL_AE_STATE_INACTIVE:
case CaptureResult.CONTROL_AE_STATE_SEARCHING: {
// Wait...
break;
}
}
}
}
@Override
protected void onMetered(@NonNull CaptureRequest.Builder builder, boolean success) {
// Undo the trigger.
/* TODO thinking about it. int newTrigger = Build.VERSION.SDK_INT >= 23
? CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
: CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_IDLE; */
builder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, null);
// builder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_IDLE);
// notifyBuilderChanged();
}
@Override
protected void onResetMetering(@NonNull CameraCharacteristics characteristics,
@NonNull CaptureRequest.Builder builder,
@Nullable MeteringRectangle area,
boolean supportsProcessing) {
boolean changed = false;
int maxRegions = readCharacteristic(characteristics,
CameraCharacteristics.CONTROL_MAX_REGIONS_AE, 0);
if (area != null && maxRegions > 0) {
builder.set(CaptureRequest.CONTROL_AE_REGIONS, new MeteringRectangle[]{area});
changed = true;
}
if (supportsProcessing) {
Integer trigger = builder.get(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER);
LOG.w("onResetMetering:", "current precapture trigger is", trigger);
if (trigger == null || trigger == CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START) {
// Undo the trigger. This might happen if we can't meter in time / reset called before.
LOG.w("onResetMetering:", "canceling precapture.");
int newTrigger = Build.VERSION.SDK_INT >= 23
? CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
: CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
builder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, newTrigger);
builder.set(CaptureRequest.CONTROL_AE_LOCK, true);
notifyBuilderChanged(true);
builder.set(CaptureRequest.CONTROL_AE_LOCK, false);
changed = true;
}
}
if (changed) {
notifyBuilderChanged(false);
}
}
}

@ -1,130 +0,0 @@
package com.otaliastudios.cameraview.engine.metering;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.params.MeteringRectangle;
import android.os.Build;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import com.otaliastudios.cameraview.CameraLogger;
import java.util.List;
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
public class AutoFocus extends Parameter {
private static final String TAG = AutoFocus.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG + "Metering");
public AutoFocus(@NonNull MeteringChangeCallback callback) {
super(callback);
}
@Override
protected boolean checkSupportsProcessing(@NonNull CameraCharacteristics characteristics,
@NonNull CaptureRequest.Builder builder) {
// Exclude OFF and EDOF as per docs.
Integer afMode = builder.get(CaptureRequest.CONTROL_AF_MODE);
boolean result = afMode != null &&
(afMode == CameraCharacteristics.CONTROL_AF_MODE_AUTO
|| afMode == CameraCharacteristics.CONTROL_AF_MODE_CONTINUOUS_PICTURE
|| afMode == CameraCharacteristics.CONTROL_AF_MODE_CONTINUOUS_VIDEO
|| afMode == CameraCharacteristics.CONTROL_AF_MODE_MACRO);
LOG.i("checkSupportsProcessing:", result);
return result;
}
@Override
protected boolean checkShouldSkip(@NonNull CaptureResult lastResult) {
Integer afState = lastResult.get(CaptureResult.CONTROL_AF_STATE);
boolean result = afState != null &&
(afState == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED ||
afState == CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED);
LOG.i("checkShouldSkip:", result);
return result;
}
@Override
protected void onStartMetering(@NonNull CameraCharacteristics characteristics,
@NonNull CaptureRequest.Builder builder,
@NonNull List<MeteringRectangle> areas,
boolean supportsProcessing) {
boolean changed = false;
if (supportsProcessing) {
builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START);
changed = true;
}
// Even if auto is not supported, change the regions anyway.
int maxRegions = readCharacteristic(characteristics,
CameraCharacteristics.CONTROL_MAX_REGIONS_AF, 0);
if (!areas.isEmpty() && maxRegions > 0) {
int max = Math.min(maxRegions, areas.size());
builder.set(CaptureRequest.CONTROL_AF_REGIONS,
areas.subList(0, max).toArray(new MeteringRectangle[]{}));
changed = true;
}
if (changed) {
notifyBuilderChanged(false);
}
}
@Override
public void processCapture(@NonNull CaptureResult result) {
Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
LOG.i("onCapture:", "afState:", afState);
if (afState == null) return;
switch (afState) {
case CaptureRequest.CONTROL_AF_STATE_FOCUSED_LOCKED: {
notifyMetered(true);
break;
}
case CaptureRequest.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED: {
notifyMetered(false);
break;
}
case CaptureRequest.CONTROL_AF_STATE_INACTIVE: break;
case CaptureRequest.CONTROL_AF_STATE_ACTIVE_SCAN: break;
default: break;
}
}
@Override
protected void onMetered(@NonNull CaptureRequest.Builder builder, boolean success) {
// TODO thinking about this
builder.set(CaptureRequest.CONTROL_AF_TRIGGER, null);
// builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_CANCEL);
// notifyBuilderChanged();
}
@Override
protected void onResetMetering(@NonNull CameraCharacteristics characteristics,
@NonNull CaptureRequest.Builder builder,
@Nullable MeteringRectangle area,
boolean supportsProcessing) {
boolean changed = false;
int maxRegions = readCharacteristic(characteristics,
CameraCharacteristics.CONTROL_MAX_REGIONS_AF, 0);
if (area != null && maxRegions > 0) {
builder.set(CaptureRequest.CONTROL_AF_REGIONS, new MeteringRectangle[]{area});
changed = true;
}
if (supportsProcessing) { // Cleanup any trigger.
Integer trigger = builder.get(CaptureRequest.CONTROL_AF_TRIGGER);
LOG.w("onResetMetering:", "current focus trigger is", trigger);
if (trigger == null || trigger == CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START) {
builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_CANCEL);
changed = true;
}
}
if (changed) {
notifyBuilderChanged(false);
}
}
}

@ -1,103 +0,0 @@
package com.otaliastudios.cameraview.engine.metering;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.params.MeteringRectangle;
import android.os.Build;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import com.otaliastudios.cameraview.CameraLogger;
import java.util.List;
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
public class AutoWhiteBalance extends Parameter {
private static final String TAG = AutoWhiteBalance.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG + "Metering");
public AutoWhiteBalance(@NonNull MeteringChangeCallback callback) {
super(callback);
}
@Override
protected boolean checkSupportsProcessing(@NonNull CameraCharacteristics characteristics,
@NonNull CaptureRequest.Builder builder) {
boolean isNotLegacy = readCharacteristic(characteristics,
CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL, -1) !=
CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY;
Integer awbMode = builder.get(CaptureRequest.CONTROL_AWB_MODE);
boolean result = isNotLegacy && awbMode != null && awbMode == CaptureRequest.CONTROL_AWB_MODE_AUTO;
LOG.i("checkSupportsProcessing:", result);
return result;
}
@Override
protected boolean checkShouldSkip(@NonNull CaptureResult lastResult) {
Integer awbState = lastResult.get(CaptureResult.CONTROL_AWB_STATE);
boolean result = awbState != null && awbState == CaptureRequest.CONTROL_AWB_STATE_CONVERGED;
LOG.i("checkShouldSkip:", result);
return result;
}
@Override
protected void onStartMetering(@NonNull CameraCharacteristics characteristics,
@NonNull CaptureRequest.Builder builder,
@NonNull List<MeteringRectangle> areas,
boolean supportsProcessing) {
int maxRegions = readCharacteristic(characteristics,
CameraCharacteristics.CONTROL_MAX_REGIONS_AWB, 0);
if (!areas.isEmpty() && maxRegions > 0) {
int max = Math.min(maxRegions, areas.size());
builder.set(CaptureRequest.CONTROL_AWB_REGIONS,
areas.subList(0, max).toArray(new MeteringRectangle[]{}));
notifyBuilderChanged(false);
}
}
@Override
public void processCapture(@NonNull CaptureResult result) {
Integer awbState = result.get(CaptureResult.CONTROL_AWB_STATE);
LOG.i("onCapture:", "awbState:", awbState);
if (awbState == null) return;
switch (awbState) {
case CaptureRequest.CONTROL_AWB_STATE_CONVERGED: {
notifyMetered(true);
break;
}
case CaptureRequest.CONTROL_AWB_STATE_LOCKED: {
// Nothing we can do if AWB was locked.
notifyMetered(false);
break;
}
case CaptureRequest.CONTROL_AWB_STATE_INACTIVE:
case CaptureRequest.CONTROL_AWB_STATE_SEARCHING: {
// Wait...
break;
}
}
}
@Override
protected void onMetered(@NonNull CaptureRequest.Builder builder, boolean success) {
// Do nothing
}
@Override
protected void onResetMetering(@NonNull CameraCharacteristics characteristics,
@NonNull CaptureRequest.Builder builder,
@Nullable MeteringRectangle area,
boolean supportsProcessing) {
int maxRegions = readCharacteristic(characteristics,
CameraCharacteristics.CONTROL_MAX_REGIONS_AWB, 0);
if (area != null && maxRegions > 0) {
builder.set(CaptureRequest.CONTROL_AWB_REGIONS, new MeteringRectangle[]{area});
notifyBuilderChanged(false);
}
}
}

@ -1,108 +0,0 @@
package com.otaliastudios.cameraview.engine.metering;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.params.MeteringRectangle;
import android.os.Build;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import java.util.List;
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
public abstract class Parameter {
public interface MeteringChangeCallback {
void onMeteringChange(boolean single);
}
private boolean isSuccessful;
private boolean isMetered;
private MeteringChangeCallback callback;
private boolean shouldSkip;
private boolean supportsProcessing;
protected Parameter(@NonNull MeteringChangeCallback callback) {
this.callback = callback;
}
@SuppressWarnings("WeakerAccess")
@NonNull
protected <T> T readCharacteristic(@NonNull CameraCharacteristics characteristics,
@NonNull CameraCharacteristics.Key<T> key,
@NonNull T fallback) {
T value = characteristics.get(key);
return value == null ? fallback : value;
}
@SuppressWarnings("WeakerAccess")
protected void notifyBuilderChanged(boolean single) {
callback.onMeteringChange(single);
}
@SuppressWarnings("WeakerAccess")
protected void notifyMetered(boolean success) {
isMetered = true;
isSuccessful = success;
}
public final boolean isMetered() {
// A non supported parameter should always appear as metered
return isMetered || !supportsProcessing || shouldSkip;
}
public final boolean isSuccessful() {
// A non supported parameter should always appear as successful
return isSuccessful || !supportsProcessing || shouldSkip;
}
public final void startMetering(@NonNull CameraCharacteristics characteristics,
@NonNull CaptureRequest.Builder builder,
@NonNull List<MeteringRectangle> areas,
@NonNull CaptureResult lastResult,
boolean skipIfPossible) {
isSuccessful = false;
isMetered = false;
shouldSkip = skipIfPossible && checkShouldSkip(lastResult);
supportsProcessing = checkSupportsProcessing(characteristics, builder);
if (!shouldSkip) {
onStartMetering(characteristics, builder, areas, supportsProcessing);
}
}
public final void onCapture(@NonNull CaptureRequest.Builder builder,
@NonNull CaptureResult result) {
if (!isMetered()) {
processCapture(result);
if (isMetered()) onMetered(builder, isSuccessful);
}
}
public final void resetMetering(@NonNull CameraCharacteristics characteristics,
@NonNull CaptureRequest.Builder builder,
@Nullable MeteringRectangle area) {
onResetMetering(characteristics, builder, area, supportsProcessing);
}
protected abstract boolean checkSupportsProcessing(@NonNull CameraCharacteristics characteristics,
@NonNull CaptureRequest.Builder builder);
protected abstract boolean checkShouldSkip(@NonNull CaptureResult lastResult);
protected abstract void onStartMetering(@NonNull CameraCharacteristics characteristics,
@NonNull CaptureRequest.Builder builder,
@NonNull List<MeteringRectangle> areas,
boolean supportsProcessing);
protected abstract void processCapture(@NonNull CaptureResult result);
protected abstract void onMetered(@NonNull CaptureRequest.Builder builder, boolean success);
protected abstract void onResetMetering(@NonNull CameraCharacteristics characteristics,
@NonNull CaptureRequest.Builder builder,
@Nullable MeteringRectangle area,
boolean supportsProcessing);
}
Loading…
Cancel
Save