Add startAutoFocus(Rect) (#724)

* Create MeteringRegion(s), MeteringTransform, add startAutoFocus(RectF) API

* Improve docs

* Changelog

* Tests
pull/725/head
Mattia Iavarone 5 years ago committed by GitHub
parent 9b916f12fd
commit df139994d7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 25
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraViewTest.java
  2. 4
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/MockCameraEngine.java
  3. 137
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/metering/MeteringRegionsTest.java
  4. 28
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java
  5. 87
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java
  6. 17
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java
  7. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraBaseEngine.java
  8. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraEngine.java
  9. 214
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/MeterAction.java
  10. 55
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/metering/Camera1MeteringTransform.java
  11. 195
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/metering/Camera2MeteringTransform.java
  12. 76
      cameraview/src/main/java/com/otaliastudios/cameraview/metering/MeteringRegion.java
  13. 112
      cameraview/src/main/java/com/otaliastudios/cameraview/metering/MeteringRegions.java
  14. 14
      cameraview/src/main/java/com/otaliastudios/cameraview/metering/MeteringTransform.java
  15. 11
      docs/_posts/2018-12-20-changelog.md
  16. 13
      docs/_posts/2019-09-04-metering.md

@ -5,6 +5,7 @@ import android.content.Context;
import android.content.res.TypedArray; import android.content.res.TypedArray;
import android.graphics.ImageFormat; import android.graphics.ImageFormat;
import android.graphics.PointF; import android.graphics.PointF;
import android.graphics.RectF;
import android.location.Location; import android.location.Location;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
import androidx.test.annotation.UiThreadTest; import androidx.test.annotation.UiThreadTest;
@ -632,6 +633,20 @@ public class CameraViewTest extends BaseTest {
cameraView.startAutoFocus(200, 200); cameraView.startAutoFocus(200, 200);
} }
@Test(expected = IllegalArgumentException.class)
public void testStartAutoFocus_illegal3() {
cameraView.startAutoFocus(new RectF(-1, -1, 1, 1));
}
@Test(expected = IllegalArgumentException.class)
public void testStartAutoFocus_illegal4() {
cameraView.setLeft(0);
cameraView.setRight(100);
cameraView.setTop(0);
cameraView.setBottom(100);
cameraView.startAutoFocus(new RectF(-100, -100, 200, 200));
}
@Test @Test
public void testStartAutoFocus() { public void testStartAutoFocus() {
cameraView.setLeft(0); cameraView.setLeft(0);
@ -642,6 +657,16 @@ public class CameraViewTest extends BaseTest {
assertTrue(mockController.mFocusStarted); assertTrue(mockController.mFocusStarted);
} }
@Test
public void testStartAutoFocusRect() {
cameraView.setLeft(0);
cameraView.setRight(100);
cameraView.setTop(0);
cameraView.setBottom(100);
cameraView.startAutoFocus(new RectF(25, 25, 75, 75));
assertTrue(mockController.mFocusStarted);
}
//endregion //endregion
//region test setParameters //region test setParameters

@ -2,6 +2,7 @@ package com.otaliastudios.cameraview.engine;
import android.graphics.PointF; import android.graphics.PointF;
import android.graphics.RectF;
import android.location.Location; import android.location.Location;
import com.google.android.gms.tasks.Task; import com.google.android.gms.tasks.Task;
@ -18,6 +19,7 @@ import com.otaliastudios.cameraview.frame.FrameManager;
import com.otaliastudios.cameraview.gesture.Gesture; import com.otaliastudios.cameraview.gesture.Gesture;
import com.otaliastudios.cameraview.controls.Hdr; import com.otaliastudios.cameraview.controls.Hdr;
import com.otaliastudios.cameraview.controls.WhiteBalance; import com.otaliastudios.cameraview.controls.WhiteBalance;
import com.otaliastudios.cameraview.metering.MeteringRegions;
import com.otaliastudios.cameraview.size.AspectRatio; import com.otaliastudios.cameraview.size.AspectRatio;
import com.otaliastudios.cameraview.size.Size; import com.otaliastudios.cameraview.size.Size;
@ -189,7 +191,7 @@ public class MockCameraEngine extends CameraBaseEngine {
} }
@Override @Override
public void startAutoFocus(@Nullable Gesture gesture, @NonNull PointF point) { public void startAutoFocus(@Nullable Gesture gesture, @NonNull MeteringRegions regions, @NonNull PointF legacyPoint) {
mFocusStarted = true; mFocusStarted = true;
} }

@ -0,0 +1,137 @@
package com.otaliastudios.cameraview.metering;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.PointF;
import android.graphics.Rect;
import android.graphics.RectF;
import android.view.View;
import android.view.ViewGroup;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest;
import com.otaliastudios.cameraview.BaseTest;
import com.otaliastudios.cameraview.R;
import com.otaliastudios.cameraview.markers.AutoFocusMarker;
import com.otaliastudios.cameraview.markers.AutoFocusTrigger;
import com.otaliastudios.cameraview.markers.MarkerParser;
import com.otaliastudios.cameraview.size.Size;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import java.util.List;
import static junit.framework.TestCase.assertNotNull;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(AndroidJUnit4.class)
@SmallTest
public class MeteringRegionsTest extends BaseTest {
private final Size bounds = new Size(1000, 1000);
private void checkRegion(@NonNull MeteringRegion region, @NonNull PointF center, int weight) {
assertEquals(center.x, region.mRegion.centerX(), 0.01F);
assertEquals(center.y, region.mRegion.centerY(), 0.01F);
assertEquals(weight, region.mWeight);
}
@Test
public void testFromPoint() {
PointF center = new PointF(500, 500);
MeteringRegions regions = MeteringRegions.fromPoint(bounds, center);
assertEquals(2, regions.mRegions.size());
MeteringRegion first = regions.mRegions.get(0);
MeteringRegion second = regions.mRegions.get(1);
checkRegion(first, center, MeteringRegion.MAX_WEIGHT);
checkRegion(second, center,
Math.round(MeteringRegions.BLUR_FACTOR_WEIGHT * MeteringRegion.MAX_WEIGHT));
}
@Test
public void testFromArea() {
RectF area = new RectF(400, 400, 600, 600);
MeteringRegions regions = MeteringRegions.fromArea(bounds, area);
assertEquals(1, regions.mRegions.size());
MeteringRegion region = regions.mRegions.get(0);
checkRegion(region, new PointF(area.centerX(), area.centerY()), MeteringRegion.MAX_WEIGHT);
}
@Test
public void testFromArea_withBlur() {
RectF area = new RectF(400, 400, 600, 600);
MeteringRegions regions = MeteringRegions.fromArea(bounds, area,
MeteringRegion.MAX_WEIGHT, true);
assertEquals(2, regions.mRegions.size());
MeteringRegion first = regions.mRegions.get(0);
MeteringRegion second = regions.mRegions.get(1);
PointF center = new PointF(area.centerX(), area.centerY());
checkRegion(first, center, MeteringRegion.MAX_WEIGHT);
checkRegion(second, center,
Math.round(MeteringRegions.BLUR_FACTOR_WEIGHT * MeteringRegion.MAX_WEIGHT));
}
@Test
public void testTransform() {
MeteringTransform transform = mock(MeteringTransform.class);
when(transform.transformMeteringPoint(any(PointF.class))).then(new Answer<PointF>() {
@Override
public PointF answer(InvocationOnMock invocation) {
PointF in = invocation.getArgument(0);
// This will swap x and y coordinates
//noinspection SuspiciousNameCombination
return new PointF(in.y, in.x);
}
});
RectF area = new RectF(0, 0, 100, 500); // tall area
RectF expected = new RectF(0, 0, 500, 100); // wide area
MeteringRegions regions = MeteringRegions.fromArea(bounds, area);
MeteringRegions transformed = regions.transform(transform);
verify(transform, times(4)).transformMeteringPoint(any(PointF.class));
assertEquals(1, transformed.mRegions.size());
assertEquals(expected, transformed.mRegions.get(0).mRegion);
}
@Test
public void testGet() {
MeteringTransform<Integer> transform = new MeteringTransform<Integer>() {
@NonNull
@Override
public PointF transformMeteringPoint(@NonNull PointF point) {
return point;
}
@NonNull
@Override
public Integer transformMeteringRegion(@NonNull RectF region, int weight) {
return weight;
}
};
MeteringRegions regions = MeteringRegions.fromArea(bounds,
new RectF(400, 400, 600, 600),
900,
true);
assertEquals(2, regions.mRegions.size());
List<Integer> result = regions.get(1, transform);
assertEquals(1, result.size());
assertEquals(900, (int) result.get(0));
}
}

@ -11,6 +11,7 @@ import android.content.pm.PackageManager;
import android.content.res.TypedArray; import android.content.res.TypedArray;
import android.graphics.PointF; import android.graphics.PointF;
import android.graphics.Rect; import android.graphics.Rect;
import android.graphics.RectF;
import android.location.Location; import android.location.Location;
import android.media.MediaActionSound; import android.media.MediaActionSound;
import android.os.Build; import android.os.Build;
@ -71,6 +72,7 @@ import com.otaliastudios.cameraview.markers.AutoFocusMarker;
import com.otaliastudios.cameraview.markers.AutoFocusTrigger; import com.otaliastudios.cameraview.markers.AutoFocusTrigger;
import com.otaliastudios.cameraview.markers.MarkerLayout; import com.otaliastudios.cameraview.markers.MarkerLayout;
import com.otaliastudios.cameraview.markers.MarkerParser; import com.otaliastudios.cameraview.markers.MarkerParser;
import com.otaliastudios.cameraview.metering.MeteringRegions;
import com.otaliastudios.cameraview.overlay.OverlayLayout; import com.otaliastudios.cameraview.overlay.OverlayLayout;
import com.otaliastudios.cameraview.preview.CameraPreview; import com.otaliastudios.cameraview.preview.CameraPreview;
import com.otaliastudios.cameraview.preview.FilterCameraPreview; import com.otaliastudios.cameraview.preview.FilterCameraPreview;
@ -669,7 +671,9 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
break; break;
case AUTO_FOCUS: case AUTO_FOCUS:
mCameraEngine.startAutoFocus(gesture, points[0]); Size size = new Size(getWidth(), getHeight());
MeteringRegions regions = MeteringRegions.fromPoint(size, points[0]);
mCameraEngine.startAutoFocus(gesture, regions, points[0]);
break; break;
case ZOOM: case ZOOM:
@ -1351,7 +1355,27 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
if (y < 0 || y > getHeight()) { if (y < 0 || y > getHeight()) {
throw new IllegalArgumentException("y should be >= 0 and <= getHeight()"); throw new IllegalArgumentException("y should be >= 0 and <= getHeight()");
} }
mCameraEngine.startAutoFocus(null, new PointF(x, y)); Size size = new Size(getWidth(), getHeight());
PointF point = new PointF(x, y);
MeteringRegions regions = MeteringRegions.fromPoint(size, point);
mCameraEngine.startAutoFocus(null, regions, point);
}
/**
* Starts a 3A touch metering process at the given coordinates, with respect
* to the view width and height.
*
* @param region should be between 0 and getWidth() / getHeight()
*/
public void startAutoFocus(@NonNull RectF region) {
RectF full = new RectF(0, 0, getWidth(), getHeight());
if (!full.contains(region)) {
throw new IllegalArgumentException("Region is out of view bounds! " + region);
}
Size size = new Size(getWidth(), getHeight());
MeteringRegions regions = MeteringRegions.fromArea(size, region);
mCameraEngine.startAutoFocus(null, regions,
new PointF(region.centerX(), region.centerY()));
} }
/** /**

@ -21,6 +21,7 @@ import com.otaliastudios.cameraview.CameraException;
import com.otaliastudios.cameraview.CameraOptions; import com.otaliastudios.cameraview.CameraOptions;
import com.otaliastudios.cameraview.controls.PictureFormat; import com.otaliastudios.cameraview.controls.PictureFormat;
import com.otaliastudios.cameraview.engine.mappers.Camera1Mapper; import com.otaliastudios.cameraview.engine.mappers.Camera1Mapper;
import com.otaliastudios.cameraview.engine.metering.Camera1MeteringTransform;
import com.otaliastudios.cameraview.engine.offset.Axis; import com.otaliastudios.cameraview.engine.offset.Axis;
import com.otaliastudios.cameraview.engine.offset.Reference; import com.otaliastudios.cameraview.engine.offset.Reference;
import com.otaliastudios.cameraview.engine.options.Camera1Options; import com.otaliastudios.cameraview.engine.options.Camera1Options;
@ -37,6 +38,8 @@ import com.otaliastudios.cameraview.controls.Hdr;
import com.otaliastudios.cameraview.controls.Mode; import com.otaliastudios.cameraview.controls.Mode;
import com.otaliastudios.cameraview.controls.WhiteBalance; import com.otaliastudios.cameraview.controls.WhiteBalance;
import com.otaliastudios.cameraview.internal.utils.CropHelper; import com.otaliastudios.cameraview.internal.utils.CropHelper;
import com.otaliastudios.cameraview.metering.MeteringRegions;
import com.otaliastudios.cameraview.metering.MeteringTransform;
import com.otaliastudios.cameraview.picture.Full1PictureRecorder; import com.otaliastudios.cameraview.picture.Full1PictureRecorder;
import com.otaliastudios.cameraview.picture.Snapshot1PictureRecorder; import com.otaliastudios.cameraview.picture.Snapshot1PictureRecorder;
import com.otaliastudios.cameraview.picture.SnapshotGlPictureRecorder; import com.otaliastudios.cameraview.picture.SnapshotGlPictureRecorder;
@ -805,36 +808,26 @@ public class Camera1Engine extends CameraBaseEngine implements
//region Auto Focus //region Auto Focus
@Override @Override
public void startAutoFocus(@Nullable final Gesture gesture, @NonNull final PointF point) { public void startAutoFocus(@Nullable final Gesture gesture,
// Must get width and height from the UI thread. @NonNull final MeteringRegions regions,
// TODO could take mPreview.surfaceSize like Camera2 does? @NonNull final PointF legacyPoint) {
int viewWidth = 0, viewHeight = 0; getOrchestrator().scheduleStateful("auto focus", CameraState.BIND, new Runnable() {
if (mPreview != null && mPreview.hasSurface()) {
viewWidth = mPreview.getView().getWidth();
viewHeight = mPreview.getView().getHeight();
}
final int viewWidthF = viewWidth;
final int viewHeightF = viewHeight;
getOrchestrator().scheduleStateful("auto focus", CameraState.ENGINE, new Runnable() {
@Override @Override
public void run() { public void run() {
if (!mCameraOptions.isAutoFocusSupported()) return; if (!mCameraOptions.isAutoFocusSupported()) return;
final PointF p = new PointF(point.x, point.y); // copy. MeteringTransform<Camera.Area> transform = new Camera1MeteringTransform(
int offset = getAngles().offset(Reference.SENSOR, Reference.VIEW, Axis.ABSOLUTE); getAngles(),
List<Camera.Area> meteringAreas2 = computeMeteringAreas(p.x, p.y, getPreview().getSurfaceSize());
viewWidthF, viewHeightF, offset); MeteringRegions transformed = regions.transform(transform);
List<Camera.Area> meteringAreas1 = meteringAreas2.subList(0, 1);
// At this point we are sure that camera supports auto focus... right?
// Look at CameraView.onTouchEvent().
Camera.Parameters params = mCamera.getParameters(); Camera.Parameters params = mCamera.getParameters();
int maxAF = params.getMaxNumFocusAreas(); int maxAF = params.getMaxNumFocusAreas();
int maxAE = params.getMaxNumMeteringAreas(); int maxAE = params.getMaxNumMeteringAreas();
if (maxAF > 0) params.setFocusAreas(maxAF > 1 ? meteringAreas2 : meteringAreas1); if (maxAF > 0) params.setFocusAreas(transformed.get(maxAF, transform));
if (maxAE > 0) params.setMeteringAreas(maxAE > 1 ? meteringAreas2 : meteringAreas1); if (maxAE > 0) params.setMeteringAreas(transformed.get(maxAE, transform));
params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO); params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
mCamera.setParameters(params); mCamera.setParameters(params);
getCallback().dispatchOnFocusStart(gesture, p); getCallback().dispatchOnFocusStart(gesture, legacyPoint);
// The auto focus callback is not guaranteed to be called, but we really want it // The auto focus callback is not guaranteed to be called, but we really want it
// to be. So we remove the old runnable if still present and post a new one. // to be. So we remove the old runnable if still present and post a new one.
@ -843,7 +836,7 @@ public class Camera1Engine extends CameraBaseEngine implements
new Runnable() { new Runnable() {
@Override @Override
public void run() { public void run() {
getCallback().dispatchOnFocusEnd(gesture, false, p); getCallback().dispatchOnFocusEnd(gesture, false, legacyPoint);
} }
}); });
@ -855,7 +848,7 @@ public class Camera1Engine extends CameraBaseEngine implements
public void onAutoFocus(boolean success, Camera camera) { public void onAutoFocus(boolean success, Camera camera) {
getOrchestrator().remove(JOB_FOCUS_END); getOrchestrator().remove(JOB_FOCUS_END);
getOrchestrator().remove(JOB_FOCUS_RESET); getOrchestrator().remove(JOB_FOCUS_RESET);
getCallback().dispatchOnFocusEnd(gesture, success, p); getCallback().dispatchOnFocusEnd(gesture, success, legacyPoint);
if (shouldResetAutoFocus()) { if (shouldResetAutoFocus()) {
getOrchestrator().scheduleStatefulDelayed( getOrchestrator().scheduleStatefulDelayed(
JOB_FOCUS_RESET, JOB_FOCUS_RESET,
@ -886,52 +879,6 @@ public class Camera1Engine extends CameraBaseEngine implements
}); });
} }
@NonNull
@EngineThread
private static List<Camera.Area> computeMeteringAreas(double viewClickX, double viewClickY,
int viewWidth, int viewHeight,
int sensorToDisplay) {
// Event came in view coordinates. We must rotate to sensor coordinates.
// First, rescale to the -1000 ... 1000 range.
int displayToSensor = -sensorToDisplay;
viewClickX = -1000d + (viewClickX / (double) viewWidth) * 2000d;
viewClickY = -1000d + (viewClickY / (double) viewHeight) * 2000d;
// Apply rotation to this point.
// https://academo.org/demos/rotation-about-point/
double theta = ((double) displayToSensor) * Math.PI / 180;
double sensorClickX = viewClickX * Math.cos(theta) - viewClickY * Math.sin(theta);
double sensorClickY = viewClickX * Math.sin(theta) + viewClickY * Math.cos(theta);
LOG.i("focus:", "viewClickX:", viewClickX, "viewClickY:", viewClickY);
LOG.i("focus:", "sensorClickX:", sensorClickX, "sensorClickY:", sensorClickY);
// Compute the rect bounds.
Rect rect1 = computeMeteringArea(sensorClickX, sensorClickY, 150d);
int weight1 = 1000; // 150 * 150 * 1000 = more than 10.000.000
Rect rect2 = computeMeteringArea(sensorClickX, sensorClickY, 300d);
int weight2 = 100; // 300 * 300 * 100 = 9.000.000
List<Camera.Area> list = new ArrayList<>(2);
list.add(new Camera.Area(rect1, weight1));
list.add(new Camera.Area(rect2, weight2));
return list;
}
@NonNull
private static Rect computeMeteringArea(double centerX, double centerY, double size) {
double delta = size / 2d;
int top = (int) Math.max(centerY - delta, -1000);
int bottom = (int) Math.min(centerY + delta, 1000);
int left = (int) Math.max(centerX - delta, -1000);
int right = (int) Math.min(centerX + delta, 1000);
LOG.i("focus:", "computeMeteringArea:",
"top:", top,
"left:", left,
"bottom:", bottom,
"right:", right);
return new Rect(left, top, right, bottom);
}
//endregion //endregion
} }

@ -61,6 +61,7 @@ import com.otaliastudios.cameraview.frame.FrameManager;
import com.otaliastudios.cameraview.frame.ImageFrameManager; import com.otaliastudios.cameraview.frame.ImageFrameManager;
import com.otaliastudios.cameraview.gesture.Gesture; import com.otaliastudios.cameraview.gesture.Gesture;
import com.otaliastudios.cameraview.internal.utils.CropHelper; import com.otaliastudios.cameraview.internal.utils.CropHelper;
import com.otaliastudios.cameraview.metering.MeteringRegions;
import com.otaliastudios.cameraview.picture.Full2PictureRecorder; import com.otaliastudios.cameraview.picture.Full2PictureRecorder;
import com.otaliastudios.cameraview.picture.Snapshot2PictureRecorder; import com.otaliastudios.cameraview.picture.Snapshot2PictureRecorder;
import com.otaliastudios.cameraview.preview.GlCameraPreview; import com.otaliastudios.cameraview.preview.GlCameraPreview;
@ -1508,7 +1509,9 @@ public class Camera2Engine extends CameraBaseEngine implements
//region 3A Metering //region 3A Metering
@Override @Override
public void startAutoFocus(@Nullable final Gesture gesture, @NonNull final PointF point) { public void startAutoFocus(@Nullable final Gesture gesture,
@NonNull final MeteringRegions regions,
@NonNull final PointF legacyPoint) {
// This will only work when we have a preview, since it launches the preview // This will only work when we have a preview, since it launches the preview
// in the end. Even without this it would need the bind state at least, // in the end. Even without this it would need the bind state at least,
// since we need the preview size. // since we need the preview size.
@ -1522,14 +1525,15 @@ public class Camera2Engine extends CameraBaseEngine implements
if (!mCameraOptions.isAutoFocusSupported()) return; if (!mCameraOptions.isAutoFocusSupported()) return;
// Create the meter and start. // Create the meter and start.
getCallback().dispatchOnFocusStart(gesture, point); getCallback().dispatchOnFocusStart(gesture, legacyPoint);
final MeterAction action = createMeterAction(point); final MeterAction action = createMeterAction(regions);
Action wrapper = Actions.timeout(METER_TIMEOUT, action); Action wrapper = Actions.timeout(METER_TIMEOUT, action);
wrapper.start(Camera2Engine.this); wrapper.start(Camera2Engine.this);
wrapper.addCallback(new CompletionCallback() { wrapper.addCallback(new CompletionCallback() {
@Override @Override
protected void onActionCompleted(@NonNull Action a) { protected void onActionCompleted(@NonNull Action a) {
getCallback().dispatchOnFocusEnd(gesture, action.isSuccessful(), point); getCallback().dispatchOnFocusEnd(gesture,
action.isSuccessful(), legacyPoint);
getOrchestrator().remove("reset metering"); getOrchestrator().remove("reset metering");
if (shouldResetAutoFocus()) { if (shouldResetAutoFocus()) {
getOrchestrator().scheduleStatefulDelayed("reset metering", getOrchestrator().scheduleStatefulDelayed("reset metering",
@ -1549,7 +1553,7 @@ public class Camera2Engine extends CameraBaseEngine implements
} }
@NonNull @NonNull
private MeterAction createMeterAction(@Nullable PointF point) { private MeterAction createMeterAction(@Nullable MeteringRegions regions) {
// Before creating any new meter action, abort the old one. // Before creating any new meter action, abort the old one.
if (mMeterAction != null) mMeterAction.abort(this); if (mMeterAction != null) mMeterAction.abort(this);
// The meter will check the current configuration to see if AF/AE/AWB should run. // The meter will check the current configuration to see if AF/AE/AWB should run.
@ -1559,8 +1563,7 @@ public class Camera2Engine extends CameraBaseEngine implements
// The last one is under our control because the library has no focus API. // The last one is under our control because the library has no focus API.
// So let's set a good af mode here. This operation is reverted during onMeteringReset(). // So let's set a good af mode here. This operation is reverted during onMeteringReset().
applyFocusForMetering(mRepeatingRequestBuilder); applyFocusForMetering(mRepeatingRequestBuilder);
mMeterAction = new MeterAction(Camera2Engine.this, point, mMeterAction = new MeterAction(Camera2Engine.this, regions, regions == null);
point == null);
return mMeterAction; return mMeterAction;
} }

@ -1,5 +1,7 @@
package com.otaliastudios.cameraview.engine; package com.otaliastudios.cameraview.engine;
import android.graphics.PointF;
import android.graphics.RectF;
import android.location.Location; import android.location.Location;
import androidx.annotation.CallSuper; import androidx.annotation.CallSuper;
@ -25,6 +27,7 @@ import com.otaliastudios.cameraview.engine.offset.Angles;
import com.otaliastudios.cameraview.engine.offset.Reference; import com.otaliastudios.cameraview.engine.offset.Reference;
import com.otaliastudios.cameraview.engine.orchestrator.CameraState; import com.otaliastudios.cameraview.engine.orchestrator.CameraState;
import com.otaliastudios.cameraview.frame.FrameManager; import com.otaliastudios.cameraview.frame.FrameManager;
import com.otaliastudios.cameraview.gesture.Gesture;
import com.otaliastudios.cameraview.overlay.Overlay; import com.otaliastudios.cameraview.overlay.Overlay;
import com.otaliastudios.cameraview.picture.PictureRecorder; import com.otaliastudios.cameraview.picture.PictureRecorder;
import com.otaliastudios.cameraview.preview.CameraPreview; import com.otaliastudios.cameraview.preview.CameraPreview;

@ -2,6 +2,7 @@ package com.otaliastudios.cameraview.engine;
import android.content.Context; import android.content.Context;
import android.graphics.PointF; import android.graphics.PointF;
import android.graphics.RectF;
import android.location.Location; import android.location.Location;
@ -21,6 +22,7 @@ import com.otaliastudios.cameraview.controls.PictureFormat;
import com.otaliastudios.cameraview.engine.orchestrator.CameraOrchestrator; import com.otaliastudios.cameraview.engine.orchestrator.CameraOrchestrator;
import com.otaliastudios.cameraview.engine.orchestrator.CameraState; import com.otaliastudios.cameraview.engine.orchestrator.CameraState;
import com.otaliastudios.cameraview.engine.orchestrator.CameraStateOrchestrator; import com.otaliastudios.cameraview.engine.orchestrator.CameraStateOrchestrator;
import com.otaliastudios.cameraview.metering.MeteringRegions;
import com.otaliastudios.cameraview.overlay.Overlay; import com.otaliastudios.cameraview.overlay.Overlay;
import com.otaliastudios.cameraview.VideoResult; import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.engine.offset.Angles; import com.otaliastudios.cameraview.engine.offset.Angles;
@ -699,7 +701,9 @@ public abstract class CameraEngine implements
public abstract void setPictureSnapshotMetering(boolean enable); public abstract void setPictureSnapshotMetering(boolean enable);
public abstract boolean getPictureSnapshotMetering(); public abstract boolean getPictureSnapshotMetering();
public abstract void startAutoFocus(@Nullable Gesture gesture, @NonNull PointF point); public abstract void startAutoFocus(@Nullable Gesture gesture,
@NonNull MeteringRegions regions,
@NonNull PointF legacyPoint);
public abstract void setPlaySounds(boolean playSounds); public abstract void setPlaySounds(boolean playSounds);

@ -1,10 +1,5 @@
package com.otaliastudios.cameraview.engine.meter; package com.otaliastudios.cameraview.engine.meter;
import android.graphics.PointF;
import android.graphics.Rect;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.params.MeteringRectangle; import android.hardware.camera2.params.MeteringRectangle;
import android.os.Build; import android.os.Build;
@ -18,10 +13,10 @@ import com.otaliastudios.cameraview.engine.action.ActionHolder;
import com.otaliastudios.cameraview.engine.action.ActionWrapper; import com.otaliastudios.cameraview.engine.action.ActionWrapper;
import com.otaliastudios.cameraview.engine.action.Actions; import com.otaliastudios.cameraview.engine.action.Actions;
import com.otaliastudios.cameraview.engine.action.BaseAction; import com.otaliastudios.cameraview.engine.action.BaseAction;
import com.otaliastudios.cameraview.engine.offset.Axis; import com.otaliastudios.cameraview.engine.metering.Camera2MeteringTransform;
import com.otaliastudios.cameraview.engine.offset.Reference; import com.otaliastudios.cameraview.engine.offset.Reference;
import com.otaliastudios.cameraview.size.AspectRatio; import com.otaliastudios.cameraview.metering.MeteringRegions;
import com.otaliastudios.cameraview.size.Size; import com.otaliastudios.cameraview.metering.MeteringTransform;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
@ -35,14 +30,14 @@ public class MeterAction extends ActionWrapper {
private List<BaseMeter> meters; private List<BaseMeter> meters;
private BaseAction action; private BaseAction action;
private ActionHolder holder; private final MeteringRegions regions;
private final PointF point;
private final CameraEngine engine; private final CameraEngine engine;
private final boolean skipIfPossible; private final boolean skipIfPossible;
public MeterAction(@NonNull CameraEngine engine, @Nullable PointF point, public MeterAction(@NonNull CameraEngine engine,
@Nullable MeteringRegions regions,
boolean skipIfPossible) { boolean skipIfPossible) {
this.point = point; this.regions = regions;
this.engine = engine; this.engine = engine;
this.skipIfPossible = skipIfPossible; this.skipIfPossible = skipIfPossible;
} }
@ -53,11 +48,6 @@ public class MeterAction extends ActionWrapper {
return action; return action;
} }
@Nullable
public PointF getPoint() {
return point;
}
public boolean isSuccessful() { public boolean isSuccessful() {
for (BaseMeter meter : meters) { for (BaseMeter meter : meters) {
if (!meter.isSuccessful()) { if (!meter.isSuccessful()) {
@ -78,45 +68,18 @@ public class MeterAction extends ActionWrapper {
} }
private void initialize(@NonNull ActionHolder holder) { private void initialize(@NonNull ActionHolder holder) {
this.holder = holder;
List<MeteringRectangle> areas = new ArrayList<>(); List<MeteringRectangle> areas = new ArrayList<>();
if (point != null) { if (regions != null) {
// This is a good Q/A. https://stackoverflow.com/a/33181620/4288782 MeteringTransform<MeteringRectangle> transform = new Camera2MeteringTransform(
// At first, the point is relative to the View system and does not account engine.getAngles(),
// our own cropping. Will keep updating these two below. engine.getPreview().getSurfaceSize(),
final PointF referencePoint = new PointF(point.x, point.y); engine.getPreviewStreamSize(Reference.VIEW),
Size referenceSize = engine.getPreview().getSurfaceSize(); engine.getPreview().isCropping(),
holder.getCharacteristics(this),
// 1. Account for cropping. holder.getBuilder(this)
// This will enlarge the preview size so that aspect ratio matches. );
referenceSize = applyPreviewCropping(referenceSize, referencePoint); MeteringRegions transformed = regions.transform(transform);
areas = transformed.get(Integer.MAX_VALUE, transform);
// 2. Scale to the preview stream coordinates.
// This will move to the preview stream coordinates by scaling.
referenceSize = applyPreviewScale(referenceSize, referencePoint);
// 3. Rotate to the stream coordinate system.
// This leaves us with sensor stream coordinates.
referenceSize = applyPreviewToSensorRotation(referenceSize, referencePoint);
// 4. Move to the crop region coordinate system.
// The crop region is the union of all currently active streams.
referenceSize = applyCropRegionCoordinates(referenceSize, referencePoint);
// 5. Move to the active array coordinate system.
referenceSize = applyActiveArrayCoordinates(referenceSize, referencePoint);
// 6. Now we can compute the metering regions.
// We want to define them as a fraction of the visible size which (apart from cropping)
// can be obtained through the SENSOR rotated preview stream size.
Size visibleSize = engine.getPreviewStreamSize(Reference.SENSOR);
//noinspection ConstantConditions
MeteringRectangle area1 = createMeteringRectangle(referenceSize, referencePoint,
visibleSize, 0.05F, 1000);
MeteringRectangle area2 = createMeteringRectangle(referenceSize, referencePoint,
visibleSize, 0.1F, 100);
areas.add(area1);
areas.add(area2);
} }
BaseMeter ae = new ExposureMeter(areas, skipIfPossible); BaseMeter ae = new ExposureMeter(areas, skipIfPossible);
@ -125,145 +88,4 @@ public class MeterAction extends ActionWrapper {
meters = Arrays.asList(ae, af, awb); meters = Arrays.asList(ae, af, awb);
action = Actions.together(ae, af, awb); action = Actions.together(ae, af, awb);
} }
@SuppressWarnings("UnnecessaryLocalVariable")
@NonNull
private Size applyPreviewCropping(@NonNull Size referenceSize, @NonNull PointF referencePoint) {
Size previewStreamSize = engine.getPreviewStreamSize(Reference.VIEW);
Size previewSurfaceSize = referenceSize;
if (previewStreamSize == null) {
throw new IllegalStateException("getPreviewStreamSize should not be null here.");
}
int referenceWidth = previewSurfaceSize.getWidth();
int referenceHeight = previewSurfaceSize.getHeight();
AspectRatio previewStreamAspectRatio = AspectRatio.of(previewStreamSize);
AspectRatio previewSurfaceAspectRatio = AspectRatio.of(previewSurfaceSize);
if (engine.getPreview().isCropping()) {
if (previewStreamAspectRatio.toFloat() > previewSurfaceAspectRatio.toFloat()) {
// Stream is larger. The x coordinate must be increased: a touch on the left side
// of the surface is not on the left size of stream (it's more to the right).
float scale = previewStreamAspectRatio.toFloat()
/ previewSurfaceAspectRatio.toFloat();
referencePoint.x += previewSurfaceSize.getWidth() * (scale - 1F) / 2F;
referenceWidth = Math.round(previewSurfaceSize.getWidth() * scale);
} else {
// Stream is taller. The y coordinate must be increased: a touch on the top side
// of the surface is not on the top size of stream (it's a bit lower).
float scale = previewSurfaceAspectRatio.toFloat()
/ previewStreamAspectRatio.toFloat();
referencePoint.y += previewSurfaceSize.getHeight() * (scale - 1F) / 2F;
referenceHeight = Math.round(previewSurfaceSize.getHeight() * scale);
}
}
return new Size(referenceWidth, referenceHeight);
}
@SuppressWarnings("ConstantConditions")
@NonNull
private Size applyPreviewScale(@NonNull Size referenceSize, @NonNull PointF referencePoint) {
// The referenceSize how has the same aspect ratio of the previewStreamSize, but they
// can still have different size (that is, a scale operation is needed).
Size previewStreamSize = engine.getPreviewStreamSize(Reference.VIEW);
referencePoint.x *= (float) previewStreamSize.getWidth() / referenceSize.getWidth();
referencePoint.y *= (float) previewStreamSize.getHeight() / referenceSize.getHeight();
return previewStreamSize;
}
@SuppressWarnings("SuspiciousNameCombination")
@NonNull
private Size applyPreviewToSensorRotation(@NonNull Size referenceSize,
@NonNull PointF referencePoint) {
// Not elegant, but the sin/cos way was failing for some reason.
int angle = engine.getAngles().offset(Reference.SENSOR, Reference.VIEW, Axis.ABSOLUTE);
boolean flip = angle % 180 != 0;
float tempX = referencePoint.x;
float tempY = referencePoint.y;
if (angle == 0) {
referencePoint.x = tempX;
referencePoint.y = tempY;
} else if (angle == 90) {
referencePoint.x = tempY;
referencePoint.y = referenceSize.getWidth() - tempX;
} else if (angle == 180) {
referencePoint.x = referenceSize.getWidth() - tempX;
referencePoint.y = referenceSize.getHeight() - tempY;
} else if (angle == 270) {
referencePoint.x = referenceSize.getHeight() - tempY;
referencePoint.y = tempX;
} else {
throw new IllegalStateException("Unexpected angle " + angle);
}
return flip ? referenceSize.flip() : referenceSize;
}
@NonNull
private Size applyCropRegionCoordinates(@NonNull Size referenceSize,
@NonNull PointF referencePoint) {
// The input point and size refer to the stream rect.
// The stream rect is part of the 'crop region', as described below.
// https://source.android.com/devices/camera/camera3_crop_reprocess.html
Rect cropRect = holder.getBuilder(this).get(CaptureRequest.SCALER_CROP_REGION);
// For now we don't care about x and y position. Rect should not be null, but let's be safe.
int cropRectWidth = cropRect == null ? referenceSize.getWidth() : cropRect.width();
int cropRectHeight = cropRect == null ? referenceSize.getHeight() : cropRect.height();
// The stream is always centered inside the crop region, and one of the dimensions
// should always match. We just increase the other one.
referencePoint.x += (cropRectWidth - referenceSize.getWidth()) / 2F;
referencePoint.y += (cropRectHeight - referenceSize.getHeight()) / 2F;
return new Size(cropRectWidth, cropRectHeight);
}
@NonNull
private Size applyActiveArrayCoordinates(@NonNull Size referenceSize,
@NonNull PointF referencePoint) {
// The input point and size refer to the scaler crop region.
// We can query for the crop region position inside the active array, so this is easy.
Rect cropRect = holder.getBuilder(this).get(CaptureRequest.SCALER_CROP_REGION);
referencePoint.x += cropRect == null ? 0 : cropRect.left;
referencePoint.y += cropRect == null ? 0 : cropRect.top;
// Finally, get the active rect width and height from characteristics.
Rect activeRect = holder.getCharacteristics(this)
.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
if (activeRect == null) { // Should never happen
activeRect = new Rect(0, 0, referenceSize.getWidth(),
referenceSize.getHeight());
}
return new Size(activeRect.width(), activeRect.height());
}
/**
* Creates a metering rectangle around the center point.
* The rectangle will have a size that's a factor of the visible width and height.
* The rectangle will also be constrained to be inside the given boundaries,
* so we don't exceed them in case the center point is exactly on one side for example.
* @return a new rectangle
*/
@NonNull
private MeteringRectangle createMeteringRectangle(
@NonNull Size boundaries,
@NonNull PointF center,
@NonNull Size visibleSize,
float factor,
int weight) {
float rectangleWidth = factor * visibleSize.getWidth();
float rectangleHeight = factor * visibleSize.getHeight();
float rectangleLeft = center.x - rectangleWidth / 2F;
float rectangleTop = center.y - rectangleHeight / 2F;
// Respect boundaries
if (rectangleLeft < 0) rectangleLeft = 0;
if (rectangleTop < 0) rectangleTop = 0;
if (rectangleLeft + rectangleWidth > boundaries.getWidth()) {
rectangleWidth = boundaries.getWidth() - rectangleLeft;
}
if (rectangleTop + rectangleHeight > boundaries.getHeight()) {
rectangleHeight = boundaries.getHeight() - rectangleTop;
}
return new MeteringRectangle(
(int) rectangleLeft,
(int) rectangleTop,
(int) rectangleWidth,
(int) rectangleHeight,
weight
);
}
} }

@ -0,0 +1,55 @@
package com.otaliastudios.cameraview.engine.metering;
import android.graphics.PointF;
import android.graphics.Rect;
import android.graphics.RectF;
import android.hardware.Camera;
import androidx.annotation.NonNull;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.engine.offset.Angles;
import com.otaliastudios.cameraview.engine.offset.Axis;
import com.otaliastudios.cameraview.engine.offset.Reference;
import com.otaliastudios.cameraview.metering.MeteringTransform;
import com.otaliastudios.cameraview.size.Size;
public class Camera1MeteringTransform implements MeteringTransform<Camera.Area> {
protected static final String TAG = Camera1MeteringTransform.class.getSimpleName();
protected static final CameraLogger LOG = CameraLogger.create(TAG);
private final int displayToSensor;
private final Size previewSize;
public Camera1MeteringTransform(@NonNull Angles angles, @NonNull Size previewSize) {
this.displayToSensor = -angles.offset(Reference.SENSOR, Reference.VIEW, Axis.ABSOLUTE);
this.previewSize = previewSize;
}
@NonNull
@Override
public PointF transformMeteringPoint(@NonNull PointF point) {
// First, rescale to the -1000 ... 1000 range.
PointF scaled = new PointF();
scaled.x = -1000F + (point.x / previewSize.getWidth()) * 2000F;
scaled.y = -1000F + (point.y / previewSize.getHeight()) * 2000F;
// Apply rotation to this point.
// https://academo.org/demos/rotation-about-point/
PointF rotated = new PointF();
double theta = ((double) displayToSensor) * Math.PI / 180;
rotated.x = (float) (scaled.x * Math.cos(theta) - scaled.y * Math.sin(theta));
rotated.y = (float) (scaled.x * Math.sin(theta) + scaled.y * Math.cos(theta));
LOG.i("scaled:", scaled, "rotated:", rotated);
return rotated;
}
@NonNull
@Override
public Camera.Area transformMeteringRegion(@NonNull RectF region, int weight) {
Rect rect = new Rect();
region.round(rect);
return new Camera.Area(rect, weight);
}
}

@ -0,0 +1,195 @@
package com.otaliastudios.cameraview.engine.metering;
import android.graphics.PointF;
import android.graphics.Rect;
import android.graphics.RectF;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.MeteringRectangle;
import android.os.Build;
import androidx.annotation.NonNull;
import androidx.annotation.RequiresApi;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.engine.offset.Angles;
import com.otaliastudios.cameraview.engine.offset.Axis;
import com.otaliastudios.cameraview.engine.offset.Reference;
import com.otaliastudios.cameraview.metering.MeteringTransform;
import com.otaliastudios.cameraview.size.AspectRatio;
import com.otaliastudios.cameraview.size.Size;
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
public class Camera2MeteringTransform implements MeteringTransform<MeteringRectangle> {
protected static final String TAG = Camera2MeteringTransform.class.getSimpleName();
protected static final CameraLogger LOG = CameraLogger.create(TAG);
private final Angles angles;
private final Size previewSize;
private final Size previewStreamSize;
private final boolean previewIsCropping;
private final CameraCharacteristics characteristics;
private final CaptureRequest.Builder builder;
public Camera2MeteringTransform(@NonNull Angles angles,
@NonNull Size previewSize,
@NonNull Size previewStreamSize,
boolean previewIsCropping,
@NonNull CameraCharacteristics characteristics,
@NonNull CaptureRequest.Builder builder) {
this.angles = angles;
this.previewSize = previewSize;
this.previewStreamSize = previewStreamSize;
this.previewIsCropping = previewIsCropping;
this.characteristics = characteristics;
this.builder = builder;
}
@NonNull
@Override
public MeteringRectangle transformMeteringRegion(@NonNull RectF region, int weight) {
Rect round = new Rect();
region.round(round);
return new MeteringRectangle(round, weight);
}
@NonNull
@Override
public PointF transformMeteringPoint(@NonNull PointF point) {
// This is a good Q/A. https://stackoverflow.com/a/33181620/4288782
// At first, the point is relative to the View system and does not account
// our own cropping. Will keep updating these two below.
final PointF referencePoint = new PointF(point.x, point.y);
Size referenceSize = previewSize;
// 1. Account for cropping.
// This will enlarge the preview size so that aspect ratio matches.
referenceSize = applyPreviewCropping(referenceSize, referencePoint);
// 2. Scale to the preview stream coordinates.
// This will move to the preview stream coordinates by scaling.
referenceSize = applyPreviewScale(referenceSize, referencePoint);
// 3. Rotate to the stream coordinate system.
// This leaves us with sensor stream coordinates.
referenceSize = applyPreviewToSensorRotation(referenceSize, referencePoint);
// 4. Move to the crop region coordinate system.
// The crop region is the union of all currently active streams.
referenceSize = applyCropRegionCoordinates(referenceSize, referencePoint);
// 5. Move to the active array coordinate system.
referenceSize = applyActiveArrayCoordinates(referenceSize, referencePoint);
LOG.i("input:", point, "output (before clipping):", referencePoint);
// 6. Probably not needed, but make sure we clip.
if (referencePoint.x < 0) referencePoint.x = 0;
if (referencePoint.y < 0) referencePoint.y = 0;
if (referencePoint.x > referenceSize.getWidth()) referencePoint.x = referenceSize.getWidth();
if (referencePoint.y > referenceSize.getHeight()) referencePoint.y = referenceSize.getHeight();
LOG.i("input:", point, "output (after clipping):", referencePoint);
return referencePoint;
}
@SuppressWarnings("UnnecessaryLocalVariable")
@NonNull
private Size applyPreviewCropping(@NonNull Size referenceSize, @NonNull PointF referencePoint) {
Size previewStreamSize = this.previewStreamSize;
Size previewSurfaceSize = referenceSize;
int referenceWidth = previewSurfaceSize.getWidth();
int referenceHeight = previewSurfaceSize.getHeight();
AspectRatio previewStreamAspectRatio = AspectRatio.of(previewStreamSize);
AspectRatio previewSurfaceAspectRatio = AspectRatio.of(previewSurfaceSize);
if (previewIsCropping) {
if (previewStreamAspectRatio.toFloat() > previewSurfaceAspectRatio.toFloat()) {
// Stream is larger. The x coordinate must be increased: a touch on the left side
// of the surface is not on the left size of stream (it's more to the right).
float scale = previewStreamAspectRatio.toFloat()
/ previewSurfaceAspectRatio.toFloat();
referencePoint.x += previewSurfaceSize.getWidth() * (scale - 1F) / 2F;
referenceWidth = Math.round(previewSurfaceSize.getWidth() * scale);
} else {
// Stream is taller. The y coordinate must be increased: a touch on the top side
// of the surface is not on the top size of stream (it's a bit lower).
float scale = previewSurfaceAspectRatio.toFloat()
/ previewStreamAspectRatio.toFloat();
referencePoint.y += previewSurfaceSize.getHeight() * (scale - 1F) / 2F;
referenceHeight = Math.round(previewSurfaceSize.getHeight() * scale);
}
}
return new Size(referenceWidth, referenceHeight);
}
@NonNull
private Size applyPreviewScale(@NonNull Size referenceSize, @NonNull PointF referencePoint) {
// The referenceSize how has the same aspect ratio of the previewStreamSize, but they
// can still have different size (that is, a scale operation is needed).
Size previewStreamSize = this.previewStreamSize;
referencePoint.x *= (float) previewStreamSize.getWidth() / referenceSize.getWidth();
referencePoint.y *= (float) previewStreamSize.getHeight() / referenceSize.getHeight();
return previewStreamSize;
}
@SuppressWarnings("SuspiciousNameCombination")
@NonNull
private Size applyPreviewToSensorRotation(@NonNull Size referenceSize,
@NonNull PointF referencePoint) {
// Not elegant, but the sin/cos way was failing for some reason.
int angle = angles.offset(Reference.SENSOR, Reference.VIEW, Axis.ABSOLUTE);
boolean flip = angle % 180 != 0;
float tempX = referencePoint.x;
float tempY = referencePoint.y;
if (angle == 0) {
referencePoint.x = tempX;
referencePoint.y = tempY;
} else if (angle == 90) {
referencePoint.x = tempY;
referencePoint.y = referenceSize.getWidth() - tempX;
} else if (angle == 180) {
referencePoint.x = referenceSize.getWidth() - tempX;
referencePoint.y = referenceSize.getHeight() - tempY;
} else if (angle == 270) {
referencePoint.x = referenceSize.getHeight() - tempY;
referencePoint.y = tempX;
} else {
throw new IllegalStateException("Unexpected angle " + angle);
}
return flip ? referenceSize.flip() : referenceSize;
}
@NonNull
private Size applyCropRegionCoordinates(@NonNull Size referenceSize,
@NonNull PointF referencePoint) {
// The input point and size refer to the stream rect.
// The stream rect is part of the 'crop region', as described below.
// https://source.android.com/devices/camera/camera3_crop_reprocess.html
Rect cropRect = builder.get(CaptureRequest.SCALER_CROP_REGION);
// For now we don't care about x and y position. Rect should not be null, but let's be safe.
int cropRectWidth = cropRect == null ? referenceSize.getWidth() : cropRect.width();
int cropRectHeight = cropRect == null ? referenceSize.getHeight() : cropRect.height();
// The stream is always centered inside the crop region, and one of the dimensions
// should always match. We just increase the other one.
referencePoint.x += (cropRectWidth - referenceSize.getWidth()) / 2F;
referencePoint.y += (cropRectHeight - referenceSize.getHeight()) / 2F;
return new Size(cropRectWidth, cropRectHeight);
}
@NonNull
private Size applyActiveArrayCoordinates(@NonNull Size referenceSize,
@NonNull PointF referencePoint) {
// The input point and size refer to the scaler crop region.
// We can query for the crop region position inside the active array, so this is easy.
Rect cropRect = builder.get(CaptureRequest.SCALER_CROP_REGION);
referencePoint.x += cropRect == null ? 0 : cropRect.left;
referencePoint.y += cropRect == null ? 0 : cropRect.top;
// Finally, get the active rect width and height from characteristics.
Rect activeRect = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
if (activeRect == null) { // Should never happen
activeRect = new Rect(0, 0, referenceSize.getWidth(),
referenceSize.getHeight());
}
return new Size(activeRect.width(), activeRect.height());
}
}

@ -0,0 +1,76 @@
package com.otaliastudios.cameraview.metering;
import android.graphics.PointF;
import android.graphics.RectF;
import androidx.annotation.NonNull;
import com.otaliastudios.cameraview.size.Size;
class MeteringRegion implements Comparable<MeteringRegion> {
final static int MAX_WEIGHT = 1000;
final RectF mRegion;
final int mWeight;
MeteringRegion(@NonNull RectF region, int weight) {
mRegion = region;
mWeight = weight;
}
@NonNull
MeteringRegion transform(@NonNull MeteringTransform transform) {
RectF result = new RectF(Float.MAX_VALUE, Float.MAX_VALUE,
-Float.MAX_VALUE, -Float.MAX_VALUE);
PointF point = new PointF();
// top-left
point.set(mRegion.left, mRegion.top);
point = transform.transformMeteringPoint(point);
updateRect(result, point);
// top-right
point.set(mRegion.right, mRegion.top);
point = transform.transformMeteringPoint(point);
updateRect(result, point);
// bottom-right
point.set(mRegion.right, mRegion.bottom);
point = transform.transformMeteringPoint(point);
updateRect(result, point);
// bottom-left
point.set(mRegion.left, mRegion.bottom);
point = transform.transformMeteringPoint(point);
updateRect(result, point);
return new MeteringRegion(result, mWeight);
}
private void updateRect(@NonNull RectF rect, @NonNull PointF point) {
rect.left = Math.min(rect.left, point.x);
rect.top = Math.min(rect.top, point.y);
rect.right = Math.max(rect.right, point.x);
rect.bottom = Math.max(rect.bottom, point.y);
}
@NonNull
MeteringRegion clip(@NonNull Size bounds) {
return clip(new RectF(0, 0, bounds.getWidth(), bounds.getHeight()));
}
@SuppressWarnings("WeakerAccess")
@NonNull
MeteringRegion clip(@NonNull RectF bounds) {
RectF region = new RectF();
region.set(
Math.max(bounds.left, mRegion.left),
Math.max(bounds.top, mRegion.top),
Math.min(bounds.right, mRegion.right),
Math.min(bounds.bottom, mRegion.bottom)
);
return new MeteringRegion(region, mWeight);
}
@Override
public int compareTo(@NonNull MeteringRegion o) {
//noinspection UseCompareMethod
return -Integer.valueOf(mWeight).compareTo(o.mWeight);
}
}

@ -0,0 +1,112 @@
package com.otaliastudios.cameraview.metering;
import android.graphics.PointF;
import android.graphics.RectF;
import androidx.annotation.NonNull;
import androidx.annotation.VisibleForTesting;
import com.otaliastudios.cameraview.size.Size;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
public class MeteringRegions {
private final static float POINT_AREA = 0.05F;
@VisibleForTesting
final static float BLUR_FACTOR_WEIGHT = 0.1F;
private final static float BLUR_FACTOR_SIZE = 1.5F;
@VisibleForTesting
final List<MeteringRegion> mRegions;
private MeteringRegions(@NonNull List<MeteringRegion> regions) {
mRegions = regions;
}
@NonNull
public MeteringRegions transform(@NonNull MeteringTransform transform) {
List<MeteringRegion> regions = new ArrayList<>();
for (MeteringRegion region : mRegions) {
regions.add(region.transform(transform));
}
return new MeteringRegions(regions);
}
@NonNull
public <T> List<T> get(int atMost, @NonNull MeteringTransform<T> transform) {
List<T> result = new ArrayList<>();
Collections.sort(mRegions);
for (MeteringRegion region : mRegions) {
result.add(transform.transformMeteringRegion(region.mRegion, region.mWeight));
}
atMost = Math.min(atMost, result.size());
return result.subList(0, atMost);
}
@NonNull
public static MeteringRegions fromPoint(@NonNull Size bounds,
@NonNull PointF point) {
return fromPoint(bounds, point, MeteringRegion.MAX_WEIGHT);
}
@NonNull
public static MeteringRegions fromPoint(@NonNull Size bounds,
@NonNull PointF point,
int weight) {
float width = POINT_AREA * bounds.getWidth();
float height = POINT_AREA * bounds.getHeight();
RectF rectF = expand(point, width, height);
return fromArea(bounds, rectF, weight, true);
}
@NonNull
public static MeteringRegions fromArea(@NonNull Size bounds,
@NonNull RectF area) {
return fromArea(bounds, area, MeteringRegion.MAX_WEIGHT);
}
@NonNull
public static MeteringRegions fromArea(@NonNull Size bounds,
@NonNull RectF area,
int weight) {
return fromArea(bounds, area, weight, false);
}
@NonNull
public static MeteringRegions fromArea(@NonNull Size bounds,
@NonNull RectF area,
int weight,
boolean blur) {
List<MeteringRegion> regions = new ArrayList<>();
final PointF center = new PointF(area.centerX(), area.centerY());
final float width = area.width();
final float height = area.height();
regions.add(new MeteringRegion(area, weight));
if (blur) {
RectF background = expand(center,
BLUR_FACTOR_SIZE * width,
BLUR_FACTOR_SIZE * height);
regions.add(new MeteringRegion(background,
Math.round(BLUR_FACTOR_WEIGHT * weight)));
}
List<MeteringRegion> clipped = new ArrayList<>();
for (MeteringRegion region : regions) {
clipped.add(region.clip(bounds));
}
return new MeteringRegions(clipped);
}
@NonNull
private static RectF expand(@NonNull PointF center, float width, float height) {
return new RectF(
center.x - width / 2F,
center.y - height / 2F,
center.x + width / 2F,
center.y + height / 2F
);
}
}

@ -0,0 +1,14 @@
package com.otaliastudios.cameraview.metering;
import android.graphics.PointF;
import android.graphics.RectF;
import androidx.annotation.NonNull;
public interface MeteringTransform<T> {
@NonNull
PointF transformMeteringPoint(@NonNull PointF point);
@NonNull
T transformMeteringRegion(@NonNull RectF region, int weight);
}

@ -11,6 +11,14 @@ New versions are released through GitHub, so the reference page is the [GitHub R
Starting from 2.4.0, you can now [support development](https://github.com/sponsors/natario1) through the GitHub Sponsors program. Starting from 2.4.0, you can now [support development](https://github.com/sponsors/natario1) through the GitHub Sponsors program.
Companies can share a tiny part of their revenue and get private support hours in return. Thanks! Companies can share a tiny part of their revenue and get private support hours in return. Thanks!
## v2.6.0
-<small>[Metering]</small> New: `startAutoFocus(RectF)` will start 3A metering to a given rect instead of a spot ([#724][724])
-<small>[Permissions]</small> New: `app:cameraRequestPermissions` flag to disable the automatic activity permission request ([#718][718])
-<small>[Frame processing]</small> New: `setFrameProcessingPoolSize()` to set the number of Frame instances that can exist at any given time. Useful in conjunction with `setFrameProcessingExecutors()`. Please read docs ([#716][716])
-<small>[Frame processing]</small> New: `setFrameProcessingExecutors()` to set the number of threads involved in frame processing. Useful in conjunction with `setFrameProcessingPoolSize()`. Please read docs ([#716][716])
-<small>[Frame processing, Camera2]</small> Improvement: ensure that slow processing does now slow down the preview ([#716][716])
## v2.5.0 ## v2.5.0
- <small>[Camera2]</small> New: support for RAW pictures with new APIs `setPictureFormat()` and `CameraOptions.getSupportedPictureFormats()`. Contains a **breaking change**: `PictureResult.getFormat()` is not an integer anymore but rather a `PictureFormat`. This API had no real purpose so this might not affect you ([#691][691]) - <small>[Camera2]</small> New: support for RAW pictures with new APIs `setPictureFormat()` and `CameraOptions.getSupportedPictureFormats()`. Contains a **breaking change**: `PictureResult.getFormat()` is not an integer anymore but rather a `PictureFormat`. This API had no real purpose so this might not affect you ([#691][691])
@ -380,3 +388,6 @@ This is the last release before v2.
[696]: https://github.com/natario1/CameraView/pull/696 [696]: https://github.com/natario1/CameraView/pull/696
[697]: https://github.com/natario1/CameraView/pull/697 [697]: https://github.com/natario1/CameraView/pull/697
[704]: https://github.com/natario1/CameraView/pull/704 [704]: https://github.com/natario1/CameraView/pull/704
[716]: https://github.com/natario1/CameraView/pull/716
[718]: https://github.com/natario1/CameraView/pull/718
[724]: https://github.com/natario1/CameraView/pull/724

@ -51,8 +51,16 @@ cameraView.setPictureSnapshotMetering(false); // Don't
### Touch Metering ### Touch Metering
Touch metering is triggered by either a [Gesture](gestures) or by the developer itself, which Touch metering is triggered by either a [Gesture](gestures) or by the developer itself, which
can start touch metering on a specific point with the `startAutoFocus(float, float)` API. can start touch metering on a specific point with the `startAutoFocus()` API.
This action needs the coordinates of a point computed with respect to the view width and height. This action needs the coordinates of a point or region computed with respect to the view width and height.
```java
// Start touch metering at the center:
cameraView.startAutoFocus(cameraView.getWidth() / 2F, cameraView.getHeight/() / 2F);
// Start touch metering within a given area,
// like the bounding box of a face.
cameraView.startAutoFocus(rect);
```
In both cases, the metering callbacks will be triggered: In both cases, the metering callbacks will be triggered:
@ -151,6 +159,7 @@ EV correction is not guaranteed to be supported: check the `CameraOptions` to be
|`setPictureMetering(boolean)`|Whether the engine should trigger 3A metering when a picture is requested. Defaults to true.| |`setPictureMetering(boolean)`|Whether the engine should trigger 3A metering when a picture is requested. Defaults to true.|
|`setPictureSnapshotMetering(boolean)`|Whether the engine should trigger 3A metering when a picture snapshot is requested. Defaults to false.| |`setPictureSnapshotMetering(boolean)`|Whether the engine should trigger 3A metering when a picture snapshot is requested. Defaults to false.|
|`startAutoFocus(float, float)`|Starts the 3A touch metering routine at the given coordinates, with respect to the view system.| |`startAutoFocus(float, float)`|Starts the 3A touch metering routine at the given coordinates, with respect to the view system.|
|`startAutoFocus(RectF)`|Starts the 3A touch metering routine for the given area, defined with respect to the view system.|
|`CameraOptions.isAutoFocusSupported()`|Whether touch metering (metering with respect to a specific region of the screen) is supported.| |`CameraOptions.isAutoFocusSupported()`|Whether touch metering (metering with respect to a specific region of the screen) is supported.|
|`setExposureCorrection(float)`|Changes the exposure adjustment, in EV stops. A positive value means a brighter picture.| |`setExposureCorrection(float)`|Changes the exposure adjustment, in EV stops. A positive value means a brighter picture.|
|`CameraOptions.getExposureCorrectionMinValue()`|The minimum value of negative exposure correction, in EV stops.| |`CameraOptions.getExposureCorrectionMinValue()`|The minimum value of negative exposure correction, in EV stops.|

Loading…
Cancel
Save