Camera2 working with all previews

pull/493/head
Mattia Iavarone 5 years ago
parent 4166031ce2
commit e2d5aa1941
  1. 13
      cameraview/build.gradle
  2. 11
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/BaseTest.java
  3. 18
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraLoggerTest.java
  4. 24
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraUtilsTest.java
  5. 39
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraViewCallbacksTest.java
  6. 16
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraViewTest.java
  7. 28
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/Camera1IntegrationTest.java
  8. 28
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/Camera2IntegrationTest.java
  9. 119
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/CameraIntegrationTest.java
  10. 56
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/MockCameraEngine.java
  11. 8
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/gesture/GestureLayoutTest.java
  12. 9
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/internal/GridLinesLayoutTest.java
  13. 8
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/internal/utils/WorkerHandlerTest.java
  14. 13
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/markers/MarkerLayoutTest.java
  15. 19
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/preview/CameraPreviewTest.java
  16. 8
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/preview/GlCameraPreviewTest.java
  17. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraException.java
  18. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java
  19. 312
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java
  20. 399
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java
  21. 688
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraEngine.java
  22. 170
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraEngineStep.java
  23. 8
      cameraview/src/main/java/com/otaliastudios/cameraview/frame/FrameManager.java
  24. 9
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/GridLinesLayout.java
  25. 12
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/Op.java
  26. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/WorkerHandler.java
  27. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/SnapshotPictureRecorder.java
  28. 40
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/CameraPreview.java
  29. 35
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/GlCameraPreview.java
  30. 53
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/TextureCameraPreview.java
  31. 16
      cameraview/src/test/java/com/otaliastudios/cameraview/frame/FrameManagerTest.java
  32. 3
      demo/src/main/AndroidManifest.xml
  33. 1
      demo/src/main/res/layout/activity_camera.xml

@ -34,18 +34,19 @@ android {
dependencies { dependencies {
testImplementation 'junit:junit:4.12' testImplementation 'junit:junit:4.12'
testImplementation 'org.mockito:mockito-core:2.23.0' testImplementation 'org.mockito:mockito-core:2.28.2'
androidTestImplementation 'androidx.test:runner:1.1.1' androidTestImplementation 'androidx.test:runner:1.2.0'
androidTestImplementation 'androidx.test:rules:1.1.1' androidTestImplementation 'androidx.test:rules:1.2.0'
androidTestImplementation 'androidx.test.ext:junit:1.1.0' androidTestImplementation 'androidx.test.ext:junit:1.1.1'
androidTestImplementation 'com.google.dexmaker:dexmaker:1.2' androidTestImplementation 'com.google.dexmaker:dexmaker:1.2'
androidTestImplementation 'com.google.dexmaker:dexmaker-mockito:1.2' androidTestImplementation 'com.google.dexmaker:dexmaker-mockito:1.2'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.1.1' androidTestImplementation 'androidx.test.espresso:espresso-core:3.2.0'
api 'androidx.exifinterface:exifinterface:1.0.0' api 'androidx.exifinterface:exifinterface:1.0.0'
api 'androidx.lifecycle:lifecycle-common:2.1.0-alpha01' api 'androidx.lifecycle:lifecycle-common:2.1.0-alpha01'
implementation 'androidx.annotation:annotation:1.0.1' api 'com.google.android.gms:play-services-tasks:17.0.0'
implementation 'androidx.annotation:annotation:1.1.0'
} }
//endregion //endregion

@ -10,9 +10,10 @@ import android.os.PowerManager;
import androidx.test.platform.app.InstrumentationRegistry; import androidx.test.platform.app.InstrumentationRegistry;
import android.util.Log;
import android.view.View; import android.view.View;
import com.otaliastudios.cameraview.internal.utils.Task; import com.otaliastudios.cameraview.internal.utils.Op;
import org.junit.After; import org.junit.After;
import org.junit.AfterClass; import org.junit.AfterClass;
@ -131,23 +132,23 @@ public class BaseTest {
}); });
} }
public static <T> Stubber doEndTask(final Task<T> task, final T response) { public static <T> Stubber doEndTask(final Op<T> op, final T response) {
return doAnswer(new Answer<Object>() { return doAnswer(new Answer<Object>() {
@Override @Override
public Object answer(InvocationOnMock invocation) throws Throwable { public Object answer(InvocationOnMock invocation) throws Throwable {
task.end(response); op.end(response);
return null; return null;
} }
}); });
} }
public static Stubber doEndTask(final Task task, final int withReturnArgument) { public static Stubber doEndTask(final Op op, final int withReturnArgument) {
return doAnswer(new Answer<Object>() { return doAnswer(new Answer<Object>() {
@Override @Override
public Object answer(InvocationOnMock invocation) throws Throwable { public Object answer(InvocationOnMock invocation) throws Throwable {
Object o = invocation.getArguments()[withReturnArgument]; Object o = invocation.getArguments()[withReturnArgument];
//noinspection unchecked //noinspection unchecked
task.end(o); op.end(o);
return null; return null;
} }
}); });

@ -1,7 +1,7 @@
package com.otaliastudios.cameraview; package com.otaliastudios.cameraview;
import com.otaliastudios.cameraview.internal.utils.Task; import com.otaliastudios.cameraview.internal.utils.Op;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest; import androidx.test.filters.SmallTest;
@ -109,27 +109,27 @@ public class CameraLoggerTest extends BaseTest {
CameraLogger.Logger mock = mock(CameraLogger.Logger.class); CameraLogger.Logger mock = mock(CameraLogger.Logger.class);
CameraLogger.registerLogger(mock); CameraLogger.registerLogger(mock);
final Task<Throwable> task = new Task<>(); final Op<Throwable> op = new Op<>();
doAnswer(new Answer() { doAnswer(new Answer() {
@Override @Override
public Object answer(InvocationOnMock invocation) throws Throwable { public Object answer(InvocationOnMock invocation) throws Throwable {
Object[] args = invocation.getArguments(); Object[] args = invocation.getArguments();
Throwable throwable = (Throwable) args[3]; Throwable throwable = (Throwable) args[3];
task.end(throwable); op.end(throwable);
return null; return null;
} }
}).when(mock).log(anyInt(), anyString(), anyString(), any(Throwable.class)); }).when(mock).log(anyInt(), anyString(), anyString(), any(Throwable.class));
task.listen(); op.listen();
logger.e("Got no error."); logger.e("Got no error.");
assertNull(task.await(100)); assertNull(op.await(100));
task.listen(); op.listen();
logger.e("Got error:", new RuntimeException("")); logger.e("Got error:", new RuntimeException(""));
assertNotNull(task.await(100)); assertNotNull(op.await(100));
task.listen(); op.listen();
logger.e("Got", new RuntimeException(""), "while starting"); logger.e("Got", new RuntimeException(""), "while starting");
assertNotNull(task.await(100)); assertNotNull(op.await(100));
} }
} }

@ -6,7 +6,7 @@ import android.content.pm.PackageManager;
import android.graphics.Bitmap; import android.graphics.Bitmap;
import android.graphics.Color; import android.graphics.Color;
import com.otaliastudios.cameraview.internal.utils.Task; import com.otaliastudios.cameraview.internal.utils.Op;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest; import androidx.test.filters.SmallTest;
@ -36,18 +36,18 @@ public class CameraUtilsTest extends BaseTest {
} }
// Encodes bitmap and decodes again using our utility. // Encodes bitmap and decodes again using our utility.
private Task<Bitmap> encodeDecodeTask(Bitmap source) { private Op<Bitmap> encodeDecodeTask(Bitmap source) {
return encodeDecodeTask(source, 0, 0); return encodeDecodeTask(source, 0, 0);
} }
// Encodes bitmap and decodes again using our utility. // Encodes bitmap and decodes again using our utility.
private Task<Bitmap> encodeDecodeTask(Bitmap source, final int maxWidth, final int maxHeight) { private Op<Bitmap> encodeDecodeTask(Bitmap source, final int maxWidth, final int maxHeight) {
final ByteArrayOutputStream os = new ByteArrayOutputStream(); final ByteArrayOutputStream os = new ByteArrayOutputStream();
// Using lossy JPG we can't have strict comparison of values after compression. // Using lossy JPG we can't have strict comparison of values after compression.
source.compress(Bitmap.CompressFormat.PNG, 100, os); source.compress(Bitmap.CompressFormat.PNG, 100, os);
final byte[] data = os.toByteArray(); final byte[] data = os.toByteArray();
final Task<Bitmap> decode = new Task<>(true); final Op<Bitmap> decode = new Op<>(true);
final BitmapCallback callback = new BitmapCallback() { final BitmapCallback callback = new BitmapCallback() {
@Override @Override
public void onBitmapReady(Bitmap bitmap) { public void onBitmapReady(Bitmap bitmap) {
@ -75,7 +75,7 @@ public class CameraUtilsTest extends BaseTest {
Bitmap source = Bitmap.createBitmap(w, h, Bitmap.Config.ARGB_8888); Bitmap source = Bitmap.createBitmap(w, h, Bitmap.Config.ARGB_8888);
source.setPixel(0, 0, color); source.setPixel(0, 0, color);
Task<Bitmap> decode = encodeDecodeTask(source); Op<Bitmap> decode = encodeDecodeTask(source);
Bitmap other = decode.await(800); Bitmap other = decode.await(800);
assertNotNull(other); assertNotNull(other);
assertEquals(100, w); assertEquals(100, w);
@ -93,23 +93,23 @@ public class CameraUtilsTest extends BaseTest {
public void testDecodeDownscaledBitmap() { public void testDecodeDownscaledBitmap() {
int width = 1000, height = 2000; int width = 1000, height = 2000;
Bitmap source = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); Bitmap source = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
Task<Bitmap> task; Op<Bitmap> op;
Bitmap other; Bitmap other;
task = encodeDecodeTask(source, 100, 100); op = encodeDecodeTask(source, 100, 100);
other = task.await(800); other = op.await(800);
assertNotNull(other); assertNotNull(other);
assertTrue(other.getWidth() <= 100); assertTrue(other.getWidth() <= 100);
assertTrue(other.getHeight() <= 100); assertTrue(other.getHeight() <= 100);
task = encodeDecodeTask(source, Integer.MAX_VALUE, Integer.MAX_VALUE); op = encodeDecodeTask(source, Integer.MAX_VALUE, Integer.MAX_VALUE);
other = task.await(800); other = op.await(800);
assertNotNull(other); assertNotNull(other);
assertEquals(other.getWidth(), width); assertEquals(other.getWidth(), width);
assertEquals(other.getHeight(), height); assertEquals(other.getHeight(), height);
task = encodeDecodeTask(source, 6000, 6000); op = encodeDecodeTask(source, 6000, 6000);
other = task.await(800); other = op.await(800);
assertNotNull(other); assertNotNull(other);
assertEquals(other.getWidth(), width); assertEquals(other.getWidth(), width);
assertEquals(other.getHeight(), height); assertEquals(other.getHeight(), height);

@ -17,7 +17,7 @@ import com.otaliastudios.cameraview.frame.Frame;
import com.otaliastudios.cameraview.frame.FrameProcessor; import com.otaliastudios.cameraview.frame.FrameProcessor;
import com.otaliastudios.cameraview.gesture.Gesture; import com.otaliastudios.cameraview.gesture.Gesture;
import com.otaliastudios.cameraview.gesture.GestureAction; import com.otaliastudios.cameraview.gesture.GestureAction;
import com.otaliastudios.cameraview.internal.utils.Task; import com.otaliastudios.cameraview.internal.utils.Op;
import com.otaliastudios.cameraview.engine.MockCameraEngine; import com.otaliastudios.cameraview.engine.MockCameraEngine;
import com.otaliastudios.cameraview.markers.AutoFocusMarker; import com.otaliastudios.cameraview.markers.AutoFocusMarker;
import com.otaliastudios.cameraview.markers.AutoFocusTrigger; import com.otaliastudios.cameraview.markers.AutoFocusTrigger;
@ -39,6 +39,7 @@ import static org.junit.Assert.assertNull;
import static org.mockito.Matchers.any; import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyFloat; import static org.mockito.Matchers.anyFloat;
import static org.mockito.Matchers.anyInt; import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never; import static org.mockito.Mockito.never;
@ -57,7 +58,7 @@ public class CameraViewCallbacksTest extends BaseTest {
private FrameProcessor processor; private FrameProcessor processor;
private MockCameraEngine mockController; private MockCameraEngine mockController;
private MockCameraPreview mockPreview; private MockCameraPreview mockPreview;
private Task<Boolean> task; private Op<Boolean> op;
@Before @Before
public void setUp() { public void setUp() {
@ -91,7 +92,7 @@ public class CameraViewCallbacksTest extends BaseTest {
camera.doInstantiatePreview(); camera.doInstantiatePreview();
camera.addCameraListener(listener); camera.addCameraListener(listener);
camera.addFrameProcessor(processor); camera.addFrameProcessor(processor);
task = new Task<>(true); op = new Op<>(true);
} }
}); });
} }
@ -104,12 +105,12 @@ public class CameraViewCallbacksTest extends BaseTest {
listener = null; listener = null;
} }
// Completes our task. // Completes our op.
private Stubber completeTask() { private Stubber completeTask() {
return doAnswer(new Answer() { return doAnswer(new Answer() {
@Override @Override
public Object answer(InvocationOnMock invocation) throws Throwable { public Object answer(InvocationOnMock invocation) throws Throwable {
task.end(true); op.end(true);
return null; return null;
} }
}); });
@ -121,7 +122,7 @@ public class CameraViewCallbacksTest extends BaseTest {
completeTask().when(listener).onCameraOpened(null); completeTask().when(listener).onCameraOpened(null);
camera.mCameraCallbacks.dispatchOnCameraOpened(null); camera.mCameraCallbacks.dispatchOnCameraOpened(null);
assertNull(task.await(200)); assertNull(op.await(200));
verify(listener, never()).onCameraOpened(null); verify(listener, never()).onCameraOpened(null);
} }
@ -131,7 +132,7 @@ public class CameraViewCallbacksTest extends BaseTest {
completeTask().when(listener).onCameraOpened(null); completeTask().when(listener).onCameraOpened(null);
camera.mCameraCallbacks.dispatchOnCameraOpened(null); camera.mCameraCallbacks.dispatchOnCameraOpened(null);
assertNull(task.await(200)); assertNull(op.await(200));
verify(listener, never()).onCameraOpened(null); verify(listener, never()).onCameraOpened(null);
} }
@ -140,7 +141,7 @@ public class CameraViewCallbacksTest extends BaseTest {
completeTask().when(listener).onCameraOpened(null); completeTask().when(listener).onCameraOpened(null);
camera.mCameraCallbacks.dispatchOnCameraOpened(null); camera.mCameraCallbacks.dispatchOnCameraOpened(null);
assertNotNull(task.await(200)); assertNotNull(op.await(200));
verify(listener, times(1)).onCameraOpened(null); verify(listener, times(1)).onCameraOpened(null);
} }
@ -149,7 +150,7 @@ public class CameraViewCallbacksTest extends BaseTest {
completeTask().when(listener).onCameraClosed(); completeTask().when(listener).onCameraClosed();
camera.mCameraCallbacks.dispatchOnCameraClosed(); camera.mCameraCallbacks.dispatchOnCameraClosed();
assertNotNull(task.await(200)); assertNotNull(op.await(200));
verify(listener, times(1)).onCameraClosed(); verify(listener, times(1)).onCameraClosed();
} }
@ -159,7 +160,7 @@ public class CameraViewCallbacksTest extends BaseTest {
completeTask().when(listener).onVideoTaken(any(VideoResult.class)); completeTask().when(listener).onVideoTaken(any(VideoResult.class));
camera.mCameraCallbacks.dispatchOnVideoTaken(stub); camera.mCameraCallbacks.dispatchOnVideoTaken(stub);
assertNotNull(task.await(200)); assertNotNull(op.await(200));
verify(listener, times(1)).onVideoTaken(any(VideoResult.class)); verify(listener, times(1)).onVideoTaken(any(VideoResult.class));
} }
@ -168,7 +169,7 @@ public class CameraViewCallbacksTest extends BaseTest {
PictureResult.Stub stub = new PictureResult.Stub(); PictureResult.Stub stub = new PictureResult.Stub();
completeTask().when(listener).onPictureTaken(any(PictureResult.class)); completeTask().when(listener).onPictureTaken(any(PictureResult.class));
camera.mCameraCallbacks.dispatchOnPictureTaken(stub); camera.mCameraCallbacks.dispatchOnPictureTaken(stub);
assertNotNull(task.await(200)); assertNotNull(op.await(200));
verify(listener, times(1)).onPictureTaken(any(PictureResult.class)); verify(listener, times(1)).onPictureTaken(any(PictureResult.class));
} }
@ -177,7 +178,7 @@ public class CameraViewCallbacksTest extends BaseTest {
completeTask().when(listener).onZoomChanged(anyFloat(), any(float[].class), any(PointF[].class)); completeTask().when(listener).onZoomChanged(anyFloat(), any(float[].class), any(PointF[].class));
camera.mCameraCallbacks.dispatchOnZoomChanged(0f, null); camera.mCameraCallbacks.dispatchOnZoomChanged(0f, null);
assertNotNull(task.await(200)); assertNotNull(op.await(200));
verify(listener, times(1)).onZoomChanged(anyFloat(), any(float[].class), any(PointF[].class)); verify(listener, times(1)).onZoomChanged(anyFloat(), any(float[].class), any(PointF[].class));
} }
@ -186,7 +187,7 @@ public class CameraViewCallbacksTest extends BaseTest {
completeTask().when(listener).onExposureCorrectionChanged(0f, null, null); completeTask().when(listener).onExposureCorrectionChanged(0f, null, null);
camera.mCameraCallbacks.dispatchOnExposureCorrectionChanged(0f, null, null); camera.mCameraCallbacks.dispatchOnExposureCorrectionChanged(0f, null, null);
assertNotNull(task.await(200)); assertNotNull(op.await(200));
verify(listener, times(1)).onExposureCorrectionChanged(0f, null, null); verify(listener, times(1)).onExposureCorrectionChanged(0f, null, null);
} }
@ -204,10 +205,10 @@ public class CameraViewCallbacksTest extends BaseTest {
completeTask().when(listener).onAutoFocusStart(point); completeTask().when(listener).onAutoFocusStart(point);
camera.mCameraCallbacks.dispatchOnFocusStart(Gesture.TAP, point); camera.mCameraCallbacks.dispatchOnFocusStart(Gesture.TAP, point);
assertNotNull(task.await(200)); assertNotNull(op.await(200));
verify(listener, times(1)).onAutoFocusStart(point); verify(listener, times(1)).onAutoFocusStart(point);
verify(marker, times(1)).onAutoFocusStart(AutoFocusTrigger.GESTURE, point); verify(marker, times(1)).onAutoFocusStart(AutoFocusTrigger.GESTURE, point);
verify(markerLayout, times(1)).onEvent(MarkerLayout.TYPE_AUTOFOCUS, any(PointF[].class)); verify(markerLayout, times(1)).onEvent(eq(MarkerLayout.TYPE_AUTOFOCUS), any(PointF[].class));
} }
@Test @Test
@ -223,7 +224,7 @@ public class CameraViewCallbacksTest extends BaseTest {
completeTask().when(listener).onAutoFocusEnd(success, point); completeTask().when(listener).onAutoFocusEnd(success, point);
camera.mCameraCallbacks.dispatchOnFocusEnd(Gesture.TAP, success, point); camera.mCameraCallbacks.dispatchOnFocusEnd(Gesture.TAP, success, point);
assertNotNull(task.await(200)); assertNotNull(op.await(200));
verify(listener, times(1)).onAutoFocusEnd(success, point); verify(listener, times(1)).onAutoFocusEnd(success, point);
verify(marker, times(1)).onAutoFocusEnd(AutoFocusTrigger.GESTURE, success, point); verify(marker, times(1)).onAutoFocusEnd(AutoFocusTrigger.GESTURE, success, point);
@ -234,7 +235,7 @@ public class CameraViewCallbacksTest extends BaseTest {
public void testOrientationCallbacks() { public void testOrientationCallbacks() {
completeTask().when(listener).onOrientationChanged(anyInt()); completeTask().when(listener).onOrientationChanged(anyInt());
camera.mCameraCallbacks.onDeviceOrientationChanged(90); camera.mCameraCallbacks.onDeviceOrientationChanged(90);
assertNotNull(task.await(200)); assertNotNull(op.await(200));
verify(listener, times(1)).onOrientationChanged(anyInt()); verify(listener, times(1)).onOrientationChanged(anyInt());
} }
@ -246,7 +247,7 @@ public class CameraViewCallbacksTest extends BaseTest {
completeTask().when(listener).onCameraError(error); completeTask().when(listener).onCameraError(error);
camera.mCameraCallbacks.dispatchError(error); camera.mCameraCallbacks.dispatchError(error);
assertNotNull(task.await(200)); assertNotNull(op.await(200));
verify(listener, times(1)).onCameraError(error); verify(listener, times(1)).onCameraError(error);
} }
@ -256,7 +257,7 @@ public class CameraViewCallbacksTest extends BaseTest {
completeTask().when(processor).process(mock); completeTask().when(processor).process(mock);
camera.mCameraCallbacks.dispatchFrame(mock); camera.mCameraCallbacks.dispatchFrame(mock);
assertNotNull(task.await(200)); assertNotNull(op.await(200));
verify(processor, times(1)).process(mock); verify(processor, times(1)).process(mock);
} }
} }

@ -32,7 +32,7 @@ import com.otaliastudios.cameraview.gesture.PinchGestureLayout;
import com.otaliastudios.cameraview.gesture.ScrollGestureLayout; import com.otaliastudios.cameraview.gesture.ScrollGestureLayout;
import com.otaliastudios.cameraview.gesture.TapGestureLayout; import com.otaliastudios.cameraview.gesture.TapGestureLayout;
import com.otaliastudios.cameraview.engine.MockCameraEngine; import com.otaliastudios.cameraview.engine.MockCameraEngine;
import com.otaliastudios.cameraview.internal.utils.Task; import com.otaliastudios.cameraview.internal.utils.Op;
import com.otaliastudios.cameraview.markers.AutoFocusMarker; import com.otaliastudios.cameraview.markers.AutoFocusMarker;
import com.otaliastudios.cameraview.markers.DefaultAutoFocusMarker; import com.otaliastudios.cameraview.markers.DefaultAutoFocusMarker;
import com.otaliastudios.cameraview.markers.MarkerLayout; import com.otaliastudios.cameraview.markers.MarkerLayout;
@ -226,7 +226,7 @@ public class CameraViewTest extends BaseTest {
public void testGestureAction_capture() { public void testGestureAction_capture() {
CameraOptions o = mock(CameraOptions.class); CameraOptions o = mock(CameraOptions.class);
mockController.setMockCameraOptions(o); mockController.setMockCameraOptions(o);
mockController.mockStarted(true); mockController.setMockEngineState(true);
MotionEvent event = MotionEvent.obtain(0L, 0L, 0, 0f, 0f, 0); MotionEvent event = MotionEvent.obtain(0L, 0L, 0, 0f, 0f, 0);
ui(new Runnable() { ui(new Runnable() {
@Override @Override
@ -248,7 +248,7 @@ public class CameraViewTest extends BaseTest {
public void testGestureAction_focus() { public void testGestureAction_focus() {
CameraOptions o = mock(CameraOptions.class); CameraOptions o = mock(CameraOptions.class);
mockController.setMockCameraOptions(o); mockController.setMockCameraOptions(o);
mockController.mockStarted(true); mockController.setMockEngineState(true);
MotionEvent event = MotionEvent.obtain(0L, 0L, 0, 0f, 0f, 0); MotionEvent event = MotionEvent.obtain(0L, 0L, 0, 0f, 0f, 0);
ui(new Runnable() { ui(new Runnable() {
@Override @Override
@ -273,7 +273,7 @@ public class CameraViewTest extends BaseTest {
public void testGestureAction_zoom() { public void testGestureAction_zoom() {
CameraOptions o = mock(CameraOptions.class); CameraOptions o = mock(CameraOptions.class);
mockController.setMockCameraOptions(o); mockController.setMockCameraOptions(o);
mockController.mockStarted(true); mockController.setMockEngineState(true);
mockController.mZoomChanged = false; mockController.mZoomChanged = false;
MotionEvent event = MotionEvent.obtain(0L, 0L, 0, 0f, 0f, 0); MotionEvent event = MotionEvent.obtain(0L, 0L, 0, 0f, 0f, 0);
final FactorHolder factor = new FactorHolder(); final FactorHolder factor = new FactorHolder();
@ -314,7 +314,7 @@ public class CameraViewTest extends BaseTest {
when(o.getExposureCorrectionMinValue()).thenReturn(-10f); when(o.getExposureCorrectionMinValue()).thenReturn(-10f);
when(o.getExposureCorrectionMaxValue()).thenReturn(10f); when(o.getExposureCorrectionMaxValue()).thenReturn(10f);
mockController.setMockCameraOptions(o); mockController.setMockCameraOptions(o);
mockController.mockStarted(true); mockController.setMockEngineState(true);
mockController.mExposureCorrectionChanged = false; mockController.mExposureCorrectionChanged = false;
MotionEvent event = MotionEvent.obtain(0L, 0L, 0, 0f, 0f, 0); MotionEvent event = MotionEvent.obtain(0L, 0L, 0, 0f, 0f, 0);
final FactorHolder factor = new FactorHolder(); final FactorHolder factor = new FactorHolder();
@ -754,16 +754,16 @@ public class CameraViewTest extends BaseTest {
cameraView.mMarkerLayout = markerLayout; cameraView.mMarkerLayout = markerLayout;
final PointF point = new PointF(0, 0); final PointF point = new PointF(0, 0);
final PointF[] points = new PointF[]{ point }; final PointF[] points = new PointF[]{ point };
final Task<Boolean> task = new Task<>(true); final Op<Boolean> op = new Op<>(true);
doAnswer(new Answer() { doAnswer(new Answer() {
@Override @Override
public Object answer(InvocationOnMock invocation) throws Throwable { public Object answer(InvocationOnMock invocation) throws Throwable {
task.end(true); op.end(true);
return null; return null;
} }
}).when(markerLayout).onEvent(MarkerLayout.TYPE_AUTOFOCUS, points); }).when(markerLayout).onEvent(MarkerLayout.TYPE_AUTOFOCUS, points);
cameraView.mCameraCallbacks.dispatchOnFocusStart(Gesture.TAP, point); cameraView.mCameraCallbacks.dispatchOnFocusStart(Gesture.TAP, point);
assertNotNull(task.await(100)); assertNotNull(op.await(100));
} }
//endregion //endregion

@ -0,0 +1,28 @@
package com.otaliastudios.cameraview.engine;
import com.otaliastudios.cameraview.controls.Engine;
import org.junit.Ignore;
import org.junit.runner.RunWith;
import androidx.annotation.NonNull;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.LargeTest;
/**
* These tests work great on real devices, and are the only way to test actual CameraEngine
* implementation - we really need to open the camera device.
* Unfortunately they fail unreliably on emulated devices, due to some bug with the
* emulated camera controller. Waiting for it to be fixed.
*/
@RunWith(AndroidJUnit4.class)
@LargeTest
@Ignore
public class Camera1IntegrationTest extends CameraIntegrationTest {
@NonNull
@Override
protected Engine getEngine() {
return Engine.CAMERA2;
}
}

@ -0,0 +1,28 @@
package com.otaliastudios.cameraview.engine;
import com.otaliastudios.cameraview.controls.Engine;
import org.junit.Ignore;
import org.junit.runner.RunWith;
import androidx.annotation.NonNull;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.LargeTest;
/**
* These tests work great on real devices, and are the only way to test actual CameraEngine
* implementation - we really need to open the camera device.
* Unfortunately they fail unreliably on emulated devices, due to some bug with the
* emulated camera controller. Waiting for it to be fixed.
*/
@RunWith(AndroidJUnit4.class)
@LargeTest
@Ignore
public class Camera2IntegrationTest extends CameraIntegrationTest {
@NonNull
@Override
protected Engine getEngine() {
return Engine.CAMERA1;
}
}

@ -5,6 +5,7 @@ import android.graphics.Bitmap;
import android.graphics.PointF; import android.graphics.PointF;
import android.hardware.Camera; import android.hardware.Camera;
import android.os.Build; import android.os.Build;
import android.util.Log;
import com.otaliastudios.cameraview.BaseTest; import com.otaliastudios.cameraview.BaseTest;
import com.otaliastudios.cameraview.CameraListener; import com.otaliastudios.cameraview.CameraListener;
@ -22,13 +23,15 @@ import com.otaliastudios.cameraview.controls.Mode;
import com.otaliastudios.cameraview.controls.WhiteBalance; import com.otaliastudios.cameraview.controls.WhiteBalance;
import com.otaliastudios.cameraview.frame.Frame; import com.otaliastudios.cameraview.frame.Frame;
import com.otaliastudios.cameraview.frame.FrameProcessor; import com.otaliastudios.cameraview.frame.FrameProcessor;
import com.otaliastudios.cameraview.internal.utils.Task; import com.otaliastudios.cameraview.internal.utils.Op;
import com.otaliastudios.cameraview.internal.utils.WorkerHandler; import com.otaliastudios.cameraview.internal.utils.WorkerHandler;
import com.otaliastudios.cameraview.size.Size; import com.otaliastudios.cameraview.size.Size;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.LargeTest;
import androidx.test.filters.MediumTest; import androidx.test.filters.MediumTest;
import androidx.test.filters.SmallTest;
import androidx.test.rule.ActivityTestRule; import androidx.test.rule.ActivityTestRule;
import org.junit.After; import org.junit.After;
@ -48,35 +51,30 @@ import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull; import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Mockito.any; import static org.mockito.Mockito.any;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy; import static org.mockito.Mockito.spy;
/** public abstract class CameraIntegrationTest extends BaseTest {
* These tests work great on real devices, and are the only way to test actual CameraEngine
* implementation - we really need to open the camera device.
* Unfortunately they fail unreliably on emulated devices, due to some bug with the
* emulated camera controller. Waiting for it to be fixed.
*/
@RunWith(AndroidJUnit4.class)
@MediumTest
@Ignore
public class IntegrationTest extends BaseTest {
@Rule @Rule
public ActivityTestRule<TestActivity> rule = new ActivityTestRule<>(TestActivity.class); public ActivityTestRule<TestActivity> rule = new ActivityTestRule<>(TestActivity.class);
private CameraView camera; private CameraView camera;
private Camera1Engine controller; private CameraEngine controller;
private CameraListener listener; private CameraListener listener;
private Task<Throwable> uiExceptionTask; private Op<Throwable> uiExceptionOp;
@BeforeClass @BeforeClass
public static void grant() { public static void grant() {
grantPermissions(); grantPermissions();
} }
@NonNull
protected abstract Engine getEngine();
@Before @Before
public void setUp() { public void setUp() {
WorkerHandler.destroy(); WorkerHandler.destroy();
@ -89,28 +87,27 @@ public class IntegrationTest extends BaseTest {
@NonNull @NonNull
@Override @Override
protected CameraEngine instantiateCameraEngine(@NonNull Engine engine, @NonNull CameraEngine.Callback callback) { protected CameraEngine instantiateCameraEngine(@NonNull Engine engine, @NonNull CameraEngine.Callback callback) {
controller = new Camera1Engine(callback); controller = super.instantiateCameraEngine(getEngine(), callback);
return controller; return controller;
} }
}; };
listener = mock(CameraListener.class); listener = mock(CameraListener.class);
camera.addCameraListener(listener); camera.addCameraListener(listener);
rule.getActivity().inflate(camera); rule.getActivity().inflate(camera);
}
});
// Ensure that controller exceptions are thrown on this thread (not on the UI thread). // Ensure that controller exceptions are thrown on this thread (not on the UI thread).
uiExceptionTask = new Task<>(true); uiExceptionOp = new Op<>(true);
WorkerHandler crashThread = WorkerHandler.get("CrashThread"); WorkerHandler crashThread = WorkerHandler.get("CrashThread");
crashThread.getThread().setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() { crashThread.getThread().setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() {
@Override @Override
public void uncaughtException(Thread t, Throwable e) { public void uncaughtException(Thread t, Throwable e) {
uiExceptionTask.end(e); uiExceptionOp.end(e);
} }
}); });
controller.mCrashHandler = crashThread.getHandler(); controller.mCrashHandler = crashThread.getHandler();
} }
});
}
@After @After
public void tearDown() { public void tearDown() {
@ -120,13 +117,13 @@ public class IntegrationTest extends BaseTest {
} }
private void waitForUiException() throws Throwable { private void waitForUiException() throws Throwable {
Throwable throwable = uiExceptionTask.await(2500); Throwable throwable = uiExceptionOp.await(2500);
if (throwable != null) throw throwable; if (throwable != null) throw throwable;
} }
private CameraOptions waitForOpen(boolean expectSuccess) { private CameraOptions waitForOpen(boolean expectSuccess) {
camera.open(); camera.open();
final Task<CameraOptions> open = new Task<>(true); final Op<CameraOptions> open = new Op<>(true);
doEndTask(open, 0).when(listener).onCameraOpened(any(CameraOptions.class)); doEndTask(open, 0).when(listener).onCameraOpened(any(CameraOptions.class));
CameraOptions result = open.await(4000); CameraOptions result = open.await(4000);
if (expectSuccess) { if (expectSuccess) {
@ -139,7 +136,7 @@ public class IntegrationTest extends BaseTest {
private void waitForClose(boolean expectSuccess) { private void waitForClose(boolean expectSuccess) {
camera.close(); camera.close();
final Task<Boolean> close = new Task<>(true); final Op<Boolean> close = new Op<>(true);
doEndTask(close, true).when(listener).onCameraClosed(); doEndTask(close, true).when(listener).onCameraClosed();
Boolean result = close.await(4000); Boolean result = close.await(4000);
if (expectSuccess) { if (expectSuccess) {
@ -150,7 +147,7 @@ public class IntegrationTest extends BaseTest {
} }
private void waitForVideoEnd(boolean expectSuccess) { private void waitForVideoEnd(boolean expectSuccess) {
final Task<Boolean> video = new Task<>(true); final Op<Boolean> video = new Op<>(true);
doEndTask(video, true).when(listener).onVideoTaken(any(VideoResult.class)); doEndTask(video, true).when(listener).onVideoTaken(any(VideoResult.class));
Boolean result = video.await(8000); Boolean result = video.await(8000);
if (expectSuccess) { if (expectSuccess) {
@ -161,7 +158,7 @@ public class IntegrationTest extends BaseTest {
} }
private PictureResult waitForPicture(boolean expectSuccess) { private PictureResult waitForPicture(boolean expectSuccess) {
final Task<PictureResult> pic = new Task<>(true); final Op<PictureResult> pic = new Op<>(true);
doEndTask(pic, 0).when(listener).onPictureTaken(any(PictureResult.class)); doEndTask(pic, 0).when(listener).onPictureTaken(any(PictureResult.class));
PictureResult result = pic.await(5000); PictureResult result = pic.await(5000);
if (expectSuccess) { if (expectSuccess) {
@ -173,24 +170,24 @@ public class IntegrationTest extends BaseTest {
} }
private void waitForVideoStart() { private void waitForVideoStart() {
controller.mStartVideoTask.listen(); controller.mStartVideoOp.listen();
File file = new File(context().getFilesDir(), "video.mp4"); File file = new File(context().getFilesDir(), "video.mp4");
camera.takeVideo(file); camera.takeVideo(file);
controller.mStartVideoTask.await(400); controller.mStartVideoOp.await(400);
} }
//region test open/close //region test open/close
@Test @Test
public void testOpenClose() throws Exception { public void testOpenClose() {
// Starting and stopping are hard to get since they happen on another thread. // Starting and stopping are hard to get since they happen on another thread.
assertEquals(controller.getState(), CameraEngine.STATE_STOPPED); assertEquals(controller.getEngineState(), CameraEngine.STATE_STOPPED);
waitForOpen(true); waitForOpen(true);
assertEquals(controller.getState(), CameraEngine.STATE_STARTED); assertEquals(controller.getEngineState(), CameraEngine.STATE_STARTED);
waitForClose(true); waitForClose(true);
assertEquals(controller.getState(), CameraEngine.STATE_STOPPED); assertEquals(controller.getEngineState(), CameraEngine.STATE_STOPPED);
} }
@Test @Test
@ -277,11 +274,11 @@ public class IntegrationTest extends BaseTest {
public void testSetZoom() { public void testSetZoom() {
CameraOptions options = waitForOpen(true); CameraOptions options = waitForOpen(true);
controller.mZoomTask.listen(); controller.mZoomOp.listen();
float oldValue = camera.getZoom(); float oldValue = camera.getZoom();
float newValue = 0.65f; float newValue = 0.65f;
camera.setZoom(newValue); camera.setZoom(newValue);
controller.mZoomTask.await(500); controller.mZoomOp.await(500);
if (options.isZoomSupported()) { if (options.isZoomSupported()) {
assertEquals(newValue, camera.getZoom(), 0f); assertEquals(newValue, camera.getZoom(), 0f);
@ -294,11 +291,11 @@ public class IntegrationTest extends BaseTest {
public void testSetExposureCorrection() { public void testSetExposureCorrection() {
CameraOptions options = waitForOpen(true); CameraOptions options = waitForOpen(true);
controller.mExposureCorrectionTask.listen(); controller.mExposureCorrectionOp.listen();
float oldValue = camera.getExposureCorrection(); float oldValue = camera.getExposureCorrection();
float newValue = options.getExposureCorrectionMaxValue(); float newValue = options.getExposureCorrectionMaxValue();
camera.setExposureCorrection(newValue); camera.setExposureCorrection(newValue);
controller.mExposureCorrectionTask.await(300); controller.mExposureCorrectionOp.await(300);
if (options.isExposureCorrectionSupported()) { if (options.isExposureCorrectionSupported()) {
assertEquals(newValue, camera.getExposureCorrection(), 0f); assertEquals(newValue, camera.getExposureCorrection(), 0f);
@ -313,9 +310,9 @@ public class IntegrationTest extends BaseTest {
Flash[] values = Flash.values(); Flash[] values = Flash.values();
Flash oldValue = camera.getFlash(); Flash oldValue = camera.getFlash();
for (Flash value : values) { for (Flash value : values) {
controller.mFlashTask.listen(); controller.mFlashOp.listen();
camera.setFlash(value); camera.setFlash(value);
controller.mFlashTask.await(300); controller.mFlashOp.await(300);
if (options.supports(value)) { if (options.supports(value)) {
assertEquals(camera.getFlash(), value); assertEquals(camera.getFlash(), value);
oldValue = value; oldValue = value;
@ -331,9 +328,9 @@ public class IntegrationTest extends BaseTest {
WhiteBalance[] values = WhiteBalance.values(); WhiteBalance[] values = WhiteBalance.values();
WhiteBalance oldValue = camera.getWhiteBalance(); WhiteBalance oldValue = camera.getWhiteBalance();
for (WhiteBalance value : values) { for (WhiteBalance value : values) {
controller.mWhiteBalanceTask.listen(); controller.mWhiteBalanceOp.listen();
camera.setWhiteBalance(value); camera.setWhiteBalance(value);
controller.mWhiteBalanceTask.await(300); controller.mWhiteBalanceOp.await(300);
if (options.supports(value)) { if (options.supports(value)) {
assertEquals(camera.getWhiteBalance(), value); assertEquals(camera.getWhiteBalance(), value);
oldValue = value; oldValue = value;
@ -349,9 +346,9 @@ public class IntegrationTest extends BaseTest {
Hdr[] values = Hdr.values(); Hdr[] values = Hdr.values();
Hdr oldValue = camera.getHdr(); Hdr oldValue = camera.getHdr();
for (Hdr value : values) { for (Hdr value : values) {
controller.mHdrTask.listen(); controller.mHdrOp.listen();
camera.setHdr(value); camera.setHdr(value);
controller.mHdrTask.await(300); controller.mHdrOp.await(300);
if (options.supports(value)) { if (options.supports(value)) {
assertEquals(camera.getHdr(), value); assertEquals(camera.getHdr(), value);
oldValue = value; oldValue = value;
@ -375,9 +372,9 @@ public class IntegrationTest extends BaseTest {
@Test @Test
public void testSetLocation() { public void testSetLocation() {
waitForOpen(true); waitForOpen(true);
controller.mLocationTask.listen(); controller.mLocationOp.listen();
camera.setLocation(10d, 2d); camera.setLocation(10d, 2d);
controller.mLocationTask.await(300); controller.mLocationOp.await(300);
assertNotNull(camera.getLocation()); assertNotNull(camera.getLocation());
assertEquals(camera.getLocation().getLatitude(), 10d, 0d); assertEquals(camera.getLocation().getLatitude(), 10d, 0d);
assertEquals(camera.getLocation().getLongitude(), 2d, 0d); assertEquals(camera.getLocation().getLongitude(), 2d, 0d);
@ -386,22 +383,26 @@ public class IntegrationTest extends BaseTest {
@Test @Test
public void testSetPlaySounds() { public void testSetPlaySounds() {
controller.mPlaySoundsTask.listen(); controller.mPlaySoundsOp.listen();
boolean oldValue = camera.getPlaySounds(); boolean oldValue = camera.getPlaySounds();
boolean newValue = !oldValue; boolean newValue = !oldValue;
camera.setPlaySounds(newValue); camera.setPlaySounds(newValue);
controller.mPlaySoundsTask.await(300); controller.mPlaySoundsOp.await(300);
if (controller instanceof Camera1Engine) {
Camera1Engine camera1Engine = (Camera1Engine) controller;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
Camera.CameraInfo info = new Camera.CameraInfo(); Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(controller.mCameraId, info); Camera.getCameraInfo(camera1Engine.mCameraId, info);
if (info.canDisableShutterSound) { if (info.canDisableShutterSound) {
assertEquals(newValue, camera.getPlaySounds()); assertEquals(newValue, camera.getPlaySounds());
} }
} else { } else {
assertEquals(oldValue, camera.getPlaySounds()); assertEquals(oldValue, camera.getPlaySounds());
} }
} else {
// TODO do when Camera2 is completed
}
} }
//endregion //endregion
@ -459,13 +460,12 @@ public class IntegrationTest extends BaseTest {
//endregion //endregion
//region startAutoFocus //region startAutoFocus
// TODO: won't test onStopAutoFocus because that is not guaranteed to be called
@Test @Test
public void testStartAutoFocus() { public void testStartAutoFocus() {
CameraOptions o = waitForOpen(true); CameraOptions o = waitForOpen(true);
final Task<PointF> focus = new Task<>(true); final Op<PointF> focus = new Op<>(true);
doEndTask(focus, 0).when(listener).onAutoFocusStart(any(PointF.class)); doEndTask(focus, 0).when(listener).onAutoFocusStart(any(PointF.class));
camera.startAutoFocus(1, 1); camera.startAutoFocus(1, 1);
@ -478,6 +478,24 @@ public class IntegrationTest extends BaseTest {
} }
} }
@Test
public void testStopAutoFocus() {
CameraOptions o = waitForOpen(true);
final Op<PointF> focus = new Op<>(true);
doEndTask(focus, 1).when(listener).onAutoFocusEnd(anyBoolean(), any(PointF.class));
camera.startAutoFocus(1, 1);
// Stop is not guaranteed to be called, we use a delay. So wait at least the delay time.
PointF point = focus.await(1000 + Camera1Engine.AUTOFOCUS_END_DELAY_MILLIS);
if (o.isAutoFocusSupported()) {
assertNotNull(point);
assertEquals(point, new PointF(1, 1));
} else {
assertNull(point);
}
}
//endregion //endregion
//region capture //region capture
@ -506,6 +524,8 @@ public class IntegrationTest extends BaseTest {
@Test @Test
public void testCapturePicture_size() throws Exception { public void testCapturePicture_size() throws Exception {
waitForOpen(true); waitForOpen(true);
// PictureSize can still be null after opened.
while (camera.getPictureSize() == null) {}
Size size = camera.getPictureSize(); Size size = camera.getPictureSize();
camera.takePicture(); camera.takePicture();
PictureResult result = waitForPicture(true); PictureResult result = waitForPicture(true);
@ -550,6 +570,8 @@ public class IntegrationTest extends BaseTest {
@Test @Test
public void testCaptureSnapshot_size() throws Exception { public void testCaptureSnapshot_size() throws Exception {
waitForOpen(true); waitForOpen(true);
// SnapshotSize can still be null after opened.
while (camera.getSnapshotSize() == null) {}
Size size = camera.getSnapshotSize(); Size size = camera.getSnapshotSize();
camera.takePictureSnapshot(); camera.takePictureSnapshot();
@ -622,7 +644,6 @@ public class IntegrationTest extends BaseTest {
assert30Frames(processor); assert30Frames(processor);
} }
@Test @Test
public void testFrameProcessing_freezeRelease() throws Exception { public void testFrameProcessing_freezeRelease() throws Exception {
// Ensure that freeze/release cycles do not cause OOMs. // Ensure that freeze/release cycles do not cause OOMs.

@ -4,13 +4,14 @@ package com.otaliastudios.cameraview.engine;
import android.graphics.PointF; import android.graphics.PointF;
import android.location.Location; import android.location.Location;
import com.google.android.gms.tasks.Task;
import com.google.android.gms.tasks.Tasks;
import com.otaliastudios.cameraview.CameraOptions; import com.otaliastudios.cameraview.CameraOptions;
import com.otaliastudios.cameraview.PictureResult; import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.VideoResult; import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.controls.Audio; import com.otaliastudios.cameraview.controls.Audio;
import com.otaliastudios.cameraview.controls.Facing; import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.controls.Flash; import com.otaliastudios.cameraview.controls.Flash;
import com.otaliastudios.cameraview.engine.CameraEngine;
import com.otaliastudios.cameraview.gesture.Gesture; import com.otaliastudios.cameraview.gesture.Gesture;
import com.otaliastudios.cameraview.controls.Hdr; import com.otaliastudios.cameraview.controls.Hdr;
import com.otaliastudios.cameraview.controls.Mode; import com.otaliastudios.cameraview.controls.Mode;
@ -21,9 +22,10 @@ import com.otaliastudios.cameraview.size.SizeSelector;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import java.io.File; import java.io.File;
import java.util.ArrayList;
import java.util.List;
public class MockCameraEngine extends CameraEngine { public class MockCameraEngine extends CameraEngine {
@ -36,12 +38,40 @@ public class MockCameraEngine extends CameraEngine {
super(callback); super(callback);
} }
@NonNull
@Override @Override
protected void onStart() { protected Task<Void> onStartEngine() {
return Tasks.forResult(null);
} }
@NonNull
@Override @Override
protected void onStop() { protected Task<Void> onStopEngine() {
return Tasks.forResult(null);
}
@NonNull
@Override
protected Task<Void> onStartBind() {
return Tasks.forResult(null);
}
@NonNull
@Override
protected Task<Void> onStopBind() {
return Tasks.forResult(null);
}
@NonNull
@Override
protected Task<Void> onStartPreview() {
return Tasks.forResult(null);
}
@NonNull
@Override
protected Task<Void> onStopPreview() {
return Tasks.forResult(null);
} }
public void setMockCameraOptions(CameraOptions options) { public void setMockCameraOptions(CameraOptions options) {
@ -52,8 +82,8 @@ public class MockCameraEngine extends CameraEngine {
mPreviewStreamSize = size; mPreviewStreamSize = size;
} }
public void mockStarted(boolean started) { public void setMockEngineState(boolean started) {
mState = started ? STATE_STARTED : STATE_STOPPED; mEngineStep.setState(started ? STATE_STARTED : STATE_STOPPED);
} }
public int getSnapshotMaxWidth() { public int getSnapshotMaxWidth() {
@ -147,21 +177,19 @@ public class MockCameraEngine extends CameraEngine {
} }
@Override @Override
public void startAutoFocus(@Nullable Gesture gesture, @NonNull PointF point) { protected void onPreviewStreamSizeChanged() {
mFocusStarted = true;
}
@Override
public void onSurfaceChanged() {
} }
@NonNull
@Override @Override
public void onSurfaceAvailable() { protected List<Size> getPreviewStreamAvailableSizes() {
return new ArrayList<>();
} }
@Override @Override
public void onSurfaceDestroyed() { public void startAutoFocus(@Nullable Gesture gesture, @NonNull PointF point) {
mFocusStarted = true;
} }
@Override @Override

@ -12,9 +12,7 @@ import android.view.View;
import com.otaliastudios.cameraview.BaseTest; import com.otaliastudios.cameraview.BaseTest;
import com.otaliastudios.cameraview.TestActivity; import com.otaliastudios.cameraview.TestActivity;
import com.otaliastudios.cameraview.gesture.Gesture; import com.otaliastudios.cameraview.internal.utils.Op;
import com.otaliastudios.cameraview.gesture.GestureLayout;
import com.otaliastudios.cameraview.internal.utils.Task;
import org.hamcrest.Matchers; import org.hamcrest.Matchers;
import org.junit.Before; import org.junit.Before;
@ -33,7 +31,7 @@ public abstract class GestureLayoutTest<T extends GestureLayout> extends BaseTes
@SuppressWarnings("WeakerAccess") @SuppressWarnings("WeakerAccess")
protected T layout; protected T layout;
@SuppressWarnings("WeakerAccess") @SuppressWarnings("WeakerAccess")
protected Task<Gesture> touch; protected Op<Gesture> touch;
@Before @Before
public void setUp() { public void setUp() {
@ -45,7 +43,7 @@ public abstract class GestureLayoutTest<T extends GestureLayout> extends BaseTes
layout.setActive(true); layout.setActive(true);
a.inflate(layout); a.inflate(layout);
touch = new Task<>(); touch = new Op<>();
layout.setOnTouchListener(new View.OnTouchListener() { layout.setOnTouchListener(new View.OnTouchListener() {
@Override @Override
public boolean onTouch(View view, MotionEvent motionEvent) { public boolean onTouch(View view, MotionEvent motionEvent) {

@ -4,7 +4,6 @@ package com.otaliastudios.cameraview.internal;
import com.otaliastudios.cameraview.BaseTest; import com.otaliastudios.cameraview.BaseTest;
import com.otaliastudios.cameraview.TestActivity; import com.otaliastudios.cameraview.TestActivity;
import com.otaliastudios.cameraview.controls.Grid; import com.otaliastudios.cameraview.controls.Grid;
import com.otaliastudios.cameraview.internal.GridLinesLayout;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.MediumTest; import androidx.test.filters.MediumTest;
@ -34,18 +33,18 @@ public class GridLinesLayoutTest extends BaseTest {
TestActivity a = rule.getActivity(); TestActivity a = rule.getActivity();
layout = new GridLinesLayout(a); layout = new GridLinesLayout(a);
layout.setGridMode(Grid.OFF); layout.setGridMode(Grid.OFF);
layout.drawTask.listen(); layout.drawOp.listen();
a.getContentView().addView(layout); a.getContentView().addView(layout);
} }
}); });
// Wait for first draw. // Wait for first draw.
layout.drawTask.await(1000); layout.drawOp.await(1000);
} }
private int setGridAndWait(Grid value) { private int setGridAndWait(Grid value) {
layout.drawTask.listen(); layout.drawOp.listen();
layout.setGridMode(value); layout.setGridMode(value);
Integer result = layout.drawTask.await(1000); Integer result = layout.drawOp.await(1000);
assertNotNull(result); assertNotNull(result);
return result; return result;
} }

@ -2,8 +2,6 @@ package com.otaliastudios.cameraview.internal.utils;
import com.otaliastudios.cameraview.BaseTest; import com.otaliastudios.cameraview.BaseTest;
import com.otaliastudios.cameraview.internal.utils.Task;
import com.otaliastudios.cameraview.internal.utils.WorkerHandler;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest; import androidx.test.filters.SmallTest;
@ -28,15 +26,15 @@ public class WorkerHandlerTest extends BaseTest {
@Test @Test
public void testStaticRun() { public void testStaticRun() {
final Task<Boolean> task = new Task<>(true); final Op<Boolean> op = new Op<>(true);
Runnable action = new Runnable() { Runnable action = new Runnable() {
@Override @Override
public void run() { public void run() {
task.end(true); op.end(true);
} }
}; };
WorkerHandler.run(action); WorkerHandler.run(action);
Boolean result = task.await(500); Boolean result = op.await(500);
assertNotNull(result); assertNotNull(result);
assertTrue(result); assertTrue(result);
} }

@ -3,29 +3,19 @@ package com.otaliastudios.cameraview.markers;
import android.annotation.TargetApi; import android.annotation.TargetApi;
import android.content.Context; import android.content.Context;
import android.view.MotionEvent;
import android.view.View;
import com.otaliastudios.cameraview.BaseTest; import com.otaliastudios.cameraview.BaseTest;
import com.otaliastudios.cameraview.TestActivity; import com.otaliastudios.cameraview.TestActivity;
import com.otaliastudios.cameraview.gesture.Gesture;
import com.otaliastudios.cameraview.gesture.GestureLayout;
import com.otaliastudios.cameraview.internal.utils.Task;
import org.hamcrest.Matchers;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
import org.junit.Rule; import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
import org.mockito.Mock;
import org.mockito.Mockito; import org.mockito.Mockito;
import androidx.test.espresso.ViewInteraction; import androidx.test.annotation.UiThreadTest;
import androidx.test.espresso.matcher.RootMatchers;
import androidx.test.rule.ActivityTestRule; import androidx.test.rule.ActivityTestRule;
import static androidx.test.espresso.Espresso.onView;
@TargetApi(17) @TargetApi(17)
public class MarkerLayoutTest extends BaseTest { public class MarkerLayoutTest extends BaseTest {
@ -65,6 +55,7 @@ public class MarkerLayoutTest extends BaseTest {
} }
@Test @Test
@UiThreadTest
public void testOnMarker_removesView() { public void testOnMarker_removesView() {
markerLayout.onMarker(MarkerLayout.TYPE_AUTOFOCUS, autoFocusMarker); markerLayout.onMarker(MarkerLayout.TYPE_AUTOFOCUS, autoFocusMarker);
Assert.assertEquals(markerLayout.getChildCount(), 1); Assert.assertEquals(markerLayout.getChildCount(), 1);

@ -7,8 +7,7 @@ import android.view.ViewGroup;
import com.otaliastudios.cameraview.BaseTest; import com.otaliastudios.cameraview.BaseTest;
import com.otaliastudios.cameraview.TestActivity; import com.otaliastudios.cameraview.TestActivity;
import com.otaliastudios.cameraview.internal.utils.Task; import com.otaliastudios.cameraview.internal.utils.Op;
import com.otaliastudios.cameraview.preview.CameraPreview;
import com.otaliastudios.cameraview.size.AspectRatio; import com.otaliastudios.cameraview.size.AspectRatio;
import com.otaliastudios.cameraview.size.Size; import com.otaliastudios.cameraview.size.Size;
@ -37,13 +36,13 @@ public abstract class CameraPreviewTest extends BaseTest {
protected Size surfaceSize; protected Size surfaceSize;
private CameraPreview.SurfaceCallback callback; private CameraPreview.SurfaceCallback callback;
private Task<Boolean> available; private Op<Boolean> available;
private Task<Boolean> destroyed; private Op<Boolean> destroyed;
@Before @Before
public void setUp() { public void setUp() {
available = new Task<>(true); available = new Op<>(true);
destroyed = new Task<>(true); destroyed = new Op<>(true);
ui(new Runnable() { ui(new Runnable() {
@Override @Override
@ -152,17 +151,17 @@ public abstract class CameraPreviewTest extends BaseTest {
// Since desired is 'desired', let's fake a new view size that is consistent with it. // Since desired is 'desired', let's fake a new view size that is consistent with it.
// Ensure crop is not happening anymore. // Ensure crop is not happening anymore.
preview.mCropTask.listen(); preview.mCropOp.listen();
preview.dispatchOnSurfaceSizeChanged((int) (50f * desired), 50); // Wait... preview.dispatchOnSurfaceSizeChanged((int) (50f * desired), 50); // Wait...
preview.mCropTask.await(); preview.mCropOp.await();
assertEquals(desired, getViewAspectRatioWithScale(), 0.01f); assertEquals(desired, getViewAspectRatioWithScale(), 0.01f);
assertFalse(preview.isCropping()); assertFalse(preview.isCropping());
} }
private void setDesiredAspectRatio(float desiredAspectRatio) { private void setDesiredAspectRatio(float desiredAspectRatio) {
preview.mCropTask.listen(); preview.mCropOp.listen();
preview.setStreamSize((int) (10f * desiredAspectRatio), 10); // Wait... preview.setStreamSize((int) (10f * desiredAspectRatio), 10); // Wait...
preview.mCropTask.await(); preview.mCropOp.await();
assertEquals(desiredAspectRatio, getViewAspectRatioWithScale(), 0.01f); assertEquals(desiredAspectRatio, getViewAspectRatioWithScale(), 0.01f);
} }

@ -7,10 +7,6 @@ import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest; import androidx.test.filters.SmallTest;
import android.view.ViewGroup; import android.view.ViewGroup;
import com.otaliastudios.cameraview.preview.CameraPreview;
import com.otaliastudios.cameraview.preview.CameraPreviewTest;
import com.otaliastudios.cameraview.preview.GlCameraPreview;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
@RunWith(AndroidJUnit4.class) @RunWith(AndroidJUnit4.class)
@ -24,11 +20,11 @@ public class GlCameraPreviewTest extends CameraPreviewTest {
@Override @Override
protected float getCropScaleY() { protected float getCropScaleY() {
return 1F / ((GlCameraPreview) preview).mScaleY; return 1F / ((GlCameraPreview) preview).mCropScaleY;
} }
@Override @Override
protected float getCropScaleX() { protected float getCropScaleX() {
return 1F / ((GlCameraPreview) preview).mScaleX; return 1F / ((GlCameraPreview) preview).mCropScaleX;
} }
} }

@ -74,7 +74,7 @@ public class CameraException extends RuntimeException {
/** /**
* Whether this error is unrecoverable. If this function returns true, * Whether this error is unrecoverable. If this function returns true,
* the Camera has been closed and it is likely showing a black preview. * the Camera has been closed (or will be soon) and it is likely showing a black preview.
* This is the right moment to show an error dialog to the user. * This is the right moment to show an error dialog to the user.
* *
* @return true if this error is unrecoverable * @return true if this error is unrecoverable

@ -597,11 +597,11 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @return whether the camera has started * @return whether the camera has started
*/ */
public boolean isOpened() { public boolean isOpened() {
return mCameraEngine.getState() >= CameraEngine.STATE_STARTED; return mCameraEngine.getEngineState() >= CameraEngine.STATE_STARTED;
} }
private boolean isClosed() { private boolean isClosed() {
return mCameraEngine.getState() == CameraEngine.STATE_STOPPED; return mCameraEngine.getEngineState() == CameraEngine.STATE_STOPPED;
} }
/** /**

@ -15,6 +15,8 @@ import androidx.annotation.VisibleForTesting;
import androidx.annotation.WorkerThread; import androidx.annotation.WorkerThread;
import android.view.SurfaceHolder; import android.view.SurfaceHolder;
import com.google.android.gms.tasks.Task;
import com.google.android.gms.tasks.Tasks;
import com.otaliastudios.cameraview.CameraException; import com.otaliastudios.cameraview.CameraException;
import com.otaliastudios.cameraview.CameraLogger; import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.CameraOptions; import com.otaliastudios.cameraview.CameraOptions;
@ -30,7 +32,7 @@ import com.otaliastudios.cameraview.controls.Hdr;
import com.otaliastudios.cameraview.controls.Mode; import com.otaliastudios.cameraview.controls.Mode;
import com.otaliastudios.cameraview.controls.WhiteBalance; import com.otaliastudios.cameraview.controls.WhiteBalance;
import com.otaliastudios.cameraview.internal.utils.CropHelper; import com.otaliastudios.cameraview.internal.utils.CropHelper;
import com.otaliastudios.cameraview.internal.utils.Task; import com.otaliastudios.cameraview.internal.utils.Op;
import com.otaliastudios.cameraview.picture.FullPictureRecorder; import com.otaliastudios.cameraview.picture.FullPictureRecorder;
import com.otaliastudios.cameraview.picture.PictureRecorder; import com.otaliastudios.cameraview.picture.PictureRecorder;
import com.otaliastudios.cameraview.picture.SnapshotPictureRecorder; import com.otaliastudios.cameraview.picture.SnapshotPictureRecorder;
@ -54,11 +56,10 @@ public class Camera1Engine extends CameraEngine implements Camera.PreviewCallbac
private static final String TAG = Camera1Engine.class.getSimpleName(); private static final String TAG = Camera1Engine.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG); private static final CameraLogger LOG = CameraLogger.create(TAG);
private static final int AUTOFOCUS_END_DELAY_MILLIS = 2500; @VisibleForTesting static final int AUTOFOCUS_END_DELAY_MILLIS = 2500;
private Camera mCamera; private Camera mCamera;
@VisibleForTesting int mCameraId; @VisibleForTesting int mCameraId;
private boolean mIsBound = false;
private Runnable mFocusEndRunnable; private Runnable mFocusEndRunnable;
private final Runnable mFocusResetRunnable = new Runnable() { private final Runnable mFocusResetRunnable = new Runnable() {
@ -81,87 +82,62 @@ public class Camera1Engine extends CameraEngine implements Camera.PreviewCallbac
mMapper = Mapper.get(Engine.CAMERA1); mMapper = Mapper.get(Engine.CAMERA1);
} }
private void schedule(@Nullable final Task<Void> task, final boolean ensureAvailable, final Runnable action) { private boolean isCameraAvailable() {
return getEngineState() == STATE_STARTED;
}
private void schedule(@Nullable final Op<Void> op, final boolean ensureAvailable, final Runnable action) {
mHandler.post(new Runnable() { mHandler.post(new Runnable() {
@Override @Override
public void run() { public void run() {
if (ensureAvailable && !isCameraAvailable()) { if (ensureAvailable && !isCameraAvailable()) {
if (task != null) task.end(null); if (op != null) op.end(null);
} else { } else {
action.run(); action.run();
if (task != null) task.end(null); if (op != null) op.end(null);
} }
} }
}); });
} }
/** @NonNull
* Preview surface is now available. If camera is open, set up. @WorkerThread
* At this point we are sure that mPreview is not null.
*/
@Override
public void onSurfaceAvailable() {
LOG.i("onSurfaceAvailable:", "Size is", getPreviewSurfaceSize(REF_VIEW));
schedule(null, false, new Runnable() {
@Override @Override
public void run() { protected Task<Void> onStartEngine() {
LOG.i("onSurfaceAvailable:", "Inside handler. About to bind."); if (collectCameraId()) {
if (shouldBindToSurface()) bindToSurface(); try {
if (shouldStartPreview()) startPreview("onSurfaceAvailable"); mCamera = Camera.open(mCameraId);
} } catch (Exception e) {
}); LOG.e("onStartEngine:", "Failed to connect. Maybe in use by another app?");
throw new CameraException(e, CameraException.REASON_FAILED_TO_CONNECT);
} }
mCamera.setErrorCallback(this);
/** // Set parameters that might have been set before the camera was opened.
* Preview surface did change its size. Compute a new preview size. LOG.i("onStartEngine:", "Applying default parameters.");
* This requires stopping and restarting the preview. Camera.Parameters params = mCamera.getParameters();
* At this point we are sure that mPreview is not null. mCameraOptions = new CameraOptions(params, flip(REF_SENSOR, REF_VIEW));
*/ applyDefaultFocus(params);
@Override applyFlash(params, Flash.OFF);
public void onSurfaceChanged() { applyLocation(params, null);
LOG.i("onSurfaceChanged, size is", getPreviewSurfaceSize(REF_VIEW)); applyWhiteBalance(params, WhiteBalance.AUTO);
schedule(null, true, new Runnable() { applyHdr(params, Hdr.OFF);
@Override applyPlaySounds(mPlaySounds);
public void run() { params.setRecordingHint(mMode == Mode.VIDEO);
if (!mIsBound) return; mCamera.setParameters(params);
mCamera.setDisplayOrientation(offset(REF_SENSOR, REF_VIEW)); // <- not allowed during preview
// Compute a new camera preview size. LOG.i("onStartEngine:", "Ended");
Size newSize = computePreviewStreamSize(sizesFromList(mCamera.getParameters().getSupportedPreviewSizes())); return Tasks.forResult(null);
if (newSize.equals(mPreviewStreamSize)) return; } else {
LOG.e("onStartEngine:", "No camera available for facing", mFacing);
// Apply. throw new CameraException(CameraException.REASON_NO_CAMERA);
LOG.i("onSurfaceChanged:", "Computed a new preview size. Going on.");
mPreviewStreamSize = newSize;
stopPreview();
startPreview("onSurfaceChanged:");
} }
});
} }
@NonNull
@Override @Override
public void onSurfaceDestroyed() { protected Task<Void> onStartBind() {
LOG.i("onSurfaceDestroyed"); LOG.i("onStartBind:", "Started");
schedule(null, true, new Runnable() {
@Override
public void run() {
stopPreview();
if (mIsBound) unbindFromSurface();
}
});
}
private boolean shouldBindToSurface() {
return isCameraAvailable() && mPreview != null && mPreview.hasSurface() && !mIsBound;
}
/**
* The act of binding an "open" camera to a "ready" preview.
* These can happen at different times but we want to end up here.
* At this point we are sure that mPreview is not null.
*/
@WorkerThread
private void bindToSurface() {
LOG.i("bindToSurface:", "Started");
Object output = mPreview.getOutput(); Object output = mPreview.getOutput();
try { try {
if (output instanceof SurfaceHolder) { if (output instanceof SurfaceHolder) {
@ -172,40 +148,19 @@ public class Camera1Engine extends CameraEngine implements Camera.PreviewCallbac
throw new RuntimeException("Unknown CameraPreview output class."); throw new RuntimeException("Unknown CameraPreview output class.");
} }
} catch (IOException e) { } catch (IOException e) {
LOG.e("bindToSurface:", "Failed to bind.", e); LOG.e("onStartBind:", "Failed to bind.", e);
throw new CameraException(e, CameraException.REASON_FAILED_TO_START_PREVIEW); throw new CameraException(e, CameraException.REASON_FAILED_TO_START_PREVIEW);
} }
mCaptureSize = computeCaptureSize(); mCaptureSize = computeCaptureSize();
mPreviewStreamSize = computePreviewStreamSize(sizesFromList(mCamera.getParameters().getSupportedPreviewSizes())); mPreviewStreamSize = computePreviewStreamSize();
mIsBound = true; return Tasks.forResult(null);
}
@WorkerThread
private void unbindFromSurface() {
mIsBound = false;
mPreviewStreamSize = null;
mCaptureSize = null;
try {
if (mPreview.getOutputClass() == SurfaceHolder.class) {
mCamera.setPreviewDisplay(null);
} else if (mPreview.getOutputClass() == SurfaceTexture.class) {
mCamera.setPreviewTexture(null);
} else {
throw new RuntimeException("Unknown CameraPreview output class.");
}
} catch (IOException e) {
LOG.e("unbindFromSurface", "Could not release surface", e);
}
}
private boolean shouldStartPreview() {
return isCameraAvailable() && mIsBound;
} }
// To be called when the preview size is setup or changed. @NonNull
private void startPreview(String log) { @Override
LOG.i(log, "Dispatching onCameraPreviewStreamSizeChanged."); protected Task<Void> onStartPreview() {
LOG.i("onStartPreview", "Dispatching onCameraPreviewStreamSizeChanged.");
mCallback.onCameraPreviewStreamSizeChanged(); mCallback.onCameraPreviewStreamSizeChanged();
Size previewSize = getPreviewStreamSize(REF_VIEW); Size previewSize = getPreviewStreamSize(REF_VIEW);
@ -231,106 +186,87 @@ public class Camera1Engine extends CameraEngine implements Camera.PreviewCallbac
mCamera.setPreviewCallbackWithBuffer(null); // Release anything left mCamera.setPreviewCallbackWithBuffer(null); // Release anything left
mCamera.setPreviewCallbackWithBuffer(this); // Add ourselves mCamera.setPreviewCallbackWithBuffer(this); // Add ourselves
mFrameManager.allocateBuffers(ImageFormat.getBitsPerPixel(mPreviewStreamFormat), mPreviewStreamSize); getFrameManager().setUp(ImageFormat.getBitsPerPixel(mPreviewStreamFormat), mPreviewStreamSize);
LOG.i(log, "Starting preview with startPreview()."); LOG.i("onStartPreview", "Starting preview with startPreview().");
try { try {
mCamera.startPreview(); mCamera.startPreview();
} catch (Exception e) { } catch (Exception e) {
LOG.e(log, "Failed to start preview.", e); LOG.e("onStartPreview", "Failed to start preview.", e);
throw new CameraException(e, CameraException.REASON_FAILED_TO_START_PREVIEW); throw new CameraException(e, CameraException.REASON_FAILED_TO_START_PREVIEW);
} }
LOG.i(log, "Started preview."); LOG.i("onStartPreview", "Started preview.");
return Tasks.forResult(null);
} }
private void stopPreview() { @NonNull
@Override
protected Task<Void> onStopPreview() {
if (mVideoRecorder != null) {
mVideoRecorder.stop();
mVideoRecorder = null;
}
mPreviewStreamFormat = 0; mPreviewStreamFormat = 0;
mFrameManager.release(); getFrameManager().release();
mCamera.setPreviewCallbackWithBuffer(null); // Release anything left mCamera.setPreviewCallbackWithBuffer(null); // Release anything left
try { try {
mCamera.stopPreview(); mCamera.stopPreview();
} catch (Exception e) { } catch (Exception e) {
LOG.e("stopPreview", "Could not stop preview", e); LOG.e("stopPreview", "Could not stop preview", e);
} }
return Tasks.forResult(null);
} }
private void createCamera() {
try {
mCamera = Camera.open(mCameraId);
} catch (Exception e) {
LOG.e("createCamera:", "Failed to connect. Maybe in use by another app?");
throw new CameraException(e, CameraException.REASON_FAILED_TO_CONNECT);
}
mCamera.setErrorCallback(this);
// Set parameters that might have been set before the camera was opened.
LOG.i("createCamera:", "Applying default parameters.");
Camera.Parameters params = mCamera.getParameters();
mCameraOptions = new CameraOptions(params, flip(REF_SENSOR, REF_VIEW));
applyDefaultFocus(params);
applyFlash(params, Flash.OFF);
applyLocation(params, null);
applyWhiteBalance(params, WhiteBalance.AUTO);
applyHdr(params, Hdr.OFF);
applyPlaySounds(mPlaySounds);
params.setRecordingHint(mMode == Mode.VIDEO);
mCamera.setParameters(params);
mCamera.setDisplayOrientation(offset(REF_SENSOR, REF_VIEW)); // <- not allowed during preview
}
private void destroyCamera() {
try {
LOG.i("destroyCamera:", "Clean up.", "Releasing camera.");
mCamera.release();
LOG.i("destroyCamera:", "Clean up.", "Released camera.");
} catch (Exception e) {
LOG.w("destroyCamera:", "Clean up.", "Exception while releasing camera.", e);
}
mCamera = null;
mCameraOptions = null;
}
@WorkerThread @NonNull
@Override @Override
protected void onStart() { protected Task<Void> onStopBind() {
if (isCameraAvailable()) { mPreviewStreamSize = null;
LOG.w("onStart:", "Camera not available. Should not happen."); mCaptureSize = null;
onStop(); // Should not happen. try {
} if (mPreview.getOutputClass() == SurfaceHolder.class) {
if (collectCameraId()) { mCamera.setPreviewDisplay(null);
createCamera(); } else if (mPreview.getOutputClass() == SurfaceTexture.class) {
if (shouldBindToSurface()) bindToSurface(); mCamera.setPreviewTexture(null);
if (shouldStartPreview()) startPreview("onStart");
LOG.i("onStart:", "Ended");
} else { } else {
LOG.e("onStart:", "No camera available for facing", mFacing); throw new RuntimeException("Unknown CameraPreview output class.");
throw new CameraException(CameraException.REASON_NO_CAMERA);
} }
} catch (IOException e) {
LOG.e("unbindFromSurface", "Could not release surface", e);
}
return Tasks.forResult(null);
} }
@NonNull
@WorkerThread @WorkerThread
@Override @Override
protected void onStop() { protected Task<Void> onStopEngine() {
LOG.i("onStop:", "About to clean up."); LOG.i("onStopEngine:", "About to clean up.");
mHandler.remove(mFocusResetRunnable); mHandler.remove(mFocusResetRunnable);
if (mFocusEndRunnable != null) { if (mFocusEndRunnable != null) {
mHandler.remove(mFocusEndRunnable); mHandler.remove(mFocusEndRunnable);
} }
if (mVideoRecorder != null) {
mVideoRecorder.stop();
mVideoRecorder = null;
}
if (mCamera != null) { if (mCamera != null) {
stopPreview(); try {
if (mIsBound) unbindFromSurface(); LOG.i("onStopEngine:", "Clean up.", "Releasing camera.");
destroyCamera(); mCamera.release();
LOG.i("onStopEngine:", "Clean up.", "Released camera.");
} catch (Exception e) {
LOG.w("onStopEngine:", "Clean up.", "Exception while releasing camera.", e);
}
mCamera = null;
mCameraOptions = null;
} }
mCameraOptions = null; mCameraOptions = null;
mCamera = null; mCamera = null;
mPreviewStreamSize = null; LOG.w("onStopEngine:", "Clean up.", "Returning.");
mCaptureSize = null; return Tasks.forResult(null);
mIsBound = false; }
LOG.w("onStop:", "Clean up.", "Returning.");
@WorkerThread
@Override
protected void onPreviewStreamSizeChanged() {
restartPreview();
} }
private boolean collectCameraId() { private boolean collectCameraId() {
@ -361,8 +297,7 @@ public class Camera1Engine extends CameraEngine implements Camera.PreviewCallbac
if (error == Camera.CAMERA_ERROR_SERVER_DIED) { if (error == Camera.CAMERA_ERROR_SERVER_DIED) {
// Looks like this is recoverable. // Looks like this is recoverable.
LOG.w("Recoverable error inside the onError callback.", "CAMERA_ERROR_SERVER_DIED"); LOG.w("Recoverable error inside the onError callback.", "CAMERA_ERROR_SERVER_DIED");
stopNow(); restart();
start();
return; return;
} }
@ -394,7 +329,7 @@ public class Camera1Engine extends CameraEngine implements Camera.PreviewCallbac
public void setLocation(@Nullable Location location) { public void setLocation(@Nullable Location location) {
final Location oldLocation = mLocation; final Location oldLocation = mLocation;
mLocation = location; mLocation = location;
schedule(mLocationTask, true, new Runnable() { schedule(mLocationOp, true, new Runnable() {
@Override @Override
public void run() { public void run() {
Camera.Parameters params = mCamera.getParameters(); Camera.Parameters params = mCamera.getParameters();
@ -437,7 +372,7 @@ public class Camera1Engine extends CameraEngine implements Camera.PreviewCallbac
public void setWhiteBalance(@NonNull WhiteBalance whiteBalance) { public void setWhiteBalance(@NonNull WhiteBalance whiteBalance) {
final WhiteBalance old = mWhiteBalance; final WhiteBalance old = mWhiteBalance;
mWhiteBalance = whiteBalance; mWhiteBalance = whiteBalance;
schedule(mWhiteBalanceTask, true, new Runnable() { schedule(mWhiteBalanceOp, true, new Runnable() {
@Override @Override
public void run() { public void run() {
Camera.Parameters params = mCamera.getParameters(); Camera.Parameters params = mCamera.getParameters();
@ -459,7 +394,7 @@ public class Camera1Engine extends CameraEngine implements Camera.PreviewCallbac
public void setHdr(@NonNull Hdr hdr) { public void setHdr(@NonNull Hdr hdr) {
final Hdr old = mHdr; final Hdr old = mHdr;
mHdr = hdr; mHdr = hdr;
schedule(mHdrTask, true, new Runnable() { schedule(mHdrOp, true, new Runnable() {
@Override @Override
public void run() { public void run() {
Camera.Parameters params = mCamera.getParameters(); Camera.Parameters params = mCamera.getParameters();
@ -515,7 +450,7 @@ public class Camera1Engine extends CameraEngine implements Camera.PreviewCallbac
public void setFlash(@NonNull Flash flash) { public void setFlash(@NonNull Flash flash) {
final Flash old = mFlash; final Flash old = mFlash;
mFlash = flash; mFlash = flash;
schedule(mFlashTask, true, new Runnable() { schedule(mFlashOp, true, new Runnable() {
@Override @Override
public void run() { public void run() {
Camera.Parameters params = mCamera.getParameters(); Camera.Parameters params = mCamera.getParameters();
@ -640,7 +575,7 @@ public class Camera1Engine extends CameraEngine implements Camera.PreviewCallbac
@Override @Override
public void onPreviewFrame(@NonNull byte[] data, Camera camera) { public void onPreviewFrame(@NonNull byte[] data, Camera camera) {
Frame frame = mFrameManager.getFrame(data, Frame frame = getFrameManager().getFrame(data,
System.currentTimeMillis(), System.currentTimeMillis(),
offset(REF_SENSOR, REF_OUTPUT), offset(REF_SENSOR, REF_OUTPUT),
mPreviewStreamSize, mPreviewStreamSize,
@ -648,26 +583,6 @@ public class Camera1Engine extends CameraEngine implements Camera.PreviewCallbac
mCallback.dispatchFrame(frame); mCallback.dispatchFrame(frame);
} }
private boolean isCameraAvailable() {
switch (getState()) {
// If we are stopped, don't.
case STATE_STOPPED:
return false;
// If we are going to be closed, don't act on camera.
// Even if mCamera != null, it might have been released.
case STATE_STOPPING:
return false;
// If we are started, mCamera should never be null.
case STATE_STARTED:
return true;
// If we are starting, theoretically we could act.
// Just check that camera is available.
case STATE_STARTING:
return mCamera != null;
}
return false;
}
// ----------------- // -----------------
// Video recording stuff. // Video recording stuff.
@ -685,7 +600,7 @@ public class Camera1Engine extends CameraEngine implements Camera.PreviewCallbac
@Override @Override
public void takeVideo(final @NonNull VideoResult.Stub stub, @NonNull final File videoFile) { public void takeVideo(final @NonNull VideoResult.Stub stub, @NonNull final File videoFile) {
schedule(mStartVideoTask, true, new Runnable() { schedule(mStartVideoOp, true, new Runnable() {
@Override @Override
public void run() { public void run() {
if (mMode == Mode.PICTURE) { if (mMode == Mode.PICTURE) {
@ -737,7 +652,7 @@ public class Camera1Engine extends CameraEngine implements Camera.PreviewCallbac
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR2) { if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR2) {
throw new IllegalStateException("Video snapshots are only supported starting from API 18."); throw new IllegalStateException("Video snapshots are only supported starting from API 18.");
} }
schedule(mStartVideoTask, true, new Runnable() { schedule(mStartVideoOp, true, new Runnable() {
@Override @Override
public void run() { public void run() {
if (isTakingVideo()) return; if (isTakingVideo()) return;
@ -798,11 +713,6 @@ public class Camera1Engine extends CameraEngine implements Camera.PreviewCallbac
outputSize = new Size(outputCrop.width(), outputCrop.height()); outputSize = new Size(outputCrop.width(), outputCrop.height());
stub.size = outputSize; stub.size = outputSize;
stub.rotation = offset(REF_VIEW, REF_OUTPUT); stub.rotation = offset(REF_VIEW, REF_OUTPUT);
// LOG.e("ROTBUG_video", "aspectRatio (REF_VIEW):", viewAspectRatio);
// LOG.e("ROTBUG_video", "aspectRatio (REF_OUTPUT):", outputRatio);
// LOG.e("ROTBUG_video", "sizeUncropped (REF_OUTPUT):", outputSize);
// LOG.e("ROTBUG_video", "sizeCropped (REF_OUTPUT):", videoResult.size);
// LOG.e("ROTBUG_video", "rotation:", videoResult.rotation);
// Reset facing and start. // Reset facing and start.
mFacing = realFacing; mFacing = realFacing;
@ -834,7 +744,7 @@ public class Camera1Engine extends CameraEngine implements Camera.PreviewCallbac
@Override @Override
public void setZoom(final float zoom, @Nullable final PointF[] points, final boolean notify) { public void setZoom(final float zoom, @Nullable final PointF[] points, final boolean notify) {
schedule(mZoomTask, true, new Runnable() { schedule(mZoomOp, true, new Runnable() {
@Override @Override
public void run() { public void run() {
if (!mCameraOptions.isZoomSupported()) return; if (!mCameraOptions.isZoomSupported()) return;
@ -855,7 +765,7 @@ public class Camera1Engine extends CameraEngine implements Camera.PreviewCallbac
@Override @Override
public void setExposureCorrection(final float EVvalue, @NonNull final float[] bounds, public void setExposureCorrection(final float EVvalue, @NonNull final float[] bounds,
@Nullable final PointF[] points, final boolean notify) { @Nullable final PointF[] points, final boolean notify) {
schedule(mExposureCorrectionTask, true, new Runnable() { schedule(mExposureCorrectionOp, true, new Runnable() {
@Override @Override
public void run() { public void run() {
if (!mCameraOptions.isExposureCorrectionSupported()) return; if (!mCameraOptions.isExposureCorrectionSupported()) return;
@ -998,13 +908,15 @@ public class Camera1Engine extends CameraEngine implements Camera.PreviewCallbac
@NonNull @NonNull
private List<Size> sizesFromList(@NonNull List<Camera.Size> sizes) { @Override
protected List<Size> getPreviewStreamAvailableSizes() {
List<Camera.Size> sizes = mCamera.getParameters().getSupportedPreviewSizes();
List<Size> result = new ArrayList<>(sizes.size()); List<Size> result = new ArrayList<>(sizes.size());
for (Camera.Size size : sizes) { for (Camera.Size size : sizes) {
Size add = new Size(size.width, size.height); Size add = new Size(size.width, size.height);
if (!result.contains(add)) result.add(add); if (!result.contains(add)) result.add(add);
} }
LOG.i("size:", "sizesFromList:", result); LOG.i("getPreviewStreamAvailableSizes:", result);
return result; return result;
} }
@ -1012,7 +924,7 @@ public class Camera1Engine extends CameraEngine implements Camera.PreviewCallbac
public void setPlaySounds(boolean playSounds) { public void setPlaySounds(boolean playSounds) {
final boolean old = mPlaySounds; final boolean old = mPlaySounds;
mPlaySounds = playSounds; mPlaySounds = playSounds;
schedule(mPlaySoundsTask, true, new Runnable() { schedule(mPlaySoundsOp, true, new Runnable() {
@Override @Override
public void run() { public void run() {
applyPlaySounds(old); applyPlaySounds(old);

@ -4,7 +4,6 @@ import android.annotation.SuppressLint;
import android.content.Context; import android.content.Context;
import android.graphics.PointF; import android.graphics.PointF;
import android.graphics.SurfaceTexture; import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.hardware.camera2.CameraAccessException; import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession; import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics; import android.hardware.camera2.CameraCharacteristics;
@ -17,6 +16,9 @@ import android.os.Build;
import android.view.Surface; import android.view.Surface;
import android.view.SurfaceHolder; import android.view.SurfaceHolder;
import com.google.android.gms.tasks.Task;
import com.google.android.gms.tasks.TaskCompletionSource;
import com.google.android.gms.tasks.Tasks;
import com.otaliastudios.cameraview.CameraException; import com.otaliastudios.cameraview.CameraException;
import com.otaliastudios.cameraview.CameraLogger; import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.CameraOptions; import com.otaliastudios.cameraview.CameraOptions;
@ -30,15 +32,16 @@ import com.otaliastudios.cameraview.controls.Hdr;
import com.otaliastudios.cameraview.controls.Mode; import com.otaliastudios.cameraview.controls.Mode;
import com.otaliastudios.cameraview.controls.WhiteBalance; import com.otaliastudios.cameraview.controls.WhiteBalance;
import com.otaliastudios.cameraview.gesture.Gesture; import com.otaliastudios.cameraview.gesture.Gesture;
import com.otaliastudios.cameraview.internal.utils.Task; import com.otaliastudios.cameraview.internal.utils.Op;
import com.otaliastudios.cameraview.size.AspectRatio; import com.otaliastudios.cameraview.size.AspectRatio;
import com.otaliastudios.cameraview.size.Size; import com.otaliastudios.cameraview.size.Size;
import java.io.File; import java.io.File;
import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
@ -55,10 +58,9 @@ public class Camera2Engine extends CameraEngine {
private final CameraManager mManager; private final CameraManager mManager;
private String mCameraId; private String mCameraId;
private CameraDevice mCamera; private CameraDevice mCamera;
private CameraCaptureSession mSession; // TODO must be released and nulled private CameraCaptureSession mSession;
private CaptureRequest.Builder mPreviewStreamRequestBuilder; // TODO must be nulled private CaptureRequest.Builder mPreviewStreamRequestBuilder;
private CaptureRequest mPreviewStreamRequest; // TODO must be nulled private CaptureRequest mPreviewStreamRequest;
private boolean mIsBound = false;
public Camera2Engine(Callback callback) { public Camera2Engine(Callback callback) {
super(callback); super(callback);
@ -66,15 +68,19 @@ public class Camera2Engine extends CameraEngine {
mManager = (CameraManager) mCallback.getContext().getSystemService(Context.CAMERA_SERVICE); mManager = (CameraManager) mCallback.getContext().getSystemService(Context.CAMERA_SERVICE);
} }
private void schedule(@Nullable final Task<Void> task, final boolean ensureAvailable, final Runnable action) { private boolean isCameraAvailable() {
return getEngineState() == STATE_STARTED;
}
private void schedule(@Nullable final Op<Void> op, final boolean ensureAvailable, final Runnable action) {
mHandler.post(new Runnable() { mHandler.post(new Runnable() {
@Override @Override
public void run() { public void run() {
if (ensureAvailable && !isCameraAvailable()) { if (ensureAvailable && !isCameraAvailable()) {
if (task != null) task.end(null); if (op != null) op.end(null);
} else { } else {
action.run(); action.run();
if (task != null) task.end(null); if (op != null) op.end(null);
} }
} }
}); });
@ -89,10 +95,41 @@ public class Camera2Engine extends CameraEngine {
} }
@NonNull @NonNull
private Size computePreviewStreamSize() throws CameraAccessException { private CameraException createCameraException(@NonNull CameraAccessException exception) {
int reason;
switch (exception.getReason()) {
case CameraAccessException.CAMERA_DISABLED: reason = CameraException.REASON_FAILED_TO_CONNECT; break;
case CameraAccessException.CAMERA_ERROR: reason = CameraException.REASON_DISCONNECTED; break;
case CameraAccessException.CAMERA_DISCONNECTED: reason = CameraException.REASON_DISCONNECTED; break;
case CameraAccessException.CAMERA_IN_USE: reason = CameraException.REASON_FAILED_TO_CONNECT; break;
case CameraAccessException.MAX_CAMERAS_IN_USE: reason = CameraException.REASON_FAILED_TO_CONNECT; break;
default: reason = CameraException.REASON_UNKNOWN; break;
}
return new CameraException(exception, reason);
}
@NonNull
private CameraException createCameraException(int stateCallbackError) {
int reason;
switch (stateCallbackError) {
case CameraDevice.StateCallback.ERROR_CAMERA_DISABLED: reason = CameraException.REASON_FAILED_TO_CONNECT; break; // Device policy
case CameraDevice.StateCallback.ERROR_CAMERA_DEVICE: reason = CameraException.REASON_FAILED_TO_CONNECT; break; // Fatal error
case CameraDevice.StateCallback.ERROR_CAMERA_SERVICE: reason = CameraException.REASON_FAILED_TO_CONNECT; break; // Fatal error, device might have to be restarted
case CameraDevice.StateCallback.ERROR_CAMERA_IN_USE: reason = CameraException.REASON_FAILED_TO_CONNECT; break;
case CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE: reason = CameraException.REASON_FAILED_TO_CONNECT; break;
default: reason = CameraException.REASON_UNKNOWN; break;
}
return new CameraException(reason);
}
@NonNull
@Override
protected List<Size> getPreviewStreamAvailableSizes() {
try {
CameraCharacteristics characteristics = mManager.getCameraCharacteristics(mCameraId); CameraCharacteristics characteristics = mManager.getCameraCharacteristics(mCameraId);
StreamConfigurationMap streamMap = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); StreamConfigurationMap streamMap = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (streamMap == null) throw new RuntimeException("StreamConfigurationMap is null. Should not happen."); if (streamMap == null)
throw new RuntimeException("StreamConfigurationMap is null. Should not happen.");
// This works because our previews return either a SurfaceTexture or a SurfaceHolder, which are // This works because our previews return either a SurfaceTexture or a SurfaceHolder, which are
// accepted class types by the getOutputSizes method. // accepted class types by the getOutputSizes method.
android.util.Size[] sizes = streamMap.getOutputSizes(mPreview.getOutputClass()); android.util.Size[] sizes = streamMap.getOutputSizes(mPreview.getOutputClass());
@ -101,76 +138,64 @@ public class Camera2Engine extends CameraEngine {
Size add = new Size(size.getWidth(), size.getHeight()); Size add = new Size(size.getWidth(), size.getHeight());
if (!candidates.contains(add)) candidates.add(add); if (!candidates.contains(add)) candidates.add(add);
} }
return computePreviewStreamSize(candidates); return candidates;
} catch (CameraAccessException e) {
throw createCameraException(e);
}
} }
private boolean collectCameraId() throws CameraAccessException { private boolean collectCameraId() {
int internalFacing = mMapper.map(mFacing); int internalFacing = mMapper.map(mFacing);
int cameras = mManager.getCameraIdList().length; String[] cameraIds = null;
LOG.i("collectCameraId", "Facing:", mFacing, "Internal:", internalFacing, "Cameras:", cameras); try {
for (String cameraId : mManager.getCameraIdList()) { cameraIds = mManager.getCameraIdList();
} catch (CameraAccessException e) {
// This should never happen, I don't see how it could crash here.
// However, let's launch an unrecoverable exception.
throw createCameraException(e);
}
LOG.i("collectCameraId", "Facing:", mFacing, "Internal:", internalFacing, "Cameras:", cameraIds.length);
for (String cameraId : cameraIds) {
try {
CameraCharacteristics characteristics = mManager.getCameraCharacteristics(cameraId); CameraCharacteristics characteristics = mManager.getCameraCharacteristics(cameraId);
if (internalFacing == readCharacteristic(characteristics, CameraCharacteristics.LENS_FACING, -99)) { if (internalFacing == readCharacteristic(characteristics, CameraCharacteristics.LENS_FACING, -99)) {
mCameraId = cameraId; mCameraId = cameraId;
mSensorOffset = readCharacteristic(characteristics, CameraCharacteristics.SENSOR_ORIENTATION, 0); mSensorOffset = readCharacteristic(characteristics, CameraCharacteristics.SENSOR_ORIENTATION, 0);
return true; return true;
} }
} catch (CameraAccessException ignore) {
// This specific camera has been disconnected.
// Keep searching in other camerIds.
} }
return false;
}
private boolean isCameraAvailable() {
switch (getState()) {
// If we are stopped, don't.
case STATE_STOPPED:
return false;
// If we are going to be closed, don't act on camera.
// Even if mCamera != null, it might have been released.
case STATE_STOPPING:
return false;
// If we are started, mCamera should never be null.
case STATE_STARTED:
return true;
// If we are starting, theoretically we could act.
// Just check that camera is available.
case STATE_STARTING:
return mCamera != null;
} }
return false; return false;
} }
@SuppressLint("MissingPermission")
@NonNull
@Override @Override
protected void onStart() { protected Task<Void> onStartEngine() {
if (isCameraAvailable()) { final TaskCompletionSource<Void> task = new TaskCompletionSource<>();
LOG.w("onStart:", "Camera not available. Should not happen.");
onStop(); // Should not happen.
}
try { try {
if (collectCameraId()) { boolean hasCamera = collectCameraId();
createCamera(); if (!hasCamera) {
LOG.i("onStart:", "Ended"); LOG.e("onStartEngine:", "No camera available for facing", mFacing);
} else {
LOG.e("onStart:", "No camera available for facing", mFacing);
throw new CameraException(CameraException.REASON_NO_CAMERA); throw new CameraException(CameraException.REASON_NO_CAMERA);
} }
} catch (CameraAccessException e) {
// TODO
}
}
@SuppressLint("MissingPermission") // We have a valid camera for this Facing. Go on.
private void createCamera() throws CameraAccessException {
mManager.openCamera(mCameraId, new CameraDevice.StateCallback() { mManager.openCamera(mCameraId, new CameraDevice.StateCallback() {
@Override @Override
public void onOpened(@NonNull CameraDevice camera) { public void onOpened(@NonNull CameraDevice camera) {
mCamera = camera; mCamera = camera;
// TODO Set parameters that might have been set before the camera was opened. // Set parameters that might have been set before the camera was opened.
try { try {
LOG.i("createCamera:", "Applying default parameters."); LOG.i("createCamera:", "Applying default parameters.");
CameraCharacteristics characteristics = mManager.getCameraCharacteristics(mCameraId); CameraCharacteristics characteristics = mManager.getCameraCharacteristics(mCameraId);
mCameraOptions = new CameraOptions(mManager, characteristics, flip(REF_SENSOR, REF_VIEW)); mCameraOptions = new CameraOptions(mManager, characteristics, flip(REF_SENSOR, REF_VIEW));
// applyDefaultFocus(params); // applyDefaultFocus(params); TODO
// applyFlash(params, Flash.OFF); // applyFlash(params, Flash.OFF);
// applyLocation(params, null); // applyLocation(params, null);
// applyWhiteBalance(params, WhiteBalance.AUTO); // applyWhiteBalance(params, WhiteBalance.AUTO);
@ -179,59 +204,84 @@ public class Camera2Engine extends CameraEngine {
// params.setRecordingHint(mMode == Mode.VIDEO); // params.setRecordingHint(mMode == Mode.VIDEO);
// mCamera.setParameters(params); // mCamera.setParameters(params);
} catch (CameraAccessException e) { } catch (CameraAccessException e) {
// TODO task.trySetException(createCameraException(e));
throw new RuntimeException(e); return;
}
// Set display orientation, not allowed during preview
// TODO not needed anymore? mCamera.setDisplayOrientation(offset(REF_SENSOR, REF_VIEW));
try {
if (shouldBindToSurface()) bindToSurface("onStart");
} catch (CameraAccessException e) {
// TODO
throw new RuntimeException(e);
} }
task.trySetResult(null);
} }
@Override @Override
public void onDisconnected(@NonNull CameraDevice camera) { public void onDisconnected(@NonNull CameraDevice camera) {
// TODO not sure what to do here. maybe stop(). Read docs. // Not sure if this is called INSTEAD of onOpened() or can be called after as well.
// However, using trySetException should address this problem - it will only trigger
// if the task has no result.
//
// Docs say to release this camera instance, however, since we throw an unrecoverable CameraException,
// this will trigger a stop() through the exception handler.
task.trySetException(new CameraException(CameraException.REASON_DISCONNECTED));
} }
@Override @Override
public void onError(@NonNull CameraDevice camera, int error) { public void onError(@NonNull CameraDevice camera, int error) {
// TODO task.trySetException(createCameraException(error));
} }
}, null); }, null);
} catch (CameraAccessException e) {
throw createCameraException(e);
} }
return task.getTask();
private boolean shouldBindToSurface() {
return isCameraAvailable() && mPreview != null && mPreview.hasSurface() && !mIsBound;
} }
/** @NonNull
* The act of binding an "open" camera to a "ready" preview. @Override
* These can happen at different times but we want to end up here. protected Task<Void> onStartBind() {
* At this point we are sure that mPreview is not null. LOG.i("onStartBind:", "Started");
*/ final TaskCompletionSource<Void> task = new TaskCompletionSource<>();
@SuppressLint("Recycle")
@WorkerThread // Compute sizes.
private void bindToSurface(final @NonNull String trigger) throws CameraAccessException { mCaptureSize = computeCaptureSize();
LOG.i("bindToSurface:", "Started"); mPreviewStreamSize = computePreviewStreamSize();
Object output = mPreview.getOutput();
// Create a preview surface with the correct size.In Camera2, instead of applying it to
// the camera params object, we must resize our own surfaces.
final Object output = mPreview.getOutput();
Surface previewSurface; Surface previewSurface;
if (output instanceof SurfaceHolder) { if (output instanceof SurfaceHolder) {
try {
// This must be called from the UI thread...
Tasks.await(Tasks.call(new Callable<Void>() {
@Override
public Void call() {
((SurfaceHolder) output).setFixedSize(mPreviewStreamSize.getWidth(), mPreviewStreamSize.getHeight());
return null;
}
}));
} catch (ExecutionException | InterruptedException e) {
throw new CameraException(e, CameraException.REASON_FAILED_TO_CONNECT);
}
previewSurface = ((SurfaceHolder) output).getSurface(); previewSurface = ((SurfaceHolder) output).getSurface();
} else if (output instanceof SurfaceTexture) { } else if (output instanceof SurfaceTexture) {
((SurfaceTexture) output).setDefaultBufferSize(mPreviewStreamSize.getWidth(), mPreviewStreamSize.getHeight());
previewSurface = new Surface((SurfaceTexture) output); previewSurface = new Surface((SurfaceTexture) output);
} else { } else {
throw new RuntimeException("Unknown CameraPreview output class."); throw new RuntimeException("Unknown CameraPreview output class.");
} }
// TODO: captureSize
/* if (mMode == Mode.PICTURE) {
params.setPictureSize(mCaptureSize.getWidth(), mCaptureSize.getHeight()); // <- allowed
} else {
// mCaptureSize in this case is a video size. The available video sizes are not necessarily
// a subset of the picture sizes, so we can't use the mCaptureSize value: it might crash.
// However, the setPictureSize() passed here is useless : we don't allow HQ pictures in video mode.
// While this might be lifted in the future, for now, just use a picture capture size.
Size pictureSize = computeCaptureSize(Mode.PICTURE);
params.setPictureSize(pictureSize.getWidth(), pictureSize.getHeight());
} */
//noinspection ArraysAsListWithZeroOrOneArgument //noinspection ArraysAsListWithZeroOrOneArgument
List<Surface> outputSurfaces = Arrays.asList(previewSurface); List<Surface> outputSurfaces = Arrays.asList(previewSurface);
try {
mPreviewStreamRequestBuilder = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); mPreviewStreamRequestBuilder = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewStreamRequestBuilder.addTarget(previewSurface); mPreviewStreamRequestBuilder.addTarget(previewSurface);
@ -239,189 +289,115 @@ public class Camera2Engine extends CameraEngine {
mCamera.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() { mCamera.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
@Override @Override
public void onConfigured(@NonNull CameraCaptureSession session) { public void onConfigured(@NonNull CameraCaptureSession session) {
try {
mCaptureSize = computeCaptureSize();
mPreviewStreamSize = computePreviewStreamSize();
mSession = session; mSession = session;
mIsBound = true; task.trySetResult(null);
if (shouldStartPreview()) startPreview(trigger);
} catch (CameraAccessException e) {
// TODO
throw new RuntimeException(e);
}
} }
@Override @Override
public void onConfigureFailed(@NonNull CameraCaptureSession session) { public void onConfigureFailed(@NonNull CameraCaptureSession session) {
// TODO // I would say this should be a library error and as such we throw a Runtime Exception.
String message = LOG.e("onConfigureFailed! Session", session);
throw new RuntimeException(message);
} }
}, null); }, null);
} catch (CameraAccessException e) {
throw createCameraException(e);
} }
return task.getTask();
private boolean shouldStartPreview() {
return isCameraAvailable() && mIsBound;
} }
/** @NonNull
* To be called when the preview size is setup or changed. @Override
* @param trigger a log helper protected Task<Void> onStartPreview() {
*/ LOG.i("onStartPreview", "Dispatching onCameraPreviewStreamSizeChanged.");
private void startPreview(@NonNull String trigger) {
LOG.i(trigger, "Dispatching onCameraPreviewStreamSizeChanged.");
mCallback.onCameraPreviewStreamSizeChanged(); mCallback.onCameraPreviewStreamSizeChanged();
Size previewSize = getPreviewStreamSize(REF_VIEW); Size previewSizeForView = getPreviewStreamSize(REF_VIEW);
if (previewSize == null) { if (previewSizeForView == null) {
throw new IllegalStateException("previewStreamSize should not be null at this point."); throw new IllegalStateException("previewStreamSize should not be null at this point.");
} }
mPreview.setStreamSize(previewSize.getWidth(), previewSize.getHeight()); mPreview.setStreamSize(previewSizeForView.getWidth(), previewSizeForView.getHeight());
// TODO mPreviewStreamFormat = params.getPreviewFormat(); // Set the preview rotation.
mPreview.setDrawRotation(mDisplayOffset);
// TODO: previewSize and captureSize
/* params.setPreviewSize(mPreviewStreamSize.getWidth(), mPreviewStreamSize.getHeight()); // <- not allowed during preview
if (mMode == Mode.PICTURE) {
params.setPictureSize(mCaptureSize.getWidth(), mCaptureSize.getHeight()); // <- allowed
} else {
// mCaptureSize in this case is a video size. The available video sizes are not necessarily
// a subset of the picture sizes, so we can't use the mCaptureSize value: it might crash.
// However, the setPictureSize() passed here is useless : we don't allow HQ pictures in video mode.
// While this might be lifted in the future, for now, just use a picture capture size.
Size pictureSize = computeCaptureSize(Mode.PICTURE);
params.setPictureSize(pictureSize.getWidth(), pictureSize.getHeight());
} */
// TODO mPreviewStreamFormat = params.getPreviewFormat();
// TODO mCamera.setPreviewCallbackWithBuffer(null); // Release anything left // TODO mCamera.setPreviewCallbackWithBuffer(null); // Release anything left
// TODO mCamera.setPreviewCallbackWithBuffer(this); // Add ourselves // TODO mCamera.setPreviewCallbackWithBuffer(this); // Add ourselves
// TODO mFrameManager.allocateBuffers(ImageFormat.getBitsPerPixel(mPreviewStreamFormat), mPreviewStreamSize); // TODO mFrameManager.setUp(ImageFormat.getBitsPerPixel(mPreviewStreamFormat), mPreviewStreamSize);
LOG.i(trigger, "Starting preview with startPreview()."); LOG.i("onStartPreview", "Starting preview with startPreview().");
try { try {
mPreviewStreamRequest = mPreviewStreamRequestBuilder.build(); mPreviewStreamRequest = mPreviewStreamRequestBuilder.build();
mSession.setRepeatingRequest(mPreviewStreamRequest, null, null); mSession.setRepeatingRequest(mPreviewStreamRequest, null, null);
} catch (Exception e) { } catch (Exception e) {
LOG.e(trigger, "Failed to start preview.", e); // This is an unrecoverable exception that will stop everything.
LOG.e("onStartPreview", "Failed to start preview.", e);
throw new CameraException(e, CameraException.REASON_FAILED_TO_START_PREVIEW); throw new CameraException(e, CameraException.REASON_FAILED_TO_START_PREVIEW);
} }
LOG.i(trigger, "Started preview."); LOG.i("onStartPreview", "Started preview.");
return Tasks.forResult(null);
} }
@NonNull
@Override @Override
protected void onStop() { protected Task<Void> onStopPreview() {
LOG.i("onStop:", "About to clean up.");
if (mVideoRecorder != null) { if (mVideoRecorder != null) {
mVideoRecorder.stop(); mVideoRecorder.stop();
mVideoRecorder = null; mVideoRecorder = null;
} }
if (mCamera != null) {
stopPreview();
if (mIsBound) unbindFromSurface();
destroyCamera();
}
mCameraOptions = null;
mCamera = null;
mPreviewStreamSize = null;
mCaptureSize = null;
mIsBound = false;
LOG.w("onStop:", "Clean up.", "Returning.");
}
private void stopPreview() {
mPreviewStreamFormat = 0; mPreviewStreamFormat = 0;
mFrameManager.release(); getFrameManager().release();
// TODO mCamera.setPreviewCallbackWithBuffer(null); // Release anything left // TODO mCamera.setPreviewCallbackWithBuffer(null); // Release anything left
try { try {
// NOTE: should we wait for onReady() like docs say?
// Leaving this synchronous for now.
mSession.stopRepeating(); mSession.stopRepeating();
// TODO should wait for onReady?
} catch (CameraAccessException e) { } catch (CameraAccessException e) {
// This tells us that we should stop everything. It's better to throw an unrecoverable
// exception rather than just swallow this, so everything gets stopped.
LOG.w("stopRepeating failed!", e); LOG.w("stopRepeating failed!", e);
throw createCameraException(e);
} }
mPreviewStreamRequest = null; mPreviewStreamRequest = null;
return Tasks.forResult(null);
} }
private void unbindFromSurface() {
mIsBound = false; @NonNull
@Override
protected Task<Void> onStopBind() {
mPreviewStreamRequestBuilder = null; mPreviewStreamRequestBuilder = null;
mPreviewStreamSize = null; mPreviewStreamSize = null;
mCaptureSize = null; mCaptureSize = null;
mSession.close(); mSession.close();
mSession = null; mSession = null;
return Tasks.forResult(null);
} }
private void destroyCamera() {
@NonNull
@Override
protected Task<Void> onStopEngine() {
LOG.i("onStopEngine:", "About to clean up.");
try { try {
LOG.i("destroyCamera:", "Clean up.", "Releasing camera."); LOG.i("onStopEngine:", "Clean up.", "Releasing camera.");
mCamera.close(); mCamera.close();
LOG.i("destroyCamera:", "Clean up.", "Released camera."); LOG.i("onStopEngine:", "Clean up.", "Released camera.");
} catch (Exception e) { } catch (Exception e) {
LOG.w("destroyCamera:", "Clean up.", "Exception while releasing camera.", e); LOG.w("onStopEngine:", "Clean up.", "Exception while releasing camera.", e);
} }
mCamera = null; mCamera = null;
mCameraOptions = null; mCameraOptions = null;
LOG.w("onStopEngine:", "Returning.");
return Tasks.forResult(null);
} }
@WorkerThread
/**
* Preview surface is now available. If camera is open, set up.
* At this point we are sure that mPreview is not null.
*/
@Override
public void onSurfaceAvailable() {
LOG.i("onSurfaceAvailable:", "Size is", getPreviewSurfaceSize(REF_VIEW));
schedule(null, false, new Runnable() {
@Override
public void run() {
LOG.i("onSurfaceAvailable:", "Inside handler. About to bind.");
try {
if (shouldBindToSurface()) bindToSurface("onSurfaceAvailable");
} catch (CameraAccessException e) {
// TODO
throw new RuntimeException(e);
}
}
});
}
/**
* Preview surface did change its size. Compute a new preview size.
* This requires stopping and restarting the preview.
* At this point we are sure that mPreview is not null.
*/
@Override
public void onSurfaceChanged() {
LOG.i("onSurfaceChanged, size is", getPreviewSurfaceSize(REF_VIEW));
schedule(null, true, new Runnable() {
@Override
public void run() {
if (!mIsBound) return;
// Compute a new camera preview size and apply.
try {
Size newSize = computePreviewStreamSize();
if (newSize.equals(mPreviewStreamSize)) return;
LOG.i("onSurfaceChanged:", "Computed a new preview size. Going on.");
mPreviewStreamSize = newSize;
} catch (CameraAccessException e) {
// TODO
throw new RuntimeException(e);
}
stopPreview();
startPreview("onSurfaceChanged:");
}
});
}
@Override
public void onSurfaceDestroyed() {
LOG.i("onSurfaceDestroyed");
schedule(null, true, new Runnable() {
@Override @Override
public void run() { protected void onPreviewStreamSizeChanged() {
stopPreview(); restartBind();
if (mIsBound) unbindFromSurface();
}
});
} }
@ -436,7 +412,6 @@ public class Camera2Engine extends CameraEngine {
@Override @Override
public void onBufferAvailable(@NonNull byte[] buffer) { public void onBufferAvailable(@NonNull byte[] buffer) {
@ -455,13 +430,7 @@ public class Camera2Engine extends CameraEngine {
schedule(null, true, new Runnable() { schedule(null, true, new Runnable() {
@Override @Override
public void run() { public void run() {
boolean success; if (collectCameraId()) {
try {
success = collectCameraId();
} catch (CameraAccessException e) {
success = false;
}
if (success) {
restart(); restart();
} else { } else {
mFacing = old; mFacing = old;

@ -8,6 +8,13 @@ import android.location.Location;
import android.os.Handler; import android.os.Handler;
import android.os.Looper; import android.os.Looper;
import com.google.android.gms.tasks.Continuation;
import com.google.android.gms.tasks.OnCompleteListener;
import com.google.android.gms.tasks.OnFailureListener;
import com.google.android.gms.tasks.SuccessContinuation;
import com.google.android.gms.tasks.TaskCompletionSource;
import com.google.android.gms.tasks.Task;
import com.google.android.gms.tasks.Tasks;
import com.otaliastudios.cameraview.CameraException; import com.otaliastudios.cameraview.CameraException;
import com.otaliastudios.cameraview.CameraLogger; import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.CameraOptions; import com.otaliastudios.cameraview.CameraOptions;
@ -15,7 +22,7 @@ import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.VideoResult; import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.frame.Frame; import com.otaliastudios.cameraview.frame.Frame;
import com.otaliastudios.cameraview.frame.FrameManager; import com.otaliastudios.cameraview.frame.FrameManager;
import com.otaliastudios.cameraview.internal.utils.Task; import com.otaliastudios.cameraview.internal.utils.Op;
import com.otaliastudios.cameraview.internal.utils.WorkerHandler; import com.otaliastudios.cameraview.internal.utils.WorkerHandler;
import com.otaliastudios.cameraview.picture.PictureRecorder; import com.otaliastudios.cameraview.picture.PictureRecorder;
import com.otaliastudios.cameraview.preview.CameraPreview; import com.otaliastudios.cameraview.preview.CameraPreview;
@ -42,30 +49,43 @@ import java.io.File;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.List; import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
/** /**
* PROCESS * PROCESS
* Setting up the Camera is usually a 4 steps process: * Setting up the Camera is usually a 4 steps process:
* 1. Setting up the Surface (done by {@link CameraPreview} * 1. Setting up the Surface. Done by {@link CameraPreview}.
* 2. Opening the camera (done by us) * 2. Starting the camera. Done by us. See {@link #startEngine()}, {@link #onStartEngine()}.
* 3. Binding the camera to the surface (done by us) * 3. Binding the camera to the surface. Done by us. See {@link #startBind()}, {@link #onStartBind()}
* 4. Starting the camera preview (done by us) * 4. Streaming the camera preview. Done by us. See {@link #startPreview()}, {@link #onStartPreview()}
* *
* The first two steps can actually happen at the same time, anyway * The first two steps can actually happen at the same time, anyway
* the order is not guaranteed. * the order is not guaranteed, we just get a callback from the Preview when 1 happens.
* So at the end of both step 1 and 2, the engine should check if both have * So at the end of both step 1 and 2, the engine should check if both have
* been performed and trigger the steps 3 and 4. * been performed and trigger the steps 3 and 4.
* *
* We use an abstraction for each step called {@link CameraEngineStep} that manages the state of
* each step and ensures that start and stop operations, for each step, are never called if the
* previous one has not ended.
*
* *
* STATE * STATE
* In the {@link CameraEngine} notation, * We only expose generic {@link #start()} and {@link #stop()} calls to the outside.
* - START [Async] means doing step 2 (which will eventually trigger 3 and 4) * The external users of this class are most likely interested in whether we have completed step 2
* - STOP [Async] means undoing 4 (if needed), undoing 3 (if needed), then undoing 2. * or not, since that tells us if we can act on the camera or not, rather than knowing about steps 3 and 4.
* - RESTART [Async] means completing a STOP then a START *
* - DESTROY [Sync] means performing a silent and synchronous STOP, ignoring all exceptions. This can make the engine unusable. * So in the {@link CameraEngine} notation,
* - {@link #start()}: ASYNC - starts the engine (S2). When possible, at a later time, S3 and S4 are also performed.
* - {@link #stop()}: ASYNC - stops everything: undoes S4, then S3, then S2.
* - {@link #restart()}: ASYNC - completes a stop then a start.
* - {@link #destroy()}: ASYNC - performs a {@link #stop()} that will go on no matter the exceptions, without throwing.
* Makes the engine unusable and clears resources.
* *
* So the engine state will be: * For example, we expose the engine (S2) state through {@link #getEngineState()}. It will be:
* - {@link #STATE_STARTING} if we're into step 2 * - {@link #STATE_STARTING} if we're into step 2
* - {@link #STATE_STARTED} if we've completed step 2. No clue about 3 or 4. * - {@link #STATE_STARTED} if we've completed step 2. No clue about 3 or 4.
* - {@link #STATE_STOPPING} if we're undoing steps 4, 3 and 2. * - {@link #STATE_STOPPING} if we're undoing steps 4, 3 and 2.
@ -74,9 +94,24 @@ import java.util.List;
* *
* THREADING * THREADING
* Subclasses should always execute code on the thread given by {@link #mHandler}. * Subclasses should always execute code on the thread given by {@link #mHandler}.
* This thread has a special {@link Thread.UncaughtExceptionHandler} that handles exceptions * For convenience, all the setup and tear down methods are called on this engine thread:
* and dispatches error to the callback (instead of crashing the app). * {@link #onStartEngine()}, {@link #onStartBind()}, {@link #onStartPreview()} to setup and
* This lets subclasses run code safely and directly throw {@link CameraException}s when needed. * {@link #onStopEngine()}, {@link #onStopBind()}, {@link #onStopPreview()} to tear down.
* However, these methods are not forced to be synchronous and then can simply return a Google's
* {@link Task}.
*
* Other setters are executed on the callers thread so subclasses should make sure they post
* to the engine handler before acting on themselves.
*
*
* ERROR HANDLING
* THe {@link #mHandler} thread has a special {@link Thread.UncaughtExceptionHandler} that handles exceptions
* and dispatches error to the callback (instead of crashing the app). This lets subclasses run code
* safely and directly throw {@link CameraException}s when needed.
*
* For convenience, the two main method {@link #onStartEngine()} and {@link #onStopEngine()} are already
* called on the engine thread, but they can still be asynchronous by returning a Google's
* {@link com.google.android.gms.tasks.Task}.
*/ */
public abstract class CameraEngine implements public abstract class CameraEngine implements
CameraPreview.SurfaceCallback, CameraPreview.SurfaceCallback,
@ -101,17 +136,19 @@ public abstract class CameraEngine implements
private static final String TAG = CameraEngine.class.getSimpleName(); private static final String TAG = CameraEngine.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG); private static final CameraLogger LOG = CameraLogger.create(TAG);
static final int STATE_STOPPING = -1; // Camera is about to be stopped. @SuppressWarnings("WeakerAccess")
public static final int STATE_STOPPED = 0; // Camera is stopped. public static final int STATE_STOPPING = CameraEngineStep.STATE_STOPPING;
static final int STATE_STARTING = 1; // Camera is about to start. public static final int STATE_STOPPED = CameraEngineStep.STATE_STOPPED;
public static final int STATE_STARTED = 2; // Camera is available and we can set parameters. @SuppressWarnings("WeakerAccess")
public static final int STATE_STARTING = CameraEngineStep.STATE_STARTING;
public static final int STATE_STARTED = CameraEngineStep.STATE_STARTED;
public static final int REF_SENSOR = 0; public static final int REF_SENSOR = 0;
public static final int REF_VIEW = 1; public static final int REF_VIEW = 1;
public static final int REF_OUTPUT = 2; public static final int REF_OUTPUT = 2;
protected final Callback mCallback; protected final Callback mCallback;
protected final FrameManager mFrameManager; private final FrameManager mFrameManager;
protected CameraPreview mPreview; protected CameraPreview mPreview;
protected WorkerHandler mHandler; protected WorkerHandler mHandler;
@VisibleForTesting Handler mCrashHandler; @VisibleForTesting Handler mCrashHandler;
@ -149,21 +186,37 @@ public abstract class CameraEngine implements
protected long mAutoFocusResetDelayMillis; protected long mAutoFocusResetDelayMillis;
protected int mSensorOffset; protected int mSensorOffset;
private int mDisplayOffset; protected int mDisplayOffset;
private int mDeviceOrientation; protected int mDeviceOrientation;
// Subclasses should not change this. Use getState() instead. private final CameraEngineStep.Callback mStepCallback = new CameraEngineStep.Callback() {
@VisibleForTesting int mState = STATE_STOPPED; @Override @NonNull public Executor getExecutor() { return mHandler.getExecutor(); }
@Override public void handleException(@NonNull Exception exception) {
CameraEngine.this.handleException(Thread.currentThread(), exception, false);
}
};
@VisibleForTesting CameraEngineStep mEngineStep = new CameraEngineStep("engine", mStepCallback);
private CameraEngineStep mBindStep = new CameraEngineStep("bind", mStepCallback);
private CameraEngineStep mPreviewStep = new CameraEngineStep("preview", mStepCallback);
private CameraEngineStep mAllStep = new CameraEngineStep("all", mStepCallback);
// Used for testing. // Used for testing.
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Task<Void> mZoomTask = new Task<>(); @VisibleForTesting(otherwise = VisibleForTesting.PROTECTED)
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Task<Void> mExposureCorrectionTask = new Task<>(); Op<Void> mZoomOp = new Op<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Task<Void> mFlashTask = new Task<>(); @VisibleForTesting(otherwise = VisibleForTesting.PROTECTED)
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Task<Void> mWhiteBalanceTask = new Task<>(); Op<Void> mExposureCorrectionOp = new Op<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Task<Void> mHdrTask = new Task<>(); @VisibleForTesting(otherwise = VisibleForTesting.PROTECTED)
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Task<Void> mLocationTask = new Task<>(); Op<Void> mFlashOp = new Op<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Task<Void> mStartVideoTask = new Task<>(); @VisibleForTesting(otherwise = VisibleForTesting.PROTECTED)
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Task<Void> mPlaySoundsTask = new Task<>(); Op<Void> mWhiteBalanceOp = new Op<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED)
Op<Void> mHdrOp = new Op<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED)
Op<Void> mLocationOp = new Op<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED)
Op<Void> mStartVideoOp = new Op<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED)
Op<Void> mPlaySoundsOp = new Op<>();
protected CameraEngine(Callback callback) { protected CameraEngine(Callback callback) {
mCallback = callback; mCallback = callback;
@ -188,176 +241,526 @@ public abstract class CameraEngine implements
private class CrashExceptionHandler implements Thread.UncaughtExceptionHandler { private class CrashExceptionHandler implements Thread.UncaughtExceptionHandler {
@Override @Override
public void uncaughtException(final Thread thread, final Throwable throwable) { public void uncaughtException(final Thread thread, final Throwable throwable) {
// Something went wrong. Thread is terminated (about to?). handleException(thread, throwable, true);
// Move to other thread and release resources. }
}
/**
* A static exception handler used during destruction to avoid leaks,
* since the default handler is not static and the thread might survive the engine.
*/
private static class NoOpExceptionHandler implements Thread.UncaughtExceptionHandler {
@Override
public void uncaughtException(Thread t, Throwable e) {
// No-op.
}
}
/**
* Handles exceptions coming from either runtime errors on the {@link #mHandler} code that is
* not caught (using the {@link CrashExceptionHandler}), as might happen during standard mHandler.post()
* operations that subclasses might do, OR for errors caught by tasks and continuations that
* we launch here.
*
* In the first case, the thread is about to be terminated. In the second case,
* we can actually keep using it.
*
* @param thread the thread
* @param throwable the throwable
* @param fromExceptionHandler true if coming from exception handler
*/
private void handleException(@NonNull Thread thread, final @NonNull Throwable throwable, final boolean fromExceptionHandler) {
if (!(throwable instanceof CameraException)) { if (!(throwable instanceof CameraException)) {
// This is unexpected, either a bug or something the developer should know. // This is unexpected, either a bug or something the developer should know.
// Release and crash the UI thread so we get bug reports. // Release and crash the UI thread so we get bug reports.
LOG.e("uncaughtException:", "Unexpected exception:", throwable); LOG.e("uncaughtException:", "Unexpected exception:", throwable);
destroy();
mCrashHandler.post(new Runnable() { mCrashHandler.post(new Runnable() {
@Override @Override
public void run() { public void run() {
RuntimeException exception; destroy();
// Throws an unchecked exception without unnecessary wrapping.
if (throwable instanceof RuntimeException) { if (throwable instanceof RuntimeException) {
exception = (RuntimeException) throwable; throw (RuntimeException) throwable;
} else { } else {
exception = new RuntimeException(throwable); throw new RuntimeException(throwable);
} }
throw exception;
} }
}); });
} else { return;
final CameraException error = (CameraException) throwable; }
LOG.e("uncaughtException:", "Interrupting thread with state:", ss(), "due to CameraException:", error);
final boolean unrecoverable = error.isUnrecoverable(); final CameraException cameraException = (CameraException) throwable;
LOG.e("uncaughtException:", "Got CameraException:", cameraException, "on engine state:", getEngineStateName());
if (fromExceptionHandler) {
// Got to restart the handler.
thread.interrupt(); thread.interrupt();
// Restart handler.
mHandler = WorkerHandler.get("CameraViewEngine"); mHandler = WorkerHandler.get("CameraViewEngine");
mHandler.getThread().setUncaughtExceptionHandler(new CrashExceptionHandler()); mHandler.getThread().setUncaughtExceptionHandler(new CrashExceptionHandler());
mHandler.post(new Runnable() { }
mCallback.dispatchError(cameraException);
if (cameraException.isUnrecoverable()) {
// Stop everything (if needed) without notifying teardown errors.
stop(true);
}
}
//endregion
//region states and steps
public final int getEngineState() {
return mEngineStep.getState();
}
@SuppressWarnings("WeakerAccess")
public final int getBindState() {
return mBindStep.getState();
}
@SuppressWarnings("unused")
public final int getPreviewState() {
return mPreviewStep.getState();
}
@NonNull
private String getEngineStateName() {
return mEngineStep.getStateName();
}
private boolean canStartEngine() {
return mEngineStep.isStoppingOrStopped();
}
private boolean needsStopEngine() {
return mEngineStep.isStartedOrStarting();
}
private boolean canStartBind() {
return mEngineStep.isStarted()
&& mPreview != null
&& mPreview.hasSurface()
&& mBindStep.isStoppingOrStopped();
}
private boolean needsStopBind() {
return mBindStep.isStartedOrStarting();
}
private boolean canStartPreview() {
return mEngineStep.isStarted()
&& mBindStep.isStarted()
&& mPreviewStep.isStoppingOrStopped();
}
private boolean needsStopPreview() {
return mPreviewStep.isStartedOrStarting();
}
//endregion
//region Start & Stop the engine
@NonNull
@WorkerThread
private Task<Void> startEngine() {
if (canStartEngine()) {
mEngineStep.doStart(false, new Callable<Task<Void>>() {
@Override
public Task<Void> call() {
return onStartEngine();
}
}, new Runnable() {
@Override @Override
public void run() { public void run() {
if (unrecoverable) stopNow(); mCallback.dispatchOnCameraOpened(mCameraOptions);
mCallback.dispatchError(error);
} }
}); });
} }
return mEngineStep.getTask();
} }
@NonNull
@WorkerThread
private Task<Void> stopEngine(boolean swallowExceptions) {
if (needsStopEngine()) {
mEngineStep.doStop(swallowExceptions, new Callable<Task<Void>>() {
@Override
public Task<Void> call() {
return onStopEngine();
}
}, new Runnable() {
@Override
public void run() {
mCallback.dispatchOnCameraClosed();
}
});
}
return mEngineStep.getTask();
} }
/** /**
* A static exception handler used during destruction to avoid leaks, * Starts the engine.
* since the default handler is not static and the thread might survive the engine. * @return a task
*/ */
private static class NoOpExceptionHandler implements Thread.UncaughtExceptionHandler { @NonNull
@WorkerThread
protected abstract Task<Void> onStartEngine();
/**
* Stops the engine.
* Stop events should generally not throw exceptions. We
* want to release resources either way.
* @return a task
*/
@NonNull
@WorkerThread
protected abstract Task<Void> onStopEngine();
//endregion
//region Start & Stop binding
@NonNull
@WorkerThread
private Task<Void> startBind() {
if (canStartBind()) {
mBindStep.doStart(false, new Callable<Task<Void>>() {
@Override @Override
public void uncaughtException(Thread t, Throwable e) { public Task<Void> call() {
// No-op. return onStartBind();
}
});
} }
return mBindStep.getTask();
}
@NonNull
@WorkerThread
private Task<Void> stopBind(boolean swallowExceptions) {
if (needsStopBind()) {
mBindStep.doStop(swallowExceptions, new Callable<Task<Void>>() {
@Override
public Task<Void> call() {
return onStopBind();
}
});
}
return mBindStep.getTask();
} }
/** /**
* Not final due to mockito requirements, but this is basically * Starts the binding process.
* it, nothing more to do. * @return a task
*/ */
public void destroy() { @NonNull
LOG.i("destroy:", "state:", ss()); @WorkerThread
// Prevent CameraEngine leaks. Don't set to null, or exceptions protected abstract Task<Void> onStartBind();
// inside the standard stop() method might crash the main thread.
mHandler.getThread().setUncaughtExceptionHandler(new NoOpExceptionHandler()); /**
// Stop if needed. * Stops the binding process.
stopNow(); * Stop events should generally not throw exceptions. We
* want to release resources either way.
* @return a task
*/
@NonNull
@WorkerThread
protected abstract Task<Void> onStopBind();
@SuppressWarnings("WeakerAccess")
protected void restartBind() {
LOG.i("restartPreviewAndBind", "posting.");
mHandler.post(new Runnable() {
@Override
public void run() {
LOG.i("restartPreviewAndBind", "executing.");
stopPreview(false).continueWithTask(mHandler.getExecutor(), new Continuation<Void, Task<Void>>() {
@Override
public Task<Void> then(@NonNull Task<Void> task) {
return stopBind(false);
}
}).onSuccessTask(mHandler.getExecutor(), new SuccessContinuation<Void, Void>() {
@NonNull
@Override
public Task<Void> then(@Nullable Void aVoid) {
return startBind();
}
}).onSuccessTask(mHandler.getExecutor(), new SuccessContinuation<Void, Void>() {
@NonNull
@Override
public Task<Void> then(@Nullable Void aVoid) {
return startPreview();
}
});
}
});
} }
//endregion //endregion
//region Start&Stop //region Start & Stop preview
@NonNull @NonNull
private String ss() { @WorkerThread
switch (mState) { private Task<Void> startPreview() {
case STATE_STOPPING: return "STATE_STOPPING"; LOG.i("startPreview", "canStartPreview:", canStartPreview());
case STATE_STOPPED: return "STATE_STOPPED"; if (canStartPreview()) {
case STATE_STARTING: return "STATE_STARTING"; mPreviewStep.doStart(false, new Callable<Task<Void>>() {
case STATE_STARTED: return "STATE_STARTED"; @Override
public Task<Void> call() {
return onStartPreview();
} }
return "null"; });
}
return mPreviewStep.getTask();
} }
// Starts the preview asynchronously. @NonNull
public final void start() { @WorkerThread
LOG.i("Start:", "posting runnable. State:", ss()); private Task<Void> stopPreview(boolean swallowExceptions) {
LOG.i("stopPreview", "needsStopPreview:", needsStopPreview(), "swallowExceptions:", swallowExceptions);
if (needsStopPreview()) {
mPreviewStep.doStop(swallowExceptions, new Callable<Task<Void>>() {
@Override
public Task<Void> call() {
return onStopPreview();
}
});
}
return mPreviewStep.getTask();
}
@SuppressWarnings("WeakerAccess")
protected void restartPreview() {
LOG.i("restartPreview", "posting.");
mHandler.post(new Runnable() { mHandler.post(new Runnable() {
@Override @Override
public void run() { public void run() {
LOG.i("Start:", "executing. State:", ss()); LOG.i("restartPreview", "executing.");
if (mState >= STATE_STARTING) return; stopPreview(false);
mState = STATE_STARTING; startPreview();
LOG.i("Start:", "about to call onStart()", ss());
onStart();
LOG.i("Start:", "returned from onStart().", "Dispatching.", ss());
mState = STATE_STARTED;
mCallback.dispatchOnCameraOpened(mCameraOptions);
} }
}); });
} }
// Stops the preview asynchronously. /**
// Public & not final so we can verify with mockito in CameraViewTest * Starts the preview streaming.
public void stop() { * @return a task
LOG.i("Stop:", "posting runnable. State:", ss()); */
@NonNull
@WorkerThread
protected abstract Task<Void> onStartPreview();
/**
* Stops the preview streaming.
* Stop events should generally not throw exceptions. We
* want to release resources either way.
* @return a task
*/
@NonNull
@WorkerThread
protected abstract Task<Void> onStopPreview();
//endregion
//region Surface callbacks
/**
* The surface is now available, which means that step 1 has completed.
* If we have also completed step 2, go on with binding and streaming.
*/
@Override
public final void onSurfaceAvailable() {
LOG.i("onSurfaceAvailable:", "Size is", getPreviewSurfaceSize(REF_VIEW));
mHandler.post(new Runnable() { mHandler.post(new Runnable() {
@Override @Override
public void run() { public void run() {
LOG.i("Stop:", "executing. State:", ss()); startBind().onSuccessTask(mHandler.getExecutor(), new SuccessContinuation<Void, Void>() {
if (mState <= STATE_STOPPED) return; @NonNull
mState = STATE_STOPPING; @Override
LOG.i("Stop:", "about to call onStop()"); public Task<Void> then(@Nullable Void aVoid) {
onStop(); return startPreview();
LOG.i("Stop:", "returned from onStop().", "Dispatching."); }
mState = STATE_STOPPED; });
mCallback.dispatchOnCameraClosed();
} }
}); });
} }
// Stops the preview synchronously, ensuring no exceptions are thrown. @Override
final void stopNow() { public final void onSurfaceChanged() {
try { LOG.i("onSurfaceChanged:", "Size is", getPreviewSurfaceSize(REF_VIEW), "Posting.");
// Don't check, try stop again. mHandler.post(new Runnable() {
LOG.i("stopNow:", "State was:", ss()); @Override
if (mState == STATE_STOPPED) return; public void run() {
mState = STATE_STOPPING; LOG.i("onSurfaceChanged:",
onStop(); "Engine started?", mEngineStep.isStarted(),
mState = STATE_STOPPED; "Bind started?", mBindStep.isStarted());
LOG.i("stopNow:", "Stopped. State is:", ss()); if (!mEngineStep.isStarted()) return; // Too early
} catch (Exception e) { if (!mBindStep.isStarted()) return; // Too early
// Do nothing.
LOG.i("stopNow:", "Swallowing exception while stopping.", e); // Compute a new camera preview size and apply.
mState = STATE_STOPPED; Size newSize = computePreviewStreamSize();
if (newSize.equals(mPreviewStreamSize)) return;
LOG.i("onSurfaceChanged:", "Computed a new preview size. Going on.");
mPreviewStreamSize = newSize;
onPreviewStreamSizeChanged();
} }
});
} }
// Forces a restart. /**
@SuppressWarnings("WeakerAccess") * The preview stream size has changed. At this point, some engine might want to
protected final void restart() { * simply call {@link #restartPreview()}, others to {@link #restartBind()}.
LOG.i("Restart:", "posting runnable"); *
* It basically depends on the step at which the preview stream size is actually used.
*/
@WorkerThread
protected abstract void onPreviewStreamSizeChanged();
@Override
public final void onSurfaceDestroyed() {
LOG.i("onSurfaceDestroyed");
mHandler.post(new Runnable() { mHandler.post(new Runnable() {
@Override @Override
public void run() { public void run() {
LOG.i("Restart:", "executing. Needs stopping:", mState > STATE_STOPPED, ss()); stopPreview(false).onSuccessTask(mHandler.getExecutor(), new SuccessContinuation<Void, Void>() {
// Don't stop if stopped. @NonNull
if (mState > STATE_STOPPED) { @Override
mState = STATE_STOPPING; public Task<Void> then(@Nullable Void aVoid) {
onStop(); return stopBind(false);
mState = STATE_STOPPED; }
LOG.i("Restart:", "stopped. Dispatching.", ss()); });
mCallback.dispatchOnCameraClosed(); }
});
} }
LOG.i("Restart: about to start. State:", ss()); //endregion
mState = STATE_STARTING;
onStart(); //region Start & Stop all
mState = STATE_STARTED;
LOG.i("Restart: returned from start. Dispatching. State:", ss()); /**
mCallback.dispatchOnCameraOpened(mCameraOptions); * Not final due to mockito requirements, but this is basically
* it, nothing more to do.
*
* NOTE: Should not be called on the {@link #mHandler} thread! I think
* that would cause deadlocks due to us awaiting for {@link #stop()} to return.
*/
public void destroy() {
LOG.i("destroy:", "state:", getEngineStateName());
// Prevent CameraEngine leaks. Don't set to null, or exceptions
// inside the standard stop() method might crash the main thread.
mHandler.getThread().setUncaughtExceptionHandler(new NoOpExceptionHandler());
// Stop if needed, synchronously and silently.
// Cannot use Tasks.await() because we might be on the UI thread.
final CountDownLatch latch = new CountDownLatch(1);
stop(true).addOnCompleteListener(mHandler.getExecutor(), new OnCompleteListener<Void>() {
@Override
public void onComplete(@NonNull Task<Void> task) {
latch.countDown();
} }
}); });
try {
latch.await();
} catch (InterruptedException ignore) {}
} }
// Starts the preview. @SuppressWarnings("WeakerAccess")
// At the end of this method camera must be available, e.g. for setting parameters. protected final void restart() {
@WorkerThread LOG.i("Restart:", "calling stop and start");
protected abstract void onStart(); stop();
start();
}
// Stops the preview. @NonNull
@WorkerThread public Task<Void> start() {
protected abstract void onStop(); LOG.i("Start:", "posting runnable. State:", getEngineStateName());
final TaskCompletionSource<Void> outTask = new TaskCompletionSource<>();
mHandler.post(new Runnable() {
@Override
public void run() {
LOG.i("Start:", "executing runnable. State:", getEngineStateName());
if (mAllStep.isStoppingOrStopped()) {
mAllStep.doStart(false, new Callable<Task<Void>>() {
@Override
public Task<Void> call() {
return startEngine().addOnFailureListener(mHandler.getExecutor(), new OnFailureListener() {
@Override
public void onFailure(@NonNull Exception e) {
outTask.trySetException(e);
}
}).onSuccessTask(mHandler.getExecutor(), new SuccessContinuation<Void, Void>() {
@NonNull
@Override
public Task<Void> then(@Nullable Void aVoid) {
outTask.trySetResult(null);
return startBind();
}
}).onSuccessTask(mHandler.getExecutor(), new SuccessContinuation<Void, Void>() {
@NonNull
@Override
public Task<Void> then(@Nullable Void aVoid) {
return startPreview();
}
});
}
});
} else {
// NOTE: this returns early if we were STARTING.
outTask.trySetResult(null);
}
}
});
return outTask.getTask();
}
// Returns current state. @NonNull
public final int getState() { public Task<Void> stop() {
return mState; return stop(false);
}
@NonNull
private Task<Void> stop(final boolean swallowExceptions) {
LOG.i("Stop:", "posting runnable. State:", getEngineStateName());
final TaskCompletionSource<Void> outTask = new TaskCompletionSource<>();
mHandler.post(new Runnable() {
@Override
public void run() {
LOG.i("Stop:", "executing runnable. State:", getEngineStateName());
if (mAllStep.isStartedOrStarting()) {
mAllStep.doStop(swallowExceptions, new Callable<Task<Void>>() {
@Override
public Task<Void> call() {
return stopPreview(swallowExceptions).continueWithTask(mHandler.getExecutor(), new Continuation<Void, Task<Void>>() {
@Override
public Task<Void> then(@NonNull Task<Void> task) {
return stopBind(swallowExceptions);
}
}).continueWithTask(mHandler.getExecutor(), new Continuation<Void, Task<Void>>() {
@Override
public Task<Void> then(@NonNull Task<Void> task) {
return stopEngine(swallowExceptions);
}
}).continueWithTask(mHandler.getExecutor(), new Continuation<Void, Task<Void>>() {
@Override
public Task<Void> then(@NonNull Task<Void> task) {
if (task.isSuccessful()) {
outTask.trySetResult(null);
} else {
//noinspection ConstantConditions
outTask.trySetException(task.getException());
}
return task;
}
});
}
});
} else {
// NOTE: this returns early if we were STOPPING.
outTask.trySetResult(null);
}
}
});
return outTask.getTask();
} }
//endregion //endregion
@ -595,6 +998,9 @@ public abstract class CameraEngine implements
} }
} }
// o(S, V) - o(S, O)
// displayOffset - deviceOrientation
// Returns the offset between two reference systems. // Returns the offset between two reference systems.
final int offset(int fromReference, int toReference) { final int offset(int fromReference, int toReference) {
if (fromReference == toReference) return 0; if (fromReference == toReference) return 0;
@ -637,7 +1043,7 @@ public abstract class CameraEngine implements
@SuppressWarnings("SameParameterValue") @SuppressWarnings("SameParameterValue")
@Nullable @Nullable
final Size getPreviewSurfaceSize(int reference) { private Size getPreviewSurfaceSize(int reference) {
if (mPreview == null) return null; if (mPreview == null) return null;
return flip(REF_VIEW, reference) ? mPreview.getSurfaceSize().flip() : mPreview.getSurfaceSize(); return flip(REF_VIEW, reference) ? mPreview.getSurfaceSize().flip() : mPreview.getSurfaceSize();
} }
@ -727,9 +1133,19 @@ public abstract class CameraEngine implements
return result; return result;
} }
/**
* This is called anytime {@link #computePreviewStreamSize()} is called.
* This means that it should be called during the binding process, when
* we can be sure that the camera is available (engineState == STARTED).
* @return a list of available sizes for preview
*/
@NonNull
protected abstract List<Size> getPreviewStreamAvailableSizes();
@NonNull @NonNull
@SuppressWarnings("WeakerAccess") @SuppressWarnings("WeakerAccess")
protected final Size computePreviewStreamSize(@NonNull List<Size> previewSizes) { protected final Size computePreviewStreamSize() {
@NonNull List<Size> previewSizes = getPreviewStreamAvailableSizes();
// These sizes come in REF_SENSOR. Since there is an external selector involved, // These sizes come in REF_SENSOR. Since there is an external selector involved,
// we must convert all of them to REF_VIEW, then flip back when returning. // we must convert all of them to REF_VIEW, then flip back when returning.
boolean flip = flip(REF_SENSOR, REF_VIEW); boolean flip = flip(REF_SENSOR, REF_VIEW);

@ -0,0 +1,170 @@
package com.otaliastudios.cameraview.engine;
import com.google.android.gms.tasks.Continuation;
import com.google.android.gms.tasks.OnFailureListener;
import com.google.android.gms.tasks.SuccessContinuation;
import com.google.android.gms.tasks.Task;
import com.google.android.gms.tasks.Tasks;
import com.otaliastudios.cameraview.CameraLogger;
import java.util.concurrent.Callable;
import java.util.concurrent.Executor;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
/**
* Represents one of the steps in the {@link CameraEngine} setup: for example, the engine step,
* the bind-to-surface step, and the preview step.
*
* A step is something that can be setup (started) or torn down (stopped), and
* steps can of course depend onto each other.
*
* The purpose of this class is to manage the step state (stopping, stopped, starting or started)
* and, more importantly, to perform START and STOP operations in such a way that they do not
* overlap. For example, if we're stopping, we're wait for stop to finish before starting again.
*
* This is an important condition for simplifying the engine code.
* Since Camera1, the only requirement was basically to use a single thread.
* Since Camera2, which has an asynchronous API, further care must be used.
*
* For this reason, we use Google's {@link Task} abstraction and only start new operations
* once the previous one has ended.
*
* <strong>This class is NOT thread safe!</string>
*/
class CameraEngineStep {
private static final String TAG = CameraEngineStep.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
interface Callback {
@NonNull
Executor getExecutor();
void handleException(@NonNull Exception exception);
}
static final int STATE_STOPPING = -1;
static final int STATE_STOPPED = 0;
static final int STATE_STARTING = 1;
static final int STATE_STARTED = 2;
private int state = STATE_STOPPED;
// To avoid dirty scenarios (e.g. calling stopXXX while XXX is starting),
// and since every operation can be asynchronous, we use some tasks for each step.
private Task<Void> task = Tasks.forResult(null);
private final String name;
private final Callback callback;
CameraEngineStep(@NonNull String name, @NonNull Callback callback) {
this.name = name.toUpperCase();
this.callback = callback;
}
int getState() {
return state;
}
@VisibleForTesting void setState(int newState) {
state = newState;
}
@NonNull
String getStateName() {
switch (state) {
case STATE_STOPPING: return name + "_STATE_STOPPING";
case STATE_STOPPED: return name + "_STATE_STOPPED";
case STATE_STARTING: return name + "_STATE_STARTING";
case STATE_STARTED: return name + "_STATE_STARTED";
}
return "null";
}
boolean isStoppingOrStopped() {
return state == STATE_STOPPING || state == STATE_STOPPED;
}
boolean isStartedOrStarting() {
return state == STATE_STARTING || state == STATE_STARTED;
}
boolean isStarted() {
return state == STATE_STARTED;
}
@NonNull
Task<Void> getTask() {
return task;
}
@SuppressWarnings({"SameParameterValue", "UnusedReturnValue"})
Task<Void> doStart(final boolean swallowExceptions, final @NonNull Callable<Task<Void>> op) {
return doStart(swallowExceptions, op, null);
}
Task<Void> doStart(final boolean swallowExceptions, final @NonNull Callable<Task<Void>> op, final @Nullable Runnable onStarted) {
LOG.i(name, "doStart", "Called. Enqueuing.");
task = task.continueWithTask(callback.getExecutor(), new Continuation<Void, Task<Void>>() {
@Override
public Task<Void> then(@NonNull Task<Void> task) throws Exception {
LOG.i(name, "doStart", "About to start. Setting state to STARTING");
setState(STATE_STARTING);
return op.call().addOnFailureListener(callback.getExecutor(), new OnFailureListener() {
@Override
public void onFailure(@NonNull Exception e) {
LOG.w(name, "doStart", "Failed with error", e, "Setting state to STOPPED");
setState(STATE_STOPPED);
if (!swallowExceptions) callback.handleException(e);
}
});
}
}).onSuccessTask(callback.getExecutor(), new SuccessContinuation<Void, Void>() {
@NonNull
@Override
public Task<Void> then(@Nullable Void aVoid) {
LOG.i(name, "doStart", "Succeeded! Setting state to STARTED");
setState(STATE_STARTED);
if (onStarted != null) onStarted.run();
return Tasks.forResult(null);
}
});
return task;
}
@SuppressWarnings("UnusedReturnValue")
Task<Void> doStop(final boolean swallowExceptions, final @NonNull Callable<Task<Void>> op) {
return doStop(swallowExceptions, op, null);
}
Task<Void> doStop(final boolean swallowExceptions, final @NonNull Callable<Task<Void>> op, final @Nullable Runnable onStopped) {
LOG.i(name, "doStop", "Called. Enqueuing.");
task = task.continueWithTask(callback.getExecutor(), new Continuation<Void, Task<Void>>() {
@Override
public Task<Void> then(@NonNull Task<Void> task) throws Exception {
LOG.i(name, "doStop", "About to stop. Setting state to STOPPING");
state = STATE_STOPPING;
return op.call().addOnFailureListener(callback.getExecutor(), new OnFailureListener() {
@Override
public void onFailure(@NonNull Exception e) {
LOG.w(name, "doStop", "Failed with error", e, "Setting state to STOPPED");
state = STATE_STOPPED;
if (!swallowExceptions) callback.handleException(e);
}
});
}
}).onSuccessTask(callback.getExecutor(), new SuccessContinuation<Void, Void>() {
@NonNull
@Override
public Task<Void> then(@Nullable Void aVoid) {
LOG.i(name, "doStop", "Succeeded! Setting state to STOPPED");
state = STATE_STOPPED;
if (onStopped != null) onStopped.run();
return Tasks.forResult(null);
}
});
return task;
}
}

@ -40,7 +40,7 @@ public class FrameManager {
/** /**
* Construct a new frame manager. * Construct a new frame manager.
* The construction must be followed by an {@link #allocateBuffers(int, Size)} call * The construction must be followed by an {@link #setUp(int, Size)} call
* as soon as the parameters are known. * as soon as the parameters are known.
* *
* @param poolSize the size of the backing pool. * @param poolSize the size of the backing pool.
@ -63,7 +63,7 @@ public class FrameManager {
* @param previewSize the preview size * @param previewSize the preview size
* @return the buffer size * @return the buffer size
*/ */
public int allocateBuffers(int bitsPerPixel, @NonNull Size previewSize) { public int setUp(int bitsPerPixel, @NonNull Size previewSize) {
// TODO throw if called twice without release? // TODO throw if called twice without release?
mBufferSize = getBufferSize(bitsPerPixel, previewSize); mBufferSize = getBufferSize(bitsPerPixel, previewSize);
for (int i = 0; i < mPoolSize; i++) { for (int i = 0; i < mPoolSize; i++) {
@ -104,8 +104,8 @@ public class FrameManager {
/** /**
* Returns a new Frame for the given data. This must be called * Returns a new Frame for the given data. This must be called
* - after {@link #allocateBuffers(int, Size)}, which sets the buffer size * - after {@link #setUp(int, Size)}, which sets the buffer size
* - after the byte buffer given by allocateBuffers() has been filled. * - after the byte buffer given by setUp() has been filled.
* If this is called X times in a row without releasing frames, it will allocate * If this is called X times in a row without releasing frames, it will allocate
* X frames and that's bad. Callers must wait for the preview buffer to be available. * X frames and that's bad. Callers must wait for the preview buffer to be available.
* *

@ -15,7 +15,7 @@ import android.util.TypedValue;
import android.view.View; import android.view.View;
import com.otaliastudios.cameraview.controls.Grid; import com.otaliastudios.cameraview.controls.Grid;
import com.otaliastudios.cameraview.internal.utils.Task; import com.otaliastudios.cameraview.internal.utils.Op;
/** /**
* A layout overlay that draws grid lines based on the {@link Grid} parameter. * A layout overlay that draws grid lines based on the {@link Grid} parameter.
@ -32,7 +32,8 @@ public class GridLinesLayout extends View {
private ColorDrawable vert; private ColorDrawable vert;
private final float width; private final float width;
@VisibleForTesting Task<Integer> drawTask = new Task<>(); @VisibleForTesting
Op<Integer> drawOp = new Op<>();
public GridLinesLayout(@NonNull Context context) { public GridLinesLayout(@NonNull Context context) {
this(context, null); this(context, null);
@ -115,7 +116,7 @@ public class GridLinesLayout extends View {
@Override @Override
protected void onDraw(@NonNull Canvas canvas) { protected void onDraw(@NonNull Canvas canvas) {
super.onDraw(canvas); super.onDraw(canvas);
drawTask.start(); drawOp.start();
int count = getLineCount(); int count = getLineCount();
for (int n = 0; n < count; n++) { for (int n = 0; n < count; n++) {
float pos = getLinePosition(n); float pos = getLinePosition(n);
@ -130,6 +131,6 @@ public class GridLinesLayout extends View {
vert.draw(canvas); vert.draw(canvas);
canvas.translate(- pos * getWidth(), 0); canvas.translate(- pos * getWidth(), 0);
} }
drawTask.end(count); drawOp.end(count);
} }
} }

@ -9,7 +9,7 @@ import java.util.concurrent.TimeUnit;
* A naive implementation of {@link java.util.concurrent.CountDownLatch} * A naive implementation of {@link java.util.concurrent.CountDownLatch}
* to help in testing. * to help in testing.
*/ */
public class Task<T> { public class Op<T> {
private CountDownLatch mLatch; private CountDownLatch mLatch;
private T mResult; private T mResult;
@ -22,17 +22,17 @@ public class Task<T> {
* - call {@link #listen()} to notify they are interested in the next action * - call {@link #listen()} to notify they are interested in the next action
* - call {@link #await()} to know when the action is performed. * - call {@link #await()} to know when the action is performed.
* *
* Task owners should: * Op owners should:
* - call {@link #start()} when task started * - call {@link #start()} when task started
* - call {@link #end(Object)} when task ends * - call {@link #end(Object)} when task ends
*/ */
public Task() { } public Op() { }
/** /**
* Creates an empty task and starts listening. * Creates an empty task and starts listening.
* @param startListening whether to call listen * @param startListening whether to call listen
*/ */
public Task(boolean startListening) { public Op(boolean startListening) {
if (startListening) listen(); if (startListening) listen();
} }
@ -41,14 +41,14 @@ public class Task<T> {
} }
/** /**
* Task owner method: notifies the action started. * Op owner method: notifies the action started.
*/ */
public void start() { public void start() {
if (!isListening()) mCount++; if (!isListening()) mCount++;
} }
/** /**
* Task owner method: notifies the action ended. * Op owner method: notifies the action ended.
* @param result the action result * @param result the action result
*/ */
public void end(T result) { public void end(T result) {

@ -81,8 +81,12 @@ public class WorkerHandler {
* @param runnable the action * @param runnable the action
*/ */
public void post(@NonNull Runnable runnable) { public void post(@NonNull Runnable runnable) {
if (Thread.currentThread() == getThread()) {
runnable.run();
} else {
mHandler.post(runnable); mHandler.post(runnable);
} }
}
/** /**
* Post an action on this handler. * Post an action on this handler.

@ -219,7 +219,7 @@ public class SnapshotPictureRecorder extends PictureRecorder {
// It seems that the buffers are already cleared here, so we need to allocate again. // It seems that the buffers are already cleared here, so we need to allocate again.
camera.setPreviewCallbackWithBuffer(null); // Release anything left camera.setPreviewCallbackWithBuffer(null); // Release anything left
camera.setPreviewCallbackWithBuffer(mEngine1); // Add ourselves camera.setPreviewCallbackWithBuffer(mEngine1); // Add ourselves
mEngine1.getFrameManager().allocateBuffers(ImageFormat.getBitsPerPixel(mFormat), previewStreamSize); mEngine1.getFrameManager().setUp(ImageFormat.getBitsPerPixel(mFormat), previewStreamSize);
} }
}); });
} }

@ -10,7 +10,7 @@ import android.view.ViewGroup;
import com.otaliastudios.cameraview.CameraLogger; import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.engine.CameraEngine; import com.otaliastudios.cameraview.engine.CameraEngine;
import com.otaliastudios.cameraview.internal.utils.Task; import com.otaliastudios.cameraview.internal.utils.Op;
import com.otaliastudios.cameraview.size.Size; import com.otaliastudios.cameraview.size.Size;
/** /**
@ -46,7 +46,8 @@ public abstract class CameraPreview<T extends View, Output> {
void onSurfaceDestroyed(); void onSurfaceDestroyed();
} }
@VisibleForTesting Task<Void> mCropTask = new Task<>(); @VisibleForTesting
Op<Void> mCropOp = new Op<>();
private SurfaceCallback mSurfaceCallback; private SurfaceCallback mSurfaceCallback;
private T mView; private T mView;
boolean mCropping; boolean mCropping;
@ -59,6 +60,9 @@ public abstract class CameraPreview<T extends View, Output> {
int mInputStreamWidth; int mInputStreamWidth;
int mInputStreamHeight; int mInputStreamHeight;
// The rotation, if any, to be applied when drawing.
int mDrawRotation;
/** /**
* Creates a new preview. * Creates a new preview.
* @param context a context * @param context a context
@ -130,7 +134,6 @@ public abstract class CameraPreview<T extends View, Output> {
/** /**
* Called to notify the preview of the input stream size. The width and height must be * Called to notify the preview of the input stream size. The width and height must be
* rotated before calling this, if needed, to be consistent with the VIEW reference. * rotated before calling this, if needed, to be consistent with the VIEW reference.
*
* @param width width of the preview stream, in view coordinates * @param width width of the preview stream, in view coordinates
* @param height height of the preview stream, in view coordinates * @param height height of the preview stream, in view coordinates
*/ */
@ -139,7 +142,7 @@ public abstract class CameraPreview<T extends View, Output> {
mInputStreamWidth = width; mInputStreamWidth = width;
mInputStreamHeight = height; mInputStreamHeight = height;
if (mInputStreamWidth > 0 && mInputStreamHeight > 0) { if (mInputStreamWidth > 0 && mInputStreamHeight > 0) {
crop(mCropTask); crop(mCropOp);
} }
} }
@ -181,7 +184,7 @@ public abstract class CameraPreview<T extends View, Output> {
mOutputSurfaceWidth = width; mOutputSurfaceWidth = width;
mOutputSurfaceHeight = height; mOutputSurfaceHeight = height;
if (mOutputSurfaceWidth > 0 && mOutputSurfaceHeight > 0) { if (mOutputSurfaceWidth > 0 && mOutputSurfaceHeight > 0) {
crop(mCropTask); crop(mCropOp);
} }
mSurfaceCallback.onSurfaceAvailable(); mSurfaceCallback.onSurfaceAvailable();
} }
@ -198,7 +201,7 @@ public abstract class CameraPreview<T extends View, Output> {
mOutputSurfaceWidth = width; mOutputSurfaceWidth = width;
mOutputSurfaceHeight = height; mOutputSurfaceHeight = height;
if (width > 0 && height > 0) { if (width > 0 && height > 0) {
crop(mCropTask); crop(mCropOp);
} }
mSurfaceCallback.onSurfaceChanged(); mSurfaceCallback.onSurfaceChanged();
} }
@ -240,10 +243,10 @@ public abstract class CameraPreview<T extends View, Output> {
* There might still be some absolute difference (e.g. same ratio but bigger / smaller). * There might still be some absolute difference (e.g. same ratio but bigger / smaller).
* However that should be already managed by the framework. * However that should be already managed by the framework.
*/ */
protected void crop(@NonNull Task<Void> task) { protected void crop(@NonNull Op<Void> op) {
// The base implementation does not support cropping. // The base implementation does not support cropping.
task.start(); op.start();
task.end(null); op.end(null);
} }
/** /**
@ -263,4 +266,23 @@ public abstract class CameraPreview<T extends View, Output> {
public boolean isCropping() { public boolean isCropping() {
return mCropping; return mCropping;
} }
/**
* Should be called after {@link #setStreamSize(int, int)}!
*
* Sets the rotation, if any, to be applied when drawing.
* Sometimes we don't need this:
* - In Camera1, the buffer producer sets our Surface size and rotates it based on the value
* that we pass to {@link android.hardware.Camera.Parameters#setDisplayOrientation(int)},
* so the stream that comes in is already rotated.
* - In Camera2, for {@link android.view.SurfaceView} based previews, apparently it just works
* out of the box. The producer might be doing something similar.
*
* But in all the other Camera2 cases, we need to apply this rotation when drawing the surface.
* @param drawRotation the rotation in degrees
*/
public void setDrawRotation(int drawRotation) {
mDrawRotation = drawRotation;
}
} }

@ -15,7 +15,7 @@ import android.view.ViewGroup;
import com.otaliastudios.cameraview.R; import com.otaliastudios.cameraview.R;
import com.otaliastudios.cameraview.internal.egl.EglViewport; import com.otaliastudios.cameraview.internal.egl.EglViewport;
import com.otaliastudios.cameraview.internal.utils.Task; import com.otaliastudios.cameraview.internal.utils.Op;
import com.otaliastudios.cameraview.size.AspectRatio; import com.otaliastudios.cameraview.size.AspectRatio;
import java.util.Collections; import java.util.Collections;
@ -65,8 +65,8 @@ public class GlCameraPreview extends CameraPreview<GLSurfaceView, SurfaceTexture
private SurfaceTexture mInputSurfaceTexture; private SurfaceTexture mInputSurfaceTexture;
private EglViewport mOutputViewport; private EglViewport mOutputViewport;
private Set<RendererFrameCallback> mRendererFrameCallbacks = Collections.synchronizedSet(new HashSet<RendererFrameCallback>()); private Set<RendererFrameCallback> mRendererFrameCallbacks = Collections.synchronizedSet(new HashSet<RendererFrameCallback>());
@VisibleForTesting float mScaleX = 1F; @VisibleForTesting float mCropScaleX = 1F;
@VisibleForTesting float mScaleY = 1F; @VisibleForTesting float mCropScaleY = 1F;
private View mRootView; private View mRootView;
/** /**
@ -199,6 +199,7 @@ public class GlCameraPreview extends CameraPreview<GLSurfaceView, SurfaceTexture
@RendererThread @RendererThread
@Override @Override
public void onDrawFrame(GL10 gl) { public void onDrawFrame(GL10 gl) {
if (mInputSurfaceTexture == null) return;
// Latch the latest frame. If there isn't anything new, // Latch the latest frame. If there isn't anything new,
// we'll just re-use whatever was there before. // we'll just re-use whatever was there before.
mInputSurfaceTexture.updateTexImage(); mInputSurfaceTexture.updateTexImage();
@ -206,23 +207,31 @@ public class GlCameraPreview extends CameraPreview<GLSurfaceView, SurfaceTexture
// Skip drawing. Camera was not opened. // Skip drawing. Camera was not opened.
return; return;
} }
mInputSurfaceTexture.getTransformMatrix(mTransformMatrix); mInputSurfaceTexture.getTransformMatrix(mTransformMatrix);
// For Camera2, apply the draw rotation.
// See TextureCameraPreview.setDrawRotation() for info.
if (mDrawRotation != 0) {
Matrix.translateM(mTransformMatrix, 0, 0.5F, 0.5F, 0);
Matrix.rotateM(mTransformMatrix, 0, -mDrawRotation, 0, 0, 1);
Matrix.translateM(mTransformMatrix, 0, -0.5F, -0.5F, 0);
}
if (isCropping()) { if (isCropping()) {
// Scaling is easy. However: // Scaling is easy. However:
// If the view is 10x1000 (very tall), it will show only the left strip of the preview (not the center one). // If the view is 10x1000 (very tall), it will show only the left strip of the preview (not the center one).
// If the view is 1000x10 (very large), it will show only the bottom strip of the preview (not the center one). // If the view is 1000x10 (very large), it will show only the bottom strip of the preview (not the center one).
// So we must use Matrix.translateM, and it must happen before the crop. // So we must use Matrix.translateM, and it must happen before the crop.
float translX = (1F - mScaleX) / 2F; float translX = (1F - mCropScaleX) / 2F;
float translY = (1F - mScaleY) / 2F; float translY = (1F - mCropScaleY) / 2F;
Matrix.translateM(mTransformMatrix, 0, translX, translY, 0); Matrix.translateM(mTransformMatrix, 0, translX, translY, 0);
Matrix.scaleM(mTransformMatrix, 0, mScaleX, mScaleY, 1); Matrix.scaleM(mTransformMatrix, 0, mCropScaleX, mCropScaleY, 1);
} }
// Future note: passing scale to the viewport? // Future note: passing scale to the viewport?
// They are scaleX an scaleY, but flipped based on mInputFlipped. // They are scaleX an scaleY, but flipped based on mInputFlipped.
mOutputViewport.drawFrame(mOutputTextureId, mTransformMatrix); mOutputViewport.drawFrame(mOutputTextureId, mTransformMatrix);
for (RendererFrameCallback callback : mRendererFrameCallbacks) { for (RendererFrameCallback callback : mRendererFrameCallbacks) {
callback.onRendererFrame(mInputSurfaceTexture, mScaleX, mScaleY); callback.onRendererFrame(mInputSurfaceTexture, mCropScaleX, mCropScaleY);
} }
} }
@ -256,8 +265,8 @@ public class GlCameraPreview extends CameraPreview<GLSurfaceView, SurfaceTexture
* then drawing it with a scaled transformation matrix. See {@link #onDrawFrame(GL10)}. * then drawing it with a scaled transformation matrix. See {@link #onDrawFrame(GL10)}.
*/ */
@Override @Override
protected void crop(@NonNull Task<Void> task) { protected void crop(@NonNull Op<Void> op) {
task.start(); op.start();
if (mInputStreamWidth > 0 && mInputStreamHeight > 0 && mOutputSurfaceWidth > 0 && mOutputSurfaceHeight > 0) { if (mInputStreamWidth > 0 && mInputStreamHeight > 0 && mOutputSurfaceWidth > 0 && mOutputSurfaceHeight > 0) {
float scaleX = 1f, scaleY = 1f; float scaleX = 1f, scaleY = 1f;
AspectRatio current = AspectRatio.of(mOutputSurfaceWidth, mOutputSurfaceHeight); AspectRatio current = AspectRatio.of(mOutputSurfaceWidth, mOutputSurfaceHeight);
@ -270,10 +279,10 @@ public class GlCameraPreview extends CameraPreview<GLSurfaceView, SurfaceTexture
scaleX = target.toFloat() / current.toFloat(); scaleX = target.toFloat() / current.toFloat();
} }
mCropping = scaleX > 1.02f || scaleY > 1.02f; mCropping = scaleX > 1.02f || scaleY > 1.02f;
mScaleX = 1F / scaleX; mCropScaleX = 1F / scaleX;
mScaleY = 1F / scaleY; mCropScaleY = 1F / scaleY;
getView().requestRender(); getView().requestRender();
} }
task.end(null); op.end(null);
} }
} }

@ -2,6 +2,10 @@ package com.otaliastudios.cameraview.preview;
import android.annotation.TargetApi; import android.annotation.TargetApi;
import android.content.Context; import android.content.Context;
import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.SurfaceTexture; import android.graphics.SurfaceTexture;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
@ -10,10 +14,14 @@ import android.view.TextureView;
import android.view.View; import android.view.View;
import android.view.ViewGroup; import android.view.ViewGroup;
import com.google.android.gms.tasks.TaskCompletionSource;
import com.google.android.gms.tasks.Tasks;
import com.otaliastudios.cameraview.R; import com.otaliastudios.cameraview.R;
import com.otaliastudios.cameraview.internal.utils.Task; import com.otaliastudios.cameraview.internal.utils.Op;
import com.otaliastudios.cameraview.size.AspectRatio; import com.otaliastudios.cameraview.size.AspectRatio;
import java.util.concurrent.ExecutionException;
/** /**
* A preview implementation based on {@link TextureView}. * A preview implementation based on {@link TextureView}.
* Better than {@link SurfaceCameraPreview} but much less powerful than {@link GlCameraPreview}. * Better than {@link SurfaceCameraPreview} but much less powerful than {@link GlCameraPreview}.
@ -76,29 +84,20 @@ public class TextureCameraPreview extends CameraPreview<TextureView, SurfaceText
return getView().getSurfaceTexture(); return getView().getSurfaceTexture();
} }
@TargetApi(15)
@Override
public void setStreamSize(int width, int height) {
super.setStreamSize(width, height);
if (getView().getSurfaceTexture() != null) {
getView().getSurfaceTexture().setDefaultBufferSize(width, height);
}
}
@Override @Override
public boolean supportsCropping() { public boolean supportsCropping() {
return true; return true;
} }
@Override @Override
protected void crop(final @NonNull Task<Void> task) { protected void crop(final @NonNull Op<Void> op) {
task.start(); op.start();
getView().post(new Runnable() { getView().post(new Runnable() {
@Override @Override
public void run() { public void run() {
if (mInputStreamHeight == 0 || mInputStreamWidth == 0 || if (mInputStreamHeight == 0 || mInputStreamWidth == 0 ||
mOutputSurfaceHeight == 0 || mOutputSurfaceWidth == 0) { mOutputSurfaceHeight == 0 || mOutputSurfaceWidth == 0) {
task.end(null); op.end(null);
return; return;
} }
float scaleX = 1f, scaleY = 1f; float scaleX = 1f, scaleY = 1f;
@ -118,8 +117,34 @@ public class TextureCameraPreview extends CameraPreview<TextureView, SurfaceText
mCropping = scaleX > 1.02f || scaleY > 1.02f; mCropping = scaleX > 1.02f || scaleY > 1.02f;
LOG.i("crop:", "applied scaleX=", scaleX); LOG.i("crop:", "applied scaleX=", scaleX);
LOG.i("crop:", "applied scaleY=", scaleY); LOG.i("crop:", "applied scaleY=", scaleY);
task.end(null); op.end(null);
}
});
}
@Override
public void setDrawRotation(final int drawRotation) {
super.setDrawRotation(drawRotation);
final TaskCompletionSource<Void> task = new TaskCompletionSource<>();
getView().post(new Runnable() {
@Override
public void run() {
Matrix matrix = new Matrix();
// Output surface coordinates
float outputCenterX = mOutputSurfaceWidth / 2F;
float outputCenterY = mOutputSurfaceHeight / 2F;
boolean flip = drawRotation % 180 != 0;
// If dimensions are swapped, we must also do extra work to flip
// the two dimensions, using the view width and height (to support cropping).
if (flip) {
float scaleX = (float) mOutputSurfaceHeight / mOutputSurfaceWidth;
matrix.postScale(scaleX, 1F / scaleX, outputCenterX, outputCenterY);
}
matrix.postRotate((float) -drawRotation, outputCenterX, outputCenterY);
getView().setTransform(matrix);
task.setResult(null);
} }
}); });
try { Tasks.await(task.getTask()); } catch (InterruptedException | ExecutionException ignore) { }
} }
} }

@ -33,12 +33,12 @@ public class FrameManagerTest {
@Test @Test
public void testAllocate() { public void testAllocate() {
FrameManager manager = new FrameManager(1, callback); FrameManager manager = new FrameManager(1, callback);
manager.allocateBuffers(4, new Size(50, 50)); manager.setUp(4, new Size(50, 50));
verify(callback, times(1)).onBufferAvailable(any(byte[].class)); verify(callback, times(1)).onBufferAvailable(any(byte[].class));
reset(callback); reset(callback);
manager = new FrameManager(5, callback); manager = new FrameManager(5, callback);
manager.allocateBuffers(4, new Size(50, 50)); manager.setUp(4, new Size(50, 50));
verify(callback, times(5)).onBufferAvailable(any(byte[].class)); verify(callback, times(5)).onBufferAvailable(any(byte[].class));
} }
@ -46,7 +46,7 @@ public class FrameManagerTest {
public void testFrameRecycling() { public void testFrameRecycling() {
// A 1-pool manager will always recycle the same frame. // A 1-pool manager will always recycle the same frame.
FrameManager manager = new FrameManager(1, callback); FrameManager manager = new FrameManager(1, callback);
manager.allocateBuffers(4, new Size(50, 50)); manager.setUp(4, new Size(50, 50));
Frame first = manager.getFrame(null, 0, 0, null, 0); Frame first = manager.getFrame(null, 0, 0, null, 0);
first.release(); first.release();
@ -60,7 +60,7 @@ public class FrameManagerTest {
@Test @Test
public void testOnFrameReleased_alreadyFull() { public void testOnFrameReleased_alreadyFull() {
FrameManager manager = new FrameManager(1, callback); FrameManager manager = new FrameManager(1, callback);
int length = manager.allocateBuffers(4, new Size(50, 50)); int length = manager.setUp(4, new Size(50, 50));
Frame frame1 = manager.getFrame(new byte[length], 0, 0, null, 0); Frame frame1 = manager.getFrame(new byte[length], 0, 0, null, 0);
// Since frame1 is already taken and poolSize = 1, a new Frame is created. // Since frame1 is already taken and poolSize = 1, a new Frame is created.
@ -77,7 +77,7 @@ public class FrameManagerTest {
@Test @Test
public void testOnFrameReleased_sameLength() { public void testOnFrameReleased_sameLength() {
FrameManager manager = new FrameManager(1, callback); FrameManager manager = new FrameManager(1, callback);
int length = manager.allocateBuffers(4, new Size(50, 50)); int length = manager.setUp(4, new Size(50, 50));
// A camera preview frame comes. Request a frame. // A camera preview frame comes. Request a frame.
byte[] picture = new byte[length]; byte[] picture = new byte[length];
@ -92,14 +92,14 @@ public class FrameManagerTest {
@Test @Test
public void testOnFrameReleased_differentLength() { public void testOnFrameReleased_differentLength() {
FrameManager manager = new FrameManager(1, callback); FrameManager manager = new FrameManager(1, callback);
int length = manager.allocateBuffers(4, new Size(50, 50)); int length = manager.setUp(4, new Size(50, 50));
// A camera preview frame comes. Request a frame. // A camera preview frame comes. Request a frame.
byte[] picture = new byte[length]; byte[] picture = new byte[length];
Frame frame = manager.getFrame(picture, 0, 0, null, 0); Frame frame = manager.getFrame(picture, 0, 0, null, 0);
// Don't release the frame. Change the allocation size. // Don't release the frame. Change the allocation size.
manager.allocateBuffers(2, new Size(15, 15)); manager.setUp(2, new Size(15, 15));
// Now release the old frame and ensure that onBufferAvailable is NOT called, // Now release the old frame and ensure that onBufferAvailable is NOT called,
// because the released data has wrong length. // because the released data has wrong length.
@ -111,7 +111,7 @@ public class FrameManagerTest {
@Test @Test
public void testRelease() { public void testRelease() {
FrameManager manager = new FrameManager(1, callback); FrameManager manager = new FrameManager(1, callback);
int length = manager.allocateBuffers(4, new Size(50, 50)); int length = manager.setUp(4, new Size(50, 50));
Frame first = manager.getFrame(new byte[length], 0, 0, null, 0); Frame first = manager.getFrame(new byte[length], 0, 0, null, 0);
first.release(); // Store this frame in the queue. first.release(); // Store this frame in the queue.

@ -15,8 +15,7 @@
<activity <activity
android:name=".CameraActivity" android:name=".CameraActivity"
android:theme="@style/Theme.MainActivity" android:theme="@style/Theme.MainActivity"
android:configChanges="orientation|screenLayout|keyboardHidden" android:configChanges="screenLayout|keyboardHidden"
android:screenOrientation="portrait"
android:hardwareAccelerated="true"> android:hardwareAccelerated="true">
<intent-filter> <intent-filter>
<action android:name="android.intent.action.MAIN" /> <action android:name="android.intent.action.MAIN" />

@ -18,6 +18,7 @@
android:keepScreenOn="true" android:keepScreenOn="true"
app:cameraExperimental="true" app:cameraExperimental="true"
app:cameraEngine="camera2" app:cameraEngine="camera2"
app:cameraPreview="glSurface"
app:cameraPlaySounds="true" app:cameraPlaySounds="true"
app:cameraGrid="off" app:cameraGrid="off"
app:cameraFlash="off" app:cameraFlash="off"

Loading…
Cancel
Save