Make internals more open (#494)

* Simplify GestureLayouts, now simple classes

* Reorder CameraEngine members

* Refactor CameraPreviews

* Reorder Camera1Engine

* Fix tests

* New angle classes, fix snapshot rotation

* Small changes

* Add AnglesTest
pull/495/head
Mattia Iavarone 5 years ago committed by GitHub
parent 64714017b8
commit 0f9c7f0e17
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 6
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraViewCallbacksTest.java
  2. 32
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraViewTest.java
  3. 45
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/MockCameraEngine.java
  4. 43
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/gesture/GestureFinderTest.java
  5. 31
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/gesture/PinchGestureFinderTest.java
  6. 39
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/gesture/ScrollGestureFinderTest.java
  7. 47
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/gesture/TapGestureFinderTest.java
  8. 5
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/preview/CameraPreviewTest.java
  9. 4
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/preview/GlCameraPreviewTest.java
  10. 2
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/preview/MockCameraPreview.java
  11. 4
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/preview/SurfaceCameraPreviewTest.java
  12. 4
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/preview/TextureCameraPreviewTest.java
  13. 111
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java
  14. 663
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java
  15. 69
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java
  16. 465
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraEngine.java
  17. 121
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/offset/Angles.java
  18. 28
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/offset/Axis.java
  19. 28
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/offset/Reference.java
  20. 29
      cameraview/src/main/java/com/otaliastudios/cameraview/gesture/GestureFinder.java
  21. 13
      cameraview/src/main/java/com/otaliastudios/cameraview/gesture/PinchGestureFinder.java
  22. 15
      cameraview/src/main/java/com/otaliastudios/cameraview/gesture/ScrollGestureFinder.java
  23. 22
      cameraview/src/main/java/com/otaliastudios/cameraview/gesture/TapGestureFinder.java
  24. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/egl/EglViewport.java
  25. 24
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/Snapshot1PictureRecorder.java
  26. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/SnapshotGlPictureRecorder.java
  27. 12
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/CameraPreview.java
  28. 91
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/GlCameraPreview.java
  29. 5
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/SurfaceCameraPreview.java
  30. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/TextureCameraPreview.java
  31. 7
      cameraview/src/main/java/com/otaliastudios/cameraview/video/SnapshotVideoRecorder.java
  32. 80
      cameraview/src/test/java/com/otaliastudios/cameraview/engine/offset/AnglesTest.java

@ -195,7 +195,7 @@ public class CameraViewCallbacksTest extends BaseTest {
@Test @Test
public void testDispatchOnFocusStart() { public void testDispatchOnFocusStart() {
// Enable tap gesture. // Enable tap gesture.
// Can't mock package protected. camera.mTapGestureLayout = mock(TapGestureLayout.class); // Can't mock package protected. camera.mTapGestureFinder = mock(TapGestureLayout.class);
camera.mapGesture(Gesture.TAP, GestureAction.AUTO_FOCUS); camera.mapGesture(Gesture.TAP, GestureAction.AUTO_FOCUS);
AutoFocusMarker marker = mock(AutoFocusMarker.class); AutoFocusMarker marker = mock(AutoFocusMarker.class);
MarkerLayout markerLayout = mock(MarkerLayout.class); MarkerLayout markerLayout = mock(MarkerLayout.class);
@ -215,7 +215,7 @@ public class CameraViewCallbacksTest extends BaseTest {
@Test @Test
public void testDispatchOnFocusEnd() { public void testDispatchOnFocusEnd() {
// Enable tap gesture. // Enable tap gesture.
// Can't mock package protected. camera.mTapGestureLayout = mock(TapGestureLayout.class); // Can't mock package protected. camera.mTapGestureFinder = mock(TapGestureLayout.class);
camera.mapGesture(Gesture.TAP, GestureAction.AUTO_FOCUS); camera.mapGesture(Gesture.TAP, GestureAction.AUTO_FOCUS);
AutoFocusMarker marker = mock(AutoFocusMarker.class); AutoFocusMarker marker = mock(AutoFocusMarker.class);
camera.setAutoFocusMarker(marker); camera.setAutoFocusMarker(marker);
@ -229,7 +229,7 @@ public class CameraViewCallbacksTest extends BaseTest {
verify(listener, times(1)).onAutoFocusEnd(success, point); verify(listener, times(1)).onAutoFocusEnd(success, point);
verify(marker, times(1)).onAutoFocusEnd(AutoFocusTrigger.GESTURE, success, point); verify(marker, times(1)).onAutoFocusEnd(AutoFocusTrigger.GESTURE, success, point);
// Can't mock package protected. verify(camera.mTapGestureLayout, times(1)).onAutoFocusEnd(success); // Can't mock package protected. verify(camera.mTapGestureFinder, times(1)).onAutoFocusEnd(success);
} }
@Test @Test

@ -28,9 +28,9 @@ import com.otaliastudios.cameraview.controls.Mode;
import com.otaliastudios.cameraview.controls.VideoCodec; import com.otaliastudios.cameraview.controls.VideoCodec;
import com.otaliastudios.cameraview.controls.WhiteBalance; import com.otaliastudios.cameraview.controls.WhiteBalance;
import com.otaliastudios.cameraview.gesture.GestureParser; import com.otaliastudios.cameraview.gesture.GestureParser;
import com.otaliastudios.cameraview.gesture.PinchGestureLayout; import com.otaliastudios.cameraview.gesture.PinchGestureFinder;
import com.otaliastudios.cameraview.gesture.ScrollGestureLayout; import com.otaliastudios.cameraview.gesture.ScrollGestureFinder;
import com.otaliastudios.cameraview.gesture.TapGestureLayout; import com.otaliastudios.cameraview.gesture.TapGestureFinder;
import com.otaliastudios.cameraview.engine.MockCameraEngine; import com.otaliastudios.cameraview.engine.MockCameraEngine;
import com.otaliastudios.cameraview.internal.utils.Op; import com.otaliastudios.cameraview.internal.utils.Op;
import com.otaliastudios.cameraview.markers.AutoFocusMarker; import com.otaliastudios.cameraview.markers.AutoFocusMarker;
@ -201,21 +201,21 @@ public class CameraViewTest extends BaseTest {
// PinchGestureLayout // PinchGestureLayout
cameraView.mapGesture(Gesture.PINCH, GestureAction.ZOOM); cameraView.mapGesture(Gesture.PINCH, GestureAction.ZOOM);
assertTrue(cameraView.mPinchGestureLayout.isActive()); assertTrue(cameraView.mPinchGestureFinder.isActive());
cameraView.clearGesture(Gesture.PINCH); cameraView.clearGesture(Gesture.PINCH);
assertFalse(cameraView.mPinchGestureLayout.isActive()); assertFalse(cameraView.mPinchGestureFinder.isActive());
// TapGestureLayout // TapGestureLayout
cameraView.mapGesture(Gesture.TAP, GestureAction.TAKE_PICTURE); cameraView.mapGesture(Gesture.TAP, GestureAction.TAKE_PICTURE);
assertTrue(cameraView.mTapGestureLayout.isActive()); assertTrue(cameraView.mTapGestureFinder.isActive());
cameraView.clearGesture(Gesture.TAP); cameraView.clearGesture(Gesture.TAP);
assertFalse(cameraView.mPinchGestureLayout.isActive()); assertFalse(cameraView.mPinchGestureFinder.isActive());
// ScrollGestureLayout // ScrollGestureLayout
cameraView.mapGesture(Gesture.SCROLL_HORIZONTAL, GestureAction.ZOOM); cameraView.mapGesture(Gesture.SCROLL_HORIZONTAL, GestureAction.ZOOM);
assertTrue(cameraView.mScrollGestureLayout.isActive()); assertTrue(cameraView.mScrollGestureFinder.isActive());
cameraView.clearGesture(Gesture.SCROLL_HORIZONTAL); cameraView.clearGesture(Gesture.SCROLL_HORIZONTAL);
assertFalse(cameraView.mScrollGestureLayout.isActive()); assertFalse(cameraView.mScrollGestureFinder.isActive());
} }
//endregion //endregion
@ -231,7 +231,7 @@ public class CameraViewTest extends BaseTest {
ui(new Runnable() { ui(new Runnable() {
@Override @Override
public void run() { public void run() {
cameraView.mTapGestureLayout = new TapGestureLayout(cameraView.getContext()) { cameraView.mTapGestureFinder = new TapGestureFinder(cameraView.mCameraCallbacks) {
protected boolean handleTouchEvent(@NonNull MotionEvent event) { protected boolean handleTouchEvent(@NonNull MotionEvent event) {
setGesture(Gesture.TAP); setGesture(Gesture.TAP);
return true; return true;
@ -253,7 +253,7 @@ public class CameraViewTest extends BaseTest {
ui(new Runnable() { ui(new Runnable() {
@Override @Override
public void run() { public void run() {
cameraView.mTapGestureLayout = new TapGestureLayout(cameraView.getContext()) { cameraView.mTapGestureFinder = new TapGestureFinder(cameraView.mCameraCallbacks) {
protected boolean handleTouchEvent(@NonNull MotionEvent event) { protected boolean handleTouchEvent(@NonNull MotionEvent event) {
setGesture(Gesture.TAP); setGesture(Gesture.TAP);
return true; return true;
@ -280,7 +280,7 @@ public class CameraViewTest extends BaseTest {
ui(new Runnable() { ui(new Runnable() {
@Override @Override
public void run() { public void run() {
cameraView.mPinchGestureLayout = new PinchGestureLayout(cameraView.getContext()) { cameraView.mPinchGestureFinder = new PinchGestureFinder(cameraView.mCameraCallbacks) {
@Override @Override
protected boolean handleTouchEvent(@NonNull MotionEvent event) { protected boolean handleTouchEvent(@NonNull MotionEvent event) {
setGesture(Gesture.PINCH); setGesture(Gesture.PINCH);
@ -321,7 +321,7 @@ public class CameraViewTest extends BaseTest {
ui(new Runnable() { ui(new Runnable() {
@Override @Override
public void run() { public void run() {
cameraView.mScrollGestureLayout = new ScrollGestureLayout(cameraView.getContext()) { cameraView.mScrollGestureFinder = new ScrollGestureFinder(cameraView.mCameraCallbacks) {
@Override @Override
protected boolean handleTouchEvent(@NonNull MotionEvent event) { protected boolean handleTouchEvent(@NonNull MotionEvent event) {
setGesture(Gesture.SCROLL_HORIZONTAL); setGesture(Gesture.SCROLL_HORIZONTAL);
@ -626,7 +626,7 @@ public class CameraViewTest extends BaseTest {
public void testPreviewStreamSizeSelector() { public void testPreviewStreamSizeSelector() {
SizeSelector source = SizeSelectors.minHeight(50); SizeSelector source = SizeSelectors.minHeight(50);
cameraView.setPreviewStreamSize(source); cameraView.setPreviewStreamSize(source);
SizeSelector result = mockController.getInternalPreviewStreamSizeSelector(); SizeSelector result = mockController.getPreviewStreamSizeSelector();
assertNotNull(result); assertNotNull(result);
assertEquals(result, source); assertEquals(result, source);
} }
@ -635,7 +635,7 @@ public class CameraViewTest extends BaseTest {
public void testPictureSizeSelector() { public void testPictureSizeSelector() {
SizeSelector source = SizeSelectors.minHeight(50); SizeSelector source = SizeSelectors.minHeight(50);
cameraView.setPictureSize(source); cameraView.setPictureSize(source);
SizeSelector result = mockController.getInternalPictureSizeSelector(); SizeSelector result = mockController.getPictureSizeSelector();
assertNotNull(result); assertNotNull(result);
assertEquals(result, source); assertEquals(result, source);
} }
@ -644,7 +644,7 @@ public class CameraViewTest extends BaseTest {
public void testVideoSizeSelector() { public void testVideoSizeSelector() {
SizeSelector source = SizeSelectors.minHeight(50); SizeSelector source = SizeSelectors.minHeight(50);
cameraView.setVideoSize(source); cameraView.setVideoSize(source);
SizeSelector result = mockController.getInternalVideoSizeSelector(); SizeSelector result = mockController.getVideoSizeSelector();
assertNotNull(result); assertNotNull(result);
assertEquals(result, source); assertEquals(result, source);
} }

@ -87,27 +87,6 @@ public class MockCameraEngine extends CameraEngine {
mEngineStep.setState(started ? STATE_STARTED : STATE_STOPPED); mEngineStep.setState(started ? STATE_STARTED : STATE_STOPPED);
} }
public int getSnapshotMaxWidth() {
return mSnapshotMaxWidth;
}
public int getSnapshotMaxHeight() {
return mSnapshotMaxHeight;
}
public SizeSelector getInternalPreviewStreamSizeSelector() {
return super.getPreviewStreamSizeSelector();
}
public SizeSelector getInternalPictureSizeSelector() {
return super.getPictureSizeSelector();
}
public SizeSelector getInternalVideoSizeSelector() {
return super.getVideoSizeSelector();
}
@Override @Override
public void setZoom(float zoom, @Nullable PointF[] points, boolean notify) { public void setZoom(float zoom, @Nullable PointF[] points, boolean notify) {
mZoomValue = zoom; mZoomValue = zoom;
@ -194,32 +173,8 @@ public class MockCameraEngine extends CameraEngine {
} }
@Override
public void setHasFrameProcessors(boolean hasFrameProcessors) {
}
@Override @Override
protected boolean collectCameraInfo(@NonNull Facing facing) { protected boolean collectCameraInfo(@NonNull Facing facing) {
return true; return true;
} }
/*
@Override
public void setFacing(@NonNull Facing facing) {
mFacing = facing;
}
@Override
public void setMode(@NonNull Mode mode) {
mMode = mode;
}
@Override
public void setAudio(@NonNull Audio audio) {
mAudio = audio;
}
*/
} }

@ -4,11 +4,14 @@ package com.otaliastudios.cameraview.gesture;
import android.annotation.TargetApi; import android.annotation.TargetApi;
import android.content.Context; import android.content.Context;
import androidx.annotation.NonNull;
import androidx.test.espresso.ViewInteraction; import androidx.test.espresso.ViewInteraction;
import androidx.test.espresso.matcher.RootMatchers; import androidx.test.espresso.matcher.RootMatchers;
import androidx.test.rule.ActivityTestRule; import androidx.test.rule.ActivityTestRule;
import android.view.MotionEvent; import android.view.MotionEvent;
import android.view.View; import android.view.View;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import com.otaliastudios.cameraview.BaseTest; import com.otaliastudios.cameraview.BaseTest;
import com.otaliastudios.cameraview.TestActivity; import com.otaliastudios.cameraview.TestActivity;
@ -21,17 +24,19 @@ import org.junit.Rule;
import static androidx.test.espresso.Espresso.onView; import static androidx.test.espresso.Espresso.onView;
@TargetApi(17) @TargetApi(17)
public abstract class GestureLayoutTest<T extends GestureLayout> extends BaseTest { public abstract class GestureFinderTest<T extends GestureFinder> extends BaseTest {
protected abstract T create(Context context); protected abstract T createFinder(@NonNull GestureFinder.Controller controller);
@Rule @Rule
public ActivityTestRule<TestActivity> rule = new ActivityTestRule<>(TestActivity.class); public ActivityTestRule<TestActivity> rule = new ActivityTestRule<>(TestActivity.class);
@SuppressWarnings("WeakerAccess") @SuppressWarnings("WeakerAccess")
protected T layout; protected T finder;
@SuppressWarnings("WeakerAccess") @SuppressWarnings("WeakerAccess")
protected Op<Gesture> touch; protected Op<Gesture> touchOp;
@SuppressWarnings("WeakerAccess")
protected ViewGroup layout;
@Before @Before
public void setUp() { public void setUp() {
@ -39,16 +44,17 @@ public abstract class GestureLayoutTest<T extends GestureLayout> extends BaseTes
@Override @Override
public void run() { public void run() {
TestActivity a = rule.getActivity(); TestActivity a = rule.getActivity();
layout = create(a); layout = new FrameLayout(a);
layout.setActive(true); finder = createFinder(new Controller());
finder.setActive(true);
a.inflate(layout); a.inflate(layout);
touch = new Op<>(); touchOp = new Op<>();
layout.setOnTouchListener(new View.OnTouchListener() { layout.setOnTouchListener(new View.OnTouchListener() {
@Override @Override
public boolean onTouch(View view, MotionEvent motionEvent) { public boolean onTouch(View view, MotionEvent motionEvent) {
boolean found = layout.onTouchEvent(motionEvent); boolean found = finder.onTouchEvent(motionEvent);
if (found) touch.end(layout.getGesture()); if (found) touchOp.end(finder.getGesture());
return true; return true;
} }
}); });
@ -62,4 +68,23 @@ public abstract class GestureLayoutTest<T extends GestureLayout> extends BaseTes
.inRoot(RootMatchers.withDecorView( .inRoot(RootMatchers.withDecorView(
Matchers.is(rule.getActivity().getWindow().getDecorView()))); Matchers.is(rule.getActivity().getWindow().getDecorView())));
} }
private class Controller implements GestureFinder.Controller {
@NonNull
@Override
public Context getContext() {
return layout.getContext();
}
@Override
public int getWidth() {
return layout.getWidth();
}
@Override
public int getHeight() {
return layout.getHeight();
}
}
} }

@ -3,10 +3,7 @@ package com.otaliastudios.cameraview.gesture;
import android.content.Context; import android.content.Context;
import com.otaliastudios.cameraview.gesture.Gesture; import androidx.annotation.NonNull;
import com.otaliastudios.cameraview.gesture.GestureLayoutTest;
import com.otaliastudios.cameraview.gesture.PinchGestureLayout;
import androidx.test.espresso.ViewAction; import androidx.test.espresso.ViewAction;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest; import androidx.test.filters.SmallTest;
@ -22,21 +19,21 @@ import static org.junit.Assert.assertTrue;
@RunWith(AndroidJUnit4.class) @RunWith(AndroidJUnit4.class)
@SmallTest @SmallTest
public class PinchGestureLayoutTest extends GestureLayoutTest<PinchGestureLayout> { public class PinchGestureFinderTest extends GestureFinderTest<PinchGestureFinder> {
@Override @Override
protected PinchGestureLayout create(Context context) { protected PinchGestureFinder createFinder(@NonNull GestureFinder.Controller controller) {
return new PinchGestureLayout(context); return new PinchGestureFinder(controller);
} }
@Test @Test
public void testDefaults() { public void testDefaults() {
assertEquals(layout.getGesture(), Gesture.PINCH); assertEquals(finder.getGesture(), Gesture.PINCH);
assertEquals(layout.getPoints().length, 2); assertEquals(finder.getPoints().length, 2);
assertEquals(layout.getPoints()[0].x, 0, 0); assertEquals(finder.getPoints()[0].x, 0, 0);
assertEquals(layout.getPoints()[0].y, 0, 0); assertEquals(finder.getPoints()[0].y, 0, 0);
assertEquals(layout.getPoints()[1].x, 0, 0); assertEquals(finder.getPoints()[1].x, 0, 0);
assertEquals(layout.getPoints()[1].y, 0, 0); assertEquals(finder.getPoints()[1].y, 0, 0);
} }
// TODO: test pinch open // TODO: test pinch open
@ -51,15 +48,15 @@ public class PinchGestureLayoutTest extends GestureLayoutTest<PinchGestureLayout
} }
private void testPinch(ViewAction action, boolean increasing) { private void testPinch(ViewAction action, boolean increasing) {
touch.listen(); touchOp.listen();
touch.start(); touchOp.start();
onLayout().perform(action); onLayout().perform(action);
Gesture found = touch.await(10000); Gesture found = touchOp.await(10000);
assertNotNull(found); assertNotNull(found);
// How will this move our parameter? // How will this move our parameter?
float curr = 0.5f, min = 0f, max = 1f; float curr = 0.5f, min = 0f, max = 1f;
float newValue = layout.computeValue(curr, min, max); float newValue = finder.computeValue(curr, min, max);
if (increasing) { if (increasing) {
assertTrue(newValue > curr); assertTrue(newValue > curr);
assertTrue(newValue <= max); assertTrue(newValue <= max);

@ -3,10 +3,7 @@ package com.otaliastudios.cameraview.gesture;
import android.content.Context; import android.content.Context;
import com.otaliastudios.cameraview.gesture.Gesture; import androidx.annotation.NonNull;
import com.otaliastudios.cameraview.gesture.GestureLayoutTest;
import com.otaliastudios.cameraview.gesture.ScrollGestureLayout;
import androidx.test.espresso.ViewAction; import androidx.test.espresso.ViewAction;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest; import androidx.test.filters.SmallTest;
@ -27,43 +24,43 @@ import static org.junit.Assert.assertTrue;
@RunWith(AndroidJUnit4.class) @RunWith(AndroidJUnit4.class)
@SmallTest @SmallTest
public class ScrollGestureLayoutTest extends GestureLayoutTest<ScrollGestureLayout> { public class ScrollGestureFinderTest extends GestureFinderTest<ScrollGestureFinder> {
@Override @Override
protected ScrollGestureLayout create(Context context) { protected ScrollGestureFinder createFinder(@NonNull GestureFinder.Controller controller) {
return new ScrollGestureLayout(context); return new ScrollGestureFinder(controller);
} }
@Test @Test
public void testDefaults() { public void testDefaults() {
assertNull(layout.getGesture()); assertNull(finder.getGesture());
assertEquals(layout.getPoints().length, 2); assertEquals(finder.getPoints().length, 2);
assertEquals(layout.getPoints()[0].x, 0, 0); assertEquals(finder.getPoints()[0].x, 0, 0);
assertEquals(layout.getPoints()[0].y, 0, 0); assertEquals(finder.getPoints()[0].y, 0, 0);
assertEquals(layout.getPoints()[1].x, 0, 0); assertEquals(finder.getPoints()[1].x, 0, 0);
assertEquals(layout.getPoints()[1].y, 0, 0); assertEquals(finder.getPoints()[1].y, 0, 0);
} }
@Test @Test
public void testScrollDisabled() { public void testScrollDisabled() {
layout.setActive(false); finder.setActive(false);
touch.listen(); touchOp.listen();
touch.start(); touchOp.start();
onLayout().perform(swipeUp()); onLayout().perform(swipeUp());
Gesture found = touch.await(500); Gesture found = touchOp.await(500);
assertNull(found); assertNull(found);
} }
private void testScroll(ViewAction scroll, Gesture expected, boolean increasing) { private void testScroll(ViewAction scroll, Gesture expected, boolean increasing) {
touch.listen(); touchOp.listen();
touch.start(); touchOp.start();
onLayout().perform(scroll); onLayout().perform(scroll);
Gesture found = touch.await(500); Gesture found = touchOp.await(500);
assertEquals(found, expected); assertEquals(found, expected);
// How will this move our parameter? // How will this move our parameter?
float curr = 0.5f, min = 0f, max = 1f; float curr = 0.5f, min = 0f, max = 1f;
float newValue = layout.computeValue(curr, min, max); float newValue = finder.computeValue(curr, min, max);
if (increasing) { if (increasing) {
assertTrue(newValue >= curr); assertTrue(newValue >= curr);
assertTrue(newValue <= max); assertTrue(newValue <= max);

@ -2,6 +2,8 @@ package com.otaliastudios.cameraview.gesture;
import android.content.Context; import android.content.Context;
import androidx.annotation.NonNull;
import androidx.test.espresso.action.GeneralClickAction; import androidx.test.espresso.action.GeneralClickAction;
import androidx.test.espresso.action.GeneralLocation; import androidx.test.espresso.action.GeneralLocation;
import androidx.test.espresso.action.Press; import androidx.test.espresso.action.Press;
@ -11,9 +13,6 @@ import androidx.test.filters.SmallTest;
import android.view.InputDevice; import android.view.InputDevice;
import android.view.MotionEvent; import android.view.MotionEvent;
import com.otaliastudios.cameraview.gesture.Gesture;
import com.otaliastudios.cameraview.gesture.GestureLayoutTest;
import com.otaliastudios.cameraview.gesture.TapGestureLayout;
import com.otaliastudios.cameraview.size.Size; import com.otaliastudios.cameraview.size.Size;
import org.junit.Test; import org.junit.Test;
@ -24,59 +23,59 @@ import static org.junit.Assert.*;
@RunWith(AndroidJUnit4.class) @RunWith(AndroidJUnit4.class)
@SmallTest @SmallTest
public class TapGestureLayoutTest extends GestureLayoutTest<TapGestureLayout> { public class TapGestureFinderTest extends GestureFinderTest<TapGestureFinder> {
@Override @Override
protected TapGestureLayout create(Context context) { protected TapGestureFinder createFinder(@NonNull GestureFinder.Controller controller) {
return new TapGestureLayout(context); return new TapGestureFinder(controller);
} }
@Test @Test
public void testDefaults() { public void testDefaults() {
assertNull(layout.getGesture()); assertNull(finder.getGesture());
assertEquals(layout.getPoints().length, 1); assertEquals(finder.getPoints().length, 1);
assertEquals(layout.getPoints()[0].x, 0, 0); assertEquals(finder.getPoints()[0].x, 0, 0);
assertEquals(layout.getPoints()[0].y, 0, 0); assertEquals(finder.getPoints()[0].y, 0, 0);
} }
@Test @Test
public void testTap() { public void testTap() {
touch.listen(); touchOp.listen();
touch.start(); touchOp.start();
GeneralClickAction a = new GeneralClickAction( GeneralClickAction a = new GeneralClickAction(
Tap.SINGLE, GeneralLocation.CENTER, Press.FINGER, Tap.SINGLE, GeneralLocation.CENTER, Press.FINGER,
InputDevice.SOURCE_UNKNOWN, MotionEvent.BUTTON_PRIMARY); InputDevice.SOURCE_UNKNOWN, MotionEvent.BUTTON_PRIMARY);
onLayout().perform(a); onLayout().perform(a);
Gesture found = touch.await(500); Gesture found = touchOp.await(500);
assertEquals(found, Gesture.TAP); assertEquals(found, Gesture.TAP);
Size size = rule.getActivity().getContentSize(); Size size = rule.getActivity().getContentSize();
assertEquals(layout.getPoints()[0].x, (size.getWidth() / 2f), 1f); assertEquals(finder.getPoints()[0].x, (size.getWidth() / 2f), 1f);
assertEquals(layout.getPoints()[0].y, (size.getHeight() / 2f), 1f); assertEquals(finder.getPoints()[0].y, (size.getHeight() / 2f), 1f);
} }
@Test @Test
public void testTapWhileDisabled() { public void testTapWhileDisabled() {
layout.setActive(false); finder.setActive(false);
touch.listen(); touchOp.listen();
touch.start(); touchOp.start();
onLayout().perform(click()); onLayout().perform(click());
Gesture found = touch.await(500); Gesture found = touchOp.await(500);
assertNull(found); assertNull(found);
} }
@Test @Test
public void testLongTap() { public void testLongTap() {
touch.listen(); touchOp.listen();
touch.start(); touchOp.start();
GeneralClickAction a = new GeneralClickAction( GeneralClickAction a = new GeneralClickAction(
Tap.LONG, GeneralLocation.CENTER, Press.FINGER, Tap.LONG, GeneralLocation.CENTER, Press.FINGER,
InputDevice.SOURCE_UNKNOWN, MotionEvent.BUTTON_PRIMARY); InputDevice.SOURCE_UNKNOWN, MotionEvent.BUTTON_PRIMARY);
onLayout().perform(a); onLayout().perform(a);
Gesture found = touch.await(500); Gesture found = touchOp.await(500);
assertEquals(found, Gesture.LONG_TAP); assertEquals(found, Gesture.LONG_TAP);
Size size = rule.getActivity().getContentSize(); Size size = rule.getActivity().getContentSize();
assertEquals(layout.getPoints()[0].x, (size.getWidth() / 2f), 1f); assertEquals(finder.getPoints()[0].x, (size.getWidth() / 2f), 1f);
assertEquals(layout.getPoints()[0].y, (size.getHeight() / 2f), 1f); assertEquals(finder.getPoints()[0].y, (size.getHeight() / 2f), 1f);
} }
} }

@ -27,7 +27,7 @@ public abstract class CameraPreviewTest extends BaseTest {
private final static long DELAY = 4000; private final static long DELAY = 4000;
protected abstract CameraPreview createPreview(Context context, ViewGroup parent, CameraPreview.SurfaceCallback callback); protected abstract CameraPreview createPreview(Context context, ViewGroup parent);
@Rule @Rule
public ActivityTestRule<TestActivity> rule = new ActivityTestRule<>(TestActivity.class); public ActivityTestRule<TestActivity> rule = new ActivityTestRule<>(TestActivity.class);
@ -69,7 +69,8 @@ public abstract class CameraPreviewTest extends BaseTest {
} }
}).when(callback).onSurfaceDestroyed(); }).when(callback).onSurfaceDestroyed();
preview = createPreview(a, a.getContentView(), callback); preview = createPreview(a, a.getContentView());
preview.setSurfaceCallback(callback);
} }
}); });
} }

@ -14,8 +14,8 @@ import org.junit.runner.RunWith;
public class GlCameraPreviewTest extends CameraPreviewTest { public class GlCameraPreviewTest extends CameraPreviewTest {
@Override @Override
protected CameraPreview createPreview(Context context, ViewGroup parent, CameraPreview.SurfaceCallback callback) { protected CameraPreview createPreview(Context context, ViewGroup parent) {
return new GlCameraPreview(context, parent, callback); return new GlCameraPreview(context, parent);
} }
@Override @Override

@ -12,7 +12,7 @@ import com.otaliastudios.cameraview.preview.CameraPreview;
public class MockCameraPreview extends CameraPreview<View, Void> { public class MockCameraPreview extends CameraPreview<View, Void> {
public MockCameraPreview(Context context, ViewGroup parent) { public MockCameraPreview(Context context, ViewGroup parent) {
super(context, parent, null); super(context, parent);
} }
@Override @Override

@ -18,8 +18,8 @@ import org.junit.runner.RunWith;
public class SurfaceCameraPreviewTest extends CameraPreviewTest { public class SurfaceCameraPreviewTest extends CameraPreviewTest {
@Override @Override
protected CameraPreview createPreview(Context context, ViewGroup parent, CameraPreview.SurfaceCallback callback) { protected CameraPreview createPreview(Context context, ViewGroup parent) {
return new SurfaceCameraPreview(context, parent, callback); return new SurfaceCameraPreview(context, parent);
} }
@Override @Override

@ -18,8 +18,8 @@ import org.junit.runner.RunWith;
public class TextureCameraPreviewTest extends CameraPreviewTest { public class TextureCameraPreviewTest extends CameraPreviewTest {
@Override @Override
protected CameraPreview createPreview(Context context, ViewGroup parent, CameraPreview.SurfaceCallback callback) { protected CameraPreview createPreview(Context context, ViewGroup parent) {
return new TextureCameraPreview(context, parent, callback); return new TextureCameraPreview(context, parent);
} }
@Override @Override

@ -37,6 +37,7 @@ import com.otaliastudios.cameraview.controls.Engine;
import com.otaliastudios.cameraview.controls.Facing; import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.controls.Flash; import com.otaliastudios.cameraview.controls.Flash;
import com.otaliastudios.cameraview.engine.Camera2Engine; import com.otaliastudios.cameraview.engine.Camera2Engine;
import com.otaliastudios.cameraview.engine.offset.Reference;
import com.otaliastudios.cameraview.markers.MarkerLayout; import com.otaliastudios.cameraview.markers.MarkerLayout;
import com.otaliastudios.cameraview.engine.Camera1Engine; import com.otaliastudios.cameraview.engine.Camera1Engine;
import com.otaliastudios.cameraview.engine.CameraEngine; import com.otaliastudios.cameraview.engine.CameraEngine;
@ -50,11 +51,11 @@ import com.otaliastudios.cameraview.controls.Mode;
import com.otaliastudios.cameraview.controls.Preview; import com.otaliastudios.cameraview.controls.Preview;
import com.otaliastudios.cameraview.controls.VideoCodec; import com.otaliastudios.cameraview.controls.VideoCodec;
import com.otaliastudios.cameraview.controls.WhiteBalance; import com.otaliastudios.cameraview.controls.WhiteBalance;
import com.otaliastudios.cameraview.gesture.GestureLayout; import com.otaliastudios.cameraview.gesture.GestureFinder;
import com.otaliastudios.cameraview.gesture.GestureParser; import com.otaliastudios.cameraview.gesture.GestureParser;
import com.otaliastudios.cameraview.gesture.PinchGestureLayout; import com.otaliastudios.cameraview.gesture.PinchGestureFinder;
import com.otaliastudios.cameraview.gesture.ScrollGestureLayout; import com.otaliastudios.cameraview.gesture.ScrollGestureFinder;
import com.otaliastudios.cameraview.gesture.TapGestureLayout; import com.otaliastudios.cameraview.gesture.TapGestureFinder;
import com.otaliastudios.cameraview.internal.GridLinesLayout; import com.otaliastudios.cameraview.internal.GridLinesLayout;
import com.otaliastudios.cameraview.internal.utils.CropHelper; import com.otaliastudios.cameraview.internal.utils.CropHelper;
import com.otaliastudios.cameraview.internal.utils.OrientationHelper; import com.otaliastudios.cameraview.internal.utils.OrientationHelper;
@ -115,11 +116,13 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
@VisibleForTesting List<FrameProcessor> mFrameProcessors = new CopyOnWriteArrayList<>(); @VisibleForTesting List<FrameProcessor> mFrameProcessors = new CopyOnWriteArrayList<>();
private Lifecycle mLifecycle; private Lifecycle mLifecycle;
// Gestures
@VisibleForTesting PinchGestureFinder mPinchGestureFinder;
@VisibleForTesting TapGestureFinder mTapGestureFinder;
@VisibleForTesting ScrollGestureFinder mScrollGestureFinder;
// Views // Views
GridLinesLayout mGridLinesLayout; GridLinesLayout mGridLinesLayout;
PinchGestureLayout mPinchGestureLayout;
TapGestureLayout mTapGestureLayout;
ScrollGestureLayout mScrollGestureLayout;
MarkerLayout mMarkerLayout; MarkerLayout mMarkerLayout;
private boolean mKeepScreenOn; private boolean mKeepScreenOn;
@SuppressWarnings({"FieldCanBeLocal", "unused"}) @SuppressWarnings({"FieldCanBeLocal", "unused"})
@ -173,16 +176,15 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
mUiHandler = new Handler(Looper.getMainLooper()); mUiHandler = new Handler(Looper.getMainLooper());
mFrameProcessorsHandler = WorkerHandler.get("FrameProcessorsWorker"); mFrameProcessorsHandler = WorkerHandler.get("FrameProcessorsWorker");
// Gestures
mPinchGestureFinder = new PinchGestureFinder(mCameraCallbacks);
mTapGestureFinder = new TapGestureFinder(mCameraCallbacks);
mScrollGestureFinder = new ScrollGestureFinder(mCameraCallbacks);
// Views // Views
mGridLinesLayout = new GridLinesLayout(context); mGridLinesLayout = new GridLinesLayout(context);
mPinchGestureLayout = new PinchGestureLayout(context);
mTapGestureLayout = new TapGestureLayout(context);
mScrollGestureLayout = new ScrollGestureLayout(context);
mMarkerLayout = new MarkerLayout(context); mMarkerLayout = new MarkerLayout(context);
addView(mGridLinesLayout); addView(mGridLinesLayout);
addView(mPinchGestureLayout);
addView(mTapGestureLayout);
addView(mScrollGestureLayout);
addView(mMarkerLayout); addView(mMarkerLayout);
// Create the engine // Create the engine
@ -225,6 +227,26 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
} }
} }
/**
* Engine is instantiated on creation and anytime
* {@link #setEngine(Engine)} is called.
*/
private void doInstantiateEngine() {
mCameraEngine = instantiateCameraEngine(mEngine, mCameraCallbacks);
}
/**
* Preview is instantiated {@link #onAttachedToWindow()}, because
* we want to know if we're hardware accelerated or not.
* However, in tests, we might want to create the preview right after constructor.
*/
@VisibleForTesting
void doInstantiatePreview() {
mCameraPreview = instantiatePreview(mPreview, getContext(), this);
mCameraEngine.setPreview(mCameraPreview);
}
/** /**
* Instantiates the camera engine. * Instantiates the camera engine.
* *
@ -255,30 +277,20 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
LOG.w("preview:", "isHardwareAccelerated:", isHardwareAccelerated()); LOG.w("preview:", "isHardwareAccelerated:", isHardwareAccelerated());
switch (preview) { switch (preview) {
case SURFACE: case SURFACE:
return new SurfaceCameraPreview(context, container, null); return new SurfaceCameraPreview(context, container);
case TEXTURE: { case TEXTURE: {
if (isHardwareAccelerated()) { if (isHardwareAccelerated()) {
// TextureView is not supported without hardware acceleration. // TextureView is not supported without hardware acceleration.
return new TextureCameraPreview(context, container, null); return new TextureCameraPreview(context, container);
} }
} }
case GL_SURFACE: default: { case GL_SURFACE: default: {
mPreview = Preview.GL_SURFACE; mPreview = Preview.GL_SURFACE;
return new GlCameraPreview(context, container, null); return new GlCameraPreview(context, container);
} }
} }
} }
@VisibleForTesting
void doInstantiatePreview() {
mCameraPreview = instantiatePreview(mPreview, getContext(), this);
mCameraEngine.setPreview(mCameraPreview);
}
private void doInstantiateEngine() {
mCameraEngine = instantiateCameraEngine(mEngine, mCameraCallbacks);
}
@Override @Override
protected void onAttachedToWindow() { protected void onAttachedToWindow() {
super.onAttachedToWindow(); super.onAttachedToWindow();
@ -331,7 +343,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
*/ */
@Override @Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
Size previewSize = mCameraEngine.getPreviewStreamSize(CameraEngine.REF_VIEW); Size previewSize = mCameraEngine.getPreviewStreamSize(Reference.VIEW);
if (previewSize == null) { if (previewSize == null) {
LOG.w("onMeasure:", "surface is not ready. Calling default behavior."); LOG.w("onMeasure:", "surface is not ready. Calling default behavior.");
super.onMeasure(widthMeasureSpec, heightMeasureSpec); super.onMeasure(widthMeasureSpec, heightMeasureSpec);
@ -474,19 +486,19 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
mGestureMap.put(gesture, action); mGestureMap.put(gesture, action);
switch (gesture) { switch (gesture) {
case PINCH: case PINCH:
mPinchGestureLayout.setActive(mGestureMap.get(Gesture.PINCH) != none); mPinchGestureFinder.setActive(mGestureMap.get(Gesture.PINCH) != none);
break; break;
case TAP: case TAP:
// case DOUBLE_TAP: // case DOUBLE_TAP:
case LONG_TAP: case LONG_TAP:
mTapGestureLayout.setActive( mTapGestureFinder.setActive(
mGestureMap.get(Gesture.TAP) != none || mGestureMap.get(Gesture.TAP) != none ||
// mGestureMap.get(Gesture.DOUBLE_TAP) != none || // mGestureMap.get(Gesture.DOUBLE_TAP) != none ||
mGestureMap.get(Gesture.LONG_TAP) != none); mGestureMap.get(Gesture.LONG_TAP) != none);
break; break;
case SCROLL_HORIZONTAL: case SCROLL_HORIZONTAL:
case SCROLL_VERTICAL: case SCROLL_VERTICAL:
mScrollGestureLayout.setActive( mScrollGestureFinder.setActive(
mGestureMap.get(Gesture.SCROLL_HORIZONTAL) != none || mGestureMap.get(Gesture.SCROLL_HORIZONTAL) != none ||
mGestureMap.get(Gesture.SCROLL_VERTICAL) != none); mGestureMap.get(Gesture.SCROLL_VERTICAL) != none);
break; break;
@ -534,15 +546,15 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
// Pass to our own GestureLayouts // Pass to our own GestureLayouts
CameraOptions options = mCameraEngine.getCameraOptions(); // Non null CameraOptions options = mCameraEngine.getCameraOptions(); // Non null
if (options == null) throw new IllegalStateException("Options should not be null here."); if (options == null) throw new IllegalStateException("Options should not be null here.");
if (mPinchGestureLayout.onTouchEvent(event)) { if (mPinchGestureFinder.onTouchEvent(event)) {
LOG.i("onTouchEvent", "pinch!"); LOG.i("onTouchEvent", "pinch!");
onGesture(mPinchGestureLayout, options); onGesture(mPinchGestureFinder, options);
} else if (mScrollGestureLayout.onTouchEvent(event)) { } else if (mScrollGestureFinder.onTouchEvent(event)) {
LOG.i("onTouchEvent", "scroll!"); LOG.i("onTouchEvent", "scroll!");
onGesture(mScrollGestureLayout, options); onGesture(mScrollGestureFinder, options);
} else if (mTapGestureLayout.onTouchEvent(event)) { } else if (mTapGestureFinder.onTouchEvent(event)) {
LOG.i("onTouchEvent", "tap!"); LOG.i("onTouchEvent", "tap!");
onGesture(mTapGestureLayout, options); onGesture(mTapGestureFinder, options);
} }
return true; return true;
} }
@ -551,7 +563,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
// Some gesture layout detected a gesture. It's not known at this moment: // Some gesture layout detected a gesture. It's not known at this moment:
// (1) if it was mapped to some action (we check here) // (1) if it was mapped to some action (we check here)
// (2) if it's supported by the camera (CameraEngine checks) // (2) if it's supported by the camera (CameraEngine checks)
private void onGesture(GestureLayout source, @NonNull CameraOptions options) { private void onGesture(GestureFinder source, @NonNull CameraOptions options) {
Gesture gesture = source.getGesture(); Gesture gesture = source.getGesture();
GestureAction action = mGestureMap.get(gesture); GestureAction action = mGestureMap.get(gesture);
PointF[] points = source.getPoints(); PointF[] points = source.getPoints();
@ -1538,12 +1550,12 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
// Get the preview size and crop according to the current view size. // Get the preview size and crop according to the current view size.
// It's better to do calculations in the REF_VIEW reference, and then flip if needed. // It's better to do calculations in the REF_VIEW reference, and then flip if needed.
Size preview = mCameraEngine.getUncroppedSnapshotSize(CameraEngine.REF_VIEW); Size preview = mCameraEngine.getUncroppedSnapshotSize(Reference.VIEW);
if (preview == null) return null; // Should never happen. if (preview == null) return null; // Should never happen.
AspectRatio viewRatio = AspectRatio.of(getWidth(), getHeight()); AspectRatio viewRatio = AspectRatio.of(getWidth(), getHeight());
Rect crop = CropHelper.computeCrop(preview, viewRatio); Rect crop = CropHelper.computeCrop(preview, viewRatio);
Size cropSize = new Size(crop.width(), crop.height()); Size cropSize = new Size(crop.width(), crop.height());
if (mCameraEngine.flip(CameraEngine.REF_VIEW, CameraEngine.REF_OUTPUT)) { if (mCameraEngine.getAngles().flip(Reference.VIEW, Reference.OUTPUT)) {
return cropSize.flip(); return cropSize.flip();
} else { } else {
return cropSize; return cropSize;
@ -1562,7 +1574,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
*/ */
@Nullable @Nullable
public Size getPictureSize() { public Size getPictureSize() {
return mCameraEngine.getPictureSize(CameraEngine.REF_OUTPUT); return mCameraEngine.getPictureSize(Reference.OUTPUT);
} }
@ -1577,7 +1589,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
*/ */
@Nullable @Nullable
public Size getVideoSize() { public Size getVideoSize() {
return mCameraEngine.getVideoSize(CameraEngine.REF_OUTPUT); return mCameraEngine.getVideoSize(Reference.OUTPUT);
} }
@ -1732,7 +1744,10 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
//region Callbacks and dispatching //region Callbacks and dispatching
@VisibleForTesting @VisibleForTesting
class CameraCallbacks implements CameraEngine.Callback, OrientationHelper.Callback { class CameraCallbacks implements
CameraEngine.Callback,
OrientationHelper.Callback,
GestureFinder.Controller {
private CameraLogger mLogger = CameraLogger.create(CameraCallbacks.class.getSimpleName()); private CameraLogger mLogger = CameraLogger.create(CameraCallbacks.class.getSimpleName());
@ -1742,6 +1757,16 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
return CameraView.this.getContext(); return CameraView.this.getContext();
} }
@Override
public int getWidth() {
return CameraView.this.getWidth();
}
@Override
public int getHeight() {
return CameraView.this.getHeight();
}
@Override @Override
public void dispatchOnCameraOpened(final CameraOptions options) { public void dispatchOnCameraOpened(final CameraOptions options) {
mLogger.i("dispatchOnCameraOpened", options); mLogger.i("dispatchOnCameraOpened", options);
@ -1786,7 +1811,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
@Override @Override
public void onShutter(boolean shouldPlaySound) { public void onShutter(boolean shouldPlaySound) {
if (shouldPlaySound && mPlaySounds) { if (shouldPlaySound && mPlaySounds) {
//noinspection all
playSound(MediaActionSound.SHUTTER_CLICK); playSound(MediaActionSound.SHUTTER_CLICK);
} }
} }
@ -1847,7 +1871,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
@Override @Override
public void run() { public void run() {
if (success && mPlaySounds) { if (success && mPlaySounds) {
//noinspection all
playSound(MediaActionSound.FOCUS_COMPLETE); playSound(MediaActionSound.FOCUS_COMPLETE);
} }

@ -14,7 +14,6 @@ import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting; import androidx.annotation.VisibleForTesting;
import androidx.annotation.WorkerThread; import androidx.annotation.WorkerThread;
import android.util.Log;
import android.view.SurfaceHolder; import android.view.SurfaceHolder;
import com.google.android.gms.tasks.Task; import com.google.android.gms.tasks.Task;
@ -23,6 +22,8 @@ import com.otaliastudios.cameraview.CameraException;
import com.otaliastudios.cameraview.CameraLogger; import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.CameraOptions; import com.otaliastudios.cameraview.CameraOptions;
import com.otaliastudios.cameraview.controls.Engine; import com.otaliastudios.cameraview.controls.Engine;
import com.otaliastudios.cameraview.engine.offset.Axis;
import com.otaliastudios.cameraview.engine.offset.Reference;
import com.otaliastudios.cameraview.frame.Frame; import com.otaliastudios.cameraview.frame.Frame;
import com.otaliastudios.cameraview.PictureResult; import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.VideoResult; import com.otaliastudios.cameraview.VideoResult;
@ -34,7 +35,6 @@ import com.otaliastudios.cameraview.controls.Hdr;
import com.otaliastudios.cameraview.controls.Mode; import com.otaliastudios.cameraview.controls.Mode;
import com.otaliastudios.cameraview.controls.WhiteBalance; import com.otaliastudios.cameraview.controls.WhiteBalance;
import com.otaliastudios.cameraview.internal.utils.CropHelper; import com.otaliastudios.cameraview.internal.utils.CropHelper;
import com.otaliastudios.cameraview.internal.utils.Op;
import com.otaliastudios.cameraview.picture.Full1PictureRecorder; import com.otaliastudios.cameraview.picture.Full1PictureRecorder;
import com.otaliastudios.cameraview.picture.Snapshot1PictureRecorder; import com.otaliastudios.cameraview.picture.Snapshot1PictureRecorder;
import com.otaliastudios.cameraview.picture.SnapshotGlPictureRecorder; import com.otaliastudios.cameraview.picture.SnapshotGlPictureRecorder;
@ -57,52 +57,84 @@ public class Camera1Engine extends CameraEngine implements
private static final String TAG = Camera1Engine.class.getSimpleName(); private static final String TAG = Camera1Engine.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG); private static final CameraLogger LOG = CameraLogger.create(TAG);
private static final int PREVIEW_FORMAT = ImageFormat.NV21;
@VisibleForTesting static final int AUTOFOCUS_END_DELAY_MILLIS = 2500; @VisibleForTesting static final int AUTOFOCUS_END_DELAY_MILLIS = 2500;
private Camera mCamera; private Camera mCamera;
@VisibleForTesting int mCameraId; @VisibleForTesting int mCameraId;
private int mPreviewStreamFormat;
private Runnable mFocusEndRunnable; private Runnable mFocusEndRunnable;
private final Runnable mFocusResetRunnable = new Runnable() {
@Override
public void run() {
if (!isCameraAvailable()) return;
mCamera.cancelAutoFocus();
Camera.Parameters params = mCamera.getParameters();
int maxAF = params.getMaxNumFocusAreas();
int maxAE = params.getMaxNumMeteringAreas();
if (maxAF > 0) params.setFocusAreas(null);
if (maxAE > 0) params.setMeteringAreas(null);
applyDefaultFocus(params); // Revert to internal focus.
mCamera.setParameters(params);
}
};
public Camera1Engine(@NonNull Callback callback) { public Camera1Engine(@NonNull Callback callback) {
super(callback); super(callback);
mMapper = Mapper.get(Engine.CAMERA1); mMapper = Mapper.get(Engine.CAMERA1);
} }
private boolean isCameraAvailable() { //region Utilities
return getEngineState() == STATE_STARTED;
@Override
public void onError(int error, Camera camera) {
if (error == Camera.CAMERA_ERROR_SERVER_DIED) {
// Looks like this is recoverable.
LOG.w("Recoverable error inside the onError callback.", "CAMERA_ERROR_SERVER_DIED");
restart();
return;
} }
private void schedule(@Nullable final Op<Void> op, final boolean ensureAvailable, final Runnable action) { String message = LOG.e("Internal Camera1 error.", error);
mHandler.run(new Runnable() { Exception runtime = new RuntimeException(message);
int reason;
switch (error) {
case Camera.CAMERA_ERROR_EVICTED: reason = CameraException.REASON_DISCONNECTED; break;
case Camera.CAMERA_ERROR_UNKNOWN: reason = CameraException.REASON_UNKNOWN; break;
default: reason = CameraException.REASON_UNKNOWN;
}
throw new CameraException(runtime, reason);
}
//endregion
//region Protected APIs
@NonNull
@Override @Override
public void run() { protected List<Size> getPreviewStreamAvailableSizes() {
if (ensureAvailable && !isCameraAvailable()) { List<Camera.Size> sizes = mCamera.getParameters().getSupportedPreviewSizes();
if (op != null) op.end(null); List<Size> result = new ArrayList<>(sizes.size());
} else { for (Camera.Size size : sizes) {
action.run(); Size add = new Size(size.width, size.height);
if (op != null) op.end(null); if (!result.contains(add)) result.add(add);
} }
LOG.i("getPreviewStreamAvailableSizes:", result);
return result;
} }
});
@WorkerThread
@Override
protected void onPreviewStreamSizeChanged() {
restartPreview();
}
@Override
protected boolean collectCameraInfo(@NonNull Facing facing) {
int internalFacing = mMapper.map(facing);
LOG.i("collectCameraInfo", "Facing:", facing, "Internal:", internalFacing, "Cameras:", Camera.getNumberOfCameras());
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int i = 0, count = Camera.getNumberOfCameras(); i < count; i++) {
Camera.getCameraInfo(i, cameraInfo);
if (cameraInfo.facing == internalFacing) {
setSensorOffset(facing, cameraInfo.orientation);
mCameraId = i;
return true;
}
}
return false;
} }
//endregion
//region Start
@NonNull @NonNull
@WorkerThread @WorkerThread
@Override @Override
@ -118,16 +150,10 @@ public class Camera1Engine extends CameraEngine implements
// Set parameters that might have been set before the camera was opened. // Set parameters that might have been set before the camera was opened.
LOG.i("onStartEngine:", "Applying default parameters."); LOG.i("onStartEngine:", "Applying default parameters.");
Camera.Parameters params = mCamera.getParameters(); Camera.Parameters params = mCamera.getParameters();
mCameraOptions = new CameraOptions(params, flip(REF_SENSOR, REF_VIEW)); mCameraOptions = new CameraOptions(params, getAngles().flip(Reference.SENSOR, Reference.VIEW));
applyDefaultFocus(params); applyAllParameters(params);
applyFlash(params, Flash.OFF);
applyLocation(params, null);
applyWhiteBalance(params, WhiteBalance.AUTO);
applyHdr(params, Hdr.OFF);
applyPlaySounds(mPlaySounds);
params.setRecordingHint(mMode == Mode.VIDEO);
mCamera.setParameters(params); mCamera.setParameters(params);
mCamera.setDisplayOrientation(offset(REF_SENSOR, REF_VIEW)); // <- not allowed during preview mCamera.setDisplayOrientation(getAngles().offset(Reference.SENSOR, Reference.VIEW, Axis.ABSOLUTE)); // <- not allowed during preview
LOG.i("onStartEngine:", "Ended"); LOG.i("onStartEngine:", "Ended");
return Tasks.forResult(null); return Tasks.forResult(null);
} }
@ -161,17 +187,17 @@ public class Camera1Engine extends CameraEngine implements
LOG.i("onStartPreview", "Dispatching onCameraPreviewStreamSizeChanged."); LOG.i("onStartPreview", "Dispatching onCameraPreviewStreamSizeChanged.");
mCallback.onCameraPreviewStreamSizeChanged(); mCallback.onCameraPreviewStreamSizeChanged();
Size previewSize = getPreviewStreamSize(REF_VIEW); Size previewSize = getPreviewStreamSize(Reference.VIEW);
if (previewSize == null) { if (previewSize == null) {
throw new IllegalStateException("previewStreamSize should not be null at this point."); throw new IllegalStateException("previewStreamSize should not be null at this point.");
} }
mPreview.setStreamSize(previewSize.getWidth(), previewSize.getHeight()); mPreview.setStreamSize(previewSize.getWidth(), previewSize.getHeight());
Camera.Parameters params = mCamera.getParameters(); Camera.Parameters params = mCamera.getParameters();
mPreviewStreamFormat = params.getPreviewFormat(); params.setPreviewFormat(ImageFormat.NV21); // should be the default, but let's make sure, since YuvImage will only support this & a few others
params.setPreviewSize(mPreviewStreamSize.getWidth(), mPreviewStreamSize.getHeight()); // <- not allowed during preview params.setPreviewSize(mPreviewStreamSize.getWidth(), mPreviewStreamSize.getHeight()); // not allowed during preview
if (mMode == Mode.PICTURE) { if (getMode() == Mode.PICTURE) {
params.setPictureSize(mCaptureSize.getWidth(), mCaptureSize.getHeight()); // <- allowed params.setPictureSize(mCaptureSize.getWidth(), mCaptureSize.getHeight()); // allowed during preview
} else { } else {
// mCaptureSize in this case is a video size. The available video sizes are not necessarily // mCaptureSize in this case is a video size. The available video sizes are not necessarily
// a subset of the picture sizes, so we can't use the mCaptureSize value: it might crash. // a subset of the picture sizes, so we can't use the mCaptureSize value: it might crash.
@ -184,7 +210,7 @@ public class Camera1Engine extends CameraEngine implements
mCamera.setPreviewCallbackWithBuffer(null); // Release anything left mCamera.setPreviewCallbackWithBuffer(null); // Release anything left
mCamera.setPreviewCallbackWithBuffer(this); // Add ourselves mCamera.setPreviewCallbackWithBuffer(this); // Add ourselves
getFrameManager().setUp(ImageFormat.getBitsPerPixel(mPreviewStreamFormat), mPreviewStreamSize); getFrameManager().setUp(ImageFormat.getBitsPerPixel(PREVIEW_FORMAT), mPreviewStreamSize);
LOG.i("onStartPreview", "Starting preview with startPreview()."); LOG.i("onStartPreview", "Starting preview with startPreview().");
try { try {
@ -197,6 +223,10 @@ public class Camera1Engine extends CameraEngine implements
return Tasks.forResult(null); return Tasks.forResult(null);
} }
//endregion
//region Stop
@NonNull @NonNull
@Override @Override
protected Task<Void> onStopPreview() { protected Task<Void> onStopPreview() {
@ -205,7 +235,6 @@ public class Camera1Engine extends CameraEngine implements
mVideoRecorder = null; mVideoRecorder = null;
} }
mPictureRecorder = null; mPictureRecorder = null;
mPreviewStreamFormat = 0;
getFrameManager().release(); getFrameManager().release();
mCamera.setPreviewCallbackWithBuffer(null); // Release anything left mCamera.setPreviewCallbackWithBuffer(null); // Release anything left
try { try {
@ -216,7 +245,6 @@ public class Camera1Engine extends CameraEngine implements
return Tasks.forResult(null); return Tasks.forResult(null);
} }
@NonNull @NonNull
@Override @Override
protected Task<Void> onStopBind() { protected Task<Void> onStopBind() {
@ -263,77 +291,172 @@ public class Camera1Engine extends CameraEngine implements
return Tasks.forResult(null); return Tasks.forResult(null);
} }
//endregion
//region Pictures
@WorkerThread @WorkerThread
@Override @Override
protected void onPreviewStreamSizeChanged() { protected void onTakePicture(@NonNull PictureResult.Stub stub) {
restartPreview(); stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR);
stub.size = getPictureSize(Reference.OUTPUT);
mPictureRecorder = new Full1PictureRecorder(stub, Camera1Engine.this, mCamera);
mPictureRecorder.take();
} }
@WorkerThread
@Override @Override
protected boolean collectCameraInfo(@NonNull Facing facing) { protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio viewAspectRatio) {
int internalFacing = mMapper.map(facing); stub.size = getUncroppedSnapshotSize(Reference.OUTPUT); // Not the real size: it will be cropped to match the view ratio
LOG.i("collectCameraInfo", "Facing:", facing, "Internal:", internalFacing, "Cameras:", Camera.getNumberOfCameras()); stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR); // Actually it will be rotated and set to 0.
Camera.CameraInfo cameraInfo = new Camera.CameraInfo(); AspectRatio outputRatio = getAngles().flip(Reference.OUTPUT, Reference.VIEW) ? viewAspectRatio.flip() : viewAspectRatio;
for (int i = 0, count = Camera.getNumberOfCameras(); i < count; i++) {
Camera.getCameraInfo(i, cameraInfo); if (mPreview instanceof GlCameraPreview && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
if (cameraInfo.facing == internalFacing) { mPictureRecorder = new SnapshotGlPictureRecorder(stub, this, (GlCameraPreview) mPreview, outputRatio);
mSensorOffset = cameraInfo.orientation; } else {
mCameraId = i; mPictureRecorder = new Snapshot1PictureRecorder(stub, this, mCamera, outputRatio);
return true;
}
} }
return false; mPictureRecorder.take();
} }
//endregion
//region Videos
@Override @Override
public void setHasFrameProcessors(boolean hasFrameProcessors) { protected void onTakeVideo(@NonNull VideoResult.Stub stub) {
mHasFrameProcessors = hasFrameProcessors; stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR);
stub.size = getAngles().flip(Reference.SENSOR, Reference.OUTPUT) ? mCaptureSize.flip() : mCaptureSize;
// Unlock the camera and start recording.
try {
mCamera.unlock();
} catch (Exception e) {
// If this failed, we are unlikely able to record the video.
// Dispatch an error.
onVideoResult(null, e);
return;
}
mVideoRecorder = new Full1VideoRecorder(Camera1Engine.this, mCamera, mCameraId);
mVideoRecorder.start(stub);
} }
@NonNull @SuppressLint("NewApi")
@WorkerThread
@Override @Override
protected FrameManager instantiateFrameManager() { protected void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull AspectRatio viewAspectRatio) {
return new FrameManager(2, this); if (!(mPreview instanceof GlCameraPreview)) {
throw new IllegalStateException("Video snapshots are only supported with GlCameraPreview.");
} }
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR2) {
throw new IllegalStateException("Video snapshots are only supported starting from API 18.");
}
GlCameraPreview glPreview = (GlCameraPreview) mPreview;
@Override // Output size is easy:
public void onBufferAvailable(@NonNull byte[] buffer) { Size outputSize = getUncroppedSnapshotSize(Reference.OUTPUT);
// TODO: sync with handler? if (outputSize == null) {
if (isCameraAvailable()) { throw new IllegalStateException("outputSize should not be null.");
mCamera.addCallbackBuffer(buffer);
} }
AspectRatio outputRatio = getAngles().flip(Reference.OUTPUT, Reference.VIEW) ? viewAspectRatio.flip() : viewAspectRatio;
Rect outputCrop = CropHelper.computeCrop(outputSize, outputRatio);
outputSize = new Size(outputCrop.width(), outputCrop.height());
stub.size = outputSize;
stub.rotation = getAngles().offset(Reference.VIEW, Reference.OUTPUT, Axis.ABSOLUTE);
// Start.
mVideoRecorder = new SnapshotVideoRecorder(Camera1Engine.this, glPreview);
mVideoRecorder.start(stub);
} }
@Override @Override
public void onError(int error, Camera camera) { public void onVideoResult(@Nullable VideoResult.Stub result, @Nullable Exception exception) {
if (error == Camera.CAMERA_ERROR_SERVER_DIED) { super.onVideoResult(result, exception);
// Looks like this is recoverable. if (result == null) {
LOG.w("Recoverable error inside the onError callback.", "CAMERA_ERROR_SERVER_DIED"); // Something went wrong, lock the camera again.
restart(); mCamera.lock();
}
}
//endregion
//region Parameters
private void applyAllParameters(@NonNull Camera.Parameters params) {
params.setRecordingHint(getMode() == Mode.VIDEO);
applyDefaultFocus(params);
applyFlash(params, Flash.OFF);
applyLocation(params, null);
applyWhiteBalance(params, WhiteBalance.AUTO);
applyHdr(params, Hdr.OFF);
applyZoom(params, 0F);
applyExposureCorrection(params, 0F);
applyPlaySounds(mPlaySounds);
}
private void applyDefaultFocus(@NonNull Camera.Parameters params) {
List<String> modes = params.getSupportedFocusModes();
if (getMode() == Mode.VIDEO &&
modes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
return; return;
} }
String message = LOG.e("Internal Camera1 error.", error); if (modes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {
Exception runtime = new RuntimeException(message); params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
int reason; return;
switch (error) {
case Camera.CAMERA_ERROR_EVICTED: reason = CameraException.REASON_DISCONNECTED; break;
case Camera.CAMERA_ERROR_UNKNOWN: reason = CameraException.REASON_UNKNOWN; break;
default: reason = CameraException.REASON_UNKNOWN;
} }
throw new CameraException(runtime, reason);
if (modes.contains(Camera.Parameters.FOCUS_MODE_INFINITY)) {
params.setFocusMode(Camera.Parameters.FOCUS_MODE_INFINITY);
return;
}
if (modes.contains(Camera.Parameters.FOCUS_MODE_FIXED)) {
params.setFocusMode(Camera.Parameters.FOCUS_MODE_FIXED);
//noinspection UnnecessaryReturnStatement
return;
}
}
@Override
public void setFlash(@NonNull Flash flash) {
final Flash old = mFlash;
mFlash = flash;
mHandler.run(new Runnable() {
@Override
public void run() {
if (getEngineState() == STATE_STARTED) {
Camera.Parameters params = mCamera.getParameters();
if (applyFlash(params, old)) mCamera.setParameters(params);
}
mFlashOp.end(null);
}
});
}
private boolean applyFlash(@NonNull Camera.Parameters params, @NonNull Flash oldFlash) {
if (mCameraOptions.supports(mFlash)) {
params.setFlashMode((String) mMapper.map(mFlash));
return true;
}
mFlash = oldFlash;
return false;
} }
@Override @Override
public void setLocation(@Nullable Location location) { public void setLocation(@Nullable Location location) {
final Location oldLocation = mLocation; final Location oldLocation = mLocation;
mLocation = location; mLocation = location;
schedule(mLocationOp, true, new Runnable() { mHandler.run(new Runnable() {
@Override @Override
public void run() { public void run() {
if (getEngineState() == STATE_STARTED) {
Camera.Parameters params = mCamera.getParameters(); Camera.Parameters params = mCamera.getParameters();
if (applyLocation(params, oldLocation)) mCamera.setParameters(params); if (applyLocation(params, oldLocation)) mCamera.setParameters(params);
} }
mLocationOp.end(null);
}
}); });
} }
@ -353,12 +476,15 @@ public class Camera1Engine extends CameraEngine implements
public void setWhiteBalance(@NonNull WhiteBalance whiteBalance) { public void setWhiteBalance(@NonNull WhiteBalance whiteBalance) {
final WhiteBalance old = mWhiteBalance; final WhiteBalance old = mWhiteBalance;
mWhiteBalance = whiteBalance; mWhiteBalance = whiteBalance;
schedule(mWhiteBalanceOp, true, new Runnable() { mHandler.run(new Runnable() {
@Override @Override
public void run() { public void run() {
if (getEngineState() == STATE_STARTED) {
Camera.Parameters params = mCamera.getParameters(); Camera.Parameters params = mCamera.getParameters();
if (applyWhiteBalance(params, old)) mCamera.setParameters(params); if (applyWhiteBalance(params, old)) mCamera.setParameters(params);
} }
mWhiteBalanceOp.end(null);
}
}); });
} }
@ -375,12 +501,15 @@ public class Camera1Engine extends CameraEngine implements
public void setHdr(@NonNull Hdr hdr) { public void setHdr(@NonNull Hdr hdr) {
final Hdr old = mHdr; final Hdr old = mHdr;
mHdr = hdr; mHdr = hdr;
schedule(mHdrOp, true, new Runnable() { mHandler.run(new Runnable() {
@Override @Override
public void run() { public void run() {
if (getEngineState() == STATE_STARTED) {
Camera.Parameters params = mCamera.getParameters(); Camera.Parameters params = mCamera.getParameters();
if (applyHdr(params, old)) mCamera.setParameters(params); if (applyHdr(params, old)) mCamera.setParameters(params);
} }
mHdrOp.end(null);
}
}); });
} }
@ -393,268 +522,149 @@ public class Camera1Engine extends CameraEngine implements
return false; return false;
} }
@SuppressWarnings("UnusedReturnValue")
@TargetApi(17)
private boolean applyPlaySounds(boolean oldPlaySound) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(mCameraId, info);
if (info.canDisableShutterSound) {
try {
// this method is documented to throw on some occasions. #377
return mCamera.enableShutterSound(mPlaySounds);
} catch (RuntimeException exception) {
return false;
}
}
}
if (mPlaySounds) {
return true;
}
mPlaySounds = oldPlaySound;
return false;
}
@Override @Override
public void setFlash(@NonNull Flash flash) { public void setZoom(final float zoom, @Nullable final PointF[] points, final boolean notify) {
final Flash old = mFlash; final float old = mZoomValue;
mFlash = flash; mZoomValue = zoom;
schedule(mFlashOp, true, new Runnable() { mHandler.run(new Runnable() {
@Override @Override
public void run() { public void run() {
if (getEngineState() == STATE_STARTED) {
Camera.Parameters params = mCamera.getParameters(); Camera.Parameters params = mCamera.getParameters();
if (applyFlash(params, old)) mCamera.setParameters(params); if (applyZoom(params, old)) {
mCamera.setParameters(params);
if (notify) {
mCallback.dispatchOnZoomChanged(mZoomValue, points);
}
}
}
mZoomOp.end(null);
} }
}); });
} }
private boolean applyZoom(@NonNull Camera.Parameters params, float oldZoom) {
private boolean applyFlash(@NonNull Camera.Parameters params, @NonNull Flash oldFlash) { if (mCameraOptions.isZoomSupported()) {
if (mCameraOptions.supports(mFlash)) { float max = params.getMaxZoom();
params.setFlashMode((String) mMapper.map(mFlash)); params.setZoom((int) (mZoomValue * max));
mCamera.setParameters(params);
return true; return true;
} }
mFlash = oldFlash; mZoomValue = oldZoom;
return false; return false;
} }
@Override
// Choose the best default focus, based on session type. public void setExposureCorrection(final float EVvalue, @NonNull final float[] bounds,
private void applyDefaultFocus(@NonNull Camera.Parameters params) { @Nullable final PointF[] points, final boolean notify) {
List<String> modes = params.getSupportedFocusModes(); final float old = mExposureCorrectionValue;
mExposureCorrectionValue = EVvalue;
if (mMode == Mode.VIDEO && mHandler.run(new Runnable() {
modes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) { @Override
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO); public void run() {
return; if (getEngineState() == STATE_STARTED) {
} Camera.Parameters params = mCamera.getParameters();
if (applyExposureCorrection(params, old)) {
if (modes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) { mCamera.setParameters(params);
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE); if (notify) {
return; mCallback.dispatchOnExposureCorrectionChanged(mExposureCorrectionValue, bounds, points);
} }
if (modes.contains(Camera.Parameters.FOCUS_MODE_INFINITY)) {
params.setFocusMode(Camera.Parameters.FOCUS_MODE_INFINITY);
return;
} }
if (modes.contains(Camera.Parameters.FOCUS_MODE_FIXED)) {
params.setFocusMode(Camera.Parameters.FOCUS_MODE_FIXED);
//noinspection UnnecessaryReturnStatement
return;
} }
mExposureCorrectionOp.end(null);
} }
});
// -----------------
// Picture recording stuff.
@WorkerThread
@Override
protected void onTakePicture(@NonNull PictureResult.Stub stub) {
stub.rotation = offset(REF_SENSOR, REF_OUTPUT);
stub.size = getPictureSize(REF_OUTPUT);
mPictureRecorder = new Full1PictureRecorder(stub, Camera1Engine.this, mCamera);
mPictureRecorder.take();
} }
@WorkerThread private boolean applyExposureCorrection(@NonNull Camera.Parameters params, float oldExposureCorrection) {
@Override if (mCameraOptions.isExposureCorrectionSupported()) {
protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio viewAspectRatio) { // Just make sure we're inside boundaries.
stub.size = getUncroppedSnapshotSize(REF_OUTPUT); // Not the real size: it will be cropped to match the view ratio float max = mCameraOptions.getExposureCorrectionMaxValue();
stub.rotation = offset(REF_SENSOR, REF_OUTPUT); // Actually it will be rotated and set to 0. float min = mCameraOptions.getExposureCorrectionMinValue();
AspectRatio outputRatio = flip(REF_OUTPUT, REF_VIEW) ? viewAspectRatio.flip() : viewAspectRatio; float val = mExposureCorrectionValue;
val = val < min ? min : val > max ? max : val; // cap
if (mPreview instanceof GlCameraPreview && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { mExposureCorrectionValue = val;
mPictureRecorder = new SnapshotGlPictureRecorder(stub, this, (GlCameraPreview) mPreview, outputRatio); // Apply.
} else { int indexValue = (int) (mExposureCorrectionValue / params.getExposureCompensationStep());
mPictureRecorder = new Snapshot1PictureRecorder(stub, this, mCamera, outputRatio); params.setExposureCompensation(indexValue);
} return true;
mPictureRecorder.take();
} }
mExposureCorrectionValue = oldExposureCorrection;
@Override return false;
public void onPreviewFrame(@NonNull byte[] data, Camera camera) {
Frame frame = getFrameManager().getFrame(data,
System.currentTimeMillis(),
offset(REF_SENSOR, REF_OUTPUT),
mPreviewStreamSize,
mPreviewStreamFormat);
mCallback.dispatchFrame(frame);
} }
// -----------------
// Video recording stuff.
@Override @Override
public void onVideoResult(@Nullable VideoResult.Stub result, @Nullable Exception exception) { public void setPlaySounds(boolean playSounds) {
super.onVideoResult(result, exception); final boolean old = mPlaySounds;
if (result == null) { mPlaySounds = playSounds;
// Something went wrong, lock the camera again. mHandler.run(new Runnable() {
mCamera.lock();
}
}
@Override @Override
protected void onTakeVideo(@NonNull VideoResult.Stub stub) { public void run() {
stub.rotation = offset(REF_SENSOR, REF_OUTPUT); if (getEngineState() == STATE_STARTED) {
stub.size = flip(REF_SENSOR, REF_OUTPUT) ? mCaptureSize.flip() : mCaptureSize; applyPlaySounds(old);
// Unlock the camera and start recording.
try {
mCamera.unlock();
} catch (Exception e) {
// If this failed, we are unlikely able to record the video.
// Dispatch an error.
onVideoResult(null, e);
return;
} }
if (!(mVideoRecorder instanceof Full1VideoRecorder)) { mPlaySoundsOp.end(null);
mVideoRecorder = new Full1VideoRecorder(Camera1Engine.this, mCamera, mCameraId);
} }
mVideoRecorder.start(stub); });
} }
@SuppressLint("NewApi") @SuppressWarnings("UnusedReturnValue")
@WorkerThread @TargetApi(17)
@Override private boolean applyPlaySounds(boolean oldPlaySound) {
protected void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull AspectRatio viewAspectRatio) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
if (!(mPreview instanceof GlCameraPreview)) { Camera.CameraInfo info = new Camera.CameraInfo();
throw new IllegalStateException("Video snapshots are only supported with GlCameraPreview."); Camera.getCameraInfo(mCameraId, info);
if (info.canDisableShutterSound) {
try {
// this method is documented to throw on some occasions. #377
return mCamera.enableShutterSound(mPlaySounds);
} catch (RuntimeException exception) {
return false;
} }
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR2) {
throw new IllegalStateException("Video snapshots are only supported starting from API 18.");
} }
GlCameraPreview glPreview = (GlCameraPreview) mPreview;
// Size and rotation turned out to be extremely tricky. In case of Snapshot1PictureRecorder
// we use the preview size in REF_OUTPUT (cropped) and offset(REF_SENSOR, REF_OUTPUT) as rotation.
// These values mean that we expect input to be in the REF_SENSOR system.
// Here everything seems different. We would expect a difference because the two snapshot
// recorders have different mechanics (the picture one uses a SurfaceTexture with setBufferSize,
// the video one here uses the MediaCodec input surface which we can't control).
// The strangest thing is the fact that the correct angle seems to be the same for FRONT and
// BACK sensor, which means that our sensor correction actually screws things up. For this reason
// facing value is temporarily set to BACK.
Facing realFacing = mFacing;
mFacing = Facing.BACK;
// These are the angles that make it work on a Nexus5X, compared to the offset() results.
// For instance, SV means offset(REF_SENSOR, REF_VIEW). The rest should be clear.
// CONFIG | WANTED | SV | VS | VO | OV | SO | OS |
// ------------|--------|--------|--------|--------|--------|--------|--------|
// Vertical | 0 | 270 | 90 | 0 | 0 | 270 | 90 |
// Left | 270 | 270 | 90 | 270 | 90 | 180 | 180 |
// Right | 90 | 270 | 90 | 90 | 270 | 0 | 0 |
// Upside down | 180 | 270 | 90 | 180 | 180 | 90 | 270 |
// The VO is the only correct value. Things change when using FRONT camera, in which case,
// no value is actually correct, and the needed values are the same of BACK!
// CONFIG | WANTED | SV | VS | VO | OV | SO | OS |
// ------------|--------|--------|--------|--------|--------|--------|--------|
// Vertical | 0 | 90 | 270 | 180 | 180 | 270 | 90 |
// Left | 270 | 90 | 270 | 270 | 90 | 0 | 0 |
// Right | 90 | 90 | 270 | 90 | 270 | 180 | 180 |
// Upside down | 180 | 90 | 270 | 0 | 0 | 90 | 270 |
// Based on this we will use VO for everything. See if we get issues about distortion
// and maybe we can improve. The reason why this happen is beyond my understanding.
Size outputSize = getUncroppedSnapshotSize(REF_OUTPUT);
if (outputSize == null) {
throw new IllegalStateException("outputSize should not be null.");
} }
AspectRatio outputRatio = flip(REF_OUTPUT, REF_VIEW) ? viewAspectRatio.flip() : viewAspectRatio; if (mPlaySounds) {
Rect outputCrop = CropHelper.computeCrop(outputSize, outputRatio); return true;
outputSize = new Size(outputCrop.width(), outputCrop.height());
stub.size = outputSize;
stub.rotation = offset(REF_VIEW, REF_OUTPUT);
// Reset facing and start.
mFacing = realFacing;
if (!(mVideoRecorder instanceof SnapshotVideoRecorder)) {
mVideoRecorder = new SnapshotVideoRecorder(Camera1Engine.this, glPreview);
} }
mVideoRecorder.start(stub); mPlaySounds = oldPlaySound;
return false;
} }
// ----------------- //endregion
// Zoom and simpler stuff.
//region Frame Processing
@NonNull
@Override @Override
public void setZoom(final float zoom, @Nullable final PointF[] points, final boolean notify) { protected FrameManager instantiateFrameManager() {
schedule(mZoomOp, true, new Runnable() { return new FrameManager(2, this);
@Override
public void run() {
if (!mCameraOptions.isZoomSupported()) return;
mZoomValue = zoom;
Camera.Parameters params = mCamera.getParameters();
float max = params.getMaxZoom();
params.setZoom((int) (zoom * max));
mCamera.setParameters(params);
if (notify) {
mCallback.dispatchOnZoomChanged(zoom, points);
}
}
});
} }
@Override @Override
public void setExposureCorrection(final float EVvalue, @NonNull final float[] bounds, public void onBufferAvailable(@NonNull byte[] buffer) {
@Nullable final PointF[] points, final boolean notify) { if (getEngineState() == STATE_STARTED) {
schedule(mExposureCorrectionOp, true, new Runnable() { mCamera.addCallbackBuffer(buffer);
@Override
public void run() {
if (!mCameraOptions.isExposureCorrectionSupported()) return;
float value = EVvalue;
float max = mCameraOptions.getExposureCorrectionMaxValue();
float min = mCameraOptions.getExposureCorrectionMinValue();
value = value < min ? min : value > max ? max : value; // cap
mExposureCorrectionValue = value;
Camera.Parameters params = mCamera.getParameters();
int indexValue = (int) (value / params.getExposureCompensationStep());
params.setExposureCompensation(indexValue);
mCamera.setParameters(params);
if (notify) {
mCallback.dispatchOnExposureCorrectionChanged(value, bounds, points);
} }
} }
});
@Override
public void onPreviewFrame(@NonNull byte[] data, Camera camera) {
Frame frame = getFrameManager().getFrame(data,
System.currentTimeMillis(),
getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR),
mPreviewStreamSize,
PREVIEW_FORMAT);
mCallback.dispatchFrame(frame);
} }
// ----------------- //endregion
// Tap to focus stuff.
//region Auto Focus
@Override @Override
public void startAutoFocus(@Nullable final Gesture gesture, @NonNull final PointF point) { public void startAutoFocus(@Nullable final Gesture gesture, @NonNull final PointF point) {
// Must get width and height from the UI thread. // Must get width and height from the UI thread.
// TODO could take mPreview.surfaceSize like Camera2 does?
int viewWidth = 0, viewHeight = 0; int viewWidth = 0, viewHeight = 0;
if (mPreview != null && mPreview.hasSurface()) { if (mPreview != null && mPreview.hasSurface()) {
viewWidth = mPreview.getView().getWidth(); viewWidth = mPreview.getView().getWidth();
@ -662,14 +672,15 @@ public class Camera1Engine extends CameraEngine implements
} }
final int viewWidthF = viewWidth; final int viewWidthF = viewWidth;
final int viewHeightF = viewHeight; final int viewHeightF = viewHeight;
// Schedule. mHandler.run(new Runnable() {
schedule(null, true, new Runnable() {
@Override @Override
public void run() { public void run() {
if (getEngineState() < STATE_STARTED) return;
if (!mCameraOptions.isAutoFocusSupported()) return; if (!mCameraOptions.isAutoFocusSupported()) return;
final PointF p = new PointF(point.x, point.y); // copy. final PointF p = new PointF(point.x, point.y); // copy.
int offset = getAngles().offset(Reference.SENSOR, Reference.VIEW, Axis.ABSOLUTE);
List<Camera.Area> meteringAreas2 = computeMeteringAreas(p.x, p.y, List<Camera.Area> meteringAreas2 = computeMeteringAreas(p.x, p.y,
viewWidthF, viewHeightF, offset(REF_SENSOR, REF_VIEW)); viewWidthF, viewHeightF, offset);
List<Camera.Area> meteringAreas1 = meteringAreas2.subList(0, 1); List<Camera.Area> meteringAreas1 = meteringAreas2.subList(0, 1);
// At this point we are sure that camera supports auto focus... right? Look at CameraView.onTouchEvent(). // At this point we are sure that camera supports auto focus... right? Look at CameraView.onTouchEvent().
@ -688,10 +699,8 @@ public class Camera1Engine extends CameraEngine implements
mFocusEndRunnable = new Runnable() { mFocusEndRunnable = new Runnable() {
@Override @Override
public void run() { public void run() {
if (isCameraAvailable()) {
mCallback.dispatchOnFocusEnd(gesture, false, p); mCallback.dispatchOnFocusEnd(gesture, false, p);
} }
}
}; };
mHandler.post(AUTOFOCUS_END_DELAY_MILLIS, mFocusEndRunnable); mHandler.post(AUTOFOCUS_END_DELAY_MILLIS, mFocusEndRunnable);
@ -717,6 +726,7 @@ public class Camera1Engine extends CameraEngine implements
// Let the mFocusEndRunnable do its job. (could remove it and quickly dispatch // Let the mFocusEndRunnable do its job. (could remove it and quickly dispatch
// onFocusEnd here, but let's make it simpler). // onFocusEnd here, but let's make it simpler).
} }
} }
}); });
} }
@ -763,38 +773,21 @@ public class Camera1Engine extends CameraEngine implements
return new Rect(left, top, right, bottom); return new Rect(left, top, right, bottom);
} }
private final Runnable mFocusResetRunnable = new Runnable() {
// -----------------
// Size stuff.
public final int getPreviewStreamFormat() {
return mPreviewStreamFormat;
}
@NonNull
@Override
protected List<Size> getPreviewStreamAvailableSizes() {
List<Camera.Size> sizes = mCamera.getParameters().getSupportedPreviewSizes();
List<Size> result = new ArrayList<>(sizes.size());
for (Camera.Size size : sizes) {
Size add = new Size(size.width, size.height);
if (!result.contains(add)) result.add(add);
}
LOG.i("getPreviewStreamAvailableSizes:", result);
return result;
}
@Override
public void setPlaySounds(boolean playSounds) {
final boolean old = mPlaySounds;
mPlaySounds = playSounds;
schedule(mPlaySoundsOp, true, new Runnable() {
@Override @Override
public void run() { public void run() {
applyPlaySounds(old); if (getEngineState() < STATE_STARTED) return;
} mCamera.cancelAutoFocus();
}); Camera.Parameters params = mCamera.getParameters();
int maxAF = params.getMaxNumFocusAreas();
int maxAE = params.getMaxNumMeteringAreas();
if (maxAF > 0) params.setFocusAreas(null);
if (maxAE > 0) params.setMeteringAreas(null);
applyDefaultFocus(params); // Revert to internal focus.
mCamera.setParameters(params);
} }
};
//endregion
} }

@ -44,6 +44,8 @@ import com.otaliastudios.cameraview.controls.Flash;
import com.otaliastudios.cameraview.controls.Hdr; import com.otaliastudios.cameraview.controls.Hdr;
import com.otaliastudios.cameraview.controls.Mode; import com.otaliastudios.cameraview.controls.Mode;
import com.otaliastudios.cameraview.controls.WhiteBalance; import com.otaliastudios.cameraview.controls.WhiteBalance;
import com.otaliastudios.cameraview.engine.offset.Axis;
import com.otaliastudios.cameraview.engine.offset.Reference;
import com.otaliastudios.cameraview.frame.Frame; import com.otaliastudios.cameraview.frame.Frame;
import com.otaliastudios.cameraview.frame.FrameManager; import com.otaliastudios.cameraview.frame.FrameManager;
import com.otaliastudios.cameraview.gesture.Gesture; import com.otaliastudios.cameraview.gesture.Gesture;
@ -299,7 +301,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
CameraCharacteristics characteristics = mManager.getCameraCharacteristics(cameraId); CameraCharacteristics characteristics = mManager.getCameraCharacteristics(cameraId);
if (internalFacing == readCharacteristic(characteristics, CameraCharacteristics.LENS_FACING, -99)) { if (internalFacing == readCharacteristic(characteristics, CameraCharacteristics.LENS_FACING, -99)) {
mCameraId = cameraId; mCameraId = cameraId;
mSensorOffset = readCharacteristic(characteristics, CameraCharacteristics.SENSOR_ORIENTATION, 0); int sensorOffset = readCharacteristic(characteristics, CameraCharacteristics.SENSOR_ORIENTATION, 0);
setSensorOffset(facing, sensorOffset);
return true; return true;
} }
} catch (CameraAccessException ignore) { } catch (CameraAccessException ignore) {
@ -330,7 +333,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
try { try {
LOG.i("createCamera:", "Applying default parameters."); LOG.i("createCamera:", "Applying default parameters.");
mCameraCharacteristics = mManager.getCameraCharacteristics(mCameraId); mCameraCharacteristics = mManager.getCameraCharacteristics(mCameraId);
mCameraOptions = new CameraOptions(mManager, mCameraId, flip(REF_SENSOR, REF_VIEW)); boolean flip = getAngles().flip(Reference.SENSOR, Reference.VIEW);
mCameraOptions = new CameraOptions(mManager, mCameraId, flip);
createRepeatingRequestBuilder(CameraDevice.TEMPLATE_PREVIEW); createRepeatingRequestBuilder(CameraDevice.TEMPLATE_PREVIEW);
} catch (CameraAccessException e) { } catch (CameraAccessException e) {
task.trySetException(createCameraException(e)); task.trySetException(createCameraException(e));
@ -406,7 +410,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
outputSurfaces.add(mPreviewStreamSurface); outputSurfaces.add(mPreviewStreamSurface);
// 2. VIDEO RECORDING // 2. VIDEO RECORDING
if (mMode == Mode.VIDEO) { if (getMode() == Mode.VIDEO) {
if (Full2VideoRecorder.SUPPORTS_PERSISTENT_SURFACE) { if (Full2VideoRecorder.SUPPORTS_PERSISTENT_SURFACE) {
mFullVideoPersistentSurface = MediaCodec.createPersistentInputSurface(); mFullVideoPersistentSurface = MediaCodec.createPersistentInputSurface();
outputSurfaces.add(mFullVideoPersistentSurface); outputSurfaces.add(mFullVideoPersistentSurface);
@ -422,7 +426,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
} }
// 3. PICTURE RECORDING // 3. PICTURE RECORDING
if (mMode == Mode.PICTURE) { if (getMode() == Mode.PICTURE) {
mPictureReader = ImageReader.newInstance( mPictureReader = ImageReader.newInstance(
mCaptureSize.getWidth(), mCaptureSize.getWidth(),
mCaptureSize.getHeight(), mCaptureSize.getHeight(),
@ -433,7 +437,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
} }
// 4. FRAME PROCESSING // 4. FRAME PROCESSING
if (mHasFrameProcessors) { if (hasFrameProcessors()) {
// Choose the size. // Choose the size.
StreamConfigurationMap streamMap = mCameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); StreamConfigurationMap streamMap = mCameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (streamMap == null) throw new RuntimeException("StreamConfigurationMap is null. Should not happen."); if (streamMap == null) throw new RuntimeException("StreamConfigurationMap is null. Should not happen.");
@ -489,13 +493,13 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
LOG.i("onStartPreview", "Dispatching onCameraPreviewStreamSizeChanged."); LOG.i("onStartPreview", "Dispatching onCameraPreviewStreamSizeChanged.");
mCallback.onCameraPreviewStreamSizeChanged(); mCallback.onCameraPreviewStreamSizeChanged();
Size previewSizeForView = getPreviewStreamSize(REF_VIEW); Size previewSizeForView = getPreviewStreamSize(Reference.VIEW);
if (previewSizeForView == null) { if (previewSizeForView == null) {
throw new IllegalStateException("previewStreamSize should not be null at this point."); throw new IllegalStateException("previewStreamSize should not be null at this point.");
} }
mPreview.setStreamSize(previewSizeForView.getWidth(), previewSizeForView.getHeight()); mPreview.setStreamSize(previewSizeForView.getWidth(), previewSizeForView.getHeight());
mPreview.setDrawRotation(mDisplayOffset); mPreview.setDrawRotation(getAngles().offset(Reference.BASE, Reference.VIEW, Axis.ABSOLUTE));
if (mHasFrameProcessors) { if (hasFrameProcessors()) {
getFrameManager().setUp(ImageFormat.getBitsPerPixel(FRAME_PROCESSING_FORMAT), mFrameProcessingSize); getFrameManager().setUp(ImageFormat.getBitsPerPixel(FRAME_PROCESSING_FORMAT), mFrameProcessingSize);
} }
@ -537,7 +541,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
mVideoRecorder = null; mVideoRecorder = null;
} }
mPictureRecorder = null; mPictureRecorder = null;
if (mHasFrameProcessors) { if (hasFrameProcessors()) {
getFrameManager().release(); getFrameManager().release();
} }
try { try {
@ -613,9 +617,9 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@WorkerThread @WorkerThread
@Override @Override
protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio viewAspectRatio) { protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio viewAspectRatio) {
stub.size = getUncroppedSnapshotSize(REF_OUTPUT); // Not the real size: it will be cropped to match the view ratio stub.size = getUncroppedSnapshotSize(Reference.OUTPUT); // Not the real size: it will be cropped to match the view ratio
stub.rotation = offset(REF_SENSOR, REF_OUTPUT); // Actually it will be rotated and set to 0. stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR); // Actually it will be rotated and set to 0.
AspectRatio outputRatio = flip(REF_OUTPUT, REF_VIEW) ? viewAspectRatio.flip() : viewAspectRatio; AspectRatio outputRatio = getAngles().flip(Reference.OUTPUT, Reference.VIEW) ? viewAspectRatio.flip() : viewAspectRatio;
if (mPreview instanceof GlCameraPreview) { if (mPreview instanceof GlCameraPreview) {
mPictureRecorder = new SnapshotGlPictureRecorder(stub, this, (GlCameraPreview) mPreview, outputRatio); mPictureRecorder = new SnapshotGlPictureRecorder(stub, this, (GlCameraPreview) mPreview, outputRatio);
} else { } else {
@ -626,8 +630,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@Override @Override
protected void onTakePicture(@NonNull PictureResult.Stub stub) { protected void onTakePicture(@NonNull PictureResult.Stub stub) {
stub.rotation = offset(REF_SENSOR, REF_OUTPUT); stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR);
stub.size = getPictureSize(REF_OUTPUT); stub.size = getPictureSize(Reference.OUTPUT);
try { try {
CaptureRequest.Builder builder = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); CaptureRequest.Builder builder = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
applyAllParameters(builder); applyAllParameters(builder);
@ -647,7 +651,6 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
public void onPictureResult(@Nullable PictureResult.Stub result, @Nullable Exception error) { public void onPictureResult(@Nullable PictureResult.Stub result, @Nullable Exception error) {
boolean fullPicture = mPictureRecorder instanceof Full2PictureRecorder; boolean fullPicture = mPictureRecorder instanceof Full2PictureRecorder;
super.onPictureResult(result, error); super.onPictureResult(result, error);
//noinspection StatementWithEmptyBody
if (fullPicture && mPictureCaptureStopsPreview) { if (fullPicture && mPictureCaptureStopsPreview) {
// See comments in Full2PictureRecorder. // See comments in Full2PictureRecorder.
applyRepeatingRequestBuilder(); applyRepeatingRequestBuilder();
@ -662,8 +665,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@Override @Override
protected void onTakeVideo(@NonNull VideoResult.Stub stub) { protected void onTakeVideo(@NonNull VideoResult.Stub stub) {
LOG.i("onTakeVideo", "called."); LOG.i("onTakeVideo", "called.");
stub.rotation = offset(REF_SENSOR, REF_OUTPUT); stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR);
stub.size = flip(REF_SENSOR, REF_OUTPUT) ? mCaptureSize.flip() : mCaptureSize; stub.size = getAngles().flip(Reference.SENSOR, Reference.OUTPUT) ? mCaptureSize.flip() : mCaptureSize;
if (!Full2VideoRecorder.SUPPORTS_PERSISTENT_SURFACE) { if (!Full2VideoRecorder.SUPPORTS_PERSISTENT_SURFACE) {
// On API 21 and 22, we must restart the session at each time. // On API 21 and 22, we must restart the session at each time.
// Save the pending data and restart the session. // Save the pending data and restart the session.
@ -676,9 +679,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
} }
private void doTakeVideo(@NonNull final VideoResult.Stub stub) { private void doTakeVideo(@NonNull final VideoResult.Stub stub) {
if (!(mVideoRecorder instanceof Full2VideoRecorder)) {
mVideoRecorder = new Full2VideoRecorder(this, mCameraId, mFullVideoPersistentSurface); mVideoRecorder = new Full2VideoRecorder(this, mCameraId, mFullVideoPersistentSurface);
}
Full2VideoRecorder recorder = (Full2VideoRecorder) mVideoRecorder; Full2VideoRecorder recorder = (Full2VideoRecorder) mVideoRecorder;
try { try {
createRepeatingRequestBuilder(CameraDevice.TEMPLATE_RECORD); createRepeatingRequestBuilder(CameraDevice.TEMPLATE_RECORD);
@ -698,10 +699,6 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
} }
} }
/**
* See {@link CameraEngine#onTakeVideoSnapshot(VideoResult.Stub, AspectRatio)}
* to read about the size and rotation computation.
*/
@WorkerThread @WorkerThread
@Override @Override
protected void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull AspectRatio viewAspectRatio) { protected void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull AspectRatio viewAspectRatio) {
@ -709,23 +706,20 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
throw new IllegalStateException("Video snapshots are only supported with GlCameraPreview."); throw new IllegalStateException("Video snapshots are only supported with GlCameraPreview.");
} }
GlCameraPreview glPreview = (GlCameraPreview) mPreview; GlCameraPreview glPreview = (GlCameraPreview) mPreview;
Facing realFacing = mFacing;
mFacing = Facing.BACK; // Output size is easy:
Size outputSize = getUncroppedSnapshotSize(REF_OUTPUT); Size outputSize = getUncroppedSnapshotSize(Reference.OUTPUT);
if (outputSize == null) { if (outputSize == null) {
throw new IllegalStateException("outputSize should not be null."); throw new IllegalStateException("outputSize should not be null.");
} }
AspectRatio outputRatio = flip(REF_OUTPUT, REF_VIEW) ? viewAspectRatio.flip() : viewAspectRatio; AspectRatio outputRatio = getAngles().flip(Reference.OUTPUT, Reference.VIEW) ? viewAspectRatio.flip() : viewAspectRatio;
Rect outputCrop = CropHelper.computeCrop(outputSize, outputRatio); Rect outputCrop = CropHelper.computeCrop(outputSize, outputRatio);
outputSize = new Size(outputCrop.width(), outputCrop.height()); outputSize = new Size(outputCrop.width(), outputCrop.height());
stub.size = outputSize; stub.size = outputSize;
stub.rotation = offset(REF_VIEW, REF_OUTPUT); stub.rotation = getAngles().offset(Reference.VIEW, Reference.OUTPUT, Axis.ABSOLUTE);
// Reset facing and start. // Start.
mFacing = realFacing;
if (!(mVideoRecorder instanceof SnapshotVideoRecorder)) {
mVideoRecorder = new SnapshotVideoRecorder(this, glPreview); mVideoRecorder = new SnapshotVideoRecorder(this, glPreview);
}
mVideoRecorder.start(stub); mVideoRecorder.start(stub);
} }
@ -764,7 +758,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
int[] modesArray = readCharacteristic(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES, new int[]{}); int[] modesArray = readCharacteristic(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES, new int[]{});
List<Integer> modes = new ArrayList<>(); List<Integer> modes = new ArrayList<>();
for (int mode : modesArray) { modes.add(mode); } for (int mode : modesArray) { modes.add(mode); }
if (mMode == Mode.VIDEO && if (getMode() == Mode.VIDEO &&
modes.contains(CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO)) { modes.contains(CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO)) {
builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO); builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
return; return;
@ -1053,7 +1047,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
if (getEngineState() == STATE_STARTED) { if (getEngineState() == STATE_STARTED) {
Frame frame = getFrameManager().getFrame(data, Frame frame = getFrameManager().getFrame(data,
System.currentTimeMillis(), System.currentTimeMillis(),
offset(REF_SENSOR, REF_OUTPUT), getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR),
mFrameProcessingSize, mFrameProcessingSize,
FRAME_PROCESSING_FORMAT); FRAME_PROCESSING_FORMAT);
mCallback.dispatchFrame(frame); mCallback.dispatchFrame(frame);
@ -1064,8 +1058,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@Override @Override
public void setHasFrameProcessors(final boolean hasFrameProcessors) { public void setHasFrameProcessors(final boolean hasFrameProcessors) {
super.setHasFrameProcessors(hasFrameProcessors);
LOG.i("setHasFrameProcessors", "changed to", hasFrameProcessors, "posting."); LOG.i("setHasFrameProcessors", "changed to", hasFrameProcessors, "posting.");
mHasFrameProcessors = hasFrameProcessors;
mHandler.run(new Runnable() { mHandler.run(new Runnable() {
@Override @Override
public void run() { public void run() {
@ -1102,12 +1096,11 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
// This is a good Q/A. https://stackoverflow.com/a/33181620/4288782 // This is a good Q/A. https://stackoverflow.com/a/33181620/4288782
// At first, the point is relative to the View system and does not account our own cropping. // At first, the point is relative to the View system and does not account our own cropping.
// Will keep updating these two below. // Will keep updating these two below.
//noinspection UnnecessaryLocalVariable
PointF referencePoint = new PointF(point.x, point.y); PointF referencePoint = new PointF(point.x, point.y);
Size referenceSize /* = previewSurfaceSize */; Size referenceSize /* = previewSurfaceSize */;
// 1. Account for cropping. // 1. Account for cropping.
Size previewStreamSize = getPreviewStreamSize(REF_VIEW); Size previewStreamSize = getPreviewStreamSize(Reference.VIEW);
Size previewSurfaceSize = mPreview.getSurfaceSize(); Size previewSurfaceSize = mPreview.getSurfaceSize();
if (previewStreamSize == null) throw new IllegalStateException("getPreviewStreamSize should not be null at this point."); if (previewStreamSize == null) throw new IllegalStateException("getPreviewStreamSize should not be null at this point.");
AspectRatio previewStreamAspectRatio = AspectRatio.of(previewStreamSize); AspectRatio previewStreamAspectRatio = AspectRatio.of(previewStreamSize);
@ -1134,7 +1127,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
// 3. Rotate to the stream coordinate system. // 3. Rotate to the stream coordinate system.
// Not elegant, but the sin/cos way was failing. // Not elegant, but the sin/cos way was failing.
int angle = offset(REF_SENSOR, REF_VIEW); int angle = getAngles().offset(Reference.SENSOR, Reference.VIEW, Axis.ABSOLUTE);
boolean flip = angle % 180 != 0; boolean flip = angle % 180 != 0;
float tempX = referencePoint.x; float tempY = referencePoint.y; float tempX = referencePoint.x; float tempY = referencePoint.y;
if (angle == 0) { if (angle == 0) {

@ -1,9 +1,7 @@
package com.otaliastudios.cameraview.engine; package com.otaliastudios.cameraview.engine;
import android.content.Context; import android.content.Context;
import android.graphics.Camera;
import android.graphics.PointF; import android.graphics.PointF;
import android.hardware.camera2.CameraCharacteristics;
import android.location.Location; import android.location.Location;
@ -21,6 +19,8 @@ import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.CameraOptions; import com.otaliastudios.cameraview.CameraOptions;
import com.otaliastudios.cameraview.PictureResult; import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.VideoResult; import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.engine.offset.Angles;
import com.otaliastudios.cameraview.engine.offset.Reference;
import com.otaliastudios.cameraview.frame.Frame; import com.otaliastudios.cameraview.frame.Frame;
import com.otaliastudios.cameraview.frame.FrameManager; import com.otaliastudios.cameraview.frame.FrameManager;
import com.otaliastudios.cameraview.internal.utils.Op; import com.otaliastudios.cameraview.internal.utils.Op;
@ -84,7 +84,7 @@ import java.util.concurrent.TimeUnit;
* - {@link #start()}: ASYNC - starts the engine (S2). When possible, at a later time, S3 and S4 are also performed. * - {@link #start()}: ASYNC - starts the engine (S2). When possible, at a later time, S3 and S4 are also performed.
* - {@link #stop()}: ASYNC - stops everything: undoes S4, then S3, then S2. * - {@link #stop()}: ASYNC - stops everything: undoes S4, then S3, then S2.
* - {@link #restart()}: ASYNC - completes a stop then a start. * - {@link #restart()}: ASYNC - completes a stop then a start.
* - {@link #destroy()}: ASYNC - performs a {@link #stop()} that will go on no matter the exceptions, without throwing. * - {@link #destroy()}: SYNC - performs a {@link #stop()} that will go on no matter the exceptions, without throwing.
* Makes the engine unusable and clears resources. * Makes the engine unusable and clears resources.
* *
* For example, we expose the engine (S2) state through {@link #getEngineState()}. It will be: * For example, we expose the engine (S2) state through {@link #getEngineState()}. It will be:
@ -139,59 +139,52 @@ public abstract class CameraEngine implements
private static final String TAG = CameraEngine.class.getSimpleName(); private static final String TAG = CameraEngine.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG); private static final CameraLogger LOG = CameraLogger.create(TAG);
@SuppressWarnings("WeakerAccess") @SuppressWarnings({"WeakerAccess", "unused"})
public static final int STATE_STOPPING = CameraEngineStep.STATE_STOPPING; public static final int STATE_STOPPING = CameraEngineStep.STATE_STOPPING;
public static final int STATE_STOPPED = CameraEngineStep.STATE_STOPPED; public static final int STATE_STOPPED = CameraEngineStep.STATE_STOPPED;
@SuppressWarnings("WeakerAccess") @SuppressWarnings({"WeakerAccess", "unused"})
public static final int STATE_STARTING = CameraEngineStep.STATE_STARTING; public static final int STATE_STARTING = CameraEngineStep.STATE_STARTING;
public static final int STATE_STARTED = CameraEngineStep.STATE_STARTED; public static final int STATE_STARTED = CameraEngineStep.STATE_STARTED;
public static final int REF_SENSOR = 0; // Need to be protected
public static final int REF_VIEW = 1; @SuppressWarnings("WeakerAccess") protected WorkerHandler mHandler;
public static final int REF_OUTPUT = 2; @SuppressWarnings("WeakerAccess") protected final Callback mCallback;
@SuppressWarnings("WeakerAccess") protected CameraPreview mPreview;
protected final Callback mCallback; @SuppressWarnings("WeakerAccess") protected CameraOptions mCameraOptions;
private final FrameManager mFrameManager; @SuppressWarnings("WeakerAccess") protected Mapper mMapper;
protected CameraPreview mPreview; @SuppressWarnings("WeakerAccess") protected PictureRecorder mPictureRecorder;
protected WorkerHandler mHandler; @SuppressWarnings("WeakerAccess") protected VideoRecorder mVideoRecorder;
@SuppressWarnings("WeakerAccess") protected Size mCaptureSize;
@SuppressWarnings("WeakerAccess") protected Size mPreviewStreamSize;
@SuppressWarnings("WeakerAccess") protected Flash mFlash;
@SuppressWarnings("WeakerAccess") protected WhiteBalance mWhiteBalance;
@SuppressWarnings("WeakerAccess") protected VideoCodec mVideoCodec;
@SuppressWarnings("WeakerAccess") protected Hdr mHdr;
@SuppressWarnings("WeakerAccess") protected Location mLocation;
@SuppressWarnings("WeakerAccess") protected float mZoomValue;
@SuppressWarnings("WeakerAccess") protected float mExposureCorrectionValue;
@SuppressWarnings("WeakerAccess") protected boolean mPlaySounds;
// Can be private
@VisibleForTesting Handler mCrashHandler; @VisibleForTesting Handler mCrashHandler;
private final FrameManager mFrameManager;
protected Facing mFacing; private final Angles mAngles;
protected Flash mFlash;
protected WhiteBalance mWhiteBalance;
protected VideoCodec mVideoCodec;
protected Mode mMode;
protected Hdr mHdr;
protected Location mLocation;
protected Audio mAudio;
protected float mZoomValue;
protected float mExposureCorrectionValue;
protected boolean mPlaySounds;
protected boolean mHasFrameProcessors;
@Nullable private SizeSelector mPreviewStreamSizeSelector; @Nullable private SizeSelector mPreviewStreamSizeSelector;
private SizeSelector mPictureSizeSelector; private SizeSelector mPictureSizeSelector;
private SizeSelector mVideoSizeSelector; private SizeSelector mVideoSizeSelector;
private Facing mFacing;
@VisibleForTesting int mSnapshotMaxWidth = Integer.MAX_VALUE; // in REF_VIEW for consistency with SizeSelectors private Mode mMode;
@VisibleForTesting int mSnapshotMaxHeight = Integer.MAX_VALUE; // in REF_VIEW for consistency with SizeSelectors private Audio mAudio;
private long mVideoMaxSize;
protected CameraOptions mCameraOptions; private int mVideoMaxDuration;
protected Mapper mMapper; private int mVideoBitRate;
protected PictureRecorder mPictureRecorder; private int mAudioBitRate;
protected VideoRecorder mVideoRecorder; private boolean mHasFrameProcessors;
protected long mVideoMaxSize; private long mAutoFocusResetDelayMillis;
protected int mVideoMaxDuration; private int mSnapshotMaxWidth = Integer.MAX_VALUE; // in REF_VIEW for consistency with SizeSelectors
protected int mVideoBitRate; private int mSnapshotMaxHeight = Integer.MAX_VALUE; // in REF_VIEW for consistency with SizeSelectors
protected int mAudioBitRate;
protected Size mCaptureSize; // Steps
protected Size mPreviewStreamSize;
protected long mAutoFocusResetDelayMillis;
protected int mSensorOffset;
protected int mDisplayOffset;
protected int mDeviceOrientation;
private final CameraEngineStep.Callback mStepCallback = new CameraEngineStep.Callback() { private final CameraEngineStep.Callback mStepCallback = new CameraEngineStep.Callback() {
@Override @NonNull public Executor getExecutor() { return mHandler.getExecutor(); } @Override @NonNull public Executor getExecutor() { return mHandler.getExecutor(); }
@Override public void handleException(@NonNull Exception exception) { @Override public void handleException(@NonNull Exception exception) {
@ -203,29 +196,23 @@ public abstract class CameraEngine implements
private CameraEngineStep mPreviewStep = new CameraEngineStep("preview", mStepCallback); private CameraEngineStep mPreviewStep = new CameraEngineStep("preview", mStepCallback);
private CameraEngineStep mAllStep = new CameraEngineStep("all", mStepCallback); private CameraEngineStep mAllStep = new CameraEngineStep("all", mStepCallback);
// Used for testing. // Ops used for testing.
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED)
Op<Void> mZoomOp = new Op<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED)
Op<Void> mExposureCorrectionOp = new Op<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED)
Op<Void> mFlashOp = new Op<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED)
Op<Void> mWhiteBalanceOp = new Op<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED)
Op<Void> mHdrOp = new Op<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED)
Op<Void> mLocationOp = new Op<>();
@VisibleForTesting Op<Void> mStartVideoOp = new Op<>(); @VisibleForTesting Op<Void> mStartVideoOp = new Op<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) @VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Op<Void> mZoomOp = new Op<>();
Op<Void> mPlaySoundsOp = new Op<>(); @VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Op<Void> mExposureCorrectionOp = new Op<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Op<Void> mFlashOp = new Op<>();
protected CameraEngine(Callback callback) { @VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Op<Void> mWhiteBalanceOp = new Op<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Op<Void> mHdrOp = new Op<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Op<Void> mLocationOp = new Op<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Op<Void> mPlaySoundsOp = new Op<>();
protected CameraEngine(@NonNull Callback callback) {
mCallback = callback; mCallback = callback;
mCrashHandler = new Handler(Looper.getMainLooper()); mCrashHandler = new Handler(Looper.getMainLooper());
mHandler = WorkerHandler.get("CameraViewEngine"); mHandler = WorkerHandler.get("CameraViewEngine");
mHandler.getThread().setUncaughtExceptionHandler(new CrashExceptionHandler()); mHandler.getThread().setUncaughtExceptionHandler(new CrashExceptionHandler());
mFrameManager = instantiateFrameManager(); mFrameManager = instantiateFrameManager();
mAngles = new Angles();
} }
public void setPreview(@NonNull CameraPreview cameraPreview) { public void setPreview(@NonNull CameraPreview cameraPreview) {
@ -320,7 +307,7 @@ public abstract class CameraEngine implements
return mBindStep.getState(); return mBindStep.getState();
} }
@SuppressWarnings("unused") @SuppressWarnings({"unused", "WeakerAccess"})
public final int getPreviewState() { public final int getPreviewState() {
return mPreviewStep.getState(); return mPreviewStep.getState();
} }
@ -580,7 +567,7 @@ public abstract class CameraEngine implements
*/ */
@Override @Override
public final void onSurfaceAvailable() { public final void onSurfaceAvailable() {
LOG.i("onSurfaceAvailable:", "Size is", getPreviewSurfaceSize(REF_VIEW)); LOG.i("onSurfaceAvailable:", "Size is", getPreviewSurfaceSize(Reference.VIEW));
mHandler.run(new Runnable() { mHandler.run(new Runnable() {
@Override @Override
public void run() { public void run() {
@ -597,7 +584,7 @@ public abstract class CameraEngine implements
@Override @Override
public final void onSurfaceChanged() { public final void onSurfaceChanged() {
LOG.i("onSurfaceChanged:", "Size is", getPreviewSurfaceSize(REF_VIEW), "Posting."); LOG.i("onSurfaceChanged:", "Size is", getPreviewSurfaceSize(Reference.VIEW), "Posting.");
mHandler.run(new Runnable() { mHandler.run(new Runnable() {
@Override @Override
public void run() { public void run() {
@ -658,7 +645,7 @@ public abstract class CameraEngine implements
* that would cause deadlocks due to us awaiting for {@link #stop()} to return. * that would cause deadlocks due to us awaiting for {@link #stop()} to return.
*/ */
public void destroy() { public void destroy() {
LOG.i("destroy:", "state:", getEngineStateName()); LOG.i("destroy:", "state:", getEngineStateName(), "thread:", Thread.currentThread());
// Prevent CameraEngine leaks. Don't set to null, or exceptions // Prevent CameraEngine leaks. Don't set to null, or exceptions
// inside the standard stop() method might crash the main thread. // inside the standard stop() method might crash the main thread.
mHandler.getThread().setUncaughtExceptionHandler(new NoOpExceptionHandler()); mHandler.getThread().setUncaughtExceptionHandler(new NoOpExceptionHandler());
@ -674,6 +661,7 @@ public abstract class CameraEngine implements
try { try {
boolean success = latch.await(3, TimeUnit.SECONDS); boolean success = latch.await(3, TimeUnit.SECONDS);
if (!success) { if (!success) {
// TODO seems like this is always the case?
LOG.e("Probably some deadlock in destroy.", LOG.e("Probably some deadlock in destroy.",
"Current thread:", Thread.currentThread(), "Current thread:", Thread.currentThread(),
"Handler thread: ", mHandler.getThread()); "Handler thread: ", mHandler.getThread());
@ -790,60 +778,115 @@ public abstract class CameraEngine implements
//endregion //endregion
//region final setters //region Final setters and getters
@SuppressWarnings("WeakerAccess")
public final Angles getAngles() {
return mAngles;
}
@SuppressWarnings("WeakerAccess")
protected final void setSensorOffset(@NonNull Facing facing, int sensorOffset) {
mAngles.setSensorOffset(facing, sensorOffset);
}
// This is called before start() and never again. // This is called before start() and never again.
public final void setDisplayOffset(int displayOffset) { public final void setDisplayOffset(int displayOffset) {
mDisplayOffset = displayOffset; mAngles.setDisplayOffset(displayOffset);
} }
// This can be called multiple times. // This can be called multiple times.
public final void setDeviceOrientation(int deviceOrientation) { public final void setDeviceOrientation(int deviceOrientation) {
mDeviceOrientation = deviceOrientation; mAngles.setDeviceOrientation(deviceOrientation);
} }
public final void setPreviewStreamSizeSelector(@Nullable SizeSelector selector) { public final void setPreviewStreamSizeSelector(@Nullable SizeSelector selector) {
mPreviewStreamSizeSelector = selector; mPreviewStreamSizeSelector = selector;
} }
@Nullable
public final SizeSelector getPreviewStreamSizeSelector() {
return mPreviewStreamSizeSelector;
}
public final void setPictureSizeSelector(@NonNull SizeSelector selector) { public final void setPictureSizeSelector(@NonNull SizeSelector selector) {
mPictureSizeSelector = selector; mPictureSizeSelector = selector;
} }
@NonNull
public final SizeSelector getPictureSizeSelector() {
return mPictureSizeSelector;
}
public final void setVideoSizeSelector(@NonNull SizeSelector selector) { public final void setVideoSizeSelector(@NonNull SizeSelector selector) {
mVideoSizeSelector = selector; mVideoSizeSelector = selector;
} }
@NonNull
public final SizeSelector getVideoSizeSelector() {
return mVideoSizeSelector;
}
public final void setVideoMaxSize(long videoMaxSizeBytes) { public final void setVideoMaxSize(long videoMaxSizeBytes) {
mVideoMaxSize = videoMaxSizeBytes; mVideoMaxSize = videoMaxSizeBytes;
} }
public final long getVideoMaxSize() {
return mVideoMaxSize;
}
public final void setVideoMaxDuration(int videoMaxDurationMillis) { public final void setVideoMaxDuration(int videoMaxDurationMillis) {
mVideoMaxDuration = videoMaxDurationMillis; mVideoMaxDuration = videoMaxDurationMillis;
} }
public final int getVideoMaxDuration() {
return mVideoMaxDuration;
}
public final void setVideoCodec(@NonNull VideoCodec codec) { public final void setVideoCodec(@NonNull VideoCodec codec) {
mVideoCodec = codec; mVideoCodec = codec;
} }
public final VideoCodec getVideoCodec() {
return mVideoCodec;
}
public final void setVideoBitRate(int videoBitRate) { public final void setVideoBitRate(int videoBitRate) {
mVideoBitRate = videoBitRate; mVideoBitRate = videoBitRate;
} }
public final int getVideoBitRate() {
return mVideoBitRate;
}
public final void setAudioBitRate(int audioBitRate) { public final void setAudioBitRate(int audioBitRate) {
mAudioBitRate = audioBitRate; mAudioBitRate = audioBitRate;
} }
public final int getAudioBitRate() {
return mAudioBitRate;
}
public final void setSnapshotMaxWidth(int maxWidth) { public final void setSnapshotMaxWidth(int maxWidth) {
mSnapshotMaxWidth = maxWidth; mSnapshotMaxWidth = maxWidth;
} }
public int getSnapshotMaxWidth() {
return mSnapshotMaxWidth;
}
public final void setSnapshotMaxHeight(int maxHeight) { public final void setSnapshotMaxHeight(int maxHeight) {
mSnapshotMaxHeight = maxHeight; mSnapshotMaxHeight = maxHeight;
} }
public int getSnapshotMaxHeight() {
return mSnapshotMaxHeight;
}
public final void setAutoFocusResetDelay(long delayMillis) { mAutoFocusResetDelayMillis = delayMillis; } public final void setAutoFocusResetDelay(long delayMillis) { mAutoFocusResetDelayMillis = delayMillis; }
public final long getAutoFocusResetDelay() { return mAutoFocusResetDelayMillis; }
/** /**
* Sets a new facing value. This will restart the session (if there's any) * Sets a new facing value. This will restart the session (if there's any)
* so that we can open the new facing camera. * so that we can open the new facing camera.
@ -867,6 +910,11 @@ public abstract class CameraEngine implements
} }
} }
@NonNull
public final Facing getFacing() {
return mFacing;
}
/** /**
* Sets a new audio value that will be used for video recordings. * Sets a new audio value that will be used for video recordings.
* @param audio desired audio * @param audio desired audio
@ -881,6 +929,11 @@ public abstract class CameraEngine implements
} }
} }
@NonNull
public final Audio getAudio() {
return mAudio;
}
/** /**
* Sets the desired mode (either picture or video). * Sets the desired mode (either picture or video).
* @param mode desired mode. * @param mode desired mode.
@ -899,6 +952,64 @@ public abstract class CameraEngine implements
} }
} }
@NonNull
public final Mode getMode() {
return mMode;
}
@NonNull
public final FrameManager getFrameManager() {
return mFrameManager;
}
@Nullable
public final CameraOptions getCameraOptions() {
return mCameraOptions;
}
@NonNull
public final Flash getFlash() {
return mFlash;
}
@NonNull
public final WhiteBalance getWhiteBalance() {
return mWhiteBalance;
}
@NonNull
public final Hdr getHdr() {
return mHdr;
}
@Nullable
public final Location getLocation() {
return mLocation;
}
public final float getZoomValue() {
return mZoomValue;
}
public final float getExposureCorrectionValue() {
return mExposureCorrectionValue;
}
@CallSuper
public void setHasFrameProcessors(boolean hasFrameProcessors) {
mHasFrameProcessors = hasFrameProcessors;
}
@SuppressWarnings("WeakerAccess")
public final boolean hasFrameProcessors() {
return mHasFrameProcessors;
}
@SuppressWarnings("WeakerAccess")
protected final boolean shouldResetAutoFocus() {
return mAutoFocusResetDelayMillis > 0 && mAutoFocusResetDelayMillis != Long.MAX_VALUE;
}
//endregion //endregion
//region Abstract setters and APIs //region Abstract setters and APIs
@ -907,8 +1018,8 @@ public abstract class CameraEngine implements
* Camera is about to be opened. Implementors should look into available cameras * Camera is about to be opened. Implementors should look into available cameras
* and see if anyone matches the given {@link Facing value}. * and see if anyone matches the given {@link Facing value}.
* *
* If so, implementors should set {@link #mSensorOffset} and any other information * If so, implementors should set {@link #setSensorOffset(Facing, int)} and any other information
* (like camera ID) needed to start teh engine. * (like camera ID) needed to start the engine.
* *
* @param facing the facing value * @param facing the facing value
* @return true if we have one * @return true if we have one
@ -945,12 +1056,14 @@ public abstract class CameraEngine implements
public abstract void setPlaySounds(boolean playSounds); public abstract void setPlaySounds(boolean playSounds);
public abstract void setHasFrameProcessors(boolean hasFrameProcessors);
//endregion //endregion
//region picture and video control //region picture and video control
public final boolean isTakingPicture() {
return mPictureRecorder != null;
}
/* not final for tests */ /* not final for tests */
public void takePicture(final @NonNull PictureResult.Stub stub) { public void takePicture(final @NonNull PictureResult.Stub stub) {
LOG.v("takePicture", "scheduling"); LOG.v("takePicture", "scheduling");
@ -970,10 +1083,10 @@ public abstract class CameraEngine implements
onTakePicture(stub); onTakePicture(stub);
} }
}); });
}; }
/** /**
* The snapshot size is the {@link #getPreviewStreamSize(int)}, but cropped based on the * The snapshot size is the {@link #getPreviewStreamSize(Reference)}, but cropped based on the
* view/surface aspect ratio. * view/surface aspect ratio.
* @param stub a picture stub * @param stub a picture stub
* @param viewAspectRatio the view aspect ratio * @param viewAspectRatio the view aspect ratio
@ -990,9 +1103,6 @@ public abstract class CameraEngine implements
stub.isSnapshot = true; stub.isSnapshot = true;
stub.facing = mFacing; stub.facing = mFacing;
// Leave the other parameters to subclasses. // Leave the other parameters to subclasses.
LOG.v("takePictureSnapshot", "Rotations", "SV", offset(REF_SENSOR, REF_VIEW), "VS", offset(REF_VIEW, REF_SENSOR));
LOG.v("takePictureSnapshot", "Rotations", "SO", offset(REF_SENSOR, REF_OUTPUT), "OS", offset(REF_OUTPUT, REF_SENSOR));
LOG.v("takePictureSnapshot", "Rotations", "VO", offset(REF_VIEW, REF_OUTPUT), "OV", offset(REF_OUTPUT, REF_VIEW));
onTakePictureSnapshot(stub, viewAspectRatio); onTakePictureSnapshot(stub, viewAspectRatio);
} }
}); });
@ -1014,6 +1124,10 @@ public abstract class CameraEngine implements
} }
} }
public final boolean isTakingVideo() {
return mVideoRecorder != null && mVideoRecorder.isRecording();
}
public final void takeVideo(final @NonNull VideoResult.Stub stub, final @NonNull File file) { public final void takeVideo(final @NonNull VideoResult.Stub stub, final @NonNull File file) {
LOG.v("takeVideo", "scheduling"); LOG.v("takeVideo", "scheduling");
mHandler.run(new Runnable() { mHandler.run(new Runnable() {
@ -1110,187 +1224,31 @@ public abstract class CameraEngine implements
//endregion //endregion
//region final getters //region Size utilities
@NonNull
public final FrameManager getFrameManager() {
return mFrameManager;
}
@Nullable
public final CameraOptions getCameraOptions() {
return mCameraOptions;
}
@NonNull
public final Facing getFacing() {
return mFacing;
}
@NonNull
public final Flash getFlash() {
return mFlash;
}
@NonNull
public final WhiteBalance getWhiteBalance() {
return mWhiteBalance;
}
public final VideoCodec getVideoCodec() {
return mVideoCodec;
}
public final int getVideoBitRate() {
return mVideoBitRate;
}
public final long getVideoMaxSize() {
return mVideoMaxSize;
}
public final int getVideoMaxDuration() {
return mVideoMaxDuration;
}
@NonNull
public final Mode getMode() {
return mMode;
}
@NonNull
public final Hdr getHdr() {
return mHdr;
}
@Nullable
public final Location getLocation() {
return mLocation;
}
@NonNull
public final Audio getAudio() {
return mAudio;
}
public final int getAudioBitRate() {
return mAudioBitRate;
}
@SuppressWarnings("unused")
@Nullable
@VisibleForTesting
final SizeSelector getPreviewStreamSizeSelector() {
return mPreviewStreamSizeSelector;
}
@SuppressWarnings("unused")
@NonNull
public final SizeSelector getPictureSizeSelector() {
return mPictureSizeSelector;
}
@SuppressWarnings("unused")
@NonNull
public final SizeSelector getVideoSizeSelector() {
return mVideoSizeSelector;
}
public final float getZoomValue() {
return mZoomValue;
}
public final float getExposureCorrectionValue() {
return mExposureCorrectionValue;
}
public final boolean isTakingVideo() {
return mVideoRecorder != null && mVideoRecorder.isRecording();
}
public final boolean isTakingPicture() {
return mPictureRecorder != null;
}
public final long getAutoFocusResetDelay() { return mAutoFocusResetDelayMillis; }
public boolean getHasFrameProcessors() {
return mHasFrameProcessors;
}
//endregion
final boolean shouldResetAutoFocus() {
return mAutoFocusResetDelayMillis > 0 && mAutoFocusResetDelayMillis != Long.MAX_VALUE;
}
//region Orientation utils
private int computeSensorToViewOffset() {
if (mFacing == Facing.FRONT) {
return (360 - ((mSensorOffset + mDisplayOffset) % 360)) % 360;
} else {
return (mSensorOffset - mDisplayOffset + 360) % 360;
}
}
private int computeSensorToOutputOffset() {
if (mFacing == Facing.FRONT) {
return (mSensorOffset - mDeviceOrientation + 360) % 360;
} else {
return (mSensorOffset + mDeviceOrientation) % 360;
}
}
// o(S, V) - o(S, O)
// displayOffset - deviceOrientation
// Returns the offset between two reference systems.
@SuppressWarnings("WeakerAccess")
public final int offset(int fromReference, int toReference) {
if (fromReference == toReference) return 0;
// We only know how to compute offsets with respect to REF_SENSOR.
// That's why we separate the two cases.
if (fromReference == REF_SENSOR) {
return toReference == REF_VIEW ?
computeSensorToViewOffset() :
computeSensorToOutputOffset();
}
// Maybe the sensor is the other.
if (toReference == REF_SENSOR) {
return (-offset(toReference, fromReference) + 360) % 360;
}
// None of them is the sensor. Use a difference.
return (offset(REF_SENSOR, toReference) - offset(REF_SENSOR, fromReference) + 360) % 360;
}
public final boolean flip(int reference1, int reference2) {
return offset(reference1, reference2) % 180 != 0;
}
@Nullable @Nullable
public final Size getPictureSize(@SuppressWarnings("SameParameterValue") int reference) { public final Size getPictureSize(@SuppressWarnings("SameParameterValue") @NonNull Reference reference) {
if (mCaptureSize == null || mMode == Mode.VIDEO) return null; if (mCaptureSize == null || mMode == Mode.VIDEO) return null;
return flip(REF_SENSOR, reference) ? mCaptureSize.flip() : mCaptureSize; return getAngles().flip(Reference.SENSOR, reference) ? mCaptureSize.flip() : mCaptureSize;
} }
@Nullable @Nullable
public final Size getVideoSize(@SuppressWarnings("SameParameterValue") int reference) { public final Size getVideoSize(@SuppressWarnings("SameParameterValue") @NonNull Reference reference) {
if (mCaptureSize == null || mMode == Mode.PICTURE) return null; if (mCaptureSize == null || mMode == Mode.PICTURE) return null;
return flip(REF_SENSOR, reference) ? mCaptureSize.flip() : mCaptureSize; return getAngles().flip(Reference.SENSOR, reference) ? mCaptureSize.flip() : mCaptureSize;
} }
@Nullable @Nullable
public final Size getPreviewStreamSize(int reference) { public final Size getPreviewStreamSize(@NonNull Reference reference) {
if (mPreviewStreamSize == null) return null; if (mPreviewStreamSize == null) return null;
return flip(REF_SENSOR, reference) ? mPreviewStreamSize.flip() : mPreviewStreamSize; return getAngles().flip(Reference.SENSOR, reference) ? mPreviewStreamSize.flip() : mPreviewStreamSize;
} }
@SuppressWarnings("SameParameterValue") @SuppressWarnings("SameParameterValue")
@Nullable @Nullable
private Size getPreviewSurfaceSize(int reference) { private Size getPreviewSurfaceSize(@NonNull Reference reference) {
if (mPreview == null) return null; if (mPreview == null) return null;
return flip(REF_VIEW, reference) ? mPreview.getSurfaceSize().flip() : mPreview.getSurfaceSize(); return getAngles().flip(Reference.VIEW, reference) ? mPreview.getSurfaceSize().flip() : mPreview.getSurfaceSize();
} }
/** /**
@ -1314,10 +1272,10 @@ public abstract class CameraEngine implements
* apply, despite the capturing mechanism being different. * apply, despite the capturing mechanism being different.
*/ */
@Nullable @Nullable
public final Size getUncroppedSnapshotSize(int reference) { public final Size getUncroppedSnapshotSize(@NonNull Reference reference) {
Size baseSize = getPreviewStreamSize(reference); Size baseSize = getPreviewStreamSize(reference);
if (baseSize == null) return null; if (baseSize == null) return null;
boolean flip = flip(reference, REF_VIEW); boolean flip = getAngles().flip(reference, Reference.VIEW);
int maxWidth = flip ? mSnapshotMaxHeight : mSnapshotMaxWidth; int maxWidth = flip ? mSnapshotMaxHeight : mSnapshotMaxWidth;
int maxHeight = flip ? mSnapshotMaxWidth : mSnapshotMaxHeight; int maxHeight = flip ? mSnapshotMaxWidth : mSnapshotMaxHeight;
float baseRatio = AspectRatio.of(baseSize).toFloat(); float baseRatio = AspectRatio.of(baseSize).toFloat();
@ -1335,11 +1293,6 @@ public abstract class CameraEngine implements
} }
} }
//endregion
//region Size utils
/** /**
* This is called either on cameraView.start(), or when the underlying surface changes. * This is called either on cameraView.start(), or when the underlying surface changes.
* It is possible that in the first call the preview surface has not already computed its * It is possible that in the first call the preview surface has not already computed its
@ -1354,10 +1307,10 @@ public abstract class CameraEngine implements
} }
@SuppressWarnings("WeakerAccess") @SuppressWarnings("WeakerAccess")
protected final Size computeCaptureSize(Mode mode) { protected final Size computeCaptureSize(@NonNull Mode mode) {
// We want to pass stuff into the REF_VIEW reference, not the sensor one. // We want to pass stuff into the REF_VIEW reference, not the sensor one.
// This is already managed by CameraOptions, so we just flip again at the end. // This is already managed by CameraOptions, so we just flip again at the end.
boolean flip = flip(REF_SENSOR, REF_VIEW); boolean flip = getAngles().flip(Reference.SENSOR, Reference.VIEW);
SizeSelector selector; SizeSelector selector;
Collection<Size> sizes; Collection<Size> sizes;
if (mode == Mode.PICTURE) { if (mode == Mode.PICTURE) {
@ -1393,7 +1346,7 @@ public abstract class CameraEngine implements
@NonNull List<Size> previewSizes = getPreviewStreamAvailableSizes(); @NonNull List<Size> previewSizes = getPreviewStreamAvailableSizes();
// These sizes come in REF_SENSOR. Since there is an external selector involved, // These sizes come in REF_SENSOR. Since there is an external selector involved,
// we must convert all of them to REF_VIEW, then flip back when returning. // we must convert all of them to REF_VIEW, then flip back when returning.
boolean flip = flip(REF_SENSOR, REF_VIEW); boolean flip = getAngles().flip(Reference.SENSOR, Reference.VIEW);
List<Size> sizes = new ArrayList<>(previewSizes.size()); List<Size> sizes = new ArrayList<>(previewSizes.size());
for (Size size : previewSizes) { for (Size size : previewSizes) {
sizes.add(flip ? size.flip() : size); sizes.add(flip ? size.flip() : size);
@ -1401,7 +1354,7 @@ public abstract class CameraEngine implements
// Create our own default selector, which will be used if the external mPreviewStreamSizeSelector // Create our own default selector, which will be used if the external mPreviewStreamSizeSelector
// is null, or if it fails in finding a size. // is null, or if it fails in finding a size.
Size targetMinSize = getPreviewSurfaceSize(REF_VIEW); Size targetMinSize = getPreviewSurfaceSize(Reference.VIEW);
if (targetMinSize == null) throw new IllegalStateException("targetMinSize should not be null here."); if (targetMinSize == null) throw new IllegalStateException("targetMinSize should not be null here.");
AspectRatio targetRatio = AspectRatio.of(mCaptureSize.getWidth(), mCaptureSize.getHeight()); AspectRatio targetRatio = AspectRatio.of(mCaptureSize.getWidth(), mCaptureSize.getHeight());
if (flip) targetRatio = targetRatio.flip(); if (flip) targetRatio = targetRatio.flip();

@ -0,0 +1,121 @@
package com.otaliastudios.cameraview.engine.offset;
import androidx.annotation.NonNull;
import androidx.annotation.VisibleForTesting;
import com.otaliastudios.cameraview.controls.Facing;
/**
* Manages offsets between different {@link Reference} systems.
*
* These offsets are computed based on the {@link #setSensorOffset(Facing, int)},
* {@link #setDisplayOffset(int)} and {@link #setDeviceOrientation(int)} values that are coming
* from outside.
*
* When communicating with the sensor, {@link Axis#RELATIVE_TO_SENSOR} should probably be used.
* This means inverting the offset when using the front camera.
* This is often the case when calling offset(SENSOR, OUTPUT), for example when passing a JPEG
* rotation to the sensor. That is meant to be consumed as relative to the sensor plane.
*
* For all other usages, {@link Axis#ABSOLUTE} is probably a better choice.
*/
public class Angles {
private Facing mSensorFacing;
@VisibleForTesting int mSensorOffset = 0;
@VisibleForTesting int mDisplayOffset = 0;
@VisibleForTesting int mDeviceOrientation = 0;
/**
* We want to keep everything in the {@link Axis#ABSOLUTE} reference,
* so a front facing sensor offset must be inverted.
*
* @param sensorFacing sensor facing value
* @param sensorOffset sensor offset
*/
public void setSensorOffset(@NonNull Facing sensorFacing, int sensorOffset) {
sanitizeInput(sensorOffset);
mSensorFacing = sensorFacing;
mSensorOffset = sensorOffset;
if (mSensorFacing == Facing.FRONT) {
mSensorOffset = sanitizeOutput(360 - mSensorOffset);
}
}
/**
* Sets the display offset.
* @param displayOffset the display offset
*/
public void setDisplayOffset(int displayOffset) {
sanitizeInput(displayOffset);
mDisplayOffset = displayOffset;
}
/**
* Sets the device orientation.
* @param deviceOrientation the device orientation
*/
public void setDeviceOrientation(int deviceOrientation) {
sanitizeInput(deviceOrientation);
mDeviceOrientation = deviceOrientation;
}
/**
* Returns the offset between two reference systems, computed along the given axis.
* @param from the source reference system
* @param to the destination reference system
* @param axis the axis
* @return the offset
*/
public int offset(@NonNull Reference from, @NonNull Reference to, @NonNull Axis axis) {
int offset = absoluteOffset(from, to);
if (axis == Axis.RELATIVE_TO_SENSOR) {
if (mSensorFacing == Facing.FRONT) {
offset = sanitizeOutput(360 - offset);
}
}
return offset;
}
private int absoluteOffset(@NonNull Reference from, @NonNull Reference to) {
if (from == to) {
return 0;
} else if (to == Reference.BASE) {
return sanitizeOutput(360 - absoluteOffset(to, from));
} else if (from == Reference.BASE) {
switch (to) {
case VIEW: return sanitizeOutput(360 - mDisplayOffset);
case OUTPUT: return sanitizeOutput(mDeviceOrientation);
case SENSOR: return sanitizeOutput(360 - mSensorOffset);
default: throw new RuntimeException("Unknown reference: " + to);
}
} else {
return sanitizeOutput(
absoluteOffset(Reference.BASE, to)
- absoluteOffset(Reference.BASE, from));
}
}
/**
* Whether the two references systems are flipped.
* @param from source
* @param to destination
* @return true if flipped
*/
public boolean flip(@NonNull Reference from, @NonNull Reference to) {
return offset(from, to, Axis.ABSOLUTE) % 180 != 0;
}
private void sanitizeInput(int value) {
if (value != 0
&& value != 90
&& value != 180
&& value != 270) {
throw new IllegalStateException("This value is not sanitized: " + value);
}
}
private int sanitizeOutput(int value) {
return (value + 360) % 360;
}
}

@ -0,0 +1,28 @@
package com.otaliastudios.cameraview.engine.offset;
/**
* The axis around which offsets are computed. We have two possibilities:
* - an axis going out of the device screen towards the user's face
* - an axis going out of the device screen towards the back
*
* We are mostly interested in the first one, but some APIs will require
* angles in the sensor reference, in which case, for front cameras, we are
* required to use {@link #RELATIVE_TO_SENSOR}.
*/
public enum Axis {
/**
* This rotation axis is the one going out of the device screen
* towards the user's face.
*/
ABSOLUTE,
/**
* This rotation axis takes into account the current
* {@link com.otaliastudios.cameraview.controls.Facing} value.
*
* - for {@link com.otaliastudios.cameraview.controls.Facing#BACK}, this equals {@link #ABSOLUTE}
* - for {@link com.otaliastudios.cameraview.controls.Facing#FRONT}, this is inverted
*/
RELATIVE_TO_SENSOR
}

@ -0,0 +1,28 @@
package com.otaliastudios.cameraview.engine.offset;
public enum Reference {
/**
* The base reference system has its 'north' aligned with the device natural
* orientation.
*/
BASE,
/**
* This reference system has its 'north' aligned with the camera sensor.
*/
SENSOR,
/**
* This reference system has its 'north' aligned with the View hierarchy.
* This can be different than {@link #BASE} if the activity is allowed to rotate
* (or forced into a non natural position).
*/
VIEW,
/**
* This reference system has its 'north' aligned with the output picture/video.
* This means that it takes into account the device orientation.
*/
OUTPUT
}

@ -4,23 +4,30 @@ import android.content.Context;
import android.graphics.PointF; import android.graphics.PointF;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
import android.view.MotionEvent; import android.view.MotionEvent;
import android.widget.FrameLayout;
/** /**
* Base class for gesture layouts, that is, layouts that will capture * Base class for gesture finders.
* gestures. * Gesture finders are passed down touch events to detect gestures.
*/ */
public abstract class GestureLayout extends FrameLayout { public abstract class GestureFinder {
public interface Controller {
@NonNull Context getContext();
int getWidth();
int getHeight();
}
// The number of possible values between minValue and maxValue, for the getValue method. // The number of possible values between minValue and maxValue, for the getValue method.
// We could make this non-static (e.g. larger granularity for exposure correction). // We could make this non-static (e.g. larger granularity for exposure correction).
private final static int GRANULARITY = 50; private final static int GRANULARITY = 50;
private boolean mActive; private boolean mActive;
private Gesture mType; private Gesture mType;
private PointF[] mPoints; private PointF[] mPoints;
private Controller mController;
GestureLayout(@NonNull Context context, int points) { GestureFinder(@NonNull Controller controller, int points) {
super(context); mController = controller;
mPoints = new PointF[points]; mPoints = new PointF[points];
for (int i = 0; i < points; i++) { for (int i = 0; i < points; i++) {
mPoints[i] = new PointF(0, 0); mPoints[i] = new PointF(0, 0);
@ -108,6 +115,7 @@ public abstract class GestureLayout extends FrameLayout {
* @param which the array position * @param which the array position
* @return the point * @return the point
*/ */
@SuppressWarnings("WeakerAccess")
@NonNull @NonNull
protected final PointF getPoint(int which) { protected final PointF getPoint(int which) {
return mPoints[which]; return mPoints[which];
@ -156,4 +164,13 @@ public abstract class GestureLayout extends FrameLayout {
} }
return newValue; return newValue;
} }
/**
* Returns the controller for this finder.
* @return the controller
*/
@NonNull
protected Controller getController() {
return mController;
}
} }

@ -1,26 +1,25 @@
package com.otaliastudios.cameraview.gesture; package com.otaliastudios.cameraview.gesture;
import android.content.Context;
import android.os.Build; import android.os.Build;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
import android.view.MotionEvent; import android.view.MotionEvent;
import android.view.ScaleGestureDetector; import android.view.ScaleGestureDetector;
/** /**
* A {@link GestureLayout} that detects {@link Gesture#PINCH} gestures. * A {@link GestureFinder} that detects {@link Gesture#PINCH} gestures.
*/ */
public class PinchGestureLayout extends GestureLayout { public class PinchGestureFinder extends GestureFinder {
private final static float ADD_SENSITIVITY = 2f; private final static float ADD_SENSITIVITY = 2f;
ScaleGestureDetector mDetector; private ScaleGestureDetector mDetector;
private boolean mNotify; private boolean mNotify;
private float mFactor = 0; private float mFactor = 0;
public PinchGestureLayout(@NonNull Context context) { public PinchGestureFinder(@NonNull Controller controller) {
super(context, 2); super(controller, 2);
setGesture(Gesture.PINCH); setGesture(Gesture.PINCH);
mDetector = new ScaleGestureDetector(context, new ScaleGestureDetector.SimpleOnScaleGestureListener() { mDetector = new ScaleGestureDetector(controller.getContext(), new ScaleGestureDetector.SimpleOnScaleGestureListener() {
@Override @Override
public boolean onScale(ScaleGestureDetector detector) { public boolean onScale(ScaleGestureDetector detector) {
mNotify = true; mNotify = true;

@ -1,6 +1,5 @@
package com.otaliastudios.cameraview.gesture; package com.otaliastudios.cameraview.gesture;
import android.content.Context;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
import android.view.GestureDetector; import android.view.GestureDetector;
import android.view.MotionEvent; import android.view.MotionEvent;
@ -8,21 +7,21 @@ import android.view.MotionEvent;
import com.otaliastudios.cameraview.CameraLogger; import com.otaliastudios.cameraview.CameraLogger;
/** /**
* A {@link GestureLayout} that detects {@link Gesture#SCROLL_HORIZONTAL} * A {@link GestureFinder} that detects {@link Gesture#SCROLL_HORIZONTAL}
* and {@link Gesture#SCROLL_VERTICAL} gestures. * and {@link Gesture#SCROLL_VERTICAL} gestures.
*/ */
public class ScrollGestureLayout extends GestureLayout { public class ScrollGestureFinder extends GestureFinder {
private static final String TAG = ScrollGestureLayout.class.getSimpleName(); private static final String TAG = ScrollGestureFinder.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG); private static final CameraLogger LOG = CameraLogger.create(TAG);
private GestureDetector mDetector; private GestureDetector mDetector;
private boolean mNotify; private boolean mNotify;
private float mFactor; private float mFactor;
public ScrollGestureLayout(@NonNull Context context) { public ScrollGestureFinder(final @NonNull Controller controller) {
super(context, 2); super(controller, 2);
mDetector = new GestureDetector(context, new GestureDetector.SimpleOnGestureListener() { mDetector = new GestureDetector(controller.getContext(), new GestureDetector.SimpleOnGestureListener() {
@Override @Override
public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) { public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) {
@ -40,7 +39,7 @@ public class ScrollGestureLayout extends GestureLayout {
horizontal = getGesture() == Gesture.SCROLL_HORIZONTAL; horizontal = getGesture() == Gesture.SCROLL_HORIZONTAL;
} }
getPoint(1).set(e2.getX(), e2.getY()); getPoint(1).set(e2.getX(), e2.getY());
mFactor = horizontal ? (distanceX / getWidth()) : (distanceY / getHeight()); mFactor = horizontal ? (distanceX / controller.getWidth()) : (distanceY / controller.getHeight());
mFactor = horizontal ? -mFactor : mFactor; // When vertical, up = positive mFactor = horizontal ? -mFactor : mFactor; // When vertical, up = positive
mNotify = true; mNotify = true;
return true; return true;

@ -1,32 +1,22 @@
package com.otaliastudios.cameraview.gesture; package com.otaliastudios.cameraview.gesture;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.content.Context;
import android.graphics.PointF;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import android.view.GestureDetector; import android.view.GestureDetector;
import android.view.LayoutInflater;
import android.view.MotionEvent; import android.view.MotionEvent;
import android.view.View;
import android.widget.FrameLayout;
import android.widget.ImageView;
import com.otaliastudios.cameraview.R;
/** /**
* A {@link GestureLayout} that detects {@link Gesture#TAP} * A {@link GestureFinder} that detects {@link Gesture#TAP}
* and {@link Gesture#LONG_TAP} gestures. * and {@link Gesture#LONG_TAP} gestures.
*/ */
public class TapGestureLayout extends GestureLayout { public class TapGestureFinder extends GestureFinder {
private GestureDetector mDetector; private GestureDetector mDetector;
private boolean mNotify; private boolean mNotify;
public TapGestureLayout(@NonNull Context context) { public TapGestureFinder(@NonNull Controller controller) {
super(context, 1); super(controller, 1);
mDetector = new GestureDetector(context, new GestureDetector.SimpleOnGestureListener() { mDetector = new GestureDetector(controller.getContext(), new GestureDetector.SimpleOnGestureListener() {
@Override @Override
public boolean onSingleTapUp(MotionEvent e) { public boolean onSingleTapUp(MotionEvent e) {

@ -40,7 +40,7 @@ public class EglViewport extends EglElement {
// Stuff from Drawable2d.FULL_RECTANGLE // Stuff from Drawable2d.FULL_RECTANGLE
// A full square, extending from -1 to +1 in both dimensions. // A full square, extending from -1 to +1 in both dimensions.
// When the model/view/projection matrix is identity, this will exactly cover the viewport. // When the model/view/projection matrix is identity, this will exactly cover the viewport.
private static final float FULL_RECTANGLE_COORDS[] = { private static final float[] FULL_RECTANGLE_COORDS = {
-1.0f, -1.0f, // 0 bottom left -1.0f, -1.0f, // 0 bottom left
1.0f, -1.0f, // 1 bottom right 1.0f, -1.0f, // 1 bottom right
-1.0f, 1.0f, // 2 top left -1.0f, 1.0f, // 2 top left
@ -49,7 +49,7 @@ public class EglViewport extends EglElement {
// Stuff from Drawable2d.FULL_RECTANGLE // Stuff from Drawable2d.FULL_RECTANGLE
// A full square, extending from -1 to +1 in both dimensions. // A full square, extending from -1 to +1 in both dimensions.
private static final float FULL_RECTANGLE_TEX_COORDS[] = { private static final float[] FULL_RECTANGLE_TEX_COORDS = {
0.0f, 0.0f, // 0 bottom left 0.0f, 0.0f, // 0 bottom left
1.0f, 0.0f, // 1 bottom right 1.0f, 0.0f, // 1 bottom right
0.0f, 1.0f, // 2 top left 0.0f, 1.0f, // 2 top left

@ -1,32 +1,18 @@
package com.otaliastudios.cameraview.picture; package com.otaliastudios.cameraview.picture;
import android.annotation.TargetApi;
import android.graphics.Bitmap;
import android.graphics.ImageFormat; import android.graphics.ImageFormat;
import android.graphics.Rect; import android.graphics.Rect;
import android.graphics.SurfaceTexture;
import android.graphics.YuvImage; import android.graphics.YuvImage;
import android.hardware.Camera; import android.hardware.Camera;
import android.opengl.EGL14;
import android.opengl.EGLContext;
import android.opengl.Matrix;
import android.os.Build;
import com.otaliastudios.cameraview.CameraLogger; import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.PictureResult; import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.engine.Camera1Engine; import com.otaliastudios.cameraview.engine.Camera1Engine;
import com.otaliastudios.cameraview.engine.CameraEngine; import com.otaliastudios.cameraview.engine.CameraEngine;
import com.otaliastudios.cameraview.internal.egl.EglCore; import com.otaliastudios.cameraview.engine.offset.Reference;
import com.otaliastudios.cameraview.internal.egl.EglViewport;
import com.otaliastudios.cameraview.internal.egl.EglWindowSurface;
import com.otaliastudios.cameraview.internal.utils.CropHelper; import com.otaliastudios.cameraview.internal.utils.CropHelper;
import com.otaliastudios.cameraview.internal.utils.RotationHelper; import com.otaliastudios.cameraview.internal.utils.RotationHelper;
import com.otaliastudios.cameraview.internal.utils.WorkerHandler; import com.otaliastudios.cameraview.internal.utils.WorkerHandler;
import com.otaliastudios.cameraview.preview.CameraPreview;
import com.otaliastudios.cameraview.preview.GlCameraPreview;
import com.otaliastudios.cameraview.preview.RendererFrameCallback;
import com.otaliastudios.cameraview.preview.RendererThread;
import com.otaliastudios.cameraview.size.AspectRatio; import com.otaliastudios.cameraview.size.AspectRatio;
import com.otaliastudios.cameraview.size.Size; import com.otaliastudios.cameraview.size.Size;
@ -40,6 +26,7 @@ import java.io.ByteArrayOutputStream;
public class Snapshot1PictureRecorder extends PictureRecorder { public class Snapshot1PictureRecorder extends PictureRecorder {
private static final String TAG = Snapshot1PictureRecorder.class.getSimpleName(); private static final String TAG = Snapshot1PictureRecorder.class.getSimpleName();
@SuppressWarnings("unused")
private static final CameraLogger LOG = CameraLogger.create(TAG); private static final CameraLogger LOG = CameraLogger.create(TAG);
private Camera1Engine mEngine1; private Camera1Engine mEngine1;
@ -47,9 +34,6 @@ public class Snapshot1PictureRecorder extends PictureRecorder {
private AspectRatio mOutputRatio; private AspectRatio mOutputRatio;
private int mFormat; private int mFormat;
/**
* Camera1 constructor.
*/
public Snapshot1PictureRecorder( public Snapshot1PictureRecorder(
@NonNull PictureResult.Stub stub, @NonNull PictureResult.Stub stub,
@NonNull Camera1Engine engine, @NonNull Camera1Engine engine,
@ -59,7 +43,7 @@ public class Snapshot1PictureRecorder extends PictureRecorder {
mEngine1 = engine; mEngine1 = engine;
mCamera = camera; mCamera = camera;
mOutputRatio = outputRatio; mOutputRatio = outputRatio;
mFormat = engine.getPreviewStreamFormat(); mFormat = camera.getParameters().getPreviewFormat();
} }
@Override @Override
@ -74,7 +58,7 @@ public class Snapshot1PictureRecorder extends PictureRecorder {
// Adding EXIF to a byte array, unfortunately, is hard. // Adding EXIF to a byte array, unfortunately, is hard.
final int sensorToOutput = mResult.rotation; final int sensorToOutput = mResult.rotation;
final Size outputSize = mResult.size; final Size outputSize = mResult.size;
final Size previewStreamSize = mEngine1.getPreviewStreamSize(CameraEngine.REF_SENSOR); final Size previewStreamSize = mEngine1.getPreviewStreamSize(Reference.SENSOR);
if (previewStreamSize == null) { if (previewStreamSize == null) {
throw new IllegalStateException("Preview stream size should never be null here."); throw new IllegalStateException("Preview stream size should never be null here.");
} }

@ -4,7 +4,6 @@ import android.annotation.TargetApi;
import android.graphics.Bitmap; import android.graphics.Bitmap;
import android.graphics.Rect; import android.graphics.Rect;
import android.graphics.SurfaceTexture; import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.opengl.EGL14; import android.opengl.EGL14;
import android.opengl.EGLContext; import android.opengl.EGLContext;
import android.opengl.Matrix; import android.opengl.Matrix;
@ -13,14 +12,13 @@ import android.os.Build;
import com.otaliastudios.cameraview.CameraLogger; import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.PictureResult; import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.controls.Facing; import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.engine.Camera1Engine;
import com.otaliastudios.cameraview.engine.CameraEngine; import com.otaliastudios.cameraview.engine.CameraEngine;
import com.otaliastudios.cameraview.engine.offset.Reference;
import com.otaliastudios.cameraview.internal.egl.EglCore; import com.otaliastudios.cameraview.internal.egl.EglCore;
import com.otaliastudios.cameraview.internal.egl.EglViewport; import com.otaliastudios.cameraview.internal.egl.EglViewport;
import com.otaliastudios.cameraview.internal.egl.EglWindowSurface; import com.otaliastudios.cameraview.internal.egl.EglWindowSurface;
import com.otaliastudios.cameraview.internal.utils.CropHelper; import com.otaliastudios.cameraview.internal.utils.CropHelper;
import com.otaliastudios.cameraview.internal.utils.WorkerHandler; import com.otaliastudios.cameraview.internal.utils.WorkerHandler;
import com.otaliastudios.cameraview.preview.CameraPreview;
import com.otaliastudios.cameraview.preview.GlCameraPreview; import com.otaliastudios.cameraview.preview.GlCameraPreview;
import com.otaliastudios.cameraview.preview.RendererFrameCallback; import com.otaliastudios.cameraview.preview.RendererFrameCallback;
import com.otaliastudios.cameraview.preview.RendererThread; import com.otaliastudios.cameraview.preview.RendererThread;
@ -107,7 +105,7 @@ public class SnapshotGlPictureRecorder extends PictureRecorder {
// Apply scale and crop: // Apply scale and crop:
// NOTE: scaleX and scaleY are in REF_VIEW, while our input appears to be in REF_SENSOR. // NOTE: scaleX and scaleY are in REF_VIEW, while our input appears to be in REF_SENSOR.
boolean flip = mEngine.flip(CameraEngine.REF_VIEW, CameraEngine.REF_SENSOR); boolean flip = mEngine.getAngles().flip(Reference.VIEW, Reference.SENSOR);
float realScaleX = flip ? scaleY : scaleX; float realScaleX = flip ? scaleY : scaleX;
float realScaleY = flip ? scaleX : scaleY; float realScaleY = flip ? scaleX : scaleY;
float scaleTranslX = (1F - realScaleX) / 2F; float scaleTranslX = (1F - realScaleX) / 2F;

@ -67,11 +67,9 @@ public abstract class CameraPreview<T extends View, Output> {
* Creates a new preview. * Creates a new preview.
* @param context a context * @param context a context
* @param parent where to inflate our view * @param parent where to inflate our view
* @param callback the callback
*/ */
public CameraPreview(@NonNull Context context, @NonNull ViewGroup parent, @Nullable SurfaceCallback callback) { public CameraPreview(@NonNull Context context, @NonNull ViewGroup parent) {
mView = onCreateView(context, parent); mView = onCreateView(context, parent);
mSurfaceCallback = callback;
} }
/** /**
@ -79,10 +77,12 @@ public abstract class CameraPreview<T extends View, Output> {
* @param callback a callback * @param callback a callback
*/ */
public final void setSurfaceCallback(@Nullable SurfaceCallback callback) { public final void setSurfaceCallback(@Nullable SurfaceCallback callback) {
if (hasSurface() && mSurfaceCallback != null) {
mSurfaceCallback.onSurfaceDestroyed();
}
mSurfaceCallback = callback; mSurfaceCallback = callback;
// If surface already available, dispatch. if (hasSurface() && mSurfaceCallback != null) {
if (mOutputSurfaceWidth != 0 || mOutputSurfaceHeight != 0) { mSurfaceCallback.onSurfaceAvailable();
if (callback != null) callback.onSurfaceAvailable();
} }
} }

@ -57,7 +57,7 @@ import javax.microedition.khronos.opengles.GL10;
* Callbacks are guaranteed to be called on the renderer thread, which means that we can fetch * Callbacks are guaranteed to be called on the renderer thread, which means that we can fetch
* the GL context that was created and is managed by the {@link GLSurfaceView}. * the GL context that was created and is managed by the {@link GLSurfaceView}.
*/ */
public class GlCameraPreview extends CameraPreview<GLSurfaceView, SurfaceTexture> implements GLSurfaceView.Renderer { public class GlCameraPreview extends CameraPreview<GLSurfaceView, SurfaceTexture> {
private boolean mDispatched; private boolean mDispatched;
private final float[] mTransformMatrix = new float[16]; private final float[] mTransformMatrix = new float[16];
@ -69,32 +69,8 @@ public class GlCameraPreview extends CameraPreview<GLSurfaceView, SurfaceTexture
@VisibleForTesting float mCropScaleY = 1F; @VisibleForTesting float mCropScaleY = 1F;
private View mRootView; private View mRootView;
/** public GlCameraPreview(@NonNull Context context, @NonNull ViewGroup parent) {
* Method specific to the GL preview. Adds a {@link RendererFrameCallback} super(context, parent);
* to receive renderer frame events.
* @param callback a callback
*/
public void addRendererFrameCallback(@NonNull final RendererFrameCallback callback) {
getView().queueEvent(new Runnable() {
@Override
public void run() {
mRendererFrameCallbacks.add(callback);
if (mOutputTextureId != 0) callback.onRendererTextureCreated(mOutputTextureId);
}
});
}
/**
* Method specific to the GL preview. Removes a {@link RendererFrameCallback}
* that was previously added to receive renderer frame events.
* @param callback a callback
*/
public void removeRendererFrameCallback(@NonNull final RendererFrameCallback callback) {
mRendererFrameCallbacks.remove(callback);
}
public GlCameraPreview(@NonNull Context context, @NonNull ViewGroup parent, @Nullable SurfaceCallback callback) {
super(context, parent, callback);
} }
@NonNull @NonNull
@ -103,11 +79,8 @@ public class GlCameraPreview extends CameraPreview<GLSurfaceView, SurfaceTexture
ViewGroup root = (ViewGroup) LayoutInflater.from(context).inflate(R.layout.cameraview_gl_view, parent, false); ViewGroup root = (ViewGroup) LayoutInflater.from(context).inflate(R.layout.cameraview_gl_view, parent, false);
GLSurfaceView glView = root.findViewById(R.id.gl_surface_view); GLSurfaceView glView = root.findViewById(R.id.gl_surface_view);
glView.setEGLContextClientVersion(2); glView.setEGLContextClientVersion(2);
glView.setRenderer(this); glView.setRenderer(instantiateRenderer());
glView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY); glView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
// Tried these 2 to remove the black background, does not work.
// glView.getHolder().setFormat(PixelFormat.TRANSLUCENT);
// glView.setZOrderMediaOverlay(true);
glView.getHolder().addCallback(new SurfaceHolder.Callback() { glView.getHolder().addCallback(new SurfaceHolder.Callback() {
public void surfaceCreated(SurfaceHolder holder) {} public void surfaceCreated(SurfaceHolder holder) {}
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { } public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { }
@ -158,6 +131,11 @@ public class GlCameraPreview extends CameraPreview<GLSurfaceView, SurfaceTexture
} }
} }
/**
* The core renderer that performs the actual drawing operations.
*/
public class Renderer implements GLSurfaceView.Renderer {
@RendererThread @RendererThread
@Override @Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) { public void onSurfaceCreated(GL10 gl, EGLConfig config) {
@ -184,7 +162,6 @@ public class GlCameraPreview extends CameraPreview<GLSurfaceView, SurfaceTexture
} }
@RendererThread @RendererThread
@SuppressWarnings("StatementWithEmptyBody")
@Override @Override
public void onSurfaceChanged(GL10 gl, final int width, final int height) { public void onSurfaceChanged(GL10 gl, final int width, final int height) {
gl.glViewport(0, 0, width, height); gl.glViewport(0, 0, width, height);
@ -213,15 +190,14 @@ public class GlCameraPreview extends CameraPreview<GLSurfaceView, SurfaceTexture
// See TextureCameraPreview.setDrawRotation() for info. // See TextureCameraPreview.setDrawRotation() for info.
if (mDrawRotation != 0) { if (mDrawRotation != 0) {
Matrix.translateM(mTransformMatrix, 0, 0.5F, 0.5F, 0); Matrix.translateM(mTransformMatrix, 0, 0.5F, 0.5F, 0);
Matrix.rotateM(mTransformMatrix, 0, -mDrawRotation, 0, 0, 1); Matrix.rotateM(mTransformMatrix, 0, mDrawRotation, 0, 0, 1);
Matrix.translateM(mTransformMatrix, 0, -0.5F, -0.5F, 0); Matrix.translateM(mTransformMatrix, 0, -0.5F, -0.5F, 0);
} }
if (isCropping()) { if (isCropping()) {
// Scaling is easy. However: // Scaling is easy, but we must also translate before:
// If the view is 10x1000 (very tall), it will show only the left strip of the preview (not the center one). // If the view is 10x1000 (very tall), it will show only the left strip of the preview (not the center one).
// If the view is 1000x10 (very large), it will show only the bottom strip of the preview (not the center one). // If the view is 1000x10 (very large), it will show only the bottom strip of the preview (not the center one).
// So we must use Matrix.translateM, and it must happen before the crop.
float translX = (1F - mCropScaleX) / 2F; float translX = (1F - mCropScaleX) / 2F;
float translY = (1F - mCropScaleY) / 2F; float translY = (1F - mCropScaleY) / 2F;
Matrix.translateM(mTransformMatrix, 0, translX, translY, 0); Matrix.translateM(mTransformMatrix, 0, translX, translY, 0);
@ -234,6 +210,7 @@ public class GlCameraPreview extends CameraPreview<GLSurfaceView, SurfaceTexture
callback.onRendererFrame(mInputSurfaceTexture, mCropScaleX, mCropScaleY); callback.onRendererFrame(mInputSurfaceTexture, mCropScaleX, mCropScaleY);
} }
} }
}
@NonNull @NonNull
@Override @Override
@ -262,7 +239,7 @@ public class GlCameraPreview extends CameraPreview<GLSurfaceView, SurfaceTexture
* platform when its window is positioned asynchronously. * platform when its window is positioned asynchronously.
* *
* But to support older platforms, this seem to work - computing scale values and requesting a new frame, * But to support older platforms, this seem to work - computing scale values and requesting a new frame,
* then drawing it with a scaled transformation matrix. See {@link #onDrawFrame(GL10)}. * then drawing it with a scaled transformation matrix. See {@link Renderer#onDrawFrame(GL10)}.
*/ */
@Override @Override
protected void crop(@NonNull Op<Void> op) { protected void crop(@NonNull Op<Void> op) {
@ -285,4 +262,46 @@ public class GlCameraPreview extends CameraPreview<GLSurfaceView, SurfaceTexture
} }
op.end(null); op.end(null);
} }
/**
* Method specific to the GL preview. Adds a {@link RendererFrameCallback}
* to receive renderer frame events.
* @param callback a callback
*/
public void addRendererFrameCallback(@NonNull final RendererFrameCallback callback) {
getView().queueEvent(new Runnable() {
@Override
public void run() {
mRendererFrameCallbacks.add(callback);
if (mOutputTextureId != 0) callback.onRendererTextureCreated(mOutputTextureId);
}
});
}
/**
* Method specific to the GL preview. Removes a {@link RendererFrameCallback}
* that was previously added to receive renderer frame events.
* @param callback a callback
*/
public void removeRendererFrameCallback(@NonNull final RendererFrameCallback callback) {
mRendererFrameCallbacks.remove(callback);
}
/**
* Returns the output GL texture id.
* @return the output GL texture id
*/
@SuppressWarnings("unused")
protected int getTextureId() {
return mOutputTextureId;
}
/**
* Creates the renderer for this GL surface.
* @return the renderer for this GL surface
*/
@NonNull
protected Renderer instantiateRenderer() {
return new Renderer();
}
} }

@ -26,8 +26,8 @@ public class SurfaceCameraPreview extends CameraPreview<SurfaceView, SurfaceHold
private boolean mDispatched; private boolean mDispatched;
private View mRootView; private View mRootView;
public SurfaceCameraPreview(@NonNull Context context, @NonNull ViewGroup parent, @Nullable SurfaceCallback callback) { public SurfaceCameraPreview(@NonNull Context context, @NonNull ViewGroup parent) {
super(context, parent, callback); super(context, parent);
} }
@NonNull @NonNull
@ -37,7 +37,6 @@ public class SurfaceCameraPreview extends CameraPreview<SurfaceView, SurfaceHold
parent.addView(root, 0); parent.addView(root, 0);
SurfaceView surfaceView = root.findViewById(R.id.surface_view); SurfaceView surfaceView = root.findViewById(R.id.surface_view);
final SurfaceHolder holder = surfaceView.getHolder(); final SurfaceHolder holder = surfaceView.getHolder();
//noinspection deprecation
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
holder.addCallback(new SurfaceHolder.Callback() { holder.addCallback(new SurfaceHolder.Callback() {

@ -30,8 +30,8 @@ public class TextureCameraPreview extends CameraPreview<TextureView, SurfaceText
private View mRootView; private View mRootView;
public TextureCameraPreview(@NonNull Context context, @NonNull ViewGroup parent, @Nullable SurfaceCallback callback) { public TextureCameraPreview(@NonNull Context context, @NonNull ViewGroup parent) {
super(context, parent, callback); super(context, parent);
} }
@NonNull @NonNull
@ -140,7 +140,7 @@ public class TextureCameraPreview extends CameraPreview<TextureView, SurfaceText
float scaleX = (float) mOutputSurfaceHeight / mOutputSurfaceWidth; float scaleX = (float) mOutputSurfaceHeight / mOutputSurfaceWidth;
matrix.postScale(scaleX, 1F / scaleX, outputCenterX, outputCenterY); matrix.postScale(scaleX, 1F / scaleX, outputCenterX, outputCenterY);
} }
matrix.postRotate((float) -drawRotation, outputCenterX, outputCenterY); matrix.postRotate((float) drawRotation, outputCenterX, outputCenterY);
getView().setTransform(matrix); getView().setTransform(matrix);
task.setResult(null); task.setResult(null);
} }

@ -4,11 +4,11 @@ import android.graphics.SurfaceTexture;
import android.opengl.EGL14; import android.opengl.EGL14;
import android.os.Build; import android.os.Build;
import com.otaliastudios.cameraview.CameraException;
import com.otaliastudios.cameraview.CameraLogger; import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.VideoResult; import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.controls.Audio; import com.otaliastudios.cameraview.controls.Audio;
import com.otaliastudios.cameraview.engine.CameraEngine; import com.otaliastudios.cameraview.engine.CameraEngine;
import com.otaliastudios.cameraview.engine.offset.Reference;
import com.otaliastudios.cameraview.preview.GlCameraPreview; import com.otaliastudios.cameraview.preview.GlCameraPreview;
import com.otaliastudios.cameraview.preview.RendererFrameCallback; import com.otaliastudios.cameraview.preview.RendererFrameCallback;
import com.otaliastudios.cameraview.preview.RendererThread; import com.otaliastudios.cameraview.preview.RendererThread;
@ -58,7 +58,7 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
@Override @Override
protected void onStart() { protected void onStart() {
mPreview.addRendererFrameCallback(this); mPreview.addRendererFrameCallback(this);
mFlipped = mEngine.flip(CameraEngine.REF_SENSOR, CameraEngine.REF_VIEW); mFlipped = mEngine.getAngles().flip(Reference.SENSOR, Reference.VIEW);
mDesiredState = STATE_RECORDING; mDesiredState = STATE_RECORDING;
} }
@ -128,8 +128,11 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
TextureMediaEncoder.TextureFrame textureFrame = textureEncoder.acquireFrame(); TextureMediaEncoder.TextureFrame textureFrame = textureEncoder.acquireFrame();
textureFrame.timestamp = surfaceTexture.getTimestamp(); textureFrame.timestamp = surfaceTexture.getTimestamp();
surfaceTexture.getTransformMatrix(textureFrame.transform); surfaceTexture.getTransformMatrix(textureFrame.transform);
if (mEncoderEngine != null) {
// can happen on teardown
mEncoderEngine.notify(TextureMediaEncoder.FRAME_EVENT, textureFrame); mEncoderEngine.notify(TextureMediaEncoder.FRAME_EVENT, textureFrame);
} }
}
if (mCurrentState == STATE_RECORDING && mDesiredState == STATE_NOT_RECORDING) { if (mCurrentState == STATE_RECORDING && mDesiredState == STATE_NOT_RECORDING) {
LOG.i("Stopping the encoder engine."); LOG.i("Stopping the encoder engine.");

@ -0,0 +1,80 @@
package com.otaliastudios.cameraview.engine.offset;
import com.otaliastudios.cameraview.controls.Facing;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class AnglesTest {
private Angles angles;
@Before
public void setUp() {
angles = new Angles();
}
@After
public void tearDown() {
angles = null;
}
@Test
public void testSetSensorOffset() {
angles.setSensorOffset(Facing.BACK, 90);
assertEquals(90, angles.mSensorOffset);
angles.setSensorOffset(Facing.FRONT, 90);
assertEquals(270, angles.mSensorOffset);
}
@Test
public void testSetDisplayOffset() {
angles.setDisplayOffset(90);
assertEquals(90, angles.mDisplayOffset);
}
@Test
public void testSetDeviceOrientation() {
angles.setDeviceOrientation(90);
assertEquals(90, angles.mDeviceOrientation);
}
@Test(expected = IllegalStateException.class)
public void testSetSensorOffset_throws() {
angles.setSensorOffset(Facing.BACK, 135);
}
@Test(expected = IllegalStateException.class)
public void testSetDisplayOffset_throws() {
angles.setDisplayOffset(135);
}
@Test(expected = IllegalStateException.class)
public void testSetDeviceOrientation_throws() {
angles.setDeviceOrientation(135);
}
@Test
public void testOffset_BaseToSensor() {
angles.setSensorOffset(Facing.BACK, 90);
assertEquals(270, angles.offset(Reference.BASE, Reference.SENSOR, Axis.RELATIVE_TO_SENSOR));
angles.setSensorOffset(Facing.FRONT, 270); // This is like setting 90
assertEquals(90, angles.offset(Reference.BASE, Reference.SENSOR, Axis.RELATIVE_TO_SENSOR));
}
@Test
public void testOffset_BaseToView() {
angles.setDisplayOffset(90);
assertEquals(270, angles.offset(Reference.BASE, Reference.VIEW, Axis.ABSOLUTE));
}
@Test
public void testOffset_BaseToOutput() {
angles.setDeviceOrientation(90);
assertEquals(90, angles.offset(Reference.BASE, Reference.OUTPUT, Axis.ABSOLUTE));
}
}
Loading…
Cancel
Save