Repackage / Expose (#482)

* Refactor code into subpackages
* Rename CameraController to CameraEngine
* Move Engine and Previews
* Repackage everything else
* Refactor and document some packages
* Refactor cameraview package
* Refactor Size package
* Refactor unit tests
* Refactor preview package
* Refactor picture package
* Refactor video package
* Refactor encoding package
* Refactor androidTests
* Fix tests
* Fix GestureLayout tests
* Document changes
pull/484/head
Mattia Iavarone 6 years ago committed by GitHub
parent 0c7726d5c5
commit cd5f0a12bf
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 7
      cameraview/build.gradle
  2. 14
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/BaseTest.java
  3. 2
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraLoggerTest.java
  4. 15
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraOptions1Test.java
  5. 2
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraUtilsTest.java
  6. 39
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraViewCallbacksTest.java
  7. 157
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraViewTest.java
  8. 137
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/MockCameraController.java
  9. 20
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/PictureResultTest.java
  10. 2
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/TestActivity.java
  11. 36
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/VideoResultTest.java
  12. 39
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/IntegrationTest.java
  13. 10
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/Mapper1Test.java
  14. 175
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/MockCameraEngine.java
  15. 18
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/gesture/GestureLayoutTest.java
  16. 11
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/gesture/PinchGestureLayoutTest.java
  17. 13
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/gesture/ScrollGestureLayoutTest.java
  18. 11
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/gesture/TapGestureLayoutTest.java
  19. 7
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/internal/GridLinesLayoutTest.java
  20. 7
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/internal/utils/CropHelperTest.java
  21. 5
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/internal/utils/OrientationHelperTest.java
  22. 6
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/internal/utils/WorkerHandlerTest.java
  23. 20
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/picture/PictureRecorderTest.java
  24. 13
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/preview/CameraPreviewTest.java
  25. 6
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/preview/GlCameraPreviewTest.java
  26. 15
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/preview/MockCameraPreview.java
  27. 7
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/preview/SurfaceCameraPreviewTest.java
  28. 6
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/preview/TextureCameraPreviewTest.java
  29. 23
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/video/VideoRecorderTest.java
  30. 3
      cameraview/src/main/gles/com/otaliastudios/cameraview/EncoderThread.java
  31. 11
      cameraview/src/main/gles/com/otaliastudios/cameraview/OutputBuffer.java
  32. 89
      cameraview/src/main/gles/com/otaliastudios/cameraview/Pool.java
  33. 0
      cameraview/src/main/java/com/otaliastudios/cameraview/BitmapCallback.java
  34. 10
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraException.java
  35. 46
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraListener.java
  36. 62
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraLogger.java
  37. 29
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraOptions.java
  38. 55
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraUtils.java
  39. 397
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java
  40. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/FileCallback.java
  41. 19
      cameraview/src/main/java/com/otaliastudios/cameraview/Mapper.java
  42. 85
      cameraview/src/main/java/com/otaliastudios/cameraview/Mapper1.java
  43. 40
      cameraview/src/main/java/com/otaliastudios/cameraview/PictureRecorder.java
  44. 46
      cameraview/src/main/java/com/otaliastudios/cameraview/PictureResult.java
  45. 40
      cameraview/src/main/java/com/otaliastudios/cameraview/VideoRecorder.java
  46. 79
      cameraview/src/main/java/com/otaliastudios/cameraview/VideoResult.java
  47. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/controls/Audio.java
  48. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/controls/Control.java
  49. 72
      cameraview/src/main/java/com/otaliastudios/cameraview/controls/ControlParser.java
  50. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/controls/Facing.java
  51. 5
      cameraview/src/main/java/com/otaliastudios/cameraview/controls/Flash.java
  52. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/controls/Grid.java
  53. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/controls/Hdr.java
  54. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/controls/Mode.java
  55. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/controls/Preview.java
  56. 5
      cameraview/src/main/java/com/otaliastudios/cameraview/controls/VideoCodec.java
  57. 5
      cameraview/src/main/java/com/otaliastudios/cameraview/controls/WhiteBalance.java
  58. 222
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java
  59. 249
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraEngine.java
  60. 123
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Mapper.java
  61. 11
      cameraview/src/main/java/com/otaliastudios/cameraview/frame/Frame.java
  62. 61
      cameraview/src/main/java/com/otaliastudios/cameraview/frame/FrameManager.java
  63. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/frame/FrameProcessor.java
  64. 43
      cameraview/src/main/java/com/otaliastudios/cameraview/gesture/Gesture.java
  65. 36
      cameraview/src/main/java/com/otaliastudios/cameraview/gesture/GestureAction.java
  66. 159
      cameraview/src/main/java/com/otaliastudios/cameraview/gesture/GestureLayout.java
  67. 52
      cameraview/src/main/java/com/otaliastudios/cameraview/gesture/GestureParser.java
  68. 23
      cameraview/src/main/java/com/otaliastudios/cameraview/gesture/GestureType.java
  69. 48
      cameraview/src/main/java/com/otaliastudios/cameraview/gesture/PinchGestureLayout.java
  70. 48
      cameraview/src/main/java/com/otaliastudios/cameraview/gesture/ScrollGestureLayout.java
  71. 34
      cameraview/src/main/java/com/otaliastudios/cameraview/gesture/TapGestureLayout.java
  72. 38
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/GridLinesLayout.java
  73. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/egl/EglBaseSurface.java
  74. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/egl/EglCore.java
  75. 12
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/egl/EglElement.java
  76. 17
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/egl/EglViewport.java
  77. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/egl/EglWindowSurface.java
  78. 8
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/CamcorderProfiles.java
  79. 13
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/CropHelper.java
  80. 38
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/ExifHelper.java
  81. 52
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/OrientationHelper.java
  82. 145
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/Pool.java
  83. 17
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/RotationHelper.java
  84. 111
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/Task.java
  85. 53
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/WorkerHandler.java
  86. 18
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/FullPictureRecorder.java
  87. 75
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/PictureRecorder.java
  88. 68
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/SnapshotPictureRecorder.java
  89. 266
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/CameraPreview.java
  90. 95
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/GlCameraPreview.java
  91. 32
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/RendererFrameCallback.java
  92. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/RendererThread.java
  93. 25
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/SurfaceCameraPreview.java
  94. 32
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/TextureCameraPreview.java
  95. 12
      cameraview/src/main/java/com/otaliastudios/cameraview/size/AspectRatio.java
  96. 13
      cameraview/src/main/java/com/otaliastudios/cameraview/size/Size.java
  97. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/size/SizeSelector.java
  98. 92
      cameraview/src/main/java/com/otaliastudios/cameraview/size/SizeSelectorParser.java
  99. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/size/SizeSelectors.java
  100. 46
      cameraview/src/main/java/com/otaliastudios/cameraview/video/FullVideoRecorder.java
  101. Some files were not shown because too many files have changed in this diff Show More

@ -30,13 +30,6 @@ android {
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
sourceSets {
main.java.srcDirs += 'src/main/options'
main.java.srcDirs += 'src/main/views'
main.java.srcDirs += 'src/main/utils'
main.java.srcDirs += 'src/main/gles'
}
}
dependencies {

@ -3,19 +3,17 @@ package com.otaliastudios.cameraview;
import android.app.KeyguardManager;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.os.Build;
import android.os.Handler;
import android.os.Looper;
import android.os.PowerManager;
import androidx.test.annotation.UiThreadTest;
import androidx.test.espresso.core.internal.deps.guava.collect.ObjectArrays;
import androidx.test.platform.app.InstrumentationRegistry;
import androidx.test.rule.ActivityTestRule;
import android.view.View;
import com.otaliastudios.cameraview.internal.utils.Task;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.BeforeClass;
@ -24,17 +22,13 @@ import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.mockito.stubbing.Stubber;
import java.io.ByteArrayOutputStream;
import java.io.OutputStream;
import java.util.concurrent.CountDownLatch;
import static android.content.Context.KEYGUARD_SERVICE;
import static android.content.Context.POWER_SERVICE;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class BaseTest {

@ -1,6 +1,8 @@
package com.otaliastudios.cameraview;
import com.otaliastudios.cameraview.internal.utils.Task;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest;

@ -3,6 +3,19 @@ package com.otaliastudios.cameraview;
import android.hardware.Camera;
import com.otaliastudios.cameraview.controls.Audio;
import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.controls.Flash;
import com.otaliastudios.cameraview.engine.Mapper;
import com.otaliastudios.cameraview.gesture.GestureAction;
import com.otaliastudios.cameraview.controls.Grid;
import com.otaliastudios.cameraview.controls.Hdr;
import com.otaliastudios.cameraview.controls.Mode;
import com.otaliastudios.cameraview.controls.VideoCodec;
import com.otaliastudios.cameraview.controls.WhiteBalance;
import com.otaliastudios.cameraview.size.AspectRatio;
import com.otaliastudios.cameraview.size.Size;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest;
@ -237,7 +250,7 @@ public class CameraOptions1Test extends BaseTest {
}
CameraOptions o = new CameraOptions(mock(Camera.Parameters.class), false);
Mapper m = new Mapper1();
Mapper m = Mapper.get();
Collection<Facing> s = o.getSupportedControls(Facing.class);
assertEquals(s.size(), supported.size());
for (Facing facing : s) {

@ -6,6 +6,8 @@ import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.Color;
import com.otaliastudios.cameraview.internal.utils.Task;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest;

@ -9,6 +9,17 @@ import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.MediumTest;
import android.view.ViewGroup;
import com.otaliastudios.cameraview.controls.Audio;
import com.otaliastudios.cameraview.engine.CameraEngine;
import com.otaliastudios.cameraview.frame.Frame;
import com.otaliastudios.cameraview.frame.FrameProcessor;
import com.otaliastudios.cameraview.gesture.Gesture;
import com.otaliastudios.cameraview.gesture.GestureAction;
import com.otaliastudios.cameraview.internal.utils.Task;
import com.otaliastudios.cameraview.engine.MockCameraEngine;
import com.otaliastudios.cameraview.preview.MockCameraPreview;
import com.otaliastudios.cameraview.preview.CameraPreview;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
@ -29,6 +40,9 @@ import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
/**
* Tests {@link CameraView#mCameraCallbacks} dispatch functions.
*/
@RunWith(AndroidJUnit4.class)
@MediumTest
public class CameraViewCallbacksTest extends BaseTest {
@ -36,7 +50,7 @@ public class CameraViewCallbacksTest extends BaseTest {
private CameraView camera;
private CameraListener listener;
private FrameProcessor processor;
private MockCameraController mockController;
private MockCameraEngine mockController;
private MockCameraPreview mockPreview;
private Task<Boolean> task;
@ -49,14 +63,17 @@ public class CameraViewCallbacksTest extends BaseTest {
listener = mock(CameraListener.class);
processor = mock(FrameProcessor.class);
camera = new CameraView(context) {
@NonNull
@Override
protected CameraController instantiateCameraController(CameraCallbacks callbacks) {
mockController = new MockCameraController(callbacks);
protected CameraEngine instantiateCameraController(@NonNull CameraEngine.Callback callback) {
mockController = new MockCameraEngine(callback);
return mockController;
}
@NonNull
@Override
protected CameraPreview instantiatePreview(Context context, ViewGroup container) {
protected CameraPreview instantiatePreview(@NonNull Context context, @NonNull ViewGroup container) {
mockPreview = new MockCameraPreview(context, container);
return mockPreview;
}
@ -133,19 +150,21 @@ public class CameraViewCallbacksTest extends BaseTest {
@Test
public void testDispatchOnVideoTaken() {
completeTask().when(listener).onVideoTaken(null);
camera.mCameraCallbacks.dispatchOnVideoTaken(null);
VideoResult.Stub stub = new VideoResult.Stub();
completeTask().when(listener).onVideoTaken(any(VideoResult.class));
camera.mCameraCallbacks.dispatchOnVideoTaken(stub);
assertNotNull(task.await(200));
verify(listener, times(1)).onVideoTaken(null);
verify(listener, times(1)).onVideoTaken(any(VideoResult.class));
}
@Test
public void testDispatchOnPictureTaken() {
completeTask().when(listener).onPictureTaken(null);
camera.mCameraCallbacks.dispatchOnPictureTaken(null);
PictureResult.Stub stub = new PictureResult.Stub();
completeTask().when(listener).onPictureTaken(any(PictureResult.class));
camera.mCameraCallbacks.dispatchOnPictureTaken(stub);
assertNotNull(task.await(200));
verify(listener, times(1)).onPictureTaken(null);
verify(listener, times(1)).onPictureTaken(any(PictureResult.class));
}
@Test

@ -2,6 +2,7 @@ package com.otaliastudios.cameraview;
import android.content.Context;
import android.content.res.TypedArray;
import android.location.Location;
import androidx.annotation.NonNull;
import androidx.test.ext.junit.runners.AndroidJUnit4;
@ -9,6 +10,31 @@ import androidx.test.filters.MediumTest;
import android.view.MotionEvent;
import android.view.ViewGroup;
import com.otaliastudios.cameraview.controls.Audio;
import com.otaliastudios.cameraview.controls.ControlParser;
import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.controls.Flash;
import com.otaliastudios.cameraview.engine.CameraEngine;
import com.otaliastudios.cameraview.frame.Frame;
import com.otaliastudios.cameraview.frame.FrameProcessor;
import com.otaliastudios.cameraview.gesture.Gesture;
import com.otaliastudios.cameraview.gesture.GestureAction;
import com.otaliastudios.cameraview.controls.Grid;
import com.otaliastudios.cameraview.controls.Hdr;
import com.otaliastudios.cameraview.controls.Mode;
import com.otaliastudios.cameraview.controls.VideoCodec;
import com.otaliastudios.cameraview.controls.WhiteBalance;
import com.otaliastudios.cameraview.gesture.GestureParser;
import com.otaliastudios.cameraview.gesture.PinchGestureLayout;
import com.otaliastudios.cameraview.gesture.ScrollGestureLayout;
import com.otaliastudios.cameraview.gesture.TapGestureLayout;
import com.otaliastudios.cameraview.engine.MockCameraEngine;
import com.otaliastudios.cameraview.preview.MockCameraPreview;
import com.otaliastudios.cameraview.preview.CameraPreview;
import com.otaliastudios.cameraview.size.Size;
import com.otaliastudios.cameraview.size.SizeSelector;
import com.otaliastudios.cameraview.size.SizeSelectors;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
@ -26,7 +52,7 @@ import static android.view.ViewGroup.LayoutParams.*;
public class CameraViewTest extends BaseTest {
private CameraView cameraView;
private MockCameraController mockController;
private MockCameraEngine mockController;
private CameraPreview mockPreview;
private boolean hasPermissions;
@ -37,14 +63,17 @@ public class CameraViewTest extends BaseTest {
public void run() {
Context context = context();
cameraView = new CameraView(context) {
@NonNull
@Override
protected CameraController instantiateCameraController(CameraCallbacks callbacks) {
mockController = spy(new MockCameraController(callbacks));
protected CameraEngine instantiateCameraController(@NonNull CameraEngine.Callback callback) {
mockController = spy(new MockCameraEngine(callback));
return mockController;
}
@NonNull
@Override
protected CameraPreview instantiatePreview(Context context, ViewGroup container) {
protected CameraPreview instantiatePreview(@NonNull Context context, @NonNull ViewGroup container) {
mockPreview = spy(new MockCameraPreview(context, container));
return mockPreview;
}
@ -105,15 +134,18 @@ public class CameraViewTest extends BaseTest {
@Test
public void testDefaults() {
// CameraController
assertEquals(cameraView.getFlash(), Flash.DEFAULT);
assertEquals(cameraView.getFacing(), Facing.DEFAULT(context()));
assertEquals(cameraView.getGrid(), Grid.DEFAULT);
assertEquals(cameraView.getWhiteBalance(), WhiteBalance.DEFAULT);
assertEquals(cameraView.getMode(), Mode.DEFAULT);
assertEquals(cameraView.getHdr(), Hdr.DEFAULT);
assertEquals(cameraView.getAudio(), Audio.DEFAULT);
assertEquals(cameraView.getVideoCodec(), VideoCodec.DEFAULT);
// CameraEngine
TypedArray empty = context().obtainStyledAttributes(new int[]{});
ControlParser controls = new ControlParser(context(), empty);
assertEquals(cameraView.getFlash(), controls.getFlash());
assertEquals(cameraView.getFacing(), controls.getFacing());
assertEquals(cameraView.getGrid(), controls.getGrid());
assertEquals(cameraView.getWhiteBalance(), controls.getWhiteBalance());
assertEquals(cameraView.getMode(), controls.getMode());
assertEquals(cameraView.getHdr(), controls.getHdr());
assertEquals(cameraView.getAudio(), controls.getAudio());
assertEquals(cameraView.getVideoCodec(), controls.getVideoCodec());
//noinspection SimplifiableJUnitAssertion
assertEquals(cameraView.getLocation(), null);
assertEquals(cameraView.getExposureCorrection(), 0f, 0f);
assertEquals(cameraView.getZoom(), 0f, 0f);
@ -121,12 +153,13 @@ public class CameraViewTest extends BaseTest {
assertEquals(cameraView.getVideoMaxSize(), 0, 0);
// Self managed
GestureParser gestures = new GestureParser(empty);
assertEquals(cameraView.getPlaySounds(), CameraView.DEFAULT_PLAY_SOUNDS);
assertEquals(cameraView.getGestureAction(Gesture.TAP), GestureAction.DEFAULT_TAP);
assertEquals(cameraView.getGestureAction(Gesture.LONG_TAP), GestureAction.DEFAULT_LONG_TAP);
assertEquals(cameraView.getGestureAction(Gesture.PINCH), GestureAction.DEFAULT_PINCH);
assertEquals(cameraView.getGestureAction(Gesture.SCROLL_HORIZONTAL), GestureAction.DEFAULT_SCROLL_HORIZONTAL);
assertEquals(cameraView.getGestureAction(Gesture.SCROLL_VERTICAL), GestureAction.DEFAULT_SCROLL_VERTICAL);
assertEquals(cameraView.getGestureAction(Gesture.TAP), gestures.getTapAction());
assertEquals(cameraView.getGestureAction(Gesture.LONG_TAP), gestures.getLongTapAction());
assertEquals(cameraView.getGestureAction(Gesture.PINCH), gestures.getPinchAction());
assertEquals(cameraView.getGestureAction(Gesture.SCROLL_HORIZONTAL), gestures.getHorizontalScrollAction());
assertEquals(cameraView.getGestureAction(Gesture.SCROLL_VERTICAL), gestures.getVerticalScrollAction());
}
//endregion
@ -159,21 +192,21 @@ public class CameraViewTest extends BaseTest {
// PinchGestureLayout
cameraView.mapGesture(Gesture.PINCH, GestureAction.ZOOM);
assertTrue(cameraView.mPinchGestureLayout.enabled());
assertTrue(cameraView.mPinchGestureLayout.isActive());
cameraView.clearGesture(Gesture.PINCH);
assertFalse(cameraView.mPinchGestureLayout.enabled());
assertFalse(cameraView.mPinchGestureLayout.isActive());
// TapGestureLayout
cameraView.mapGesture(Gesture.TAP, GestureAction.CAPTURE);
assertTrue(cameraView.mTapGestureLayout.enabled());
assertTrue(cameraView.mTapGestureLayout.isActive());
cameraView.clearGesture(Gesture.TAP);
assertFalse(cameraView.mPinchGestureLayout.enabled());
assertFalse(cameraView.mPinchGestureLayout.isActive());
// ScrollGestureLayout
cameraView.mapGesture(Gesture.SCROLL_HORIZONTAL, GestureAction.ZOOM);
assertTrue(cameraView.mScrollGestureLayout.enabled());
assertTrue(cameraView.mScrollGestureLayout.isActive());
cameraView.clearGesture(Gesture.SCROLL_HORIZONTAL);
assertFalse(cameraView.mScrollGestureLayout.enabled());
assertFalse(cameraView.mScrollGestureLayout.isActive());
}
//endregion
@ -182,16 +215,19 @@ public class CameraViewTest extends BaseTest {
@Test
public void testGestureAction_capture() {
CameraOptions o = mock(CameraOptions.class);
mockController.setMockCameraOptions(o);
mockController.mockStarted(true);
MotionEvent event = MotionEvent.obtain(0L, 0L, 0, 0f, 0f, 0);
ui(new Runnable() {
@Override
public void run() {
cameraView.mTapGestureLayout = new TapGestureLayout(cameraView.getContext()) {
public boolean onTouchEvent(MotionEvent event) { return true; }
protected boolean handleTouchEvent(@NonNull MotionEvent event) {
setGesture(Gesture.TAP);
return true;
}
};
cameraView.mTapGestureLayout.setGestureType(Gesture.TAP);
}
});
cameraView.mapGesture(Gesture.TAP, GestureAction.CAPTURE);
@ -201,15 +237,19 @@ public class CameraViewTest extends BaseTest {
@Test
public void testGestureAction_focus() {
CameraOptions o = mock(CameraOptions.class);
mockController.setMockCameraOptions(o);
mockController.mockStarted(true);
MotionEvent event = MotionEvent.obtain(0L, 0L, 0, 0f, 0f, 0);
ui(new Runnable() {
@Override
public void run() {
cameraView.mTapGestureLayout = new TapGestureLayout(cameraView.getContext()) {
public boolean onTouchEvent(MotionEvent event) { return true; }
protected boolean handleTouchEvent(@NonNull MotionEvent event) {
setGesture(Gesture.TAP);
return true;
}
};
cameraView.mTapGestureLayout.setGestureType(Gesture.TAP);
}
});
mockController.mFocusStarted = false;
@ -224,37 +264,49 @@ public class CameraViewTest extends BaseTest {
assertTrue(mockController.mFocusStarted);
}
private class FactorHolder { float value; }
@Test
public void testGestureAction_zoom() {
CameraOptions o = mock(CameraOptions.class);
mockController.setMockCameraOptions(o);
mockController.mockStarted(true);
mockController.mZoomChanged = false;
MotionEvent event = MotionEvent.obtain(0L, 0L, 0, 0f, 0f, 0);
final FactorHolder factor = new FactorHolder();
ui(new Runnable() {
@Override
public void run() {
cameraView.mPinchGestureLayout = new PinchGestureLayout(cameraView.getContext()) {
public boolean onTouchEvent(MotionEvent event) { return true; }
@Override
protected boolean handleTouchEvent(@NonNull MotionEvent event) {
setGesture(Gesture.PINCH);
return true;
}
@Override
protected float getFactor() {
return factor.value;
}
};
cameraView.mPinchGestureLayout.setGestureType(Gesture.PINCH);
cameraView.mapGesture(Gesture.PINCH, GestureAction.ZOOM);
}
});
// If factor is 0, we return the same value. The controller should not be notified.
cameraView.mPinchGestureLayout.mFactor = 0f;
factor.value = 0f;
cameraView.dispatchTouchEvent(event);
assertFalse(mockController.mZoomChanged);
// For larger factors, the value is scaled. The controller should be notified.
cameraView.mPinchGestureLayout.mFactor = 1f;
factor.value = 1f;
cameraView.dispatchTouchEvent(event);
assertTrue(mockController.mZoomChanged);
}
@Test
public void testGestureAction_exposureCorrection() {
// This needs a valid CameraOptions value.
CameraOptions o = mock(CameraOptions.class);
when(o.getExposureCorrectionMinValue()).thenReturn(-10f);
when(o.getExposureCorrectionMaxValue()).thenReturn(10f);
@ -262,24 +314,33 @@ public class CameraViewTest extends BaseTest {
mockController.mockStarted(true);
mockController.mExposureCorrectionChanged = false;
MotionEvent event = MotionEvent.obtain(0L, 0L, 0, 0f, 0f, 0);
final FactorHolder factor = new FactorHolder();
ui(new Runnable() {
@Override
public void run() {
cameraView.mScrollGestureLayout = new ScrollGestureLayout(cameraView.getContext()) {
public boolean onTouchEvent(MotionEvent event) { return true; }
@Override
protected boolean handleTouchEvent(@NonNull MotionEvent event) {
setGesture(Gesture.SCROLL_HORIZONTAL);
return true;
}
@Override
protected float getFactor() {
return factor.value;
}
};
cameraView.mScrollGestureLayout.setGestureType(Gesture.SCROLL_HORIZONTAL);
cameraView.mapGesture(Gesture.SCROLL_HORIZONTAL, GestureAction.EXPOSURE_CORRECTION);
}
});
// If factor is 0, we return the same value. The controller should not be notified.
cameraView.mScrollGestureLayout.mFactor = 0f;
factor.value = 0f;
cameraView.dispatchTouchEvent(event);
assertFalse(mockController.mExposureCorrectionChanged);
// For larger factors, the value is scaled. The controller should be notified.
cameraView.mScrollGestureLayout.mFactor = 1f;
factor.value = 1f;
cameraView.dispatchTouchEvent(event);
assertTrue(mockController.mExposureCorrectionChanged);
}
@ -425,11 +486,11 @@ public class CameraViewTest extends BaseTest {
@Test
public void testSetLocation() {
cameraView.setLocation(50d, -50d);
assertEquals(50d, mockController.mLocation.getLatitude(), 0);
assertEquals(-50d, mockController.mLocation.getLongitude(), 0);
assertEquals(0, mockController.mLocation.getAltitude(), 0);
assertEquals("Unknown", mockController.mLocation.getProvider());
assertEquals(System.currentTimeMillis(), mockController.mLocation.getTime(), 1000f);
assertEquals(50d, mockController.getLocation().getLatitude(), 0);
assertEquals(-50d, mockController.getLocation().getLongitude(), 0);
assertEquals(0, mockController.getLocation().getAltitude(), 0);
assertEquals("Unknown", mockController.getLocation().getProvider());
assertEquals(System.currentTimeMillis(), mockController.getLocation().getTime(), 1000f);
Location source = new Location("Provider");
source.setTime(5000);
@ -562,7 +623,7 @@ public class CameraViewTest extends BaseTest {
public void testPreviewStreamSizeSelector() {
SizeSelector source = SizeSelectors.minHeight(50);
cameraView.setPreviewStreamSize(source);
SizeSelector result = mockController.getPreviewStreamSizeSelector();
SizeSelector result = mockController.getInternalPreviewStreamSizeSelector();
assertNotNull(result);
assertEquals(result, source);
}
@ -571,7 +632,7 @@ public class CameraViewTest extends BaseTest {
public void testPictureSizeSelector() {
SizeSelector source = SizeSelectors.minHeight(50);
cameraView.setPictureSize(source);
SizeSelector result = mockController.getPictureSizeSelector();
SizeSelector result = mockController.getInternalPictureSizeSelector();
assertNotNull(result);
assertEquals(result, source);
}
@ -580,7 +641,7 @@ public class CameraViewTest extends BaseTest {
public void testVideoSizeSelector() {
SizeSelector source = SizeSelectors.minHeight(50);
cameraView.setVideoSize(source);
SizeSelector result = mockController.getVideoSizeSelector();
SizeSelector result = mockController.getInternalVideoSizeSelector();
assertNotNull(result);
assertEquals(result, source);
}
@ -667,8 +728,8 @@ public class CameraViewTest extends BaseTest {
public void testSetSnapshotMaxSize() {
cameraView.setSnapshotMaxWidth(500);
cameraView.setSnapshotMaxHeight(1000);
assertEquals(mockController.mSnapshotMaxWidth, 500);
assertEquals(mockController.mSnapshotMaxHeight, 1000);
assertEquals(mockController.getSnapshotMaxWidth(), 500);
assertEquals(mockController.getSnapshotMaxHeight(), 1000);
}
//endregion

@ -1,137 +0,0 @@
package com.otaliastudios.cameraview;
import android.graphics.PointF;
import android.location.Location;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import java.io.File;
public class MockCameraController extends CameraController {
boolean mPictureCaptured;
boolean mFocusStarted;
boolean mZoomChanged;
boolean mExposureCorrectionChanged;
MockCameraController(CameraView.CameraCallbacks callback) {
super(callback);
}
void setMockCameraOptions(CameraOptions options) {
mCameraOptions = options;
}
void setMockPreviewStreamSize(Size size) {
mPreviewStreamSize = size;
}
void mockStarted(boolean started) {
mState = started ? STATE_STARTED : STATE_STOPPED;
}
@Override
void onStart() {
}
@Override
void onStop() {
}
@Override
void setZoom(float zoom, @Nullable PointF[] points, boolean notify) {
mZoomValue = zoom;
mZoomChanged = true;
}
@Override
void setExposureCorrection(float EVvalue, @NonNull float[] bounds, @Nullable PointF[] points, boolean notify) {
mExposureCorrectionValue = EVvalue;
mExposureCorrectionChanged = true;
}
@Override
void setFacing(@NonNull Facing facing) {
mFacing = facing;
}
@Override
void setFlash(@NonNull Flash flash) {
mFlash = flash;
}
@Override
void setWhiteBalance(@NonNull WhiteBalance whiteBalance) {
mWhiteBalance = whiteBalance;
}
@Override
void setMode(@NonNull Mode mode) {
mMode = mode;
}
@Override
void setHdr(@NonNull Hdr hdr) {
mHdr = hdr;
}
@Override
void setAudio(@NonNull Audio audio) {
mAudio = audio;
}
@Override
void setLocation(@Nullable Location location) {
mLocation = location;
}
@Override
void takePicture() {
mPictureCaptured = true;
}
@Override
void takePictureSnapshot(@NonNull AspectRatio viewAspectRatio) {
}
@Override
void takeVideo(@NonNull File file) {
}
@Override
void takeVideoSnapshot(@NonNull File file, @NonNull AspectRatio viewAspectRatio) {
}
@Override
void stopVideo() {
}
@Override
void startAutoFocus(@Nullable Gesture gesture, @NonNull PointF point) {
mFocusStarted = true;
}
@Override
public void onSurfaceChanged() {
}
@Override
public void onSurfaceAvailable() {
}
@Override
public void onSurfaceDestroyed() {
}
@Override
public void onBufferAvailable(@NonNull byte[] buffer) {
}
@Override
void setPlaySounds(boolean playSounds) {
}
}

@ -3,6 +3,9 @@ package com.otaliastudios.cameraview;
import android.location.Location;
import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.size.Size;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest;
@ -17,7 +20,7 @@ import static org.junit.Assert.assertEquals;
@SmallTest
public class PictureResultTest extends BaseTest {
private PictureResult result = new PictureResult();
private PictureResult.Stub stub = new PictureResult.Stub();
@Test
public void testResult() {
@ -29,15 +32,16 @@ public class PictureResultTest extends BaseTest {
boolean isSnapshot = true;
Facing facing = Facing.FRONT;
result. format = format;
result.rotation = rotation;
result.size = size;
result.data = jpeg;
result.location = location;
result.facing = facing;
stub.format = format;
stub.rotation = rotation;
stub.size = size;
stub.data = jpeg;
stub.location = location;
stub.facing = facing;
//noinspection ConstantConditions
result.isSnapshot = isSnapshot;
stub.isSnapshot = isSnapshot;
PictureResult result = new PictureResult(stub);
assertEquals(result.getFormat(), format);
assertEquals(result.getRotation(), rotation);
assertEquals(result.getSize(), size);

@ -12,6 +12,8 @@ import android.view.ViewGroup;
import android.view.WindowManager;
import android.widget.FrameLayout;
import com.otaliastudios.cameraview.size.Size;
import static android.view.ViewGroup.LayoutParams.*;
public class TestActivity extends Activity {

@ -3,6 +3,11 @@ package com.otaliastudios.cameraview;
import android.location.Location;
import com.otaliastudios.cameraview.controls.Audio;
import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.controls.VideoCodec;
import com.otaliastudios.cameraview.size.Size;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest;
@ -19,7 +24,7 @@ import static org.junit.Assert.assertEquals;
@SmallTest
public class VideoResultTest extends BaseTest {
private VideoResult result = new VideoResult();
private VideoResult.Stub stub = new VideoResult.Stub();
@Test
public void testResult() {
@ -38,21 +43,22 @@ public class VideoResultTest extends BaseTest {
Audio audio = Audio.ON;
Facing facing = Facing.FRONT;
result.file = file;
result.rotation = rotation;
result.size = size;
result.codec = codec;
result.location = location;
result.isSnapshot = isSnapshot;
result.maxDuration = maxDuration;
result.maxSize = maxFileSize;
result.endReason = reason;
result.videoFrameRate = videoFrameRate;
result.videoBitRate = videoBitRate;
result.audioBitRate = audioBitRate;
result.audio = audio;
result.facing = facing;
stub.file = file;
stub.rotation = rotation;
stub.size = size;
stub.videoCodec = codec;
stub.location = location;
stub.isSnapshot = isSnapshot;
stub.maxDuration = maxDuration;
stub.maxSize = maxFileSize;
stub.endReason = reason;
stub.videoFrameRate = videoFrameRate;
stub.videoBitRate = videoBitRate;
stub.audioBitRate = audioBitRate;
stub.audio = audio;
stub.facing = facing;
VideoResult result = new VideoResult(stub);
assertEquals(result.getFile(), file);
assertEquals(result.getRotation(), rotation);
assertEquals(result.getSize(), size);

@ -1,4 +1,4 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.engine;
import android.graphics.Bitmap;
@ -6,6 +6,27 @@ import android.graphics.PointF;
import android.hardware.Camera;
import android.os.Build;
import com.otaliastudios.cameraview.BaseTest;
import com.otaliastudios.cameraview.CameraListener;
import com.otaliastudios.cameraview.CameraOptions;
import com.otaliastudios.cameraview.CameraUtils;
import com.otaliastudios.cameraview.CameraView;
import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.TestActivity;
import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.controls.Audio;
import com.otaliastudios.cameraview.controls.Flash;
import com.otaliastudios.cameraview.controls.Hdr;
import com.otaliastudios.cameraview.controls.Mode;
import com.otaliastudios.cameraview.controls.WhiteBalance;
import com.otaliastudios.cameraview.engine.Camera1Engine;
import com.otaliastudios.cameraview.engine.CameraEngine;
import com.otaliastudios.cameraview.frame.Frame;
import com.otaliastudios.cameraview.frame.FrameProcessor;
import com.otaliastudios.cameraview.internal.utils.Task;
import com.otaliastudios.cameraview.internal.utils.WorkerHandler;
import com.otaliastudios.cameraview.size.Size;
import androidx.annotation.NonNull;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.MediumTest;
@ -34,7 +55,7 @@ import static org.mockito.Mockito.spy;
/**
* These tests work great on real devices, and are the only way to test actual CameraController
* These tests work great on real devices, and are the only way to test actual CameraEngine
* implementation - we really need to open the camera device.
* Unfortunately they fail unreliably on emulated devices, due to some bug with the
* emulated camera controller. Waiting for it to be fixed.
@ -48,7 +69,7 @@ public class IntegrationTest extends BaseTest {
public ActivityTestRule<TestActivity> rule = new ActivityTestRule<>(TestActivity.class);
private CameraView camera;
private Camera1 controller;
private Camera1Engine controller;
private CameraListener listener;
private Task<Throwable> uiExceptionTask;
@ -65,9 +86,11 @@ public class IntegrationTest extends BaseTest {
@Override
public void run() {
camera = new CameraView(rule.getActivity()) {
@NonNull
@Override
protected CameraController instantiateCameraController(CameraCallbacks callbacks) {
controller = new Camera1(callbacks);
protected CameraEngine instantiateCameraController(@NonNull CameraEngine.Callback callback) {
controller = new Camera1Engine(callback);
return controller;
}
};
@ -162,13 +185,13 @@ public class IntegrationTest extends BaseTest {
@Test
public void testOpenClose() throws Exception {
// Starting and stopping are hard to get since they happen on another thread.
assertEquals(controller.getState(), CameraController.STATE_STOPPED);
assertEquals(controller.getState(), CameraEngine.STATE_STOPPED);
waitForOpen(true);
assertEquals(controller.getState(), CameraController.STATE_STARTED);
assertEquals(controller.getState(), CameraEngine.STATE_STARTED);
waitForClose(true);
assertEquals(controller.getState(), CameraController.STATE_STOPPED);
assertEquals(controller.getState(), CameraEngine.STATE_STOPPED);
}
@Test

@ -1,8 +1,14 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.engine;
import android.hardware.Camera;
import com.otaliastudios.cameraview.BaseTest;
import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.controls.Flash;
import com.otaliastudios.cameraview.controls.Hdr;
import com.otaliastudios.cameraview.controls.WhiteBalance;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest;
@ -16,7 +22,7 @@ import static org.junit.Assert.*;
@SmallTest
public class Mapper1Test extends BaseTest {
private Mapper mapper = new Mapper1();
private Mapper mapper = Mapper.get();
@Test
public void testMap() {

@ -0,0 +1,175 @@
package com.otaliastudios.cameraview.engine;
import android.graphics.PointF;
import android.location.Location;
import com.otaliastudios.cameraview.CameraOptions;
import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.controls.Audio;
import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.controls.Flash;
import com.otaliastudios.cameraview.engine.CameraEngine;
import com.otaliastudios.cameraview.gesture.Gesture;
import com.otaliastudios.cameraview.controls.Hdr;
import com.otaliastudios.cameraview.controls.Mode;
import com.otaliastudios.cameraview.controls.WhiteBalance;
import com.otaliastudios.cameraview.size.AspectRatio;
import com.otaliastudios.cameraview.size.Size;
import com.otaliastudios.cameraview.size.SizeSelector;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import java.io.File;
public class MockCameraEngine extends CameraEngine {
public boolean mPictureCaptured;
public boolean mFocusStarted;
public boolean mZoomChanged;
public boolean mExposureCorrectionChanged;
public MockCameraEngine(CameraEngine.Callback callback) {
super(callback);
}
@Override
protected void onStart() {
}
@Override
protected void onStop() {
}
public void setMockCameraOptions(CameraOptions options) {
mCameraOptions = options;
}
public void setMockPreviewStreamSize(Size size) {
mPreviewStreamSize = size;
}
public void mockStarted(boolean started) {
mState = started ? STATE_STARTED : STATE_STOPPED;
}
public int getSnapshotMaxWidth() {
return mSnapshotMaxWidth;
}
public int getSnapshotMaxHeight() {
return mSnapshotMaxHeight;
}
public SizeSelector getInternalPreviewStreamSizeSelector() {
return super.getPreviewStreamSizeSelector();
}
public SizeSelector getInternalPictureSizeSelector() {
return super.getPictureSizeSelector();
}
public SizeSelector getInternalVideoSizeSelector() {
return super.getVideoSizeSelector();
}
@Override
public void setZoom(float zoom, @Nullable PointF[] points, boolean notify) {
mZoomValue = zoom;
mZoomChanged = true;
}
@Override
public void setExposureCorrection(float EVvalue, @NonNull float[] bounds, @Nullable PointF[] points, boolean notify) {
mExposureCorrectionValue = EVvalue;
mExposureCorrectionChanged = true;
}
@Override
public void setFacing(@NonNull Facing facing) {
mFacing = facing;
}
@Override
public void setFlash(@NonNull Flash flash) {
mFlash = flash;
}
@Override
public void setWhiteBalance(@NonNull WhiteBalance whiteBalance) {
mWhiteBalance = whiteBalance;
}
@Override
public void setMode(@NonNull Mode mode) {
mMode = mode;
}
@Override
public void setHdr(@NonNull Hdr hdr) {
mHdr = hdr;
}
@Override
public void setAudio(@NonNull Audio audio) {
mAudio = audio;
}
@Override
public void setLocation(@Nullable Location location) {
mLocation = location;
}
@Override
public void takePicture(@NonNull PictureResult.Stub stub) {
mPictureCaptured = true;
}
@Override
public void takePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio viewAspectRatio) {
}
@Override
public void takeVideo(@NonNull VideoResult.Stub stub, @NonNull File file) {
}
@Override
public void takeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull File file, @NonNull AspectRatio viewAspectRatio) {
}
@Override
public void stopVideo() {
}
@Override
public void startAutoFocus(@Nullable Gesture gesture, @NonNull PointF point) {
mFocusStarted = true;
}
@Override
public void onSurfaceChanged() {
}
@Override
public void onSurfaceAvailable() {
}
@Override
public void onSurfaceDestroyed() {
}
@Override
public void onBufferAvailable(@NonNull byte[] buffer) {
}
@Override
public void setPlaySounds(boolean playSounds) {
}
}

@ -1,19 +1,21 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.gesture;
import android.annotation.TargetApi;
import android.content.Context;
import androidx.test.espresso.Espresso;
import androidx.test.espresso.Root;
import androidx.test.espresso.ViewAssertion;
import androidx.test.espresso.ViewInteraction;
import androidx.test.espresso.assertion.ViewAssertions;
import androidx.test.espresso.matcher.RootMatchers;
import androidx.test.espresso.matcher.ViewMatchers;
import androidx.test.rule.ActivityTestRule;
import android.view.MotionEvent;
import android.view.View;
import com.otaliastudios.cameraview.BaseTest;
import com.otaliastudios.cameraview.TestActivity;
import com.otaliastudios.cameraview.gesture.Gesture;
import com.otaliastudios.cameraview.gesture.GestureLayout;
import com.otaliastudios.cameraview.internal.utils.Task;
import org.hamcrest.Matchers;
import org.junit.Before;
import org.junit.Rule;
@ -40,7 +42,7 @@ public abstract class GestureLayoutTest<T extends GestureLayout> extends BaseTes
public void run() {
TestActivity a = rule.getActivity();
layout = create(a);
layout.enable(true);
layout.setActive(true);
a.inflate(layout);
touch = new Task<>();
@ -48,7 +50,7 @@ public abstract class GestureLayoutTest<T extends GestureLayout> extends BaseTes
@Override
public boolean onTouch(View view, MotionEvent motionEvent) {
boolean found = layout.onTouchEvent(motionEvent);
if (found) touch.end(layout.getGestureType());
if (found) touch.end(layout.getGesture());
return true;
}
});

@ -1,7 +1,12 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.gesture;
import android.content.Context;
import com.otaliastudios.cameraview.gesture.Gesture;
import com.otaliastudios.cameraview.gesture.GestureLayoutTest;
import com.otaliastudios.cameraview.gesture.PinchGestureLayout;
import androidx.test.espresso.ViewAction;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest;
@ -26,7 +31,7 @@ public class PinchGestureLayoutTest extends GestureLayoutTest<PinchGestureLayout
@Test
public void testDefaults() {
assertEquals(layout.getGestureType(), Gesture.PINCH);
assertEquals(layout.getGesture(), Gesture.PINCH);
assertEquals(layout.getPoints().length, 2);
assertEquals(layout.getPoints()[0].x, 0, 0);
assertEquals(layout.getPoints()[0].y, 0, 0);
@ -54,7 +59,7 @@ public class PinchGestureLayoutTest extends GestureLayoutTest<PinchGestureLayout
// How will this move our parameter?
float curr = 0.5f, min = 0f, max = 1f;
float newValue = layout.scaleValue(curr, min, max);
float newValue = layout.computeValue(curr, min, max);
if (increasing) {
assertTrue(newValue > curr);
assertTrue(newValue <= max);

@ -1,7 +1,12 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.gesture;
import android.content.Context;
import com.otaliastudios.cameraview.gesture.Gesture;
import com.otaliastudios.cameraview.gesture.GestureLayoutTest;
import com.otaliastudios.cameraview.gesture.ScrollGestureLayout;
import androidx.test.espresso.ViewAction;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest;
@ -31,7 +36,7 @@ public class ScrollGestureLayoutTest extends GestureLayoutTest<ScrollGestureLayo
@Test
public void testDefaults() {
assertNull(layout.getGestureType());
assertNull(layout.getGesture());
assertEquals(layout.getPoints().length, 2);
assertEquals(layout.getPoints()[0].x, 0, 0);
assertEquals(layout.getPoints()[0].y, 0, 0);
@ -41,7 +46,7 @@ public class ScrollGestureLayoutTest extends GestureLayoutTest<ScrollGestureLayo
@Test
public void testScrollDisabled() {
layout.enable(false);
layout.setActive(false);
touch.listen();
touch.start();
onLayout().perform(swipeUp());
@ -58,7 +63,7 @@ public class ScrollGestureLayoutTest extends GestureLayoutTest<ScrollGestureLayo
// How will this move our parameter?
float curr = 0.5f, min = 0f, max = 1f;
float newValue = layout.scaleValue(curr, min, max);
float newValue = layout.computeValue(curr, min, max);
if (increasing) {
assertTrue(newValue > curr);
assertTrue(newValue <= max);

@ -1,4 +1,4 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.gesture;
import android.content.Context;
@ -11,6 +11,11 @@ import androidx.test.filters.SmallTest;
import android.view.InputDevice;
import android.view.MotionEvent;
import com.otaliastudios.cameraview.gesture.Gesture;
import com.otaliastudios.cameraview.gesture.GestureLayoutTest;
import com.otaliastudios.cameraview.gesture.TapGestureLayout;
import com.otaliastudios.cameraview.size.Size;
import org.junit.Test;
import org.junit.runner.RunWith;
@ -28,7 +33,7 @@ public class TapGestureLayoutTest extends GestureLayoutTest<TapGestureLayout> {
@Test
public void testDefaults() {
assertNull(layout.getGestureType());
assertNull(layout.getGesture());
assertEquals(layout.getPoints().length, 1);
assertEquals(layout.getPoints()[0].x, 0, 0);
assertEquals(layout.getPoints()[0].y, 0, 0);
@ -52,7 +57,7 @@ public class TapGestureLayoutTest extends GestureLayoutTest<TapGestureLayout> {
@Test
public void testTapWhileDisabled() {
layout.enable(false);
layout.setActive(false);
touch.listen();
touch.start();
onLayout().perform(click());

@ -1,6 +1,11 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.internal;
import com.otaliastudios.cameraview.BaseTest;
import com.otaliastudios.cameraview.TestActivity;
import com.otaliastudios.cameraview.controls.Grid;
import com.otaliastudios.cameraview.internal.GridLinesLayout;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.MediumTest;
import androidx.test.rule.ActivityTestRule;

@ -1,8 +1,13 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.internal.utils;
import android.graphics.Rect;
import com.otaliastudios.cameraview.BaseTest;
import com.otaliastudios.cameraview.internal.utils.CropHelper;
import com.otaliastudios.cameraview.size.AspectRatio;
import com.otaliastudios.cameraview.size.Size;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest;

@ -1,10 +1,13 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.internal.utils;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest;
import android.view.OrientationEventListener;
import com.otaliastudios.cameraview.BaseTest;
import com.otaliastudios.cameraview.internal.utils.OrientationHelper;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

@ -1,6 +1,10 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.internal.utils;
import com.otaliastudios.cameraview.BaseTest;
import com.otaliastudios.cameraview.internal.utils.Task;
import com.otaliastudios.cameraview.internal.utils.WorkerHandler;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest;

@ -1,6 +1,10 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.picture;
import com.otaliastudios.cameraview.BaseTest;
import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.picture.PictureRecorder;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest;
@ -8,6 +12,8 @@ import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import java.lang.reflect.Constructor;
import static org.junit.Assert.assertNull;
@ -16,11 +22,11 @@ import static org.junit.Assert.assertNull;
public class PictureRecorderTest extends BaseTest {
@Test
public void testRecorder() {
PictureResult result = new PictureResult();
public void testRecorder() throws Exception {
PictureResult.Stub result = createStub();
PictureRecorder.PictureResultListener listener = Mockito.mock(PictureRecorder.PictureResultListener.class);
PictureRecorder recorder = new PictureRecorder(result, listener) {
void take() {
public void take() {
dispatchResult();
}
};
@ -29,4 +35,10 @@ public class PictureRecorderTest extends BaseTest {
assertNull(recorder.mListener);
assertNull(recorder.mResult);
}
private PictureResult.Stub createStub() throws Exception {
Constructor<PictureResult.Stub> constructor = PictureResult.Stub.class.getDeclaredConstructor();
constructor.setAccessible(true);
return constructor.newInstance();
}
}

@ -1,10 +1,17 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.preview;
import android.content.Context;
import androidx.test.rule.ActivityTestRule;
import android.view.ViewGroup;
import com.otaliastudios.cameraview.BaseTest;
import com.otaliastudios.cameraview.TestActivity;
import com.otaliastudios.cameraview.internal.utils.Task;
import com.otaliastudios.cameraview.preview.CameraPreview;
import com.otaliastudios.cameraview.size.AspectRatio;
import com.otaliastudios.cameraview.size.Size;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
@ -102,7 +109,7 @@ public abstract class CameraPreviewTest extends BaseTest {
@Test
public void testDesiredSize() {
preview.setStreamSize(160, 90, false);
preview.setStreamSize(160, 90);
assertEquals(160, preview.getStreamSize().getWidth());
assertEquals(90, preview.getStreamSize().getHeight());
}
@ -154,7 +161,7 @@ public abstract class CameraPreviewTest extends BaseTest {
private void setDesiredAspectRatio(float desiredAspectRatio) {
preview.mCropTask.listen();
preview.setStreamSize((int) (10f * desiredAspectRatio), 10, false); // Wait...
preview.setStreamSize((int) (10f * desiredAspectRatio), 10); // Wait...
preview.mCropTask.await();
assertEquals(desiredAspectRatio, getViewAspectRatioWithScale(), 0.01f);

@ -1,4 +1,4 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.preview;
import android.content.Context;
@ -7,6 +7,10 @@ import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest;
import android.view.ViewGroup;
import com.otaliastudios.cameraview.preview.CameraPreview;
import com.otaliastudios.cameraview.preview.CameraPreviewTest;
import com.otaliastudios.cameraview.preview.GlCameraPreview;
import org.junit.runner.RunWith;
@RunWith(AndroidJUnit4.class)

@ -1,4 +1,4 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.preview;
import android.content.Context;
@ -7,14 +7,16 @@ import androidx.annotation.NonNull;
import android.view.View;
import android.view.ViewGroup;
import com.otaliastudios.cameraview.preview.CameraPreview;
public class MockCameraPreview extends CameraPreview<View, Void> {
MockCameraPreview(Context context, ViewGroup parent) {
public MockCameraPreview(Context context, ViewGroup parent) {
super(context, parent, null);
}
@Override
boolean supportsCropping() {
public boolean supportsCropping() {
return true;
}
@ -26,19 +28,20 @@ public class MockCameraPreview extends CameraPreview<View, Void> {
@NonNull
@Override
Class<Void> getOutputClass() {
public Class<Void> getOutputClass() {
return null;
}
@NonNull
@Override
Void getOutput() {
public Void getOutput() {
return null;
}
@NonNull
@Override
View getRootView() {
public View getRootView() {
return null;
}
}

@ -1,4 +1,4 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.preview;
import android.content.Context;
@ -7,7 +7,10 @@ import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest;
import android.view.ViewGroup;
import org.junit.Test;
import com.otaliastudios.cameraview.preview.CameraPreview;
import com.otaliastudios.cameraview.preview.CameraPreviewTest;
import com.otaliastudios.cameraview.preview.SurfaceCameraPreview;
import org.junit.runner.RunWith;
@RunWith(AndroidJUnit4.class)

@ -1,4 +1,4 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.preview;
import android.content.Context;
@ -7,6 +7,10 @@ import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest;
import android.view.ViewGroup;
import com.otaliastudios.cameraview.preview.CameraPreview;
import com.otaliastudios.cameraview.preview.CameraPreviewTest;
import com.otaliastudios.cameraview.preview.TextureCameraPreview;
import org.junit.runner.RunWith;
@RunWith(AndroidJUnit4.class)

@ -1,6 +1,11 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.video;
import com.otaliastudios.cameraview.BaseTest;
import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.video.VideoRecorder;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest;
@ -8,6 +13,8 @@ import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import java.lang.reflect.Constructor;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
@ -17,12 +24,12 @@ import static org.junit.Assert.assertNull;
public class VideoRecorderTest extends BaseTest {
@Test
public void testRecorder() {
VideoResult result = new VideoResult();
public void testRecorder() throws Exception {
VideoResult.Stub result = createStub();
VideoRecorder.VideoResultListener listener = Mockito.mock(VideoRecorder.VideoResultListener.class);
VideoRecorder recorder = new VideoRecorder(result, listener) {
void start() {}
void stop() {
public void start() {}
public void stop() {
dispatchResult();
}
};
@ -32,4 +39,10 @@ public class VideoRecorderTest extends BaseTest {
assertNull(recorder.mListener);
assertNull(recorder.mResult);
}
private VideoResult.Stub createStub() throws Exception {
Constructor<VideoResult.Stub> constructor = VideoResult.Stub.class.getDeclaredConstructor();
constructor.setAccessible(true);
return constructor.newInstance();
}
}

@ -1,3 +0,0 @@
package com.otaliastudios.cameraview;
@interface EncoderThread {}

@ -1,11 +0,0 @@
package com.otaliastudios.cameraview;
import android.media.MediaCodec;
import java.nio.ByteBuffer;
class OutputBuffer {
MediaCodec.BufferInfo info;
int trackIndex;
ByteBuffer data;
}

@ -1,89 +0,0 @@
package com.otaliastudios.cameraview;
import java.util.concurrent.LinkedBlockingQueue;
import androidx.annotation.CallSuper;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
class Pool<T> {
private static final String TAG = Pool.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
private int maxPoolSize;
private int activeCount;
private LinkedBlockingQueue<T> mQueue;
private Factory<T> factory;
interface Factory<T> {
T create();
}
Pool(int maxPoolSize, Factory<T> factory) {
this.maxPoolSize = maxPoolSize;
this.mQueue = new LinkedBlockingQueue<>(maxPoolSize);
this.factory = factory;
}
boolean canGet() {
return count() < maxPoolSize;
}
@Nullable
T get() {
T buffer = mQueue.poll();
if (buffer != null) {
activeCount++; // poll decreases, this fixes
LOG.v("GET: Reusing recycled item.", this);
return buffer;
}
if (!canGet()) {
LOG.v("GET: Returning null. Too much items requested.", this);
return null;
}
activeCount++;
LOG.v("GET: Creating a new item.", this);
return factory.create();
}
void recycle(@NonNull T item) {
LOG.v("RECYCLE: Recycling item.", this);
if (--activeCount < 0) {
throw new IllegalStateException("Trying to recycle an item which makes activeCount < 0." +
"This means that this or some previous items being recycled were not coming from " +
"this pool, or some item was recycled more than once. " + this);
}
if (!mQueue.offer(item)) {
throw new IllegalStateException("Trying to recycle an item while the queue is full. " +
"This means that this or some previous items being recycled were not coming from " +
"this pool, or some item was recycled more than once. " + this);
}
}
@NonNull
@Override
public String toString() {
return getClass().getSimpleName() + " -- count:" + count() + ", active:" + activeCount() + ", cached:" + cachedCount();
}
final int count() {
return activeCount() + cachedCount();
}
final int activeCount() {
return activeCount;
}
final int cachedCount() {
return mQueue.size();
}
@CallSuper
void clear() {
mQueue.clear();
}
}

@ -1,6 +1,8 @@
package com.otaliastudios.cameraview;
import com.otaliastudios.cameraview.controls.Facing;
/**
* Holds an error with the camera configuration.
*/
@ -51,16 +53,17 @@ public class CameraException extends RuntimeException {
private int reason = REASON_UNKNOWN;
CameraException(Throwable cause) {
@SuppressWarnings("WeakerAccess")
public CameraException(Throwable cause) {
super(cause);
}
CameraException(Throwable cause, int reason) {
public CameraException(Throwable cause, int reason) {
super(cause);
this.reason = reason;
}
CameraException(int reason) {
public CameraException(int reason) {
super();
this.reason = reason;
}
@ -76,6 +79,7 @@ public class CameraException extends RuntimeException {
*
* @return true if this error is unrecoverable
*/
@SuppressWarnings("unused")
public boolean isUnrecoverable() {
switch (getReason()) {
case REASON_FAILED_TO_CONNECT: return true;

@ -5,6 +5,12 @@ import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.UiThread;
/**
* The base class for receiving updates from a {@link CameraView} instance.
* You can add and remove listeners using {@link CameraView#addCameraListener(CameraListener)}
* and {@link CameraView#removeCameraListener(CameraListener)}.
*/
@SuppressWarnings({"WeakerAccess", "unused"})
public abstract class CameraListener {
@ -15,18 +21,14 @@ public abstract class CameraListener {
* @param options camera supported options
*/
@UiThread
public void onCameraOpened(@NonNull CameraOptions options) {
}
public void onCameraOpened(@NonNull CameraOptions options) { }
/**
* Notifies that the camera session was closed.
*/
@UiThread
public void onCameraClosed() {
}
public void onCameraClosed() { }
/**
@ -39,9 +41,7 @@ public abstract class CameraListener {
* @param exception the error
*/
@UiThread
public void onCameraError(@NonNull CameraException exception) {
}
public void onCameraError(@NonNull CameraException exception) { }
/**
@ -54,9 +54,7 @@ public abstract class CameraListener {
* @param result captured picture
*/
@UiThread
public void onPictureTaken(@NonNull PictureResult result) {
}
public void onPictureTaken(@NonNull PictureResult result) { }
/**
@ -65,9 +63,7 @@ public abstract class CameraListener {
* @param result the video result
*/
@UiThread
public void onVideoTaken(@NonNull VideoResult result) {
}
public void onVideoTaken(@NonNull VideoResult result) { }
/**
@ -81,9 +77,7 @@ public abstract class CameraListener {
* @param orientation either 0, 90, 180 or 270
*/
@UiThread
public void onOrientationChanged(int orientation) {
}
public void onOrientationChanged(int orientation) { }
/**
@ -94,9 +88,7 @@ public abstract class CameraListener {
* @param point coordinates with respect to CameraView.getWidth() and CameraView.getHeight()
*/
@UiThread
public void onFocusStart(@NonNull PointF point) {
}
public void onFocusStart(@NonNull PointF point) { }
/**
@ -109,9 +101,7 @@ public abstract class CameraListener {
* @param point coordinates with respect to CameraView.getWidth() and CameraView.getHeight()
*/
@UiThread
public void onFocusEnd(boolean successful, @NonNull PointF point) {
}
public void onFocusEnd(boolean successful, @NonNull PointF point) { }
/**
@ -123,9 +113,7 @@ public abstract class CameraListener {
* @param fingers finger positions that caused the event, null if not caused by touch
*/
@UiThread
public void onZoomChanged(float newValue, @NonNull float[] bounds, @Nullable PointF[] fingers) {
}
public void onZoomChanged(float newValue, @NonNull float[] bounds, @Nullable PointF[] fingers) { }
/**
@ -137,8 +125,6 @@ public abstract class CameraListener {
* @param fingers finger positions that caused the event, null if not caused by touch
*/
@UiThread
public void onExposureCorrectionChanged(float newValue, @NonNull float[] bounds, @Nullable PointF[] fingers) {
}
public void onExposureCorrectionChanged(float newValue, @NonNull float[] bounds, @Nullable PointF[] fingers) { }
}

@ -3,6 +3,8 @@ package com.otaliastudios.cameraview;
import androidx.annotation.IntDef;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import android.util.Log;
import java.lang.annotation.Retention;
@ -13,6 +15,7 @@ import java.util.List;
/**
* Utility class that can log traces and info.
*/
@SuppressWarnings({"WeakerAccess", "UnusedReturnValue"})
public final class CameraLogger {
public final static int LEVEL_VERBOSE = 0;
@ -49,8 +52,8 @@ public final class CameraLogger {
void log(@LogLevel int level, @NonNull String tag, @NonNull String message, @Nullable Throwable throwable);
}
static String lastMessage;
static String lastTag;
@VisibleForTesting static String lastMessage;
@VisibleForTesting static String lastTag;
private static int sLevel;
private static List<Logger> sLoggers;
@ -71,7 +74,14 @@ public final class CameraLogger {
});
}
static CameraLogger create(@NonNull String tag) {
/**
* Creates a CameraLogger that will stream logs into the
* internal logs and dispatch them to {@link Logger}s.
*
* @param tag the logger tag
* @return a new CameraLogger
*/
public static CameraLogger create(@NonNull String tag) {
return new CameraLogger(tag);
}
@ -121,24 +131,49 @@ public final class CameraLogger {
return sLevel <= messageLevel && sLoggers.size() > 0;
}
void v(@NonNull Object... data) {
log(LEVEL_VERBOSE, data);
/**
* Log to the verbose channel.
* @param data log contents
* @return the log message, if logged
*/
@Nullable
public String v(@NonNull Object... data) {
return log(LEVEL_VERBOSE, data);
}
void i(@NonNull Object... data) {
log(LEVEL_INFO, data);
/**
* Log to the info channel.
* @param data log contents
* @return the log message, if logged
*/
@Nullable
public String i(@NonNull Object... data) {
return log(LEVEL_INFO, data);
}
void w(@NonNull Object... data) {
log(LEVEL_WARNING, data);
/**
* Log to the warning channel.
* @param data log contents
* @return the log message, if logged
*/
@Nullable
public String w(@NonNull Object... data) {
return log(LEVEL_WARNING, data);
}
void e(@NonNull Object... data) {
log(LEVEL_ERROR, data);
/**
* Log to the error channel.
* @param data log contents
* @return the log message, if logged
*/
@Nullable
public String e(@NonNull Object... data) {
return log(LEVEL_ERROR, data);
}
private void log(@LogLevel int level, @NonNull Object... data) {
if (!should(level)) return;
@Nullable
private String log(@LogLevel int level, @NonNull Object... data) {
if (!should(level)) return null;
StringBuilder message = new StringBuilder();
Throwable throwable = null;
@ -155,6 +190,7 @@ public final class CameraLogger {
}
lastMessage = string;
lastTag = mTag;
return string;
}
}

@ -1,9 +1,22 @@
package com.otaliastudios.cameraview;
import android.annotation.TargetApi;
import android.hardware.Camera;
import android.hardware.camera2.CameraCharacteristics;
import com.otaliastudios.cameraview.controls.Audio;
import com.otaliastudios.cameraview.controls.Control;
import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.controls.Flash;
import com.otaliastudios.cameraview.engine.Mapper;
import com.otaliastudios.cameraview.gesture.GestureAction;
import com.otaliastudios.cameraview.controls.Grid;
import com.otaliastudios.cameraview.controls.Hdr;
import com.otaliastudios.cameraview.controls.Mode;
import com.otaliastudios.cameraview.controls.VideoCodec;
import com.otaliastudios.cameraview.controls.WhiteBalance;
import com.otaliastudios.cameraview.size.AspectRatio;
import com.otaliastudios.cameraview.size.Size;
import androidx.annotation.NonNull;
import java.util.Arrays;
@ -34,11 +47,11 @@ public class CameraOptions {
private boolean autoFocusSupported;
// Camera1 constructor.
// Camera1Engine constructor.
@SuppressWarnings("deprecation")
CameraOptions(@NonNull Camera.Parameters params, boolean flipSizes) {
public CameraOptions(@NonNull Camera.Parameters params, boolean flipSizes) {
List<String> strings;
Mapper mapper = new Mapper1();
Mapper mapper = Mapper.get();
// Facing
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
@ -117,12 +130,6 @@ public class CameraOptions {
}
}
// Camera2 constructor.
@TargetApi(21)
CameraOptions(@NonNull CameraCharacteristics params) {}
/**
* Shorthand for getSupported*().contains(value).
*

@ -1,5 +1,6 @@
package com.otaliastudios.cameraview;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
@ -7,6 +8,12 @@ import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.hardware.Camera;
import android.os.Handler;
import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.engine.Mapper;
import com.otaliastudios.cameraview.internal.utils.ExifHelper;
import com.otaliastudios.cameraview.internal.utils.WorkerHandler;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.WorkerThread;
@ -15,7 +22,6 @@ import androidx.exifinterface.media.ExifInterface;
import java.io.BufferedOutputStream;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
@ -24,6 +30,7 @@ import java.io.OutputStream;
/**
* Static utilities for dealing with camera I/O, orientations, etc.
*/
@SuppressWarnings("unused")
public class CameraUtils {
@ -51,8 +58,9 @@ public class CameraUtils {
* @param facing either {@link Facing#BACK} or {@link Facing#FRONT}
* @return true if such sensor exists
*/
public static boolean hasCameraFacing(@NonNull Context context, @NonNull Facing facing) {
int internal = new Mapper1().map(facing);
public static boolean hasCameraFacing(@SuppressWarnings("unused") @NonNull Context context,
@NonNull Facing facing) {
int internal = Mapper.get().map(facing);
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int i = 0, count = Camera.getNumberOfCameras(); i < count; i++) {
Camera.getCameraInfo(i, cameraInfo);
@ -76,6 +84,7 @@ public class CameraUtils {
@SuppressWarnings("WeakerAccess")
@Nullable
@WorkerThread
@SuppressLint("NewApi")
public static File writeToFile(@NonNull final byte[] data, @NonNull File file) {
if (file.exists() && !file.delete()) return null;
try (OutputStream stream = new BufferedOutputStream(new FileOutputStream(file))) {
@ -208,8 +217,12 @@ public class CameraUtils {
* @param source a JPEG byte array
* @param maxWidth the max allowed width
* @param maxHeight the max allowed height
* @return decoded bitmap or null if error is encountered
*/
static Bitmap decodeBitmap(@NonNull byte[] source, int maxWidth, int maxHeight) {
@SuppressWarnings("SameParameterValue")
@Nullable
@WorkerThread
public static Bitmap decodeBitmap(@NonNull byte[] source, int maxWidth, int maxHeight) {
return decodeBitmap(source, maxWidth, maxHeight, new BitmapFactory.Options());
}
@ -233,10 +246,11 @@ public class CameraUtils {
return decodeBitmap(source, maxWidth, maxHeight, options, -1);
}
// Null: got OOM
// TODO ignores flipping. but it should be super rare.
// Null means we got OOM
// Ignores flipping, but it should be super rare.
@SuppressWarnings("TryFinallyCanBeTryWithResources")
@Nullable
static Bitmap decodeBitmap(@NonNull byte[] source, int maxWidth, int maxHeight, @NonNull BitmapFactory.Options options, int rotation) {
private static Bitmap decodeBitmap(@NonNull byte[] source, int maxWidth, int maxHeight, @NonNull BitmapFactory.Options options, int rotation) {
if (maxWidth <= 0) maxWidth = Integer.MAX_VALUE;
if (maxHeight <= 0) maxHeight = Integer.MAX_VALUE;
int orientation;
@ -248,7 +262,7 @@ public class CameraUtils {
stream = new ByteArrayInputStream(source);
ExifInterface exif = new ExifInterface(stream);
int exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);
orientation = readExifOrientation(exifOrientation);
orientation = ExifHelper.readExifOrientation(exifOrientation);
flip = exifOrientation == ExifInterface.ORIENTATION_FLIP_HORIZONTAL ||
exifOrientation == ExifInterface.ORIENTATION_FLIP_VERTICAL ||
exifOrientation == ExifInterface.ORIENTATION_TRANSPOSE ||
@ -305,31 +319,6 @@ public class CameraUtils {
return bitmap;
}
static int readExifOrientation(int exifOrientation) {
int orientation;
switch (exifOrientation) {
case ExifInterface.ORIENTATION_NORMAL:
case ExifInterface.ORIENTATION_FLIP_HORIZONTAL:
orientation = 0; break;
case ExifInterface.ORIENTATION_ROTATE_180:
case ExifInterface.ORIENTATION_FLIP_VERTICAL:
orientation = 180; break;
case ExifInterface.ORIENTATION_ROTATE_90:
case ExifInterface.ORIENTATION_TRANSPOSE:
orientation = 90; break;
case ExifInterface.ORIENTATION_ROTATE_270:
case ExifInterface.ORIENTATION_TRANSVERSE:
orientation = 270; break;
default: orientation = 0;
}
return orientation;
}
private static int computeSampleSize(int width, int height, int maxWidth, int maxHeight) {
// https://developer.android.com/topic/performance/graphics/load-bitmap.html
int inSampleSize = 1;

@ -4,6 +4,8 @@ import android.Manifest;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.app.Activity;
import androidx.annotation.VisibleForTesting;
import androidx.lifecycle.Lifecycle;
import androidx.lifecycle.LifecycleObserver;
import androidx.lifecycle.LifecycleOwner;
@ -28,6 +30,42 @@ import android.view.MotionEvent;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import com.otaliastudios.cameraview.controls.Audio;
import com.otaliastudios.cameraview.controls.Control;
import com.otaliastudios.cameraview.controls.ControlParser;
import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.controls.Flash;
import com.otaliastudios.cameraview.engine.Camera1Engine;
import com.otaliastudios.cameraview.engine.CameraEngine;
import com.otaliastudios.cameraview.frame.Frame;
import com.otaliastudios.cameraview.frame.FrameProcessor;
import com.otaliastudios.cameraview.gesture.Gesture;
import com.otaliastudios.cameraview.gesture.GestureAction;
import com.otaliastudios.cameraview.controls.Grid;
import com.otaliastudios.cameraview.controls.Hdr;
import com.otaliastudios.cameraview.controls.Mode;
import com.otaliastudios.cameraview.controls.Preview;
import com.otaliastudios.cameraview.controls.VideoCodec;
import com.otaliastudios.cameraview.controls.WhiteBalance;
import com.otaliastudios.cameraview.gesture.GestureLayout;
import com.otaliastudios.cameraview.gesture.GestureParser;
import com.otaliastudios.cameraview.gesture.PinchGestureLayout;
import com.otaliastudios.cameraview.gesture.ScrollGestureLayout;
import com.otaliastudios.cameraview.gesture.TapGestureLayout;
import com.otaliastudios.cameraview.internal.GridLinesLayout;
import com.otaliastudios.cameraview.internal.utils.CropHelper;
import com.otaliastudios.cameraview.internal.utils.OrientationHelper;
import com.otaliastudios.cameraview.internal.utils.WorkerHandler;
import com.otaliastudios.cameraview.preview.CameraPreview;
import com.otaliastudios.cameraview.preview.GlCameraPreview;
import com.otaliastudios.cameraview.preview.SurfaceCameraPreview;
import com.otaliastudios.cameraview.preview.TextureCameraPreview;
import com.otaliastudios.cameraview.size.AspectRatio;
import com.otaliastudios.cameraview.size.Size;
import com.otaliastudios.cameraview.size.SizeSelector;
import com.otaliastudios.cameraview.size.SizeSelectorParser;
import com.otaliastudios.cameraview.size.SizeSelectors;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
@ -40,7 +78,10 @@ import static android.view.View.MeasureSpec.UNSPECIFIED;
import static android.view.ViewGroup.LayoutParams.MATCH_PARENT;
/**
* Entry point for the whole library.
* Please read documentation for usage and full set of features.
*/
public class CameraView extends FrameLayout implements LifecycleObserver {
private final static String TAG = CameraView.class.getSimpleName();
@ -57,13 +98,13 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
private Preview mPreview;
// Components
/* for tests */ CameraCallbacks mCameraCallbacks;
@VisibleForTesting CameraCallbacks mCameraCallbacks;
private CameraPreview mCameraPreview;
private OrientationHelper mOrientationHelper;
private CameraController mCameraController;
private CameraEngine mCameraEngine;
private MediaActionSound mSound;
/* for tests */ List<CameraListener> mListeners = new CopyOnWriteArrayList<>();
/* for tests */ List<FrameProcessor> mFrameProcessors = new CopyOnWriteArrayList<>();
@VisibleForTesting List<CameraListener> mListeners = new CopyOnWriteArrayList<>();
@VisibleForTesting List<FrameProcessor> mFrameProcessors = new CopyOnWriteArrayList<>();
private Lifecycle mLifecycle;
// Views
@ -72,6 +113,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
TapGestureLayout mTapGestureLayout;
ScrollGestureLayout mScrollGestureLayout;
private boolean mKeepScreenOn;
@SuppressWarnings({"FieldCanBeLocal", "unused"})
private boolean mExperimental;
// Threading
@ -94,101 +136,30 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
private void init(@NonNull Context context, @Nullable AttributeSet attrs) {
setWillNotDraw(false);
TypedArray a = context.getTheme().obtainStyledAttributes(attrs, R.styleable.CameraView, 0, 0);
ControlParser controls = new ControlParser(context, a);
// Self managed
boolean playSounds = a.getBoolean(R.styleable.CameraView_cameraPlaySounds, DEFAULT_PLAY_SOUNDS);
mExperimental = a.getBoolean(R.styleable.CameraView_cameraExperimental, false);
mPreview = Preview.fromValue(a.getInteger(R.styleable.CameraView_cameraPreview, Preview.DEFAULT.value()));
mPreview = controls.getPreview();
// Camera controller params
Facing facing = Facing.fromValue(a.getInteger(R.styleable.CameraView_cameraFacing, Facing.DEFAULT(context).value()));
Flash flash = Flash.fromValue(a.getInteger(R.styleable.CameraView_cameraFlash, Flash.DEFAULT.value()));
Grid grid = Grid.fromValue(a.getInteger(R.styleable.CameraView_cameraGrid, Grid.DEFAULT.value()));
int gridColor = a.getColor(R.styleable.CameraView_cameraGridColor, GridLinesLayout.DEFAULT_COLOR);
WhiteBalance whiteBalance = WhiteBalance.fromValue(a.getInteger(R.styleable.CameraView_cameraWhiteBalance, WhiteBalance.DEFAULT.value()));
Mode mode = Mode.fromValue(a.getInteger(R.styleable.CameraView_cameraMode, Mode.DEFAULT.value()));
Hdr hdr = Hdr.fromValue(a.getInteger(R.styleable.CameraView_cameraHdr, Hdr.DEFAULT.value()));
Audio audio = Audio.fromValue(a.getInteger(R.styleable.CameraView_cameraAudio, Audio.DEFAULT.value()));
VideoCodec codec = VideoCodec.fromValue(a.getInteger(R.styleable.CameraView_cameraVideoCodec, VideoCodec.DEFAULT.value()));
long videoMaxSize = (long) a.getFloat(R.styleable.CameraView_cameraVideoMaxSize, 0);
int videoMaxDuration = a.getInteger(R.styleable.CameraView_cameraVideoMaxDuration, 0);
int videoBitRate = a.getInteger(R.styleable.CameraView_cameraVideoBitRate, 0);
int audioBitRate = a.getInteger(R.styleable.CameraView_cameraAudioBitRate, 0);
long autoFocusResetDelay = (long) a.getInteger(R.styleable.CameraView_cameraAutoFocusResetDelay, (int) DEFAULT_AUTOFOCUS_RESET_DELAY_MILLIS);
// Picture size selector
List<SizeSelector> pictureConstraints = new ArrayList<>(3);
if (a.hasValue(R.styleable.CameraView_cameraPictureSizeMinWidth)) {
pictureConstraints.add(SizeSelectors.minWidth(a.getInteger(R.styleable.CameraView_cameraPictureSizeMinWidth, 0)));
}
if (a.hasValue(R.styleable.CameraView_cameraPictureSizeMaxWidth)) {
pictureConstraints.add(SizeSelectors.maxWidth(a.getInteger(R.styleable.CameraView_cameraPictureSizeMaxWidth, 0)));
}
if (a.hasValue(R.styleable.CameraView_cameraPictureSizeMinHeight)) {
pictureConstraints.add(SizeSelectors.minHeight(a.getInteger(R.styleable.CameraView_cameraPictureSizeMinHeight, 0)));
}
if (a.hasValue(R.styleable.CameraView_cameraPictureSizeMaxHeight)) {
pictureConstraints.add(SizeSelectors.maxHeight(a.getInteger(R.styleable.CameraView_cameraPictureSizeMaxHeight, 0)));
}
if (a.hasValue(R.styleable.CameraView_cameraPictureSizeMinArea)) {
pictureConstraints.add(SizeSelectors.minArea(a.getInteger(R.styleable.CameraView_cameraPictureSizeMinArea, 0)));
}
if (a.hasValue(R.styleable.CameraView_cameraPictureSizeMaxArea)) {
pictureConstraints.add(SizeSelectors.maxArea(a.getInteger(R.styleable.CameraView_cameraPictureSizeMaxArea, 0)));
}
if (a.hasValue(R.styleable.CameraView_cameraPictureSizeAspectRatio)) {
//noinspection ConstantConditions
pictureConstraints.add(SizeSelectors.aspectRatio(AspectRatio.parse(a.getString(R.styleable.CameraView_cameraPictureSizeAspectRatio)), 0));
}
if (a.getBoolean(R.styleable.CameraView_cameraPictureSizeSmallest, false)) pictureConstraints.add(SizeSelectors.smallest());
if (a.getBoolean(R.styleable.CameraView_cameraPictureSizeBiggest, false)) pictureConstraints.add(SizeSelectors.biggest());
SizeSelector pictureSelector = !pictureConstraints.isEmpty() ?
SizeSelectors.and(pictureConstraints.toArray(new SizeSelector[0])) :
SizeSelectors.biggest();
// Video size selector
List<SizeSelector> videoConstraints = new ArrayList<>(3);
if (a.hasValue(R.styleable.CameraView_cameraVideoSizeMinWidth)) {
videoConstraints.add(SizeSelectors.minWidth(a.getInteger(R.styleable.CameraView_cameraVideoSizeMinWidth, 0)));
}
if (a.hasValue(R.styleable.CameraView_cameraVideoSizeMaxWidth)) {
videoConstraints.add(SizeSelectors.maxWidth(a.getInteger(R.styleable.CameraView_cameraVideoSizeMaxWidth, 0)));
}
if (a.hasValue(R.styleable.CameraView_cameraVideoSizeMinHeight)) {
videoConstraints.add(SizeSelectors.minHeight(a.getInteger(R.styleable.CameraView_cameraVideoSizeMinHeight, 0)));
}
if (a.hasValue(R.styleable.CameraView_cameraVideoSizeMaxHeight)) {
videoConstraints.add(SizeSelectors.maxHeight(a.getInteger(R.styleable.CameraView_cameraVideoSizeMaxHeight, 0)));
}
if (a.hasValue(R.styleable.CameraView_cameraVideoSizeMinArea)) {
videoConstraints.add(SizeSelectors.minArea(a.getInteger(R.styleable.CameraView_cameraVideoSizeMinArea, 0)));
}
if (a.hasValue(R.styleable.CameraView_cameraVideoSizeMaxArea)) {
videoConstraints.add(SizeSelectors.maxArea(a.getInteger(R.styleable.CameraView_cameraVideoSizeMaxArea, 0)));
}
if (a.hasValue(R.styleable.CameraView_cameraVideoSizeAspectRatio)) {
//noinspection ConstantConditions
videoConstraints.add(SizeSelectors.aspectRatio(AspectRatio.parse(a.getString(R.styleable.CameraView_cameraVideoSizeAspectRatio)), 0));
}
if (a.getBoolean(R.styleable.CameraView_cameraVideoSizeSmallest, false)) videoConstraints.add(SizeSelectors.smallest());
if (a.getBoolean(R.styleable.CameraView_cameraVideoSizeBiggest, false)) videoConstraints.add(SizeSelectors.biggest());
SizeSelector videoSelector = !videoConstraints.isEmpty() ?
SizeSelectors.and(videoConstraints.toArray(new SizeSelector[0])) :
SizeSelectors.biggest();
// Gestures
GestureAction tapGesture = GestureAction.fromValue(a.getInteger(R.styleable.CameraView_cameraGestureTap, GestureAction.DEFAULT_TAP.value()));
GestureAction longTapGesture = GestureAction.fromValue(a.getInteger(R.styleable.CameraView_cameraGestureLongTap, GestureAction.DEFAULT_LONG_TAP.value()));
GestureAction pinchGesture = GestureAction.fromValue(a.getInteger(R.styleable.CameraView_cameraGesturePinch, GestureAction.DEFAULT_PINCH.value()));
GestureAction scrollHorizontalGesture = GestureAction.fromValue(a.getInteger(R.styleable.CameraView_cameraGestureScrollHorizontal, GestureAction.DEFAULT_SCROLL_HORIZONTAL.value()));
GestureAction scrollVerticalGesture = GestureAction.fromValue(a.getInteger(R.styleable.CameraView_cameraGestureScrollVertical, GestureAction.DEFAULT_SCROLL_VERTICAL.value()));
// Size selectors and gestures
SizeSelectorParser sizeSelectors = new SizeSelectorParser(a);
GestureParser gestures = new GestureParser(a);
a.recycle();
// Components
mCameraCallbacks = new Callbacks();
mCameraController = instantiateCameraController(mCameraCallbacks);
mCameraCallbacks = new CameraCallbacks();
mCameraEngine = instantiateCameraController(mCameraCallbacks);
mUiHandler = new Handler(Looper.getMainLooper());
mFrameProcessorsHandler = WorkerHandler.get("FrameProcessorsWorker");
@ -206,40 +177,53 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
setPlaySounds(playSounds);
// Apply camera controller params
setFacing(facing);
setFlash(flash);
setMode(mode);
setWhiteBalance(whiteBalance);
setGrid(grid);
setFacing(controls.getFacing());
setFlash(controls.getFlash());
setMode(controls.getMode());
setWhiteBalance(controls.getWhiteBalance());
setGrid(controls.getGrid());
setGridColor(gridColor);
setHdr(hdr);
setAudio(audio);
setHdr(controls.getHdr());
setAudio(controls.getAudio());
setAudioBitRate(audioBitRate);
setPictureSize(pictureSelector);
setVideoSize(videoSelector);
setVideoCodec(codec);
setPictureSize(sizeSelectors.getPictureSizeSelector());
setVideoSize(sizeSelectors.getVideoSizeSelector());
setVideoCodec(controls.getVideoCodec());
setVideoMaxSize(videoMaxSize);
setVideoMaxDuration(videoMaxDuration);
setVideoBitRate(videoBitRate);
setAutoFocusResetDelay(autoFocusResetDelay);
// Apply gestures
mapGesture(Gesture.TAP, tapGesture);
mapGesture(Gesture.LONG_TAP, longTapGesture);
mapGesture(Gesture.PINCH, pinchGesture);
mapGesture(Gesture.SCROLL_HORIZONTAL, scrollHorizontalGesture);
mapGesture(Gesture.SCROLL_VERTICAL, scrollVerticalGesture);
mapGesture(Gesture.TAP, gestures.getTapAction());
mapGesture(Gesture.LONG_TAP, gestures.getLongTapAction());
mapGesture(Gesture.PINCH, gestures.getPinchAction());
mapGesture(Gesture.SCROLL_HORIZONTAL, gestures.getHorizontalScrollAction());
mapGesture(Gesture.SCROLL_VERTICAL, gestures.getVerticalScrollAction());
if (!isInEditMode()) {
mOrientationHelper = new OrientationHelper(context, mCameraCallbacks);
}
}
protected CameraController instantiateCameraController(CameraCallbacks callbacks) {
return new Camera1(callbacks);
/**
* Instantiates the camera engine.
* @param callback the engine callback
* @return the engine
*/
@NonNull
protected CameraEngine instantiateCameraController(@NonNull CameraEngine.Callback callback) {
return new Camera1Engine(callback);
}
protected CameraPreview instantiatePreview(Context context, ViewGroup container) {
/**
* Instantiates the camera preview.
* @param context a context
* @param container the container
* @return the preview
*/
@NonNull
protected CameraPreview instantiatePreview(@NonNull Context context, @NonNull ViewGroup container) {
LOG.w("preview:", "isHardwareAccelerated:", isHardwareAccelerated());
switch (mPreview) {
case SURFACE:
@ -257,9 +241,10 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
}
}
/* for tests */ void instantiatePreview() {
@VisibleForTesting
void instantiatePreview() {
mCameraPreview = instantiatePreview(getContext(), this);
mCameraController.setPreview(mCameraPreview);
mCameraEngine.setPreview(mCameraPreview);
}
@Override
@ -314,7 +299,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
*/
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
Size previewSize = mCameraController.getPreviewStreamSize(CameraController.REF_VIEW);
Size previewSize = mCameraEngine.getPreviewStreamSize(CameraEngine.REF_VIEW);
if (previewSize == null) {
LOG.w("onMeasure:", "surface is not ready. Calling default behavior.");
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
@ -457,19 +442,19 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
mGestureMap.put(gesture, action);
switch (gesture) {
case PINCH:
mPinchGestureLayout.enable(mGestureMap.get(Gesture.PINCH) != none);
mPinchGestureLayout.setActive(mGestureMap.get(Gesture.PINCH) != none);
break;
case TAP:
// case DOUBLE_TAP:
case LONG_TAP:
mTapGestureLayout.enable(
mTapGestureLayout.setActive(
mGestureMap.get(Gesture.TAP) != none ||
// mGestureMap.get(Gesture.DOUBLE_TAP) != none ||
mGestureMap.get(Gesture.LONG_TAP) != none);
break;
case SCROLL_HORIZONTAL:
case SCROLL_VERTICAL:
mScrollGestureLayout.enable(
mScrollGestureLayout.setActive(
mGestureMap.get(Gesture.SCROLL_HORIZONTAL) != none ||
mGestureMap.get(Gesture.SCROLL_VERTICAL) != none);
break;
@ -498,6 +483,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
*/
@NonNull
public GestureAction getGestureAction(@NonNull Gesture gesture) {
//noinspection ConstantConditions
return mGestureMap.get(gesture);
}
@ -508,12 +494,14 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
}
@SuppressLint("ClickableViewAccessibility")
@Override
public boolean onTouchEvent(MotionEvent event) {
if (!isOpened()) return true;
// Pass to our own GestureLayouts
CameraOptions options = mCameraController.getCameraOptions(); // Non null
CameraOptions options = mCameraEngine.getCameraOptions(); // Non null
if (options == null) throw new IllegalStateException("Options should not be null here.");
if (mPinchGestureLayout.onTouchEvent(event)) {
LOG.i("onTouchEvent", "pinch!");
onGesture(mPinchGestureLayout, options);
@ -530,39 +518,40 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
// Some gesture layout detected a gesture. It's not known at this moment:
// (1) if it was mapped to some action (we check here)
// (2) if it's supported by the camera (CameraController checks)
// (2) if it's supported by the camera (CameraEngine checks)
private void onGesture(GestureLayout source, @NonNull CameraOptions options) {
Gesture gesture = source.getGestureType();
Gesture gesture = source.getGesture();
GestureAction action = mGestureMap.get(gesture);
PointF[] points = source.getPoints();
float oldValue, newValue;
//noinspection ConstantConditions
switch (action) {
case CAPTURE:
mCameraController.takePicture();
takePicture();
break;
case FOCUS:
case FOCUS_WITH_MARKER:
mCameraController.startAutoFocus(gesture, points[0]);
mCameraEngine.startAutoFocus(gesture, points[0]);
break;
case ZOOM:
oldValue = mCameraController.getZoomValue();
newValue = source.scaleValue(oldValue, 0, 1);
oldValue = mCameraEngine.getZoomValue();
newValue = source.computeValue(oldValue, 0, 1);
if (newValue != oldValue) {
mCameraController.setZoom(newValue, points, true);
mCameraEngine.setZoom(newValue, points, true);
}
break;
case EXPOSURE_CORRECTION:
oldValue = mCameraController.getExposureCorrectionValue();
oldValue = mCameraEngine.getExposureCorrectionValue();
float minValue = options.getExposureCorrectionMinValue();
float maxValue = options.getExposureCorrectionMaxValue();
newValue = source.scaleValue(oldValue, minValue, maxValue);
newValue = source.computeValue(oldValue, minValue, maxValue);
if (newValue != oldValue) {
float[] bounds = new float[]{minValue, maxValue};
mCameraController.setExposureCorrection(newValue, bounds, points, true);
mCameraEngine.setExposureCorrection(newValue, bounds, points, true);
}
break;
}
@ -577,11 +566,11 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @return whether the camera has started
*/
public boolean isOpened() {
return mCameraController.getState() >= CameraController.STATE_STARTED;
return mCameraEngine.getState() >= CameraEngine.STATE_STARTED;
}
private boolean isClosed() {
return mCameraController.getState() == CameraController.STATE_STOPPED;
return mCameraEngine.getState() == CameraEngine.STATE_STOPPED;
}
/**
@ -606,10 +595,10 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
if (!isEnabled()) return;
if (mCameraPreview != null) mCameraPreview.onResume();
if (checkPermissions(getAudio())) {
// Update display orientation for current CameraController
// Update display orientation for current CameraEngine
mOrientationHelper.enable(getContext());
mCameraController.setDisplayOffset(mOrientationHelper.getDisplayOffset());
mCameraController.start();
mCameraEngine.setDisplayOffset(mOrientationHelper.getDisplayOffset());
mCameraEngine.start();
}
}
@ -656,9 +645,9 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
return;
}
}
LOG.e("Permission error:", "When audio is enabled (Audio.ON),",
String message = LOG.e("Permission error:", "When audio is enabled (Audio.ON),",
"the RECORD_AUDIO permission should be added to the app manifest file.");
throw new IllegalStateException(CameraLogger.lastMessage);
throw new IllegalStateException(message);
} catch (PackageManager.NameNotFoundException e) {
// Not possible.
}
@ -672,7 +661,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
*/
@OnLifecycleEvent(Lifecycle.Event.ON_PAUSE)
public void close() {
mCameraController.stop();
mCameraEngine.stop();
if (mCameraPreview != null) mCameraPreview.onPause();
}
@ -685,7 +674,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
public void destroy() {
clearCameraListeners();
clearFrameProcessors();
mCameraController.destroy();
mCameraEngine.destroy();
if (mCameraPreview != null) mCameraPreview.onDestroy();
}
@ -732,7 +721,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
*/
@Nullable
public CameraOptions getCameraOptions() {
return mCameraController.getCameraOptions();
return mCameraEngine.getCameraOptions();
}
@ -757,7 +746,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
if (EVvalue < min) EVvalue = min;
if (EVvalue > max) EVvalue = max;
float[] bounds = new float[]{min, max};
mCameraController.setExposureCorrection(EVvalue, bounds, null, false);
mCameraEngine.setExposureCorrection(EVvalue, bounds, null, false);
}
}
@ -768,7 +757,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @return the current exposure correction value
*/
public float getExposureCorrection() {
return mCameraController.getExposureCorrectionValue();
return mCameraEngine.getExposureCorrectionValue();
}
@ -785,7 +774,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
public void setZoom(float zoom) {
if (zoom < 0) zoom = 0;
if (zoom > 1) zoom = 1;
mCameraController.setZoom(zoom, null, false);
mCameraEngine.setZoom(zoom, null, false);
}
@ -794,7 +783,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @return the current zoom value
*/
public float getZoom() {
return mCameraController.getZoomValue();
return mCameraEngine.getZoomValue();
}
@ -850,7 +839,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @param hdr desired hdr value
*/
public void setHdr(@NonNull Hdr hdr) {
mCameraController.setHdr(hdr);
mCameraEngine.setHdr(hdr);
}
@ -877,7 +866,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
*/
@NonNull
public Hdr getHdr() {
return mCameraController.getHdr();
return mCameraEngine.getHdr();
}
@ -893,7 +882,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
location.setAltitude(0);
location.setLatitude(latitude);
location.setLongitude(longitude);
mCameraController.setLocation(location);
mCameraEngine.setLocation(location);
}
@ -903,7 +892,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @param location current location
*/
public void setLocation(@Nullable Location location) {
mCameraController.setLocation(location);
mCameraEngine.setLocation(location);
}
@ -914,7 +903,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
*/
@Nullable
public Location getLocation() {
return mCameraController.getLocation();
return mCameraEngine.getLocation();
}
@ -930,7 +919,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @param whiteBalance desired white balance behavior.
*/
public void setWhiteBalance(@NonNull WhiteBalance whiteBalance) {
mCameraController.setWhiteBalance(whiteBalance);
mCameraEngine.setWhiteBalance(whiteBalance);
}
@ -940,7 +929,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
*/
@NonNull
public WhiteBalance getWhiteBalance() {
return mCameraController.getWhiteBalance();
return mCameraEngine.getWhiteBalance();
}
@ -953,7 +942,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @param facing a facing value.
*/
public void setFacing(@NonNull Facing facing) {
mCameraController.setFacing(facing);
mCameraEngine.setFacing(facing);
}
@ -963,7 +952,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
*/
@NonNull
public Facing getFacing() {
return mCameraController.getFacing();
return mCameraEngine.getFacing();
}
@ -974,7 +963,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @return the new facing value
*/
public Facing toggleFacing() {
Facing facing = mCameraController.getFacing();
Facing facing = mCameraEngine.getFacing();
switch (facing) {
case BACK:
setFacing(Facing.FRONT);
@ -985,7 +974,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
break;
}
return mCameraController.getFacing();
return mCameraEngine.getFacing();
}
@ -1000,7 +989,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @param flash desired flash mode.
*/
public void setFlash(@NonNull Flash flash) {
mCameraController.setFlash(flash);
mCameraEngine.setFlash(flash);
}
@ -1010,7 +999,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
*/
@NonNull
public Flash getFlash() {
return mCameraController.getFlash();
return mCameraEngine.getFlash();
}
@ -1026,11 +1015,11 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
if (audio == getAudio() || isClosed()) {
// Check did took place, or will happen on start().
mCameraController.setAudio(audio);
mCameraEngine.setAudio(audio);
} else if (checkPermissions(audio)) {
// Camera is running. Pass.
mCameraController.setAudio(audio);
mCameraEngine.setAudio(audio);
} else {
// This means that the audio permission is being asked.
@ -1048,7 +1037,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
*/
@NonNull
public Audio getAudio() {
return mCameraController.getAudio();
return mCameraEngine.getAudio();
}
@ -1060,14 +1049,15 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* the autofocus will not be reset.
*/
public void setAutoFocusResetDelay(long delayMillis) {
mCameraController.setAutoFocusResetDelay(delayMillis);
mCameraEngine.setAutoFocusResetDelay(delayMillis);
}
/**
* Returns the current delay in milliseconds to reset the focus after an autofocus process.
* @return the current autofocus reset delay in milliseconds.
*/
public long getAutoFocusResetDelay() { return mCameraController.getAutoFocusResetDelay(); }
@SuppressWarnings("unused")
public long getAutoFocusResetDelay() { return mCameraEngine.getAutoFocusResetDelay(); }
/**
@ -1080,7 +1070,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
public void startAutoFocus(float x, float y) {
if (x < 0 || x > getWidth()) throw new IllegalArgumentException("x should be >= 0 and <= getWidth()");
if (y < 0 || y > getHeight()) throw new IllegalArgumentException("y should be >= 0 and <= getHeight()");
mCameraController.startAutoFocus(null, new PointF(x, y));
mCameraEngine.startAutoFocus(null, new PointF(x, y));
}
@ -1102,7 +1092,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @param selector a size selector
*/
public void setPreviewStreamSize(@NonNull SizeSelector selector) {
mCameraController.setPreviewStreamSizeSelector(selector);
mCameraEngine.setPreviewStreamSizeSelector(selector);
}
@ -1115,7 +1105,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @param mode desired session type.
*/
public void setMode(@NonNull Mode mode) {
mCameraController.setMode(mode);
mCameraEngine.setMode(mode);
}
@ -1125,7 +1115,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
*/
@NonNull
public Mode getMode() {
return mCameraController.getMode();
return mCameraEngine.getMode();
}
@ -1138,7 +1128,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @param selector a size selector
*/
public void setPictureSize(@NonNull SizeSelector selector) {
mCameraController.setPictureSizeSelector(selector);
mCameraEngine.setPictureSizeSelector(selector);
}
@ -1151,7 +1141,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @param selector a size selector
*/
public void setVideoSize(@NonNull SizeSelector selector) {
mCameraController.setVideoSizeSelector(selector);
mCameraEngine.setVideoSizeSelector(selector);
}
/**
@ -1161,15 +1151,16 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @param bitRate desired bit rate
*/
public void setVideoBitRate(int bitRate) {
mCameraController.setVideoBitRate(bitRate);
mCameraEngine.setVideoBitRate(bitRate);
}
/**
* Returns the current video bit rate.
* @return current bit rate
*/
@SuppressWarnings("unused")
public int getVideoBitRate() {
return mCameraController.getVideoBitRate();
return mCameraEngine.getVideoBitRate();
}
/**
@ -1179,15 +1170,16 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @param bitRate desired bit rate
*/
public void setAudioBitRate(int bitRate) {
mCameraController.setAudioBitRate(bitRate);
mCameraEngine.setAudioBitRate(bitRate);
}
/**
* Returns the current audio bit rate.
* @return current bit rate
*/
@SuppressWarnings("unused")
public int getAudioBitRate() {
return mCameraController.getAudioBitRate();
return mCameraEngine.getAudioBitRate();
}
/**
@ -1265,7 +1257,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @see #takePictureSnapshot()
*/
public void takePicture() {
mCameraController.takePicture();
PictureResult.Stub stub = new PictureResult.Stub();
mCameraEngine.takePicture(stub);
}
@ -1281,7 +1274,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
*/
public void takePictureSnapshot() {
if (getWidth() == 0 || getHeight() == 0) return;
mCameraController.takePictureSnapshot(AspectRatio.of(getWidth(), getHeight()));
PictureResult.Stub stub = new PictureResult.Stub();
mCameraEngine.takePictureSnapshot(stub, AspectRatio.of(getWidth(), getHeight()));
}
@ -1292,7 +1286,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @param file a file where the video will be saved
*/
public void takeVideo(@NonNull File file) {
mCameraController.takeVideo(file);
VideoResult.Stub stub = new VideoResult.Stub();
mCameraEngine.takeVideo(stub, file);
mUiHandler.post(new Runnable() {
@Override
public void run() {
@ -1313,7 +1308,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
*/
public void takeVideoSnapshot(@NonNull File file) {
if (getWidth() == 0 || getHeight() == 0) return;
mCameraController.takeVideoSnapshot(file, AspectRatio.of(getWidth(), getHeight()));
VideoResult.Stub stub = new VideoResult.Stub();
mCameraEngine.takeVideoSnapshot(stub, file, AspectRatio.of(getWidth(), getHeight()));
mUiHandler.post(new Runnable() {
@Override
public void run() {
@ -1400,7 +1396,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* This will fire {@link CameraListener#onVideoTaken(VideoResult)}.
*/
public void stopVideo() {
mCameraController.stopVideo();
mCameraEngine.stopVideo();
mUiHandler.post(new Runnable() {
@Override
public void run() {
@ -1417,7 +1413,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @param maxWidth max width for snapshots
*/
public void setSnapshotMaxWidth(int maxWidth) {
mCameraController.setSnapshotMaxWidth(maxWidth);
mCameraEngine.setSnapshotMaxWidth(maxWidth);
}
/**
@ -1428,7 +1424,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @param maxHeight max height for snapshots
*/
public void setSnapshotMaxHeight(int maxHeight) {
mCameraController.setSnapshotMaxHeight(maxHeight);
mCameraEngine.setSnapshotMaxHeight(maxHeight);
}
/**
@ -1444,11 +1440,12 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
// Get the preview size and crop according to the current view size.
// It's better to do calculations in the REF_VIEW reference, and then flip if needed.
Size preview = mCameraController.getUncroppedSnapshotSize(CameraController.REF_VIEW);
Size preview = mCameraEngine.getUncroppedSnapshotSize(CameraEngine.REF_VIEW);
if (preview == null) return null; // Should never happen.
AspectRatio viewRatio = AspectRatio.of(getWidth(), getHeight());
Rect crop = CropHelper.computeCrop(preview, viewRatio);
Size cropSize = new Size(crop.width(), crop.height());
if (mCameraController.flip(CameraController.REF_VIEW, CameraController.REF_OUTPUT)) {
if (mCameraEngine.flip(CameraEngine.REF_VIEW, CameraEngine.REF_OUTPUT)) {
return cropSize.flip();
} else {
return cropSize;
@ -1467,7 +1464,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
*/
@Nullable
public Size getPictureSize() {
return mCameraController.getPictureSize(CameraController.REF_OUTPUT);
return mCameraEngine.getPictureSize(CameraEngine.REF_OUTPUT);
}
@ -1482,7 +1479,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
*/
@Nullable
public Size getVideoSize() {
return mCameraController.getVideoSize(CameraController.REF_OUTPUT);
return mCameraEngine.getVideoSize(CameraEngine.REF_OUTPUT);
}
@ -1502,7 +1499,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
if (requestCamera) permissions.add(Manifest.permission.CAMERA);
if (requestAudio) permissions.add(Manifest.permission.RECORD_AUDIO);
if (activity != null) {
activity.requestPermissions(permissions.toArray(new String[permissions.size()]),
activity.requestPermissions(permissions.toArray(new String[0]),
PERMISSION_REQUEST_CODE);
}
}
@ -1527,7 +1524,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
*/
public void setPlaySounds(boolean playSounds) {
mPlaySounds = playSounds && Build.VERSION.SDK_INT >= 16;
mCameraController.setPlaySounds(playSounds);
mCameraEngine.setPlaySounds(playSounds);
}
@ -1553,7 +1550,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @param codec requested video codec
*/
public void setVideoCodec(@NonNull VideoCodec codec) {
mCameraController.setVideoCodec(codec);
mCameraEngine.setVideoCodec(codec);
}
@ -1563,7 +1560,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
*/
@NonNull
public VideoCodec getVideoCodec() {
return mCameraController.getVideoCodec();
return mCameraEngine.getVideoCodec();
}
@ -1575,7 +1572,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @param videoMaxSizeInBytes The maximum video size in bytes
*/
public void setVideoMaxSize(long videoMaxSizeInBytes) {
mCameraController.setVideoMaxSize(videoMaxSizeInBytes);
mCameraEngine.setVideoMaxSize(videoMaxSizeInBytes);
}
@ -1587,7 +1584,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @return the maximum size in bytes
*/
public long getVideoMaxSize() {
return mCameraController.getVideoMaxSize();
return mCameraEngine.getVideoMaxSize();
}
@ -1599,7 +1596,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @param videoMaxDurationMillis The maximum video duration in milliseconds
*/
public void setVideoMaxDuration(int videoMaxDurationMillis) {
mCameraController.setVideoMaxDuration(videoMaxDurationMillis);
mCameraEngine.setVideoMaxDuration(videoMaxDurationMillis);
}
@ -1611,7 +1608,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @return the maximum duration in milliseconds
*/
public int getVideoMaxDuration() {
return mCameraController.getVideoMaxDuration();
return mCameraEngine.getVideoMaxDuration();
}
@ -1620,7 +1617,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @return boolean indicating if the camera is recording a video
*/
public boolean isTakingVideo() {
return mCameraController.isTakingVideo();
return mCameraEngine.isTakingVideo();
}
@ -1629,34 +1626,18 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @return boolean indicating if the camera is capturing a picture
*/
public boolean isTakingPicture() {
return mCameraController.isTakingPicture();
return mCameraEngine.isTakingPicture();
}
//endregion
//region Callbacks and dispatching
interface CameraCallbacks extends OrientationHelper.Callback {
void dispatchOnCameraOpened(CameraOptions options);
void dispatchOnCameraClosed();
void onCameraPreviewStreamSizeChanged();
void onShutter(boolean shouldPlaySound);
void dispatchOnVideoTaken(VideoResult result);
void dispatchOnPictureTaken(PictureResult result);
void dispatchOnFocusStart(@Nullable Gesture trigger, @NonNull PointF where);
void dispatchOnFocusEnd(@Nullable Gesture trigger, boolean success, @NonNull PointF where);
void dispatchOnZoomChanged(final float newValue, @Nullable final PointF[] fingers);
void dispatchOnExposureCorrectionChanged(float newValue, @NonNull float[] bounds, @Nullable PointF[] fingers);
void dispatchFrame(Frame frame);
void dispatchError(CameraException exception);
}
private class Callbacks implements CameraCallbacks {
@VisibleForTesting
class CameraCallbacks implements CameraEngine.Callback, OrientationHelper.Callback {
private CameraLogger mLogger = CameraLogger.create(CameraCallbacks.class.getSimpleName());
Callbacks() {}
@Override
public void dispatchOnCameraOpened(final CameraOptions options) {
mLogger.i("dispatchOnCameraOpened", options);
@ -1707,11 +1688,12 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
}
@Override
public void dispatchOnPictureTaken(final PictureResult result) {
mLogger.i("dispatchOnPictureTaken");
public void dispatchOnPictureTaken(final PictureResult.Stub stub) {
mLogger.i("dispatchOnPictureTaken", stub);
mUiHandler.post(new Runnable() {
@Override
public void run() {
PictureResult result = new PictureResult(stub);
for (CameraListener listener : mListeners) {
listener.onPictureTaken(result);
}
@ -1720,13 +1702,14 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
}
@Override
public void dispatchOnVideoTaken(final VideoResult video) {
mLogger.i("dispatchOnVideoTaken", video);
public void dispatchOnVideoTaken(final VideoResult.Stub stub) {
mLogger.i("dispatchOnVideoTaken", stub);
mUiHandler.post(new Runnable() {
@Override
public void run() {
VideoResult result = new VideoResult(stub);
for (CameraListener listener : mListeners) {
listener.onVideoTaken(video);
listener.onVideoTaken(result);
}
}
});
@ -1775,7 +1758,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
@Override
public void onDeviceOrientationChanged(int deviceOrientation) {
mLogger.i("onDeviceOrientationChanged", deviceOrientation);
mCameraController.setDeviceOrientation(deviceOrientation);
mCameraEngine.setDeviceOrientation(deviceOrientation);
int displayOffset = mOrientationHelper.getDisplayOffset();
final int value = (deviceOrientation + displayOffset) % 360;
mUiHandler.post(new Runnable() {
@ -1850,8 +1833,4 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
}
//endregion
//region deprecated APIs
//endregion
}

@ -1,7 +1,5 @@
package com.otaliastudios.cameraview;
import android.graphics.Bitmap;
import java.io.File;
import androidx.annotation.Nullable;

@ -1,19 +0,0 @@
package com.otaliastudios.cameraview;
import android.hardware.Camera;
import android.media.MediaRecorder;
import android.os.Build;
import java.util.HashMap;
abstract class Mapper {
abstract <T> T map(Flash flash);
abstract <T> T map(Facing facing);
abstract <T> T map(WhiteBalance whiteBalance);
abstract <T> T map(Hdr hdr);
abstract <T> Flash unmapFlash(T cameraConstant);
abstract <T> Facing unmapFacing(T cameraConstant);
abstract <T> WhiteBalance unmapWhiteBalance(T cameraConstant);
abstract <T> Hdr unmapHdr(T cameraConstant);
}

@ -1,85 +0,0 @@
package com.otaliastudios.cameraview;
import android.hardware.Camera;
import android.os.Build;
import java.util.HashMap;
@SuppressWarnings("unchecked")
class Mapper1 extends Mapper {
private static final HashMap<Flash, String> FLASH = new HashMap<>();
private static final HashMap<WhiteBalance, String> WB = new HashMap<>();
private static final HashMap<Facing, Integer> FACING = new HashMap<>();
private static final HashMap<Hdr, String> HDR = new HashMap<>();
static {
FLASH.put(Flash.OFF, Camera.Parameters.FLASH_MODE_OFF);
FLASH.put(Flash.ON, Camera.Parameters.FLASH_MODE_ON);
FLASH.put(Flash.AUTO, Camera.Parameters.FLASH_MODE_AUTO);
FLASH.put(Flash.TORCH, Camera.Parameters.FLASH_MODE_TORCH);
FACING.put(Facing.BACK, Camera.CameraInfo.CAMERA_FACING_BACK);
FACING.put(Facing.FRONT, Camera.CameraInfo.CAMERA_FACING_FRONT);
WB.put(WhiteBalance.AUTO, Camera.Parameters.WHITE_BALANCE_AUTO);
WB.put(WhiteBalance.INCANDESCENT, Camera.Parameters.WHITE_BALANCE_INCANDESCENT);
WB.put(WhiteBalance.FLUORESCENT, Camera.Parameters.WHITE_BALANCE_FLUORESCENT);
WB.put(WhiteBalance.DAYLIGHT, Camera.Parameters.WHITE_BALANCE_DAYLIGHT);
WB.put(WhiteBalance.CLOUDY, Camera.Parameters.WHITE_BALANCE_CLOUDY_DAYLIGHT);
HDR.put(Hdr.OFF, Camera.Parameters.SCENE_MODE_AUTO);
if (Build.VERSION.SDK_INT >= 17) {
HDR.put(Hdr.ON, Camera.Parameters.SCENE_MODE_HDR);
} else {
HDR.put(Hdr.ON, "hdr");
}
}
@Override
<T> T map(Flash flash) {
return (T) FLASH.get(flash);
}
@Override
<T> T map(Facing facing) {
return (T) FACING.get(facing);
}
@Override
<T> T map(WhiteBalance whiteBalance) {
return (T) WB.get(whiteBalance);
}
@Override
<T> T map(Hdr hdr) {
return (T) HDR.get(hdr);
}
private <T> T reverseLookup(HashMap<T, ?> map, Object object) {
for (T value : map.keySet()) {
if (map.get(value).equals(object)) {
return value;
}
}
return null;
}
@Override
<T> Flash unmapFlash(T cameraConstant) {
return reverseLookup(FLASH, cameraConstant);
}
@Override
<T> Facing unmapFacing(T cameraConstant) {
return reverseLookup(FACING, cameraConstant);
}
@Override
<T> WhiteBalance unmapWhiteBalance(T cameraConstant) {
return reverseLookup(WB, cameraConstant);
}
@Override
<T> Hdr unmapHdr(T cameraConstant) {
return reverseLookup(HDR, cameraConstant);
}
}

@ -1,40 +0,0 @@
package com.otaliastudios.cameraview;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
/**
* Interface for picture capturing.
* Don't call start if already started. Don't call stop if already stopped.
* Don't reuse.
*/
abstract class PictureRecorder {
/* tests */ PictureResult mResult;
/* tests */ PictureResultListener mListener;
PictureRecorder(@NonNull PictureResult stub, @Nullable PictureResultListener listener) {
mResult = stub;
mListener = listener;
}
abstract void take();
@SuppressWarnings("WeakerAccess")
protected void dispatchOnShutter(boolean didPlaySound) {
if (mListener != null) mListener.onPictureShutter(didPlaySound);
}
protected void dispatchResult() {
if (mListener != null) {
mListener.onPictureResult(mResult);
mListener = null;
mResult = null;
}
}
interface PictureResultListener {
void onPictureShutter(boolean didPlaySound);
void onPictureResult(@Nullable PictureResult result);
}
}

@ -3,6 +3,9 @@ package com.otaliastudios.cameraview;
import android.graphics.BitmapFactory;
import android.location.Location;
import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.size.Size;
import java.io.File;
import androidx.annotation.NonNull;
@ -12,20 +15,45 @@ import androidx.annotation.Nullable;
* Wraps the picture captured by {@link CameraView#takePicture()} or
* {@link CameraView#takePictureSnapshot()}.
*/
@SuppressWarnings("unused")
public class PictureResult {
/**
* A result stub, for internal use only.
*/
public static class Stub {
Stub() {}
public boolean isSnapshot;
public Location location;
public int rotation;
public Size size;
public Facing facing;
public byte[] data;
public int format;
}
public final static int FORMAT_JPEG = 0;
// public final static int FORMAT_PNG = 1;
boolean isSnapshot;
Location location;
int rotation;
Size size;
Facing facing;
byte[] data;
int format;
PictureResult() {}
private final boolean isSnapshot;
private final Location location;
private final int rotation;
private final Size size;
private final Facing facing;
private final byte[] data;
private final int format;
PictureResult(@NonNull Stub builder) {
isSnapshot = builder.isSnapshot;
location = builder.location;
rotation = builder.rotation;
size = builder.size;
facing = builder.facing;
data = builder.data;
format = builder.format;
}
/**
* Returns whether this result comes from a snapshot.

@ -1,40 +0,0 @@
package com.otaliastudios.cameraview;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
/**
* Interface for video recording.
* Don't call start if already started. Don't call stop if already stopped.
* Don't reuse.
*/
abstract class VideoRecorder {
/* tests */ VideoResult mResult;
/* tests */ VideoResultListener mListener;
protected Exception mError;
VideoRecorder(@NonNull VideoResult stub, @Nullable VideoResultListener listener) {
mResult = stub;
mListener = listener;
}
abstract void start();
abstract void stop();
@SuppressWarnings("WeakerAccess")
protected void dispatchResult() {
if (mListener != null) {
mListener.onVideoResult(mResult, mError);
mListener = null;
mResult = null;
mError = null;
}
}
interface VideoResultListener {
void onVideoResult(@Nullable VideoResult result, @Nullable Exception exception);
}
}

@ -1,6 +1,12 @@
package com.otaliastudios.cameraview;
import android.location.Location;
import com.otaliastudios.cameraview.controls.Audio;
import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.controls.VideoCodec;
import com.otaliastudios.cameraview.size.Size;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
@ -9,8 +15,32 @@ import java.io.File;
/**
* Wraps the result of a video recording started by {@link CameraView#takeVideo(File)}.
*/
@SuppressWarnings("WeakerAccess")
public class VideoResult {
/**
* A result stub, for internal use only.
*/
public static class Stub {
Stub() {}
public boolean isSnapshot;
public Location location;
public int rotation;
public Size size;
public File file;
public Facing facing;
public VideoCodec videoCodec;
public Audio audio;
public long maxSize;
public int maxDuration;
public int endReason;
public int videoBitRate;
public int videoFrameRate;
public int audioBitRate;
}
@SuppressWarnings({"WeakerAccess", "unused"})
public static final int REASON_USER = 0;
@ -20,22 +50,37 @@ public class VideoResult {
@SuppressWarnings("WeakerAccess")
public static final int REASON_MAX_DURATION_REACHED = 2;
boolean isSnapshot;
Location location;
int rotation;
Size size;
File file;
Facing facing;
VideoCodec codec;
Audio audio;
long maxSize;
int maxDuration;
int endReason;
int videoBitRate;
int videoFrameRate;
int audioBitRate;
VideoResult() {}
private final boolean isSnapshot;
private final Location location;
private final int rotation;
private final Size size;
private final File file;
private final Facing facing;
private final VideoCodec videoCodec;
private final Audio audio;
private final long maxSize;
private final int maxDuration;
private final int endReason;
private final int videoBitRate;
private final int videoFrameRate;
private final int audioBitRate;
VideoResult(@NonNull Stub builder) {
isSnapshot = builder.isSnapshot;
location = builder.location;
rotation = builder.rotation;
size = builder.size;
file = builder.file;
facing = builder.facing;
videoCodec = builder.videoCodec;
audio = builder.audio;
maxSize = builder.maxSize;
maxDuration = builder.maxDuration;
endReason = builder.endReason;
videoBitRate = builder.videoBitRate;
videoFrameRate = builder.videoFrameRate;
audioBitRate = builder.audioBitRate;
}
/**
* Returns whether this result comes from a snapshot.
@ -105,7 +150,7 @@ public class VideoResult {
*/
@NonNull
public VideoCodec getVideoCodec() {
return codec;
return videoCodec;
}
/**

@ -1,6 +1,8 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.controls;
import com.otaliastudios.cameraview.CameraView;
import androidx.annotation.Nullable;
/**

@ -1,4 +1,4 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.controls;
/**
* Base interface for controls like {@link Audio},

@ -0,0 +1,72 @@
package com.otaliastudios.cameraview.controls;
import android.content.Context;
import android.content.res.TypedArray;
import com.otaliastudios.cameraview.R;
import androidx.annotation.NonNull;
/**
* Parses controls from XML attributes.
*/
public class ControlParser {
private int preview;
private int facing;
private int flash;
private int grid;
private int whiteBalance;
private int mode;
private int hdr;
private int audio;
private int videoCodec;
public ControlParser(@NonNull Context context, @NonNull TypedArray array) {
this.preview = array.getInteger(R.styleable.CameraView_cameraPreview, Preview.DEFAULT.value());
this.facing = array.getInteger(R.styleable.CameraView_cameraFacing, Facing.DEFAULT(context).value());
this.flash = array.getInteger(R.styleable.CameraView_cameraFlash, Flash.DEFAULT.value());
this.grid = array.getInteger(R.styleable.CameraView_cameraGrid, Grid.DEFAULT.value());
this.whiteBalance = array.getInteger(R.styleable.CameraView_cameraWhiteBalance, WhiteBalance.DEFAULT.value());
this.mode = array.getInteger(R.styleable.CameraView_cameraMode, Mode.DEFAULT.value());
this.hdr = array.getInteger(R.styleable.CameraView_cameraHdr, Hdr.DEFAULT.value());
this.audio = array.getInteger(R.styleable.CameraView_cameraAudio, Audio.DEFAULT.value());
this.videoCodec = array.getInteger(R.styleable.CameraView_cameraVideoCodec, VideoCodec.DEFAULT.value());
}
public Preview getPreview() {
return Preview.fromValue(preview);
}
public Facing getFacing() {
return Facing.fromValue(facing);
}
public Flash getFlash() {
return Flash.fromValue(flash);
}
public Grid getGrid() {
return Grid.fromValue(grid);
}
public Mode getMode() {
return Mode.fromValue(mode);
}
public WhiteBalance getWhiteBalance() {
return WhiteBalance.fromValue(whiteBalance);
}
public Hdr getHdr() {
return Hdr.fromValue(hdr);
}
public Audio getAudio() {
return Audio.fromValue(audio);
}
public VideoCodec getVideoCodec() {
return VideoCodec.fromValue(videoCodec);
}
}

@ -1,7 +1,11 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.controls;
import android.content.Context;
import com.otaliastudios.cameraview.CameraUtils;
import com.otaliastudios.cameraview.CameraView;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;

@ -1,6 +1,9 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.controls;
import com.otaliastudios.cameraview.CameraOptions;
import com.otaliastudios.cameraview.CameraView;
import androidx.annotation.Nullable;
/**

@ -1,6 +1,8 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.controls;
import com.otaliastudios.cameraview.CameraView;
import androidx.annotation.Nullable;
/**

@ -1,6 +1,8 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.controls;
import com.otaliastudios.cameraview.CameraView;
import androidx.annotation.Nullable;
/**

@ -1,6 +1,8 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.controls;
import com.otaliastudios.cameraview.CameraView;
import androidx.annotation.Nullable;
import java.io.File;

@ -1,6 +1,8 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.controls;
import com.otaliastudios.cameraview.CameraView;
import androidx.annotation.Nullable;
/**

@ -1,7 +1,8 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.controls;
import androidx.annotation.NonNull;
import com.otaliastudios.cameraview.CameraView;
import androidx.annotation.Nullable;
/**

@ -1,6 +1,9 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.controls;
import com.otaliastudios.cameraview.CameraOptions;
import com.otaliastudios.cameraview.CameraView;
import androidx.annotation.Nullable;
/**

@ -1,4 +1,4 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.engine;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
@ -14,6 +14,31 @@ import androidx.annotation.Nullable;
import androidx.annotation.WorkerThread;
import android.view.SurfaceHolder;
import com.otaliastudios.cameraview.CameraException;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.CameraOptions;
import com.otaliastudios.cameraview.frame.Frame;
import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.controls.Audio;
import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.controls.Flash;
import com.otaliastudios.cameraview.gesture.Gesture;
import com.otaliastudios.cameraview.controls.Hdr;
import com.otaliastudios.cameraview.controls.Mode;
import com.otaliastudios.cameraview.controls.WhiteBalance;
import com.otaliastudios.cameraview.internal.utils.CropHelper;
import com.otaliastudios.cameraview.internal.utils.Task;
import com.otaliastudios.cameraview.picture.FullPictureRecorder;
import com.otaliastudios.cameraview.picture.PictureRecorder;
import com.otaliastudios.cameraview.picture.SnapshotPictureRecorder;
import com.otaliastudios.cameraview.preview.GlCameraPreview;
import com.otaliastudios.cameraview.size.AspectRatio;
import com.otaliastudios.cameraview.size.Size;
import com.otaliastudios.cameraview.video.FullVideoRecorder;
import com.otaliastudios.cameraview.video.SnapshotVideoRecorder;
import com.otaliastudios.cameraview.video.VideoRecorder;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
@ -21,11 +46,11 @@ import java.util.List;
@SuppressWarnings("deprecation")
class Camera1 extends CameraController implements Camera.PreviewCallback, Camera.ErrorCallback,
public class Camera1Engine extends CameraEngine implements Camera.PreviewCallback, Camera.ErrorCallback,
VideoRecorder.VideoResultListener,
PictureRecorder.PictureResultListener {
private static final String TAG = Camera1.class.getSimpleName();
private static final String TAG = Camera1Engine.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
private Camera mCamera;
@ -46,9 +71,9 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
}
};
Camera1(@NonNull CameraView.CameraCallbacks callback) {
public Camera1Engine(@NonNull Callback callback) {
super(callback);
mMapper = new Mapper1();
mMapper = Mapper.get();
}
private void schedule(@Nullable final Task<Void> task, final boolean ensureAvailable, final Runnable action) {
@ -176,14 +201,16 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
// To be called when the preview size is setup or changed.
private void startPreview(String log) {
LOG.i(log, "Dispatching onCameraPreviewStreamSizeChanged.");
mCameraCallbacks.onCameraPreviewStreamSizeChanged();
mCallback.onCameraPreviewStreamSizeChanged();
Size previewSize = getPreviewStreamSize(REF_VIEW);
boolean wasFlipped = flip(REF_SENSOR, REF_VIEW);
mPreview.setStreamSize(previewSize.getWidth(), previewSize.getHeight(), wasFlipped);
if (previewSize == null) {
throw new IllegalStateException("previewStreamSize should not be null at this point.");
}
mPreview.setStreamSize(previewSize.getWidth(), previewSize.getHeight());
Camera.Parameters params = mCamera.getParameters();
mPreviewFormat = params.getPreviewFormat();
mPreviewStreamFormat = params.getPreviewFormat();
params.setPreviewSize(mPreviewStreamSize.getWidth(), mPreviewStreamSize.getHeight()); // <- not allowed during preview
if (mMode == Mode.PICTURE) {
params.setPictureSize(mCaptureSize.getWidth(), mCaptureSize.getHeight()); // <- allowed
@ -199,7 +226,7 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
mCamera.setPreviewCallbackWithBuffer(null); // Release anything left
mCamera.setPreviewCallbackWithBuffer(this); // Add ourselves
mFrameManager.allocate(ImageFormat.getBitsPerPixel(mPreviewFormat), mPreviewStreamSize);
mFrameManager.allocateBuffers(ImageFormat.getBitsPerPixel(mPreviewStreamFormat), mPreviewStreamSize);
LOG.i(log, "Starting preview with startPreview().");
try {
@ -212,7 +239,7 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
}
private void stopPreview() {
mPreviewFormat = 0;
mPreviewStreamFormat = 0;
mFrameManager.release();
mCamera.setPreviewCallbackWithBuffer(null); // Release anything left
try {
@ -236,10 +263,10 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
Camera.Parameters params = mCamera.getParameters();
mCameraOptions = new CameraOptions(params, flip(REF_SENSOR, REF_VIEW));
applyDefaultFocus(params);
applyFlash(params, Flash.DEFAULT);
applyFlash(params, Flash.OFF);
applyLocation(params, null);
applyWhiteBalance(params, WhiteBalance.DEFAULT);
applyHdr(params, Hdr.DEFAULT);
applyWhiteBalance(params, WhiteBalance.AUTO);
applyHdr(params, Hdr.OFF);
applyPlaySounds(mPlaySounds);
params.setRecordingHint(mMode == Mode.VIDEO);
mCamera.setParameters(params);
@ -260,7 +287,7 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
@WorkerThread
@Override
void onStart() {
protected void onStart() {
if (isCameraAvailable()) {
LOG.w("onStart:", "Camera not available. Should not happen.");
onStop(); // Should not happen.
@ -278,7 +305,7 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
@WorkerThread
@Override
void onStop() {
protected void onStop() {
LOG.i("onStop:", "About to clean up.");
mHandler.get().removeCallbacks(mPostFocusResetRunnable);
if (mVideoRecorder != null) {
@ -336,8 +363,8 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
return;
}
LOG.e("Internal Camera1 error.", error);
Exception runtime = new RuntimeException(CameraLogger.lastMessage);
String message = LOG.e("Internal Camera1 error.", error);
Exception runtime = new RuntimeException(message);
int reason;
switch (error) {
case Camera.CAMERA_ERROR_EVICTED: reason = CameraException.REASON_DISCONNECTED; break;
@ -348,7 +375,7 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
}
@Override
void setMode(@NonNull Mode mode) {
public void setMode(@NonNull Mode mode) {
if (mode != mMode) {
mMode = mode;
schedule(null, true, new Runnable() {
@ -361,7 +388,7 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
}
@Override
void setLocation(@Nullable Location location) {
public void setLocation(@Nullable Location location) {
final Location oldLocation = mLocation;
mLocation = location;
schedule(mLocationTask, true, new Runnable() {
@ -373,7 +400,8 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
});
}
private boolean applyLocation(@NonNull Camera.Parameters params, @Nullable Location oldLocation) {
private boolean applyLocation(@NonNull Camera.Parameters params,
@SuppressWarnings("unused") @Nullable Location oldLocation) {
if (mLocation != null) {
params.setGpsLatitude(mLocation.getLatitude());
params.setGpsLongitude(mLocation.getLongitude());
@ -385,7 +413,7 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
}
@Override
void setFacing(@NonNull Facing facing) {
public void setFacing(@NonNull Facing facing) {
final Facing old = mFacing;
if (facing != old) {
mFacing = facing;
@ -403,7 +431,7 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
}
@Override
void setWhiteBalance(@NonNull WhiteBalance whiteBalance) {
public void setWhiteBalance(@NonNull WhiteBalance whiteBalance) {
final WhiteBalance old = mWhiteBalance;
mWhiteBalance = whiteBalance;
schedule(mWhiteBalanceTask, true, new Runnable() {
@ -425,7 +453,7 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
}
@Override
void setHdr(@NonNull Hdr hdr) {
public void setHdr(@NonNull Hdr hdr) {
final Hdr old = mHdr;
mHdr = hdr;
schedule(mHdrTask, true, new Runnable() {
@ -446,6 +474,7 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
return false;
}
@SuppressWarnings("UnusedReturnValue")
@TargetApi(17)
private boolean applyPlaySounds(boolean oldPlaySound) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
@ -469,7 +498,7 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
@Override
void setAudio(@NonNull Audio audio) {
public void setAudio(@NonNull Audio audio) {
if (mAudio != audio) {
if (isTakingVideo()) {
LOG.w("Audio setting was changed while recording. " +
@ -480,7 +509,7 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
}
@Override
void setFlash(@NonNull Flash flash) {
public void setFlash(@NonNull Flash flash) {
final Flash old = mFlash;
mFlash = flash;
schedule(mFlashTask, true, new Runnable() {
@ -536,23 +565,23 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
@Override
public void onPictureShutter(boolean didPlaySound) {
mCameraCallbacks.onShutter(!didPlaySound);
mCallback.onShutter(!didPlaySound);
}
@Override
public void onPictureResult(@Nullable PictureResult result) {
public void onPictureResult(@Nullable PictureResult.Stub result) {
mPictureRecorder = null;
if (result != null) {
mCameraCallbacks.dispatchOnPictureTaken(result);
mCallback.dispatchOnPictureTaken(result);
} else {
// Something went wrong.
mCameraCallbacks.dispatchError(new CameraException(CameraException.REASON_PICTURE_FAILED));
mCallback.dispatchError(new CameraException(CameraException.REASON_PICTURE_FAILED));
LOG.e("onPictureResult", "result is null: something went wrong.");
}
}
@Override
void takePicture() {
public void takePicture(final @NonNull PictureResult.Stub stub) {
LOG.v("takePicture: scheduling");
schedule(null, true, new Runnable() {
@Override
@ -565,13 +594,12 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
LOG.v("takePicture: performing.", isTakingPicture());
if (isTakingPicture()) return;
PictureResult result = new PictureResult();
result.isSnapshot = false;
result.location = mLocation;
result.rotation = offset(REF_SENSOR, REF_OUTPUT);
result.size = getPictureSize(REF_OUTPUT);
result.facing = mFacing;
mPictureRecorder = new FullPictureRecorder(result, Camera1.this, mCamera);
stub.isSnapshot = false;
stub.location = mLocation;
stub.rotation = offset(REF_SENSOR, REF_OUTPUT);
stub.size = getPictureSize(REF_OUTPUT);
stub.facing = mFacing;
mPictureRecorder = new FullPictureRecorder(stub, Camera1Engine.this, mCamera);
mPictureRecorder.take();
}
});
@ -582,7 +610,7 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
* @param viewAspectRatio the view aspect ratio
*/
@Override
void takePictureSnapshot(@NonNull final AspectRatio viewAspectRatio) {
public void takePictureSnapshot(final @NonNull PictureResult.Stub stub, @NonNull final AspectRatio viewAspectRatio) {
LOG.v("takePictureSnapshot: scheduling");
schedule(null, true, new Runnable() {
@Override
@ -590,13 +618,12 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
LOG.v("takePictureSnapshot: performing.", isTakingPicture());
if (isTakingPicture()) return;
PictureResult result = new PictureResult();
result.location = mLocation;
result.isSnapshot = true;
result.facing = mFacing;
result.size = getUncroppedSnapshotSize(REF_OUTPUT); // Not the real size: it will be cropped to match the view ratio
result.rotation = offset(REF_SENSOR, REF_OUTPUT); // Actually it will be rotated and set to 0.
AspectRatio outputRatio = flip(REF_OUTPUT, REF_VIEW) ? viewAspectRatio.inverse() : viewAspectRatio;
stub.location = mLocation;
stub.isSnapshot = true;
stub.facing = mFacing;
stub.size = getUncroppedSnapshotSize(REF_OUTPUT); // Not the real size: it will be cropped to match the view ratio
stub.rotation = offset(REF_SENSOR, REF_OUTPUT); // Actually it will be rotated and set to 0.
AspectRatio outputRatio = flip(REF_OUTPUT, REF_VIEW) ? viewAspectRatio.flip() : viewAspectRatio;
// LOG.e("ROTBUG_pic", "aspectRatio (REF_VIEW):", viewAspectRatio);
// LOG.e("ROTBUG_pic", "aspectRatio (REF_OUTPUT):", outputRatio);
// LOG.e("ROTBUG_pic", "sizeUncropped (REF_OUTPUT):", result.size);
@ -606,7 +633,7 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
LOG.v("Rotations", "SO", offset(REF_SENSOR, REF_OUTPUT), "OS", offset(REF_OUTPUT, REF_SENSOR));
LOG.v("Rotations", "VO", offset(REF_VIEW, REF_OUTPUT), "OV", offset(REF_OUTPUT, REF_VIEW));
mPictureRecorder = new SnapshotPictureRecorder(result, Camera1.this, mCamera, outputRatio);
mPictureRecorder = new SnapshotPictureRecorder(stub, Camera1Engine.this, mPreview, mCamera, outputRatio);
mPictureRecorder.take();
}
});
@ -618,8 +645,8 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
System.currentTimeMillis(),
offset(REF_SENSOR, REF_OUTPUT),
mPreviewStreamSize,
mPreviewFormat);
mCameraCallbacks.dispatchFrame(frame);
mPreviewStreamFormat);
mCallback.dispatchFrame(frame);
}
private boolean isCameraAvailable() {
@ -646,19 +673,19 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
// Video recording stuff.
@Override
public void onVideoResult(@Nullable VideoResult result, @Nullable Exception exception) {
public void onVideoResult(@Nullable VideoResult.Stub result, @Nullable Exception exception) {
mVideoRecorder = null;
if (result != null) {
mCameraCallbacks.dispatchOnVideoTaken(result);
mCallback.dispatchOnVideoTaken(result);
} else {
// Something went wrong, lock the camera again.
mCameraCallbacks.dispatchError(new CameraException(exception, CameraException.REASON_VIDEO_FAILED));
mCallback.dispatchError(new CameraException(exception, CameraException.REASON_VIDEO_FAILED));
mCamera.lock();
}
}
@Override
void takeVideo(@NonNull final File videoFile) {
public void takeVideo(final @NonNull VideoResult.Stub stub, @NonNull final File videoFile) {
schedule(mStartVideoTask, true, new Runnable() {
@Override
public void run() {
@ -669,19 +696,18 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
if (isTakingVideo()) return;
// Create the video result stub
VideoResult videoResult = new VideoResult();
videoResult.file = videoFile;
videoResult.isSnapshot = false;
videoResult.codec = mVideoCodec;
videoResult.location = mLocation;
videoResult.facing = mFacing;
videoResult.rotation = offset(REF_SENSOR, REF_OUTPUT);
videoResult.size = flip(REF_SENSOR, REF_OUTPUT) ? mCaptureSize.flip() : mCaptureSize;
videoResult.audio = mAudio;
videoResult.maxSize = mVideoMaxSize;
videoResult.maxDuration = mVideoMaxDuration;
videoResult.videoBitRate = mVideoBitRate;
videoResult.audioBitRate = mAudioBitRate;
stub.file = videoFile;
stub.isSnapshot = false;
stub.videoCodec = mVideoCodec;
stub.location = mLocation;
stub.facing = mFacing;
stub.rotation = offset(REF_SENSOR, REF_OUTPUT);
stub.size = flip(REF_SENSOR, REF_OUTPUT) ? mCaptureSize.flip() : mCaptureSize;
stub.audio = mAudio;
stub.maxSize = mVideoMaxSize;
stub.maxDuration = mVideoMaxDuration;
stub.videoBitRate = mVideoBitRate;
stub.audioBitRate = mAudioBitRate;
// Unlock the camera and start recording.
try {
@ -692,8 +718,8 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
onVideoResult(null, e);
return;
}
mVideoRecorder = new FullVideoRecorder(videoResult, Camera1.this,
Camera1.this, mCamera, mCameraId);
mVideoRecorder = new FullVideoRecorder(stub, Camera1Engine.this,
Camera1Engine.this, mCamera, mCameraId);
mVideoRecorder.start();
}
});
@ -705,7 +731,7 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
*/
@SuppressLint("NewApi")
@Override
void takeVideoSnapshot(@NonNull final File file, @NonNull final AspectRatio viewAspectRatio) {
public void takeVideoSnapshot(final @NonNull VideoResult.Stub stub, @NonNull final File file, @NonNull final AspectRatio viewAspectRatio) {
if (!(mPreview instanceof GlCameraPreview)) {
throw new IllegalStateException("Video snapshots are only supported with GlCameraPreview.");
}
@ -718,17 +744,16 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
if (isTakingVideo()) return;
// Create the video result stub
VideoResult videoResult = new VideoResult();
videoResult.file = file;
videoResult.isSnapshot = true;
videoResult.codec = mVideoCodec;
videoResult.location = mLocation;
videoResult.facing = mFacing;
videoResult.videoBitRate = mVideoBitRate;
videoResult.audioBitRate = mAudioBitRate;
videoResult.audio = mAudio;
videoResult.maxSize = mVideoMaxSize;
videoResult.maxDuration = mVideoMaxDuration;
stub.file = file;
stub.isSnapshot = true;
stub.videoCodec = mVideoCodec;
stub.location = mLocation;
stub.facing = mFacing;
stub.videoBitRate = mVideoBitRate;
stub.audioBitRate = mAudioBitRate;
stub.audio = mAudio;
stub.maxSize = mVideoMaxSize;
stub.maxDuration = mVideoMaxDuration;
// Size and rotation turned out to be extremely tricky. In case of SnapshotPictureRecorder
// we use the preview size in REF_OUTPUT (cropped) and offset(REF_SENSOR, REF_OUTPUT) as rotation.
@ -766,11 +791,14 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
// and maybe we can improve. The reason why this happen is beyond my understanding.
Size outputSize = getUncroppedSnapshotSize(REF_OUTPUT);
AspectRatio outputRatio = flip(REF_OUTPUT, REF_VIEW) ? viewAspectRatio.inverse() : viewAspectRatio;
if (outputSize == null) {
throw new IllegalStateException("outputSize should not be null.");
}
AspectRatio outputRatio = flip(REF_OUTPUT, REF_VIEW) ? viewAspectRatio.flip() : viewAspectRatio;
Rect outputCrop = CropHelper.computeCrop(outputSize, outputRatio);
outputSize = new Size(outputCrop.width(), outputCrop.height());
videoResult.size = outputSize;
videoResult.rotation = offset(REF_VIEW, REF_OUTPUT);
stub.size = outputSize;
stub.rotation = offset(REF_VIEW, REF_OUTPUT);
// LOG.e("ROTBUG_video", "aspectRatio (REF_VIEW):", viewAspectRatio);
// LOG.e("ROTBUG_video", "aspectRatio (REF_OUTPUT):", outputRatio);
// LOG.e("ROTBUG_video", "sizeUncropped (REF_OUTPUT):", outputSize);
@ -780,14 +808,15 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
// Reset facing and start.
mFacing = realFacing;
GlCameraPreview cameraPreview = (GlCameraPreview) mPreview;
mVideoRecorder = new SnapshotVideoRecorder(videoResult, Camera1.this, cameraPreview);
mVideoRecorder = new SnapshotVideoRecorder(stub,
Camera1Engine.this, Camera1Engine.this, cameraPreview);
mVideoRecorder.start();
}
});
}
@Override
void stopVideo() {
public void stopVideo() {
schedule(null, false, new Runnable() {
@Override
public void run() {
@ -805,7 +834,7 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
@Override
void setZoom(final float zoom, @Nullable final PointF[] points, final boolean notify) {
public void setZoom(final float zoom, @Nullable final PointF[] points, final boolean notify) {
schedule(mZoomTask, true, new Runnable() {
@Override
public void run() {
@ -818,14 +847,14 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
mCamera.setParameters(params);
if (notify) {
mCameraCallbacks.dispatchOnZoomChanged(zoom, points);
mCallback.dispatchOnZoomChanged(zoom, points);
}
}
});
}
@Override
void setExposureCorrection(final float EVvalue, @NonNull final float[] bounds,
public void setExposureCorrection(final float EVvalue, @NonNull final float[] bounds,
@Nullable final PointF[] points, final boolean notify) {
schedule(mExposureCorrectionTask, true, new Runnable() {
@Override
@ -843,7 +872,7 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
mCamera.setParameters(params);
if (notify) {
mCameraCallbacks.dispatchOnExposureCorrectionChanged(value, bounds, points);
mCallback.dispatchOnExposureCorrectionChanged(value, bounds, points);
}
}
});
@ -854,7 +883,7 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
@Override
void startAutoFocus(@Nullable final Gesture gesture, @NonNull final PointF point) {
public void startAutoFocus(@Nullable final Gesture gesture, @NonNull final PointF point) {
// Must get width and height from the UI thread.
int viewWidth = 0, viewHeight = 0;
if (mPreview != null && mPreview.hasSurface()) {
@ -881,14 +910,14 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
if (maxAE > 0) params.setMeteringAreas(maxAE > 1 ? meteringAreas2 : meteringAreas1);
params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
mCamera.setParameters(params);
mCameraCallbacks.dispatchOnFocusStart(gesture, p);
mCallback.dispatchOnFocusStart(gesture, p);
// TODO this is not guaranteed to be called... Fix.
try {
mCamera.autoFocus(new Camera.AutoFocusCallback() {
@Override
public void onAutoFocus(boolean success, Camera camera) {
// TODO lock auto exposure and white balance for a while
mCameraCallbacks.dispatchOnFocusEnd(gesture, success, p);
mCallback.dispatchOnFocusEnd(gesture, success, p);
mHandler.get().removeCallbacks(mPostFocusResetRunnable);
if (shouldResetAutoFocus()) {
mHandler.get().postDelayed(mPostFocusResetRunnable, getAutoFocusResetDelay());
@ -899,7 +928,7 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
// Handling random auto-focus exception on some devices
// See https://github.com/natario1/CameraView/issues/181
LOG.e("startAutoFocus:", "Error calling autoFocus", e);
mCameraCallbacks.dispatchOnFocusEnd(gesture, false, p);
mCallback.dispatchOnFocusEnd(gesture, false, p);
}
}
});
@ -952,9 +981,8 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
// Size stuff.
@Nullable
private List<Size> sizesFromList(@Nullable List<Camera.Size> sizes) {
if (sizes == null) return null;
@NonNull
private List<Size> sizesFromList(@NonNull List<Camera.Size> sizes) {
List<Size> result = new ArrayList<>(sizes.size());
for (Camera.Size size : sizes) {
Size add = new Size(size.width, size.height);
@ -965,7 +993,7 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
}
@Override
void setPlaySounds(boolean playSounds) {
public void setPlaySounds(boolean playSounds) {
final boolean old = mPlaySounds;
mPlaySounds = playSounds;
schedule(mPlaySoundsTask, true, new Runnable() {

@ -1,4 +1,4 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.engine;
import android.graphics.PointF;
import android.location.Location;
@ -6,6 +6,32 @@ import android.location.Location;
import android.os.Handler;
import android.os.Looper;
import com.otaliastudios.cameraview.CameraException;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.CameraOptions;
import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.frame.Frame;
import com.otaliastudios.cameraview.frame.FrameManager;
import com.otaliastudios.cameraview.internal.utils.Task;
import com.otaliastudios.cameraview.internal.utils.WorkerHandler;
import com.otaliastudios.cameraview.picture.PictureRecorder;
import com.otaliastudios.cameraview.preview.CameraPreview;
import com.otaliastudios.cameraview.controls.Audio;
import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.controls.Flash;
import com.otaliastudios.cameraview.gesture.Gesture;
import com.otaliastudios.cameraview.controls.Hdr;
import com.otaliastudios.cameraview.controls.Mode;
import com.otaliastudios.cameraview.controls.VideoCodec;
import com.otaliastudios.cameraview.controls.WhiteBalance;
import com.otaliastudios.cameraview.size.AspectRatio;
import com.otaliastudios.cameraview.size.Size;
import com.otaliastudios.cameraview.size.SizeSelector;
import com.otaliastudios.cameraview.size.SizeSelectors;
import com.otaliastudios.cameraview.video.VideoRecorder;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
@ -16,27 +42,43 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
abstract class CameraController implements
public abstract class CameraEngine implements
CameraPreview.SurfaceCallback,
FrameManager.BufferCallback,
Thread.UncaughtExceptionHandler {
private static final String TAG = CameraController.class.getSimpleName();
public interface Callback {
void dispatchOnCameraOpened(CameraOptions options);
void dispatchOnCameraClosed();
void onCameraPreviewStreamSizeChanged();
void onShutter(boolean shouldPlaySound);
void dispatchOnVideoTaken(VideoResult.Stub stub);
void dispatchOnPictureTaken(PictureResult.Stub stub);
void dispatchOnFocusStart(@Nullable Gesture trigger, @NonNull PointF where);
void dispatchOnFocusEnd(@Nullable Gesture trigger, boolean success, @NonNull PointF where);
void dispatchOnZoomChanged(final float newValue, @Nullable final PointF[] fingers);
void dispatchOnExposureCorrectionChanged(float newValue, @NonNull float[] bounds, @Nullable PointF[] fingers);
void dispatchFrame(Frame frame);
void dispatchError(CameraException exception);
}
private static final String TAG = CameraEngine.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
static final int STATE_STOPPING = -1; // Camera is about to be stopped.
static final int STATE_STOPPED = 0; // Camera is stopped.
public static final int STATE_STOPPED = 0; // Camera is stopped.
static final int STATE_STARTING = 1; // Camera is about to start.
static final int STATE_STARTED = 2; // Camera is available and we can set parameters.
public static final int STATE_STARTED = 2; // Camera is available and we can set parameters.
static final int REF_SENSOR = 0;
static final int REF_VIEW = 1;
static final int REF_OUTPUT = 2;
public static final int REF_SENSOR = 0;
public static final int REF_VIEW = 1;
public static final int REF_OUTPUT = 2;
protected final CameraView.CameraCallbacks mCameraCallbacks;
protected final Callback mCallback;
protected final FrameManager mFrameManager;
protected CameraPreview mPreview;
protected WorkerHandler mHandler;
/* for tests */ Handler mCrashHandler;
@VisibleForTesting Handler mCrashHandler;
protected Facing mFacing;
protected Flash mFlash;
@ -54,15 +96,12 @@ abstract class CameraController implements
private SizeSelector mPictureSizeSelector;
private SizeSelector mVideoSizeSelector;
@VisibleForTesting(otherwise = VisibleForTesting.PRIVATE)
int mSnapshotMaxWidth = Integer.MAX_VALUE; // in REF_VIEW for consistency with SizeSelectors
@VisibleForTesting(otherwise = VisibleForTesting.PRIVATE)
int mSnapshotMaxHeight = Integer.MAX_VALUE; // in REF_VIEW for consistency with SizeSelectors
@VisibleForTesting int mSnapshotMaxWidth = Integer.MAX_VALUE; // in REF_VIEW for consistency with SizeSelectors
@VisibleForTesting int mSnapshotMaxHeight = Integer.MAX_VALUE; // in REF_VIEW for consistency with SizeSelectors
protected int mCameraId;
protected CameraOptions mCameraOptions;
protected Mapper mMapper;
protected FrameManager mFrameManager;
protected PictureRecorder mPictureRecorder;
protected VideoRecorder mVideoRecorder;
protected long mVideoMaxSize;
@ -71,7 +110,7 @@ abstract class CameraController implements
protected int mAudioBitRate;
protected Size mCaptureSize;
protected Size mPreviewStreamSize;
protected int mPreviewFormat;
protected int mPreviewStreamFormat;
protected long mAutoFocusResetDelayMillis;
protected int mSensorOffset;
@ -81,24 +120,24 @@ abstract class CameraController implements
protected int mState = STATE_STOPPED;
// Used for testing.
Task<Void> mZoomTask = new Task<>();
Task<Void> mExposureCorrectionTask = new Task<>();
Task<Void> mFlashTask = new Task<>();
Task<Void> mWhiteBalanceTask = new Task<>();
Task<Void> mHdrTask = new Task<>();
Task<Void> mLocationTask = new Task<>();
Task<Void> mStartVideoTask = new Task<>();
Task<Void> mPlaySoundsTask = new Task<>();
CameraController(CameraView.CameraCallbacks callback) {
mCameraCallbacks = callback;
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Task<Void> mZoomTask = new Task<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Task<Void> mExposureCorrectionTask = new Task<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Task<Void> mFlashTask = new Task<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Task<Void> mWhiteBalanceTask = new Task<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Task<Void> mHdrTask = new Task<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Task<Void> mLocationTask = new Task<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Task<Void> mStartVideoTask = new Task<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Task<Void> mPlaySoundsTask = new Task<>();
protected CameraEngine(Callback callback) {
mCallback = callback;
mCrashHandler = new Handler(Looper.getMainLooper());
mHandler = WorkerHandler.get("CameraViewController");
mHandler.getThread().setUncaughtExceptionHandler(this);
mFrameManager = new FrameManager(2, this);
}
void setPreview(@NonNull CameraPreview cameraPreview) {
public void setPreview(@NonNull CameraPreview cameraPreview) {
mPreview = cameraPreview;
mPreview.setSurfaceCallback(this);
}
@ -146,7 +185,7 @@ abstract class CameraController implements
@Override
public void run() {
stopImmediately();
mCameraCallbacks.dispatchError(error);
mCallback.dispatchError(error);
}
});
}
@ -155,7 +194,7 @@ abstract class CameraController implements
// Public & not final so we can verify with mockito in CameraViewTest
public void destroy() {
LOG.i("destroy:", "state:", ss());
// Prevent CameraController leaks. Don't set to null, or exceptions
// Prevent CameraEngine leaks. Don't set to null, or exceptions
// inside the standard stop() method might crash the main thread.
mHandler.getThread().setUncaughtExceptionHandler(new NoOpExceptionHandler());
// Stop if needed.
@ -178,7 +217,7 @@ abstract class CameraController implements
}
// Starts the preview asynchronously.
final void start() {
public final void start() {
LOG.i("Start:", "posting runnable. State:", ss());
mHandler.post(new Runnable() {
@Override
@ -190,7 +229,7 @@ abstract class CameraController implements
onStart();
LOG.i("Start:", "returned from onStart().", "Dispatching.", ss());
mState = STATE_STARTED;
mCameraCallbacks.dispatchOnCameraOpened(mCameraOptions);
mCallback.dispatchOnCameraOpened(mCameraOptions);
}
});
}
@ -209,7 +248,7 @@ abstract class CameraController implements
onStop();
LOG.i("Stop:", "returned from onStop().", "Dispatching.");
mState = STATE_STOPPED;
mCameraCallbacks.dispatchOnCameraClosed();
mCallback.dispatchOnCameraClosed();
}
});
}
@ -245,7 +284,7 @@ abstract class CameraController implements
onStop();
mState = STATE_STOPPED;
LOG.i("Restart:", "stopped. Dispatching.", ss());
mCameraCallbacks.dispatchOnCameraClosed();
mCallback.dispatchOnCameraClosed();
}
LOG.i("Restart: about to start. State:", ss());
@ -253,7 +292,7 @@ abstract class CameraController implements
onStart();
mState = STATE_STARTED;
LOG.i("Restart: returned from start. Dispatching. State:", ss());
mCameraCallbacks.dispatchOnCameraOpened(mCameraOptions);
mCallback.dispatchOnCameraOpened(mCameraOptions);
}
});
}
@ -261,14 +300,14 @@ abstract class CameraController implements
// Starts the preview.
// At the end of this method camera must be available, e.g. for setting parameters.
@WorkerThread
abstract void onStart();
protected abstract void onStart();
// Stops the preview.
@WorkerThread
abstract void onStop();
protected abstract void onStop();
// Returns current state.
final int getState() {
public final int getState() {
return mState;
}
@ -277,198 +316,213 @@ abstract class CameraController implements
//region Simple setters
// This is called before start() and never again.
final void setDisplayOffset(int displayOffset) {
public final void setDisplayOffset(int displayOffset) {
mDisplayOffset = displayOffset;
}
// This can be called multiple times.
final void setDeviceOrientation(int deviceOrientation) {
public final void setDeviceOrientation(int deviceOrientation) {
mDeviceOrientation = deviceOrientation;
}
final void setPreviewStreamSizeSelector(@Nullable SizeSelector selector) {
public final void setPreviewStreamSizeSelector(@Nullable SizeSelector selector) {
mPreviewStreamSizeSelector = selector;
}
final void setPictureSizeSelector(@NonNull SizeSelector selector) {
public final void setPictureSizeSelector(@NonNull SizeSelector selector) {
mPictureSizeSelector = selector;
}
final void setVideoSizeSelector(@NonNull SizeSelector selector) {
public final void setVideoSizeSelector(@NonNull SizeSelector selector) {
mVideoSizeSelector = selector;
}
final void setVideoMaxSize(long videoMaxSizeBytes) {
public final void setVideoMaxSize(long videoMaxSizeBytes) {
mVideoMaxSize = videoMaxSizeBytes;
}
final void setVideoMaxDuration(int videoMaxDurationMillis) {
public final void setVideoMaxDuration(int videoMaxDurationMillis) {
mVideoMaxDuration = videoMaxDurationMillis;
}
final void setVideoCodec(@NonNull VideoCodec codec) {
public final void setVideoCodec(@NonNull VideoCodec codec) {
mVideoCodec = codec;
}
final void setVideoBitRate(int videoBitRate) {
public final void setVideoBitRate(int videoBitRate) {
mVideoBitRate = videoBitRate;
}
final void setAudioBitRate(int audioBitRate) {
public final void setAudioBitRate(int audioBitRate) {
mAudioBitRate = audioBitRate;
}
final void setSnapshotMaxWidth(int maxWidth) {
public final void setSnapshotMaxWidth(int maxWidth) {
mSnapshotMaxWidth = maxWidth;
}
final void setSnapshotMaxHeight(int maxHeight) {
public final void setSnapshotMaxHeight(int maxHeight) {
mSnapshotMaxHeight = maxHeight;
}
final void setAutoFocusResetDelay(long delayMillis) { mAutoFocusResetDelayMillis = delayMillis; }
public final void setAutoFocusResetDelay(long delayMillis) { mAutoFocusResetDelayMillis = delayMillis; }
//endregion
//region Abstract setters and APIs
// Should restart the session if active.
abstract void setMode(@NonNull Mode mode);
public abstract void setMode(@NonNull Mode mode);
// Should restart the session if active.
abstract void setFacing(@NonNull Facing facing);
public abstract void setFacing(@NonNull Facing facing);
// If closed, no-op. If opened, check supported and apply.
abstract void setZoom(float zoom, @Nullable PointF[] points, boolean notify);
public abstract void setZoom(float zoom, @Nullable PointF[] points, boolean notify);
// If closed, no-op. If opened, check supported and apply.
abstract void setExposureCorrection(float EVvalue, @NonNull float[] bounds, @Nullable PointF[] points, boolean notify);
public abstract void setExposureCorrection(float EVvalue, @NonNull float[] bounds, @Nullable PointF[] points, boolean notify);
// If closed, keep. If opened, check supported and apply.
abstract void setFlash(@NonNull Flash flash);
public abstract void setFlash(@NonNull Flash flash);
// If closed, keep. If opened, check supported and apply.
abstract void setWhiteBalance(@NonNull WhiteBalance whiteBalance);
public abstract void setWhiteBalance(@NonNull WhiteBalance whiteBalance);
// If closed, keep. If opened, check supported and apply.
abstract void setHdr(@NonNull Hdr hdr);
public abstract void setHdr(@NonNull Hdr hdr);
// If closed, keep. If opened, check supported and apply.
abstract void setLocation(@Nullable Location location);
public abstract void setLocation(@Nullable Location location);
// Just set.
abstract void setAudio(@NonNull Audio audio);
public abstract void setAudio(@NonNull Audio audio);
abstract void takePicture();
public abstract void takePicture(@NonNull PictureResult.Stub stub);
abstract void takePictureSnapshot(@NonNull AspectRatio viewAspectRatio);
public abstract void takePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio viewAspectRatio);
abstract void takeVideo(@NonNull File file);
public abstract void takeVideo(@NonNull VideoResult.Stub stub, @NonNull File file);
abstract void takeVideoSnapshot(@NonNull File file, @NonNull AspectRatio viewAspectRatio);
public abstract void takeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull File file, @NonNull AspectRatio viewAspectRatio);
abstract void stopVideo();
public abstract void stopVideo();
abstract void startAutoFocus(@Nullable Gesture gesture, @NonNull PointF point);
public abstract void startAutoFocus(@Nullable Gesture gesture, @NonNull PointF point);
abstract void setPlaySounds(boolean playSounds);
public abstract void setPlaySounds(boolean playSounds);
//endregion
//region final getters
@NonNull
public final FrameManager getFrameManager() {
return mFrameManager;
}
public final int getPreviewStreamFormat() {
return mPreviewStreamFormat;
}
@Nullable
final CameraOptions getCameraOptions() {
public final CameraOptions getCameraOptions() {
return mCameraOptions;
}
@NonNull
final Facing getFacing() {
public final Facing getFacing() {
return mFacing;
}
@NonNull
final Flash getFlash() {
public final Flash getFlash() {
return mFlash;
}
@NonNull
final WhiteBalance getWhiteBalance() {
public final WhiteBalance getWhiteBalance() {
return mWhiteBalance;
}
final VideoCodec getVideoCodec() {
public final VideoCodec getVideoCodec() {
return mVideoCodec;
}
final int getVideoBitRate() {
public final int getVideoBitRate() {
return mVideoBitRate;
}
final long getVideoMaxSize() {
public final long getVideoMaxSize() {
return mVideoMaxSize;
}
final int getVideoMaxDuration() {
public final int getVideoMaxDuration() {
return mVideoMaxDuration;
}
@NonNull
final Mode getMode() {
public final Mode getMode() {
return mMode;
}
@NonNull
final Hdr getHdr() {
public final Hdr getHdr() {
return mHdr;
}
@Nullable
final Location getLocation() {
public final Location getLocation() {
return mLocation;
}
@NonNull
final Audio getAudio() {
public final Audio getAudio() {
return mAudio;
}
final int getAudioBitRate() {
public final int getAudioBitRate() {
return mAudioBitRate;
}
@SuppressWarnings("unused")
@Nullable
/* for tests */ final SizeSelector getPreviewStreamSizeSelector() {
@VisibleForTesting
final SizeSelector getPreviewStreamSizeSelector() {
return mPreviewStreamSizeSelector;
}
@SuppressWarnings("unused")
@NonNull
/* for tests */ final SizeSelector getPictureSizeSelector() {
@VisibleForTesting
final SizeSelector getPictureSizeSelector() {
return mPictureSizeSelector;
}
@SuppressWarnings("unused")
@NonNull
/* for tests */ final SizeSelector getVideoSizeSelector() {
@VisibleForTesting
final SizeSelector getVideoSizeSelector() {
return mVideoSizeSelector;
}
final float getZoomValue() {
public final float getZoomValue() {
return mZoomValue;
}
final float getExposureCorrectionValue() {
public final float getExposureCorrectionValue() {
return mExposureCorrectionValue;
}
final boolean isTakingVideo() {
public final boolean isTakingVideo() {
return mVideoRecorder != null;
}
final boolean isTakingPicture() {
public final boolean isTakingPicture() {
return mPictureRecorder != null;
}
final long getAutoFocusResetDelay() { return mAutoFocusResetDelayMillis; }
public final long getAutoFocusResetDelay() { return mAutoFocusResetDelayMillis; }
final boolean shouldResetAutoFocus() {
return mAutoFocusResetDelayMillis > 0 && mAutoFocusResetDelayMillis != Long.MAX_VALUE;
@ -512,24 +566,24 @@ abstract class CameraController implements
return (offset(REF_SENSOR, toReference) - offset(REF_SENSOR, fromReference) + 360) % 360;
}
final boolean flip(int reference1, int reference2) {
public final boolean flip(int reference1, int reference2) {
return offset(reference1, reference2) % 180 != 0;
}
@Nullable
final Size getPictureSize(@SuppressWarnings("SameParameterValue") int reference) {
public final Size getPictureSize(@SuppressWarnings("SameParameterValue") int reference) {
if (mCaptureSize == null || mMode == Mode.VIDEO) return null;
return flip(REF_SENSOR, reference) ? mCaptureSize.flip() : mCaptureSize;
}
@Nullable
final Size getVideoSize(@SuppressWarnings("SameParameterValue") int reference) {
public final Size getVideoSize(@SuppressWarnings("SameParameterValue") int reference) {
if (mCaptureSize == null || mMode == Mode.PICTURE) return null;
return flip(REF_SENSOR, reference) ? mCaptureSize.flip() : mCaptureSize;
}
@Nullable
final Size getPreviewStreamSize(int reference) {
public final Size getPreviewStreamSize(int reference) {
if (mPreviewStreamSize == null) return null;
return flip(REF_SENSOR, reference) ? mPreviewStreamSize.flip() : mPreviewStreamSize;
}
@ -562,7 +616,7 @@ abstract class CameraController implements
* apply, despite the capturing mechanism being different.
*/
@Nullable
final Size getUncroppedSnapshotSize(int reference) {
public final Size getUncroppedSnapshotSize(int reference) {
Size baseSize = getPreviewStreamSize(reference);
if (baseSize == null) return null;
boolean flip = flip(reference, REF_VIEW);
@ -618,6 +672,9 @@ abstract class CameraController implements
selector = SizeSelectors.or(selector, SizeSelectors.biggest());
List<Size> list = new ArrayList<>(sizes);
Size result = selector.select(list).get(0);
if (!list.contains(result)) {
throw new RuntimeException("SizeSelectors must not return Sizes other than those in the input list.");
}
LOG.i("computeCaptureSize:", "result:", result, "flip:", flip, "mode:", mode);
if (flip) result = result.flip(); // Go back to REF_SENSOR
return result;
@ -637,8 +694,9 @@ abstract class CameraController implements
// Create our own default selector, which will be used if the external mPreviewStreamSizeSelector
// is null, or if it fails in finding a size.
Size targetMinSize = getPreviewSurfaceSize(REF_VIEW);
if (targetMinSize == null) throw new IllegalStateException("targetMinSize should not be null here.");
AspectRatio targetRatio = AspectRatio.of(mCaptureSize.getWidth(), mCaptureSize.getHeight());
if (flip) targetRatio = targetRatio.inverse();
if (flip) targetRatio = targetRatio.flip();
LOG.i("size:", "computePreviewStreamSize:", "targetRatio:", targetRatio, "targetMinSize:", targetMinSize);
SizeSelector matchRatio = SizeSelectors.and( // Match this aspect ratio and sort by biggest
SizeSelectors.aspectRatio(targetRatio, 0),
@ -663,6 +721,9 @@ abstract class CameraController implements
selector = matchAll;
}
Size result = selector.select(sizes).get(0);
if (!sizes.contains(result)) {
throw new RuntimeException("SizeSelectors must not return Sizes other than those in the input list.");
}
if (flip) result = result.flip();
LOG.i("computePreviewStreamSize:", "result:", result, "flip:", flip);
return result;

@ -0,0 +1,123 @@
package com.otaliastudios.cameraview.engine;
import android.hardware.Camera;
import android.os.Build;
import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.controls.Flash;
import com.otaliastudios.cameraview.controls.Hdr;
import com.otaliastudios.cameraview.controls.WhiteBalance;
import java.util.HashMap;
/**
* A Mapper maps camera engine constants to CameraView constants.
*/
public abstract class Mapper {
private Mapper() {}
public abstract <T> T map(Flash flash);
public abstract <T> T map(Facing facing);
public abstract <T> T map(WhiteBalance whiteBalance);
public abstract <T> T map(Hdr hdr);
public abstract <T> Flash unmapFlash(T cameraConstant);
public abstract <T> Facing unmapFacing(T cameraConstant);
public abstract <T> WhiteBalance unmapWhiteBalance(T cameraConstant);
public abstract <T> Hdr unmapHdr(T cameraConstant);
@SuppressWarnings("WeakerAccess")
protected <T> T reverseLookup(HashMap<T, ?> map, Object object) {
for (T value : map.keySet()) {
if (object.equals(map.get(value))) {
return value;
}
}
return null;
}
private static Mapper CAMERA1;
public static Mapper get() {
if (CAMERA1 == null) {
CAMERA1 = new Camera1Mapper();
}
return CAMERA1;
}
@SuppressWarnings("unchecked")
private static class Camera1Mapper extends Mapper {
private static final HashMap<Flash, String> FLASH = new HashMap<>();
private static final HashMap<WhiteBalance, String> WB = new HashMap<>();
private static final HashMap<Facing, Integer> FACING = new HashMap<>();
private static final HashMap<Hdr, String> HDR = new HashMap<>();
static {
FLASH.put(Flash.OFF, Camera.Parameters.FLASH_MODE_OFF);
FLASH.put(Flash.ON, Camera.Parameters.FLASH_MODE_ON);
FLASH.put(Flash.AUTO, Camera.Parameters.FLASH_MODE_AUTO);
FLASH.put(Flash.TORCH, Camera.Parameters.FLASH_MODE_TORCH);
FACING.put(Facing.BACK, Camera.CameraInfo.CAMERA_FACING_BACK);
FACING.put(Facing.FRONT, Camera.CameraInfo.CAMERA_FACING_FRONT);
WB.put(WhiteBalance.AUTO, Camera.Parameters.WHITE_BALANCE_AUTO);
WB.put(WhiteBalance.INCANDESCENT, Camera.Parameters.WHITE_BALANCE_INCANDESCENT);
WB.put(WhiteBalance.FLUORESCENT, Camera.Parameters.WHITE_BALANCE_FLUORESCENT);
WB.put(WhiteBalance.DAYLIGHT, Camera.Parameters.WHITE_BALANCE_DAYLIGHT);
WB.put(WhiteBalance.CLOUDY, Camera.Parameters.WHITE_BALANCE_CLOUDY_DAYLIGHT);
HDR.put(Hdr.OFF, Camera.Parameters.SCENE_MODE_AUTO);
if (Build.VERSION.SDK_INT >= 17) {
HDR.put(Hdr.ON, Camera.Parameters.SCENE_MODE_HDR);
} else {
HDR.put(Hdr.ON, "hdr");
}
}
@Override
public <T> T map(Flash flash) {
return (T) FLASH.get(flash);
}
@Override
public <T> T map(Facing facing) {
return (T) FACING.get(facing);
}
@Override
public <T> T map(WhiteBalance whiteBalance) {
return (T) WB.get(whiteBalance);
}
@Override
public <T> T map(Hdr hdr) {
return (T) HDR.get(hdr);
}
@Override
public <T> Flash unmapFlash(T cameraConstant) {
return reverseLookup(FLASH, cameraConstant);
}
@Override
public <T> Facing unmapFacing(T cameraConstant) {
return reverseLookup(FACING, cameraConstant);
}
@Override
public <T> WhiteBalance unmapWhiteBalance(T cameraConstant) {
return reverseLookup(WB, cameraConstant);
}
@Override
public <T> Hdr unmapHdr(T cameraConstant) {
return reverseLookup(HDR, cameraConstant);
}
}
}

@ -1,14 +1,16 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.frame;
import com.otaliastudios.cameraview.size.Size;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
/**
* A preview frame to be processed by {@link FrameProcessor}s.
*/
public class Frame {
/* for tests */ FrameManager mManager;
@VisibleForTesting FrameManager mManager;
private byte[] mData = null;
private long mTime = -1;
@ -20,7 +22,8 @@ public class Frame {
mManager = manager;
}
boolean isAlive() {
@SuppressWarnings("BooleanMethodIsAlwaysInverted")
private boolean isAlive() {
return mData != null;
}

@ -1,6 +1,8 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.frame;
import com.otaliastudios.cameraview.size.Size;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
@ -21,9 +23,13 @@ import java.util.concurrent.LinkedBlockingQueue;
* - Frame pool:
* We keep a list of mPoolSize recycled instances, to be reused when a new buffer is available.
*/
class FrameManager {
public class FrameManager {
interface BufferCallback {
/**
* Receives callbacks on buffer availability
* (when a Frame is released, we reuse its buffer).
*/
public interface BufferCallback {
void onBufferAvailable(@NonNull byte[] buffer);
}
@ -32,14 +38,45 @@ class FrameManager {
private BufferCallback mCallback;
private LinkedBlockingQueue<Frame> mQueue;
FrameManager(int poolSize, @Nullable BufferCallback callback) {
/**
* Construct a new frame manager.
* The construction must be followed by an {@link #allocateBuffers(int, Size)} call
* as soon as the parameters are known.
*
* @param poolSize the size of the backing pool.
* @param callback a callback
*/
public FrameManager(int poolSize, @Nullable BufferCallback callback) {
mPoolSize = poolSize;
mCallback = callback;
mQueue = new LinkedBlockingQueue<>(mPoolSize);
mBufferSize = -1;
}
void release() {
/**
* Allocates a {@link #mPoolSize} number of buffers. Should be called once
* the preview size and the bitsPerPixel value are known.
*
* This method can be called again after {@link #release()} has been called.
*
* @param bitsPerPixel bits per pixel, depends on image format
* @param previewSize the preview size
* @return the buffer size
*/
public int allocateBuffers(int bitsPerPixel, @NonNull Size previewSize) {
// TODO throw if called twice without release?
mBufferSize = getBufferSize(bitsPerPixel, previewSize);
for (int i = 0; i < mPoolSize; i++) {
mCallback.onBufferAvailable(new byte[mBufferSize]);
}
return mBufferSize;
}
/**
* Releases all frames controlled by this manager and
* clears the pool.
*/
public void release() {
for (Frame frame : mQueue) {
frame.releaseManager();
frame.release();
@ -67,8 +104,8 @@ class FrameManager {
/**
* Returns a new Frame for the given data. This must be called
* - after {@link #allocate(int, Size)}, which sets the buffer size
* - after the byte buffer given by allocate() has been filled.
* - after {@link #allocateBuffers(int, Size)}, which sets the buffer size
* - after the byte buffer given by allocateBuffers() has been filled.
* If this is called X times in a row without releasing frames, it will allocate
* X frames and that's bad. Callers must wait for the preview buffer to be available.
*
@ -76,21 +113,13 @@ class FrameManager {
*
* @return a new frame
*/
Frame getFrame(@NonNull byte[] data, long time, int rotation, @NonNull Size previewSize, int previewFormat) {
public Frame getFrame(@NonNull byte[] data, long time, int rotation, @NonNull Size previewSize, int previewFormat) {
Frame frame = mQueue.poll();
if (frame == null) frame = new Frame(this);
frame.set(data, time, rotation, previewSize, previewFormat);
return frame;
}
int allocate(int bitsPerPixel, @NonNull Size previewSize) {
mBufferSize = getBufferSize(bitsPerPixel, previewSize);
for (int i = 0; i < mPoolSize; i++) {
mCallback.onBufferAvailable(new byte[mBufferSize]);
}
return mBufferSize;
}
private int getBufferSize(int bitsPerPixel, @NonNull Size previewSize) {
long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
return (int) Math.ceil(sizeInBits / 8.0d);

@ -1,4 +1,6 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.frame;
import com.otaliastudios.cameraview.CameraView;
import androidx.annotation.NonNull;
import androidx.annotation.WorkerThread;

@ -1,10 +1,9 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.gesture;
import androidx.annotation.NonNull;
import com.otaliastudios.cameraview.CameraView;
import java.util.Arrays;
import java.util.List;
import androidx.annotation.NonNull;
/**
@ -19,65 +18,69 @@ public enum Gesture {
/**
* Pinch gesture, typically assigned to the zoom control.
* This gesture can be mapped to:
* This gesture can be mapped to continuous actions:
*
* - {@link GestureAction#ZOOM}
* - {@link GestureAction#EXPOSURE_CORRECTION}
* - {@link GestureAction#NONE}
*/
PINCH(GestureAction.ZOOM, GestureAction.EXPOSURE_CORRECTION),
PINCH(GestureType.CONTINUOUS),
/**
* Single tap gesture, typically assigned to the focus control.
* This gesture can be mapped to:
* This gesture can be mapped to one shot actions:
*
* - {@link GestureAction#FOCUS}
* - {@link GestureAction#FOCUS_WITH_MARKER}
* - {@link GestureAction#CAPTURE}
* - {@link GestureAction#NONE}
*/
TAP(GestureAction.FOCUS, GestureAction.FOCUS_WITH_MARKER, GestureAction.CAPTURE),
// DOUBLE_TAP(GestureAction.FOCUS, GestureAction.FOCUS_WITH_MARKER, GestureAction.CAPTURE),
TAP(GestureType.ONE_SHOT),
/**
* Long tap gesture.
* This gesture can be mapped to:
* This gesture can be mapped to one shot actions:
*
* - {@link GestureAction#FOCUS}
* - {@link GestureAction#FOCUS_WITH_MARKER}
* - {@link GestureAction#CAPTURE}
* - {@link GestureAction#NONE}
*/
LONG_TAP(GestureAction.FOCUS, GestureAction.FOCUS_WITH_MARKER, GestureAction.CAPTURE),
LONG_TAP(GestureType.ONE_SHOT),
/**
* Horizontal scroll gesture.
* This gesture can be mapped to:
* This gesture can be mapped to continuous actions:
*
* - {@link GestureAction#ZOOM}
* - {@link GestureAction#EXPOSURE_CORRECTION}
* - {@link GestureAction#NONE}
*/
SCROLL_HORIZONTAL(GestureAction.ZOOM, GestureAction.EXPOSURE_CORRECTION),
SCROLL_HORIZONTAL(GestureType.CONTINUOUS),
/**
* Vertical scroll gesture.
* This gesture can be mapped to:
* This gesture can be mapped to continuous actions:
*
* - {@link GestureAction#ZOOM}
* - {@link GestureAction#EXPOSURE_CORRECTION}
* - {@link GestureAction#NONE}
*/
SCROLL_VERTICAL(GestureAction.ZOOM, GestureAction.EXPOSURE_CORRECTION);
SCROLL_VERTICAL(GestureType.CONTINUOUS);
Gesture(GestureAction... controls) {
mControls = Arrays.asList(controls);
Gesture(@NonNull GestureType type) {
this.type = type;
}
private List<GestureAction> mControls;
private GestureType type;
boolean isAssignableTo(@NonNull GestureAction control) {
return control == GestureAction.NONE || mControls.contains(control);
/**
* Whether this gesture can be assigned to the given {@link GestureAction}.
* @param action the action to be checked
* @return true if assignable
*/
public boolean isAssignableTo(@NonNull GestureAction action) {
return action == GestureAction.NONE || action.type() == type;
}
}

@ -1,6 +1,9 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.gesture;
import com.otaliastudios.cameraview.CameraView;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
/**
@ -16,55 +19,55 @@ public enum GestureAction {
/**
* No action. This can be mapped to any gesture to disable it.
*/
NONE(0),
NONE(0, GestureType.ONE_SHOT),
/**
* Auto focus control, typically assigned to the tap gesture.
* This action can be mapped to:
* This action can be mapped to one shot gestures:
*
* - {@link Gesture#TAP}
* - {@link Gesture#LONG_TAP}
*/
FOCUS(1),
FOCUS(1, GestureType.ONE_SHOT),
/**
* Auto focus control, typically assigned to the tap gesture.
* On top of {@link #FOCUS}, this will draw a default marker on screen.
* This action can be mapped to:
* This action can be mapped to one shot gestures:
*
* - {@link Gesture#TAP}
* - {@link Gesture#LONG_TAP}
*/
FOCUS_WITH_MARKER(2),
FOCUS_WITH_MARKER(2, GestureType.ONE_SHOT),
/**
* When triggered, this action will fire a picture shoot.
* This action can be mapped to:
* This action can be mapped to one shot gestures:
*
* - {@link Gesture#TAP}
* - {@link Gesture#LONG_TAP}
*/
CAPTURE(3),
CAPTURE(3, GestureType.ONE_SHOT),
/**
* Zoom control, typically assigned to the pinch gesture.
* This action can be mapped to:
* This action can be mapped to continuous gestures:
*
* - {@link Gesture#PINCH}
* - {@link Gesture#SCROLL_HORIZONTAL}
* - {@link Gesture#SCROLL_VERTICAL}
*/
ZOOM(4),
ZOOM(4, GestureType.CONTINUOUS),
/**
* Exposure correction control.
* This action can be mapped to:
* This action can be mapped to continuous gestures:
*
* - {@link Gesture#PINCH}
* - {@link Gesture#SCROLL_HORIZONTAL}
* - {@link Gesture#SCROLL_VERTICAL}
*/
EXPOSURE_CORRECTION(5);
EXPOSURE_CORRECTION(5, GestureType.CONTINUOUS);
final static GestureAction DEFAULT_PINCH = NONE;
@ -74,15 +77,22 @@ public enum GestureAction {
final static GestureAction DEFAULT_SCROLL_VERTICAL = NONE;
private int value;
private GestureType type;
GestureAction(int value) {
GestureAction(int value, @NonNull GestureType type) {
this.value = value;
this.type = type;
}
int value() {
return value;
}
@NonNull
GestureType type() {
return type;
}
@Nullable
static GestureAction fromValue(int value) {
GestureAction[] list = GestureAction.values();

@ -0,0 +1,159 @@
package com.otaliastudios.cameraview.gesture;
import android.content.Context;
import android.graphics.PointF;
import androidx.annotation.NonNull;
import android.view.MotionEvent;
import android.widget.FrameLayout;
/**
* Base class for gesture layouts, that is, layouts that will capture
* gestures.
*/
public abstract class GestureLayout extends FrameLayout {
// The number of possible values between minValue and maxValue, for the getValue method.
// We could make this non-static (e.g. larger granularity for exposure correction).
private final static int GRANULARITY = 50;
private boolean mActive;
private Gesture mType;
private PointF[] mPoints;
GestureLayout(@NonNull Context context, int points) {
super(context);
mPoints = new PointF[points];
for (int i = 0; i < points; i++) {
mPoints[i] = new PointF(0, 0);
}
}
/**
* Makes this instance active, which means, listening to events.
* @param active whether this should be active or not
*/
public void setActive(boolean active) {
mActive = active;
}
/**
* Whether this instance is active, which means, it is listening
* to events and identifying new gestures.
* @return true if active
*/
public boolean isActive() {
return mActive;
}
/**
* Called when new events are available.
* If true is returned, users will call {@link #getGesture()}, {@link #getPoints()}
* and maybe {@link #getValue(float, float, float)} to know more about the gesture.
*
* @param event the new event
* @return true if a gesture was detected
*/
public final boolean onTouchEvent(@NonNull MotionEvent event) {
if (!mActive) return false;
return handleTouchEvent(event);
}
/**
* Called when new events are available.
* If true is returned, users will call {@link #getGesture()}, {@link #getPoints()}
* and maybe {@link #getValue(float, float, float)} to know more about the gesture.
*
* @param event the new event
* @return true if a gesture was detected
*/
protected abstract boolean handleTouchEvent(@NonNull MotionEvent event);
/**
* Returns the gesture that this instance is currently detecting.
* This is mutable - for instance, a scroll layout can detect both
* horizontal and vertical scroll gestures.
*
* @return the current gesture
*/
@NonNull
public final Gesture getGesture() {
return mType;
}
/**
* Sets the currently detected gesture.
* @see #getGesture()
*
* @param gesture the current gesture
*/
protected final void setGesture(Gesture gesture) {
mType = gesture;
}
/**
* Returns an array of points that identify the currently
* detected gesture. If no gesture was detected, this returns
* an array of points with x and y set to 0.
*
* @return array of gesture points
*/
@NonNull
public final PointF[] getPoints() {
return mPoints;
}
/**
* Utility function to access an item in the
* {@link #getPoints()} array.
*
* @param which the array position
* @return the point
*/
@NonNull
protected final PointF getPoint(int which) {
return mPoints[which];
}
/**
* For {@link GestureType#CONTINUOUS} gestures, returns the float value at the current
* gesture state. This means, for example, scaling the old value with a pinch factor,
* taking into account the minimum and maximum values.
*
* @param currValue the last value
* @param minValue the min possible value
* @param maxValue the max possible value
* @return the new continuous value
*/
public final float computeValue(float currValue, float minValue, float maxValue) {
return capValue(currValue, getValue(currValue, minValue, maxValue), minValue, maxValue);
}
/**
* For {@link GestureType#CONTINUOUS} gestures, returns the float value at the current
* gesture state. This means, for example, scaling the old value with a pinch factor,
* taking into account the minimum and maximum values.
*
* @param currValue the last value
* @param minValue the min possible value
* @param maxValue the max possible value
* @return the new continuous value
*/
protected abstract float getValue(float currValue, float minValue, float maxValue);
/**
* Checks for newValue to be between minValue and maxValue,
* and checks that it is 'far enough' from the oldValue, in order
* to reduce useless updates.
*/
private static float capValue(float oldValue, float newValue, float minValue, float maxValue) {
if (newValue < minValue) newValue = minValue;
if (newValue > maxValue) newValue = maxValue;
float distance = (maxValue - minValue) / (float) GRANULARITY;
float half = distance / 2;
if (newValue >= oldValue - half && newValue <= oldValue + half) {
// Too close! Return the oldValue.
return oldValue;
}
return newValue;
}
}

@ -0,0 +1,52 @@
package com.otaliastudios.cameraview.gesture;
import android.content.res.TypedArray;
import com.otaliastudios.cameraview.R;
import androidx.annotation.NonNull;
/**
* Parses gestures from XML attributes.
*/
public class GestureParser {
private int tapAction;
private int longTapAction;
private int pinchAction;
private int horizontalScrollAction;
private int verticalScrollAction;
public GestureParser(@NonNull TypedArray array) {
this.tapAction = array.getInteger(R.styleable.CameraView_cameraGestureTap, GestureAction.DEFAULT_TAP.value());
this.longTapAction = array.getInteger(R.styleable.CameraView_cameraGestureLongTap, GestureAction.DEFAULT_LONG_TAP.value());
this.pinchAction = array.getInteger(R.styleable.CameraView_cameraGesturePinch, GestureAction.DEFAULT_PINCH.value());
this.horizontalScrollAction = array.getInteger(R.styleable.CameraView_cameraGestureScrollHorizontal, GestureAction.DEFAULT_SCROLL_HORIZONTAL.value());
this.verticalScrollAction = array.getInteger(R.styleable.CameraView_cameraGestureScrollVertical, GestureAction.DEFAULT_SCROLL_VERTICAL.value());
}
private GestureAction get(int which) {
return GestureAction.fromValue(which);
}
public GestureAction getTapAction() {
return get(tapAction);
}
public GestureAction getLongTapAction() {
return get(longTapAction);
}
public GestureAction getPinchAction() {
return get(pinchAction);
}
public GestureAction getHorizontalScrollAction() {
return get(horizontalScrollAction);
}
public GestureAction getVerticalScrollAction() {
return get(verticalScrollAction);
}
}

@ -0,0 +1,23 @@
package com.otaliastudios.cameraview.gesture;
/**
* Gestures and gesture actions can both have a type. For a gesture to be able to be mapped to
* a certain {@link GestureAction}, both of them might be of the same type.
*/
public enum GestureType {
/**
* Defines gestures or gesture actions that consist of a single operation.
* Gesture example: a tap.
* Gesture action example: taking a picture.
*/
ONE_SHOT,
/**
* Defines gestures or gesture actions that consist of a continuous operation.
* Gesture example: pinching.
* Gesture action example: controlling zoom.
*/
CONTINUOUS
}

@ -1,29 +1,25 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.gesture;
import android.annotation.SuppressLint;
import android.content.Context;
import android.graphics.PointF;
import android.os.Build;
import androidx.annotation.NonNull;
import android.view.MotionEvent;
import android.view.ScaleGestureDetector;
class PinchGestureLayout extends GestureLayout {
/**
* A {@link GestureLayout} that detects {@link Gesture#PINCH} gestures.
*/
public class PinchGestureLayout extends GestureLayout {
private final static float ADD_SENSITIVITY = 2f;
ScaleGestureDetector mDetector;
private boolean mNotify;
/* tests */ float mFactor = 0;
private float mFactor = 0;
public PinchGestureLayout(@NonNull Context context) {
super(context);
}
@Override
protected void onInitialize(@NonNull Context context) {
super.onInitialize(context);
mPoints = new PointF[]{ new PointF(0, 0), new PointF(0, 0) };
super(context, 2);
setGesture(Gesture.PINCH);
mDetector = new ScaleGestureDetector(context, new ScaleGestureDetector.SimpleOnScaleGestureListener() {
@Override
public boolean onScale(ScaleGestureDetector detector) {
@ -36,17 +32,10 @@ class PinchGestureLayout extends GestureLayout {
if (Build.VERSION.SDK_INT >= 19) {
mDetector.setQuickScaleEnabled(false);
}
// We listen only to the pinch type.
mType = Gesture.PINCH;
}
@SuppressLint("ClickableViewAccessibility")
@Override
public boolean onTouchEvent(MotionEvent event) {
if (!mEnabled) return false;
protected boolean handleTouchEvent(@NonNull MotionEvent event) {
// Reset the mNotify flag on a new gesture.
// This is to ensure that the mNotify flag stays on until the
// previous gesture ends.
@ -59,11 +48,11 @@ class PinchGestureLayout extends GestureLayout {
// Keep notifying CameraView as long as the gesture goes.
if (mNotify) {
mPoints[0].x = event.getX(0);
mPoints[0].y = event.getY(0);
getPoint(0).x = event.getX(0);
getPoint(0).y = event.getY(0);
if (event.getPointerCount() > 1) {
mPoints[1].x = event.getX(1);
mPoints[1].y = event.getY(1);
getPoint(1).x = event.getX(1);
getPoint(1).y = event.getY(1);
}
return true;
}
@ -71,8 +60,8 @@ class PinchGestureLayout extends GestureLayout {
}
@Override
public float scaleValue(float currValue, float minValue, float maxValue) {
float add = mFactor;
public float getValue(float currValue, float minValue, float maxValue) {
float add = getFactor();
// ^ This works well if minValue = 0, maxValue = 1.
// Account for the different range:
add *= (maxValue - minValue);
@ -84,6 +73,11 @@ class PinchGestureLayout extends GestureLayout {
} else if (add < 0) {
add *= (currValue - minValue);
} Nope, I don't like this, it slows everything down. */
return capValue(currValue, currValue + add, minValue, maxValue);
return currValue + add;
}
/* for tests */ protected float getFactor() {
return mFactor;
}
}

@ -1,29 +1,27 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.gesture;
import android.annotation.SuppressLint;
import android.content.Context;
import android.graphics.PointF;
import androidx.annotation.NonNull;
import android.view.GestureDetector;
import android.view.MotionEvent;
class ScrollGestureLayout extends GestureLayout {
import com.otaliastudios.cameraview.CameraLogger;
/**
* A {@link GestureLayout} that detects {@link Gesture#SCROLL_HORIZONTAL}
* and {@link Gesture#SCROLL_VERTICAL} gestures.
*/
public class ScrollGestureLayout extends GestureLayout {
private static final String TAG = ScrollGestureLayout.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
private GestureDetector mDetector;
private boolean mNotify;
/* tests */ float mFactor;
private float mFactor;
public ScrollGestureLayout(@NonNull Context context) {
super(context);
}
@Override
protected void onInitialize(@NonNull Context context) {
super.onInitialize(context);
mPoints = new PointF[]{ new PointF(0, 0), new PointF(0, 0) };
super(context, 2);
mDetector = new GestureDetector(context, new GestureDetector.SimpleOnGestureListener() {
@Override
@ -31,17 +29,17 @@ class ScrollGestureLayout extends GestureLayout {
boolean horizontal;
LOG.i("onScroll:", "distanceX="+distanceX, "distanceY="+distanceY);
if (e1 == null || e2 == null) return false; // Got some crashes about this.
if (e1.getX() != mPoints[0].x || e1.getY() != mPoints[0].y) {
if (e1.getX() != getPoint(0).x || e1.getY() != getPoint(0).y) {
// First step. We choose now if it's a vertical or horizontal scroll, and
// stick to it for the whole gesture.
horizontal = Math.abs(distanceX) >= Math.abs(distanceY);
mType = horizontal ? Gesture.SCROLL_HORIZONTAL : Gesture.SCROLL_VERTICAL;
mPoints[0].set(e1.getX(), e1.getY());
setGesture(horizontal ? Gesture.SCROLL_HORIZONTAL : Gesture.SCROLL_VERTICAL);
getPoint(0).set(e1.getX(), e1.getY());
} else {
// Not the first step. We already defined the type.
horizontal = mType == Gesture.SCROLL_HORIZONTAL;
horizontal = getGesture() == Gesture.SCROLL_HORIZONTAL;
}
mPoints[1].set(e2.getX(), e2.getY());
getPoint(1).set(e2.getX(), e2.getY());
mFactor = horizontal ? (distanceX / getWidth()) : (distanceY / getHeight());
mFactor = horizontal ? -mFactor : mFactor; // When vertical, up = positive
mNotify = true;
@ -52,11 +50,8 @@ class ScrollGestureLayout extends GestureLayout {
mDetector.setIsLongpressEnabled(false); // Looks important.
}
@SuppressLint("ClickableViewAccessibility")
@Override
public boolean onTouchEvent(MotionEvent event) {
if (!mEnabled) return false;
protected boolean handleTouchEvent(@NonNull MotionEvent event) {
// Reset the mNotify flag on a new gesture.
// This is to ensure that the mNotify flag stays on until the
// previous gesture ends.
@ -68,20 +63,23 @@ class ScrollGestureLayout extends GestureLayout {
mDetector.onTouchEvent(event);
// Keep notifying CameraView as long as the gesture goes.
if (mNotify) LOG.i("Notifying a gesture of type", mType.name());
if (mNotify) LOG.i("Notifying a gesture of type", getGesture().name());
return mNotify;
}
@Override
public float scaleValue(float currValue, float minValue, float maxValue) {
float delta = mFactor; // -1 ... 1
public float getValue(float currValue, float minValue, float maxValue) {
float delta = getFactor(); // -1 ... 1
// ^ This works well if minValue = 0, maxValue = 1.
// Account for the different range:
delta *= (maxValue - minValue); // -(max-min) ... (max-min)
delta *= 2; // Add some sensitivity.
return capValue(currValue, currValue + delta, minValue, maxValue);
return currValue + delta;
}
/* for tests */ protected float getFactor() {
return mFactor;
}
}

@ -1,8 +1,7 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.gesture;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.annotation.SuppressLint;
import android.content.Context;
import android.graphics.PointF;
import androidx.annotation.NonNull;
@ -14,7 +13,13 @@ import android.view.View;
import android.widget.FrameLayout;
import android.widget.ImageView;
class TapGestureLayout extends GestureLayout {
import com.otaliastudios.cameraview.R;
/**
* A {@link GestureLayout} that detects {@link Gesture#TAP}
* and {@link Gesture#LONG_TAP} gestures.
*/
public class TapGestureLayout extends GestureLayout {
private GestureDetector mDetector;
private boolean mNotify;
@ -23,19 +28,13 @@ class TapGestureLayout extends GestureLayout {
private ImageView mFocusMarkerFill;
public TapGestureLayout(@NonNull Context context) {
super(context);
}
@Override
protected void onInitialize(@NonNull Context context) {
super.onInitialize(context);
mPoints = new PointF[]{ new PointF(0, 0) };
super(context, 1);
mDetector = new GestureDetector(context, new GestureDetector.SimpleOnGestureListener() {
@Override
public boolean onSingleTapUp(MotionEvent e) {
mNotify = true;
mType = Gesture.TAP;
setGesture(Gesture.TAP);
return true;
}
@ -50,7 +49,7 @@ class TapGestureLayout extends GestureLayout {
@Override
public void onLongPress(MotionEvent e) {
mNotify = true;
mType = Gesture.LONG_TAP;
setGesture(Gesture.LONG_TAP);
}
});
@ -63,11 +62,8 @@ class TapGestureLayout extends GestureLayout {
mFocusMarkerFill = findViewById(R.id.fill);
}
@SuppressLint("ClickableViewAccessibility")
@Override
public boolean onTouchEvent(MotionEvent event) {
if (!mEnabled) return false;
protected boolean handleTouchEvent(@NonNull MotionEvent event) {
// Reset the mNotify flag on a new gesture.
// This is to ensure that the mNotify flag stays on until the
// previous gesture ends.
@ -80,15 +76,15 @@ class TapGestureLayout extends GestureLayout {
// Keep notifying CameraView as long as the gesture goes.
if (mNotify) {
mPoints[0].x = event.getX();
mPoints[0].y = event.getY();
getPoint(0).x = event.getX();
getPoint(0).y = event.getY();
return true;
}
return false;
}
@Override
public float scaleValue(float currValue, float minValue, float maxValue) {
public float getValue(float currValue, float minValue, float maxValue) {
return 0;
}

@ -1,4 +1,4 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.internal;
import android.content.Context;
import android.graphics.Canvas;
@ -8,11 +8,19 @@ import android.graphics.drawable.ColorDrawable;
import androidx.annotation.ColorInt;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import android.util.AttributeSet;
import android.util.TypedValue;
import android.view.View;
class GridLinesLayout extends View {
import com.otaliastudios.cameraview.controls.Grid;
import com.otaliastudios.cameraview.internal.utils.Task;
/**
* A layout overlay that draws grid lines based on the {@link Grid} parameter.
*/
public class GridLinesLayout extends View {
private final static float GOLDEN_RATIO_INV = 0.61803398874989f;
public final static int DEFAULT_COLOR = Color.argb(160, 255, 255, 255);
@ -24,7 +32,7 @@ class GridLinesLayout extends View {
private ColorDrawable vert;
private final float width;
Task<Integer> drawTask = new Task<>();
@VisibleForTesting Task<Integer> drawTask = new Task<>();
public GridLinesLayout(@NonNull Context context) {
this(context, null);
@ -44,16 +52,36 @@ class GridLinesLayout extends View {
vert.setBounds(0, top, (int) width, bottom);
}
/**
* Returns the current grid value.
* @return the grid mode
*/
@NonNull
public Grid getGridMode() {
return gridMode;
}
/**
* Sets a new grid value
* @param gridMode the new value
*/
public void setGridMode(@NonNull Grid gridMode) {
this.gridMode = gridMode;
postInvalidate();
}
/**
* Returns the current grid color.
* @return the grid color
*/
public int getGridColor() {
return gridColor;
}
/**
* Sets a new grid color.
* @param gridColor the new color
*/
public void setGridColor(@ColorInt int gridColor) {
this.gridColor = gridColor;
horiz.setColor(gridColor);
@ -61,10 +89,6 @@ class GridLinesLayout extends View {
postInvalidate();
}
public int getGridColor() {
return gridColor;
}
private int getLineCount() {
switch (gridMode) {
case OFF: return 0;

@ -14,7 +14,7 @@
* limitations under the License.
*/
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.internal.egl;
import android.graphics.Bitmap;
import android.opengl.EGL14;
@ -24,6 +24,8 @@ import android.os.Build;
import androidx.annotation.RequiresApi;
import android.util.Log;
import com.otaliastudios.cameraview.CameraLogger;
import java.io.BufferedOutputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
@ -38,7 +40,7 @@ import java.nio.ByteOrder;
* There can be multiple surfaces associated with a single context.
*/
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2)
class EglBaseSurface extends EglElement {
public class EglBaseSurface extends EglElement {
protected static final String TAG = EglBaseSurface.class.getSimpleName();
private final static CameraLogger LOG = CameraLogger.create(TAG);

@ -14,7 +14,7 @@
* limitations under the License.
*/
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.internal.egl;
import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
@ -36,7 +36,7 @@ import android.view.Surface;
* The EGLContext must only be attached to one thread at a time. This class is not thread-safe.
*/
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2)
final class EglCore {
public final class EglCore {
private static final String TAG = EglCore.class.getSimpleName();
/**

@ -1,9 +1,11 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.internal.egl;
import android.opengl.GLES20;
import android.opengl.Matrix;
import com.otaliastudios.cameraview.CameraLogger;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
@ -22,16 +24,16 @@ class EglElement {
protected static void check(String opName) {
int error = GLES20.glGetError();
if (error != GLES20.GL_NO_ERROR) {
LOG.e("Error during", opName, "glError 0x", Integer.toHexString(error));
throw new RuntimeException(CameraLogger.lastMessage);
String message = LOG.e("Error during", opName, "glError 0x", Integer.toHexString(error));
throw new RuntimeException(message);
}
}
// Check for valid location.
protected static void checkLocation(int location, String label) {
if (location < 0) {
LOG.e("Unable to locate", label, "in program");
throw new RuntimeException(CameraLogger.lastMessage);
String message = LOG.e("Unable to locate", label, "in program");
throw new RuntimeException(message);
}
}

@ -1,16 +1,17 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.internal.egl;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.Matrix;
import com.otaliastudios.cameraview.CameraLogger;
import java.nio.FloatBuffer;
/**
* This is a mix of 3 grafika classes, FullFrameRect, Texture2dProgram, Drawable2d.
*/
class EglViewport extends EglElement {
public class EglViewport extends EglElement {
private final static CameraLogger LOG = CameraLogger.create(EglViewport.class.getSimpleName());
@ -73,7 +74,7 @@ class EglViewport extends EglElement {
// private int muTexOffsetLoc; // Used for filtering
// private int muColorAdjustLoc; // Used for filtering
EglViewport() {
public EglViewport() {
mTextureTarget = GLES11Ext.GL_TEXTURE_EXTERNAL_OES;
mProgramHandle = createProgram(SIMPLE_VERTEX_SHADER, SIMPLE_FRAGMENT_SHADER);
maPositionLocation = GLES20.glGetAttribLocation(mProgramHandle, "aPosition");
@ -89,16 +90,16 @@ class EglViewport extends EglElement {
}
void release(boolean doEglCleanup) {
public void release(boolean doEglCleanup) {
if (doEglCleanup) GLES20.glDeleteProgram(mProgramHandle);
mProgramHandle = -1;
}
void release() {
public void release() {
release(true);
}
int createTexture() {
public int createTexture() {
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
check("glGenTextures");
@ -116,7 +117,7 @@ class EglViewport extends EglElement {
return texId;
}
void drawFrame(int textureId, float[] textureMatrix) {
public void drawFrame(int textureId, float[] textureMatrix) {
drawFrame(textureId, textureMatrix,
mVertexCoordinatesArray,
mTextureCoordinatesArray);

@ -14,7 +14,7 @@
* limitations under the License.
*/
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.internal.egl;
import android.graphics.SurfaceTexture;
import android.os.Build;
@ -27,7 +27,7 @@ import android.view.Surface;
* It's good practice to explicitly release() the surface, preferably from a "finally" block.
*/
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2)
class EglWindowSurface extends EglBaseSurface {
public class EglWindowSurface extends EglBaseSurface {
private Surface mSurface;
private boolean mReleaseSurface;

@ -1,9 +1,11 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.internal.utils;
import android.annotation.SuppressLint;
import android.media.CamcorderProfile;
import android.os.Build;
import com.otaliastudios.cameraview.size.Size;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
@ -16,7 +18,7 @@ import androidx.annotation.NonNull;
/**
* Wraps the {@link android.media.CamcorderProfile} static utilities.
*/
class CamcorderProfiles {
public class CamcorderProfiles {
@SuppressLint("UseSparseArrays")
private static Map<Size, Integer> sizeToProfileMap = new HashMap<>();
@ -42,7 +44,7 @@ class CamcorderProfiles {
* @return a profile
*/
@NonNull
static CamcorderProfile get(int cameraId, @NonNull Size targetSize) {
public static CamcorderProfile get(int cameraId, @NonNull Size targetSize) {
final int targetArea = targetSize.getWidth() * targetSize.getHeight();
List<Size> sizes = new ArrayList<>(sizeToProfileMap.keySet());
Collections.sort(sizes, new Comparator<Size>() {

@ -1,13 +1,20 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.internal.utils;
import android.graphics.Rect;
import com.otaliastudios.cameraview.size.AspectRatio;
import com.otaliastudios.cameraview.size.Size;
import androidx.annotation.NonNull;
class CropHelper {
/**
* Simply computes the crop between a full size and a desired aspect ratio.
*/
public class CropHelper {
// It's important that size and aspect ratio belong to the same reference.
@NonNull
static Rect computeCrop(@NonNull Size currentSize, @NonNull AspectRatio targetRatio) {
public static Rect computeCrop(@NonNull Size currentSize, @NonNull AspectRatio targetRatio) {
int currentWidth = currentSize.getWidth();
int currentHeight = currentSize.getHeight();
if (targetRatio.matches(currentSize)) {

@ -0,0 +1,38 @@
package com.otaliastudios.cameraview.internal.utils;
import androidx.exifinterface.media.ExifInterface;
/**
* Super basic exif utilities.
*/
public class ExifHelper {
/**
* Maps an {@link ExifInterface} orientation value
* to the actual degrees.
*/
public static int readExifOrientation(int exifOrientation) {
int orientation;
switch (exifOrientation) {
case ExifInterface.ORIENTATION_NORMAL:
case ExifInterface.ORIENTATION_FLIP_HORIZONTAL:
orientation = 0; break;
case ExifInterface.ORIENTATION_ROTATE_180:
case ExifInterface.ORIENTATION_FLIP_VERTICAL:
orientation = 180; break;
case ExifInterface.ORIENTATION_ROTATE_90:
case ExifInterface.ORIENTATION_TRANSPOSE:
orientation = 90; break;
case ExifInterface.ORIENTATION_ROTATE_270:
case ExifInterface.ORIENTATION_TRANSVERSE:
orientation = 270; break;
default: orientation = 0;
}
return orientation;
}
}

@ -1,26 +1,39 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.internal.utils;
import android.content.Context;
import android.hardware.SensorManager;
import androidx.annotation.NonNull;
import androidx.annotation.VisibleForTesting;
import android.view.Display;
import android.view.OrientationEventListener;
import android.view.Surface;
import android.view.WindowManager;
class OrientationHelper {
/**
* Helps with keeping track of both device orientation (which changes when device is rotated)
* and the display offset (which depends on the activity orientation wrt the device default orientation).
*/
public class OrientationHelper {
final OrientationEventListener mListener;
/**
* Receives callback about the device orientation changes.
*/
public interface Callback {
void onDeviceOrientationChanged(int deviceOrientation);
}
@VisibleForTesting final OrientationEventListener mListener;
private final Callback mCallback;
private int mDeviceOrientation = -1;
private int mDisplayOffset = -1;
interface Callback {
void onDeviceOrientationChanged(int deviceOrientation);
}
OrientationHelper(@NonNull Context context, @NonNull Callback callback) {
/**
* Creates a new orientation helper.
* @param context a valid context
* @param callback a {@link Callback}
*/
public OrientationHelper(@NonNull Context context, @NonNull Callback callback) {
mCallback = callback;
mListener = new OrientationEventListener(context.getApplicationContext(), SensorManager.SENSOR_DELAY_NORMAL) {
@ -48,7 +61,11 @@ class OrientationHelper {
};
}
void enable(@NonNull Context context) {
/**
* Enables this listener.
* @param context a context
*/
public void enable(@NonNull Context context) {
Display display = ((WindowManager) context.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
switch (display.getRotation()) {
case Surface.ROTATION_0: mDisplayOffset = 0; break;
@ -60,17 +77,28 @@ class OrientationHelper {
mListener.enable();
}
void disable() {
/**
* Disables this listener.
*/
public void disable() {
mListener.disable();
mDisplayOffset = -1;
mDeviceOrientation = -1;
}
int getDeviceOrientation() {
/**
* Returns the current device orientation.
* @return device orientation
*/
public int getDeviceOrientation() {
return mDeviceOrientation;
}
int getDisplayOffset() {
/**
* Returns the current display offset.
* @return display offset
*/
public int getDisplayOffset() {
return mDisplayOffset;
}
}

@ -0,0 +1,145 @@
package com.otaliastudios.cameraview.internal.utils;
import com.otaliastudios.cameraview.CameraLogger;
import java.util.concurrent.LinkedBlockingQueue;
import androidx.annotation.CallSuper;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
/**
* Base class for pools of recycleable objects.
* @param <T> the object type
*/
public class Pool<T> {
private static final String TAG = Pool.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
private int maxPoolSize;
private int activeCount;
private LinkedBlockingQueue<T> mQueue;
private Factory<T> factory;
/**
* Used to create new instances of objects when needed.
* @param <T> object type
*/
public interface Factory<T> {
T create();
}
/**
* Creates a new pool with the given pool size and factory.
* @param maxPoolSize the max pool size
* @param factory the factory
*/
public Pool(int maxPoolSize, @NonNull Factory<T> factory) {
this.maxPoolSize = maxPoolSize;
this.mQueue = new LinkedBlockingQueue<>(maxPoolSize);
this.factory = factory;
}
/**
* Whether the pool is empty. This means that {@link #get()} will return
* a null item, because all objects were reclaimed and not recycled yet.
*
* @return whether the pool is empty
*/
public boolean isEmpty() {
return count() >= maxPoolSize;
}
/**
* Returns a new item, from the recycled pool if possible (if there are recycled items),
* or instantiating one through the factory (if we can respect the pool size).
* If these conditions are not met, this returns null.
*
* @return an item or null
*/
@Nullable
public T get() {
T item = mQueue.poll();
if (item != null) {
activeCount++; // poll decreases, this fixes
LOG.v("GET: Reusing recycled item.", this);
return item;
}
if (isEmpty()) {
LOG.v("GET: Returning null. Too much items requested.", this);
return null;
}
activeCount++;
LOG.v("GET: Creating a new item.", this);
return factory.create();
}
/**
* Recycles an item after it has been used. The item should come from a previous
* {@link #get()} call.
*
* @param item used item
*/
public void recycle(@NonNull T item) {
LOG.v("RECYCLE: Recycling item.", this);
if (--activeCount < 0) {
throw new IllegalStateException("Trying to recycle an item which makes activeCount < 0." +
"This means that this or some previous items being recycled were not coming from " +
"this pool, or some item was recycled more than once. " + this);
}
if (!mQueue.offer(item)) {
throw new IllegalStateException("Trying to recycle an item while the queue is full. " +
"This means that this or some previous items being recycled were not coming from " +
"this pool, or some item was recycled more than once. " + this);
}
}
/**
* Clears the pool of recycled items.
*/
@CallSuper
public void clear() {
mQueue.clear();
}
/**
* Returns the count of all items managed by this pool. Includes
* - active items: currently being used
* - recycled items: used and recycled, available for second use
*
* @return count
*/
public final int count() {
return activeCount() + recycledCount();
}
/**
* Returns the active items managed by this pools, which means, items
* currently being used.
*
* @return active count
*/
public final int activeCount() {
return activeCount;
}
/**
* Returns the recycled items managed by this pool, which means, items
* that were used and later recycled, and are currently available for
* second use.
*
* @return recycled count
*/
public final int recycledCount() {
return mQueue.size();
}
@NonNull
@Override
public String toString() {
return getClass().getSimpleName() + " -- count:" + count() + ", active:" + activeCount() + ", recycled:" + recycledCount();
}
}

@ -1,4 +1,6 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.internal.utils;
import com.otaliastudios.cameraview.size.Size;
import androidx.annotation.NonNull;
@ -8,9 +10,16 @@ import androidx.annotation.NonNull;
*/
@SuppressWarnings("DeprecatedIsStillUsed")
@Deprecated
class RotationHelper {
static byte[] rotate(@NonNull final byte[] yuv, @NonNull final Size size, final int rotation) {
public class RotationHelper {
/**
* Rotates the given yuv image into another yuv array, by the given angle.
* @param yuv image
* @param size image size
* @param rotation desired angle
* @return a new yuv array
*/
public static byte[] rotate(@NonNull final byte[] yuv, @NonNull final Size size, final int rotation) {
if (rotation == 0) return yuv;
if (rotation % 90 != 0 || rotation < 0 || rotation > 270) {
throw new IllegalArgumentException("0 <= rotation < 360, rotation % 90 == 0");

@ -0,0 +1,111 @@
package com.otaliastudios.cameraview.internal.utils;
import androidx.annotation.NonNull;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
/**
* A naive implementation of {@link java.util.concurrent.CountDownLatch}
* to help in testing.
*/
public class Task<T> {
private CountDownLatch mLatch;
private T mResult;
private int mCount;
/**
* Creates an empty task.
*
* Listeners should:
* - call {@link #listen()} to notify they are interested in the next action
* - call {@link #await()} to know when the action is performed.
*
* Task owners should:
* - call {@link #start()} when task started
* - call {@link #end(Object)} when task ends
*/
public Task() { }
/**
* Creates an empty task and starts listening.
* @param startListening whether to call listen
*/
public Task(boolean startListening) {
if (startListening) listen();
}
private boolean isListening() {
return mLatch != null;
}
/**
* Task owner method: notifies the action started.
*/
public void start() {
if (!isListening()) mCount++;
}
/**
* Task owner method: notifies the action ended.
* @param result the action result
*/
public void end(T result) {
if (mCount > 0) {
mCount--;
return;
}
if (isListening()) { // Should be always true.
mResult = result;
mLatch.countDown();
}
}
/**
* Listener method: notifies we are interested in the next action.
*/
public void listen() {
if (isListening()) throw new RuntimeException("Should not happen.");
mResult = null;
mLatch = new CountDownLatch(1);
}
/**
* Listener method: waits for next task action to end.
* @param millis milliseconds
* @return the action result
*/
public T await(long millis) {
return await(millis, TimeUnit.MILLISECONDS);
}
/**
* Listener method: waits 1 minute for next task action to end.
* @return the action result
*/
public T await() {
return await(1, TimeUnit.MINUTES);
}
/**
* Listener method: waits for next task action to end.
* @param time time
* @param unit the time unit
* @return the action result
*/
private T await(long time, @NonNull TimeUnit unit) {
try {
mLatch.await(time, unit);
} catch (Exception e) {
e.printStackTrace();
}
T result = mResult;
mResult = null;
mLatch = null;
return result;
}
}

@ -1,9 +1,11 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.internal.utils;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import com.otaliastudios.cameraview.CameraLogger;
import androidx.annotation.NonNull;
import java.lang.ref.WeakReference;
@ -13,14 +15,20 @@ import java.util.concurrent.ConcurrentHashMap;
* Class holding a background handler.
* We want them to survive configuration changes if there's still job to do.
*/
class WorkerHandler {
public class WorkerHandler {
private final static CameraLogger LOG = CameraLogger.create(WorkerHandler.class.getSimpleName());
private final static ConcurrentHashMap<String, WeakReference<WorkerHandler>> sCache = new ConcurrentHashMap<>(4);
/**
* Gets a possibly cached handler with the given name.
* @param name the handler name
* @return a handler
*/
@NonNull
public static WorkerHandler get(@NonNull String name) {
if (sCache.containsKey(name)) {
//noinspection ConstantConditions
WorkerHandler cached = sCache.get(name).get();
if (cached != null) {
HandlerThread thread = cached.mThread;
@ -39,9 +47,13 @@ class WorkerHandler {
return handler;
}
// Handy util to perform action in a fallback thread.
// Not to be used for long-running operations since they will
// block the fallback thread.
/**
* Handy utility to perform an action in a fallback thread.
* Not to be used for long-running operations since they will block
* the fallback thread.
*
* @param action the action
*/
public static void run(@NonNull Runnable action) {
get("FallbackCameraThread").post(action);
}
@ -56,27 +68,48 @@ class WorkerHandler {
mHandler = new Handler(mThread.getLooper());
}
public Handler get() {
return mHandler;
}
/**
* Post an action on this handler.
* @param runnable the action
*/
public void post(@NonNull Runnable runnable) {
mHandler.post(runnable);
}
/**
* Returns the android backing {@link Handler}.
* @return the handler
*/
public Handler get() {
return mHandler;
}
/**
* Returns the android backing {@link HandlerThread}.
* @return the thread
*/
@NonNull
public HandlerThread getThread() {
return mThread;
}
/**
* Returns the android backing {@link Looper}.
* @return the looper
*/
@NonNull
public Looper getLooper() {
return mThread.getLooper();
}
static void destroy() {
/**
* Destroys all handlers, interrupting their work and
* removing them from our cache.
*/
public static void destroy() {
for (String key : sCache.keySet()) {
WeakReference<WorkerHandler> ref = sCache.get(key);
//noinspection ConstantConditions
WorkerHandler handler = ref.get();
if (handler != null && handler.getThread().isAlive()) {
handler.getThread().interrupt();

@ -1,6 +1,11 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.picture;
import android.hardware.Camera;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.internal.utils.ExifHelper;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.exifinterface.media.ExifInterface;
@ -11,14 +16,15 @@ import java.io.IOException;
/**
* A {@link PictureResult} that uses standard APIs.
*/
class FullPictureRecorder extends PictureRecorder {
public class FullPictureRecorder extends PictureRecorder {
private static final String TAG = FullPictureRecorder.class.getSimpleName();
@SuppressWarnings("unused")
private static final CameraLogger LOG = CameraLogger.create(TAG);
private Camera mCamera;
FullPictureRecorder(@NonNull PictureResult stub, @Nullable PictureResultListener listener, @NonNull Camera camera) {
public FullPictureRecorder(@NonNull PictureResult.Stub stub, @Nullable PictureResultListener listener, @NonNull Camera camera) {
super(stub, listener);
mCamera = camera;
@ -29,10 +35,8 @@ class FullPictureRecorder extends PictureRecorder {
mCamera.setParameters(params);
}
// Camera2 constructor here...
@Override
void take() {
public void take() {
mCamera.takePicture(
new Camera.ShutterCallback() {
@Override
@ -49,7 +53,7 @@ class FullPictureRecorder extends PictureRecorder {
try {
ExifInterface exif = new ExifInterface(new ByteArrayInputStream(data));
int exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);
exifRotation = CameraUtils.readExifOrientation(exifOrientation);
exifRotation = ExifHelper.readExifOrientation(exifOrientation);
} catch (IOException e) {
exifRotation = 0;
}

@ -0,0 +1,75 @@
package com.otaliastudios.cameraview.picture;
import com.otaliastudios.cameraview.PictureResult;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
/**
* Interface for picture capturing.
* Don't call start if already started. Don't call stop if already stopped.
* Don't reuse.
*/
public abstract class PictureRecorder {
/**
* Listens for picture recorder events.
*/
public interface PictureResultListener {
/**
* The shutter was activated.
* @param didPlaySound whether a sound was played
*/
void onPictureShutter(boolean didPlaySound);
/**
* Picture was taken or there was some error, if
* the result is null.
* @param result the result or null if there was some error.
*/
void onPictureResult(@Nullable PictureResult.Stub result);
}
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) PictureResult.Stub mResult;
@VisibleForTesting PictureResultListener mListener;
/**
* Creates a new picture recorder.
* @param stub a picture stub
* @param listener a listener
*/
@SuppressWarnings("WeakerAccess")
public PictureRecorder(@NonNull PictureResult.Stub stub, @Nullable PictureResultListener listener) {
mResult = stub;
mListener = listener;
}
/**
* Takes a picture.
*/
public abstract void take();
/**
* Subclasses can call this to notify that the shutter was activated,
* and whether it did play some sound or not.
* @param didPlaySound whether it played sounds
*/
@SuppressWarnings("WeakerAccess")
protected void dispatchOnShutter(boolean didPlaySound) {
if (mListener != null) mListener.onPictureShutter(didPlaySound);
}
/**
* Subclasses can call this to notify that the result was obtained,
* either with some error (null result) or with the actual stub, filled.
*/
protected void dispatchResult() {
if (mListener != null) {
mListener.onPictureResult(mResult);
mListener = null;
mResult = null;
}
}
}

@ -1,6 +1,5 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.picture;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.graphics.Bitmap;
import android.graphics.ImageFormat;
@ -13,6 +12,24 @@ import android.opengl.EGLContext;
import android.opengl.Matrix;
import android.os.Build;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.engine.Camera1Engine;
import com.otaliastudios.cameraview.engine.CameraEngine;
import com.otaliastudios.cameraview.internal.egl.EglCore;
import com.otaliastudios.cameraview.internal.egl.EglViewport;
import com.otaliastudios.cameraview.internal.egl.EglWindowSurface;
import com.otaliastudios.cameraview.internal.utils.CropHelper;
import com.otaliastudios.cameraview.internal.utils.RotationHelper;
import com.otaliastudios.cameraview.internal.utils.WorkerHandler;
import com.otaliastudios.cameraview.preview.CameraPreview;
import com.otaliastudios.cameraview.preview.GlCameraPreview;
import com.otaliastudios.cameraview.preview.RendererFrameCallback;
import com.otaliastudios.cameraview.preview.RendererThread;
import com.otaliastudios.cameraview.size.AspectRatio;
import com.otaliastudios.cameraview.size.Size;
import androidx.annotation.NonNull;
import java.io.ByteArrayOutputStream;
@ -20,31 +37,36 @@ import java.io.ByteArrayOutputStream;
/**
* A {@link PictureResult} that uses standard APIs.
*/
class SnapshotPictureRecorder extends PictureRecorder {
public class SnapshotPictureRecorder extends PictureRecorder {
private static final String TAG = SnapshotPictureRecorder.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
private Camera1 mController;
private Camera1Engine mEngine1;
private Camera mCamera;
private CameraPreview mPreview;
private AspectRatio mOutputRatio;
private Size mSensorPreviewSize;
private int mFormat;
SnapshotPictureRecorder(@NonNull PictureResult stub, @NonNull Camera1 controller,
@NonNull Camera camera, @NonNull AspectRatio outputRatio) {
super(stub, controller);
mController = controller;
mPreview = controller.mPreview;
/**
* Camera1 constructor.
*/
public SnapshotPictureRecorder(
@NonNull PictureResult.Stub stub,
@NonNull Camera1Engine engine,
@NonNull CameraPreview preview,
@NonNull Camera camera,
@NonNull AspectRatio outputRatio) {
super(stub, engine);
mEngine1 = engine;
mPreview = preview;
mCamera = camera;
mOutputRatio = outputRatio;
mFormat = mController.mPreviewFormat;
mSensorPreviewSize = mController.mPreviewStreamSize;
mFormat = engine.getPreviewStreamFormat();
}
@Override
void take() {
public void take() {
if (mPreview instanceof GlCameraPreview && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
takeGl((GlCameraPreview) mPreview);
} else {
@ -54,7 +76,7 @@ class SnapshotPictureRecorder extends PictureRecorder {
@TargetApi(Build.VERSION_CODES.KITKAT)
private void takeGl(@NonNull final GlCameraPreview preview) {
preview.addRendererFrameCallback(new GlCameraPreview.RendererFrameCallback() {
preview.addRendererFrameCallback(new RendererFrameCallback() {
int mTextureId;
SurfaceTexture mSurfaceTexture;
@ -73,7 +95,7 @@ class SnapshotPictureRecorder extends PictureRecorder {
@RendererThread
@Override
public void onRendererFrame(SurfaceTexture surfaceTexture, final float scaleX, final float scaleY) {
public void onRendererFrame(@NonNull SurfaceTexture surfaceTexture, final float scaleX, final float scaleY) {
preview.removeRendererFrameCallback(this);
// This kinda work but has drawbacks:
@ -108,7 +130,7 @@ class SnapshotPictureRecorder extends PictureRecorder {
// Apply scale and crop:
// NOTE: scaleX and scaleY are in REF_VIEW, while our input appears to be in REF_SENSOR.
boolean flip = mController.flip(CameraController.REF_VIEW, CameraController.REF_SENSOR);
boolean flip = mEngine1.flip(CameraEngine.REF_VIEW, CameraEngine.REF_SENSOR);
float realScaleX = flip ? scaleY : scaleX;
float realScaleY = flip ? scaleX : scaleY;
float scaleTranslX = (1F - realScaleX) / 2F;
@ -168,12 +190,17 @@ class SnapshotPictureRecorder extends PictureRecorder {
// Adding EXIF to a byte array, unfortunately, is hard.
final int sensorToOutput = mResult.rotation;
final Size outputSize = mResult.size;
final Size previewStreamSize = mEngine1.getPreviewStreamSize(CameraEngine.REF_SENSOR);
if (previewStreamSize == null) {
throw new IllegalStateException("Preview stream size should never be null here.");
}
WorkerHandler.run(new Runnable() {
@Override
public void run() {
// Rotate the picture, because no one will write EXIF data,
// then crop if needed. In both cases, transform yuv to jpeg.
byte[] data = RotationHelper.rotate(yuv, mSensorPreviewSize, sensorToOutput);
//noinspection deprecation
byte[] data = RotationHelper.rotate(yuv, previewStreamSize, sensorToOutput);
YuvImage yuv = new YuvImage(data, mFormat, outputSize.getWidth(), outputSize.getHeight(), null);
ByteArrayOutputStream stream = new ByteArrayOutputStream();
@ -191,19 +218,18 @@ class SnapshotPictureRecorder extends PictureRecorder {
// It seems that the buffers are already cleared here, so we need to allocate again.
camera.setPreviewCallbackWithBuffer(null); // Release anything left
camera.setPreviewCallbackWithBuffer(mController); // Add ourselves
mController.mFrameManager.allocate(ImageFormat.getBitsPerPixel(mFormat), mController.mPreviewStreamSize);
camera.setPreviewCallbackWithBuffer(mEngine1); // Add ourselves
mEngine1.getFrameManager().allocateBuffers(ImageFormat.getBitsPerPixel(mFormat), previewStreamSize);
}
});
}
@Override
protected void dispatchResult() {
mController = null;
mEngine1 = null;
mCamera = null;
mOutputRatio = null;
mFormat = 0;
mSensorPreviewSize = null;
super.dispatchResult();
}
}

@ -0,0 +1,266 @@
package com.otaliastudios.cameraview.preview;
import android.content.Context;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import android.view.View;
import android.view.ViewGroup;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.engine.CameraEngine;
import com.otaliastudios.cameraview.internal.utils.Task;
import com.otaliastudios.cameraview.size.Size;
/**
* A CameraPreview takes in input stream from the {@link CameraEngine}, and streams it
* into an output surface that belongs to the view hierarchy.
*
* @param <T> the type of view which hosts the content surface
* @param <Output> the type of output, either {@link android.view.SurfaceHolder} or {@link android.graphics.SurfaceTexture}
*/
public abstract class CameraPreview<T extends View, Output> {
protected final static CameraLogger LOG = CameraLogger.create(CameraPreview.class.getSimpleName());
/**
* This is used to notify CameraEngine to recompute its camera Preview size.
* After that, CameraView will need a new layout pass to adapt to the Preview size.
*/
public interface SurfaceCallback {
/**
* Called when the surface is available.
*/
void onSurfaceAvailable();
/**
* Called when the surface has changed.
*/
void onSurfaceChanged();
/**
* Called when the surface was destroyed.
*/
void onSurfaceDestroyed();
}
@VisibleForTesting Task<Void> mCropTask = new Task<>();
private SurfaceCallback mSurfaceCallback;
private T mView;
boolean mCropping;
// These are the surface dimensions in REF_VIEW.
int mOutputSurfaceWidth;
int mOutputSurfaceHeight;
// These are the preview stream dimensions, in REF_VIEW.
int mInputStreamWidth;
int mInputStreamHeight;
/**
* Creates a new preview.
* @param context a context
* @param parent where to inflate our view
* @param callback the callback
*/
public CameraPreview(@NonNull Context context, @NonNull ViewGroup parent, @Nullable SurfaceCallback callback) {
mView = onCreateView(context, parent);
mSurfaceCallback = callback;
}
/**
* Sets a callback to be notified of surface events (creation, change, destruction)
* @param callback a callback
*/
public final void setSurfaceCallback(@NonNull SurfaceCallback callback) {
mSurfaceCallback = callback;
// If surface already available, dispatch.
if (mOutputSurfaceWidth != 0 || mOutputSurfaceHeight != 0) {
mSurfaceCallback.onSurfaceAvailable();
}
}
/**
* Called at creation time. Implementors should inflate the hierarchy into the
* parent ViewGroup, and return the View that actually hosts the surface.
*
* @param context a context
* @param parent where to inflate
* @return the view hosting the Surface
*/
@NonNull
protected abstract T onCreateView(@NonNull Context context, @NonNull ViewGroup parent);
/**
* Returns the view hosting the Surface.
* @return the view
*/
@NonNull
public final T getView() {
return mView;
}
/**
* For testing purposes, should return the root view that was inflated into the
* parent during {@link #onCreateView(Context, ViewGroup)}.
* @return the root view
*/
@SuppressWarnings("unused")
@VisibleForTesting
@NonNull
abstract View getRootView();
/**
* Returns the output surface object (for example a SurfaceHolder
* or a SurfaceTexture).
* @return the surface object
*/
@NonNull
public abstract Output getOutput();
/**
* Returns the type of the output returned by {@link #getOutput()}.
* @return the output type
*/
@NonNull
public abstract Class<Output> getOutputClass();
/**
* Called to notify the preview of the input stream size. The width and height must be
* rotated before calling this, if needed, to be consistent with the VIEW reference.
*
* @param width width of the preview stream, in view coordinates
* @param height height of the preview stream, in view coordinates
*/
public void setStreamSize(int width, int height) {
LOG.i("setStreamSize:", "desiredW=", width, "desiredH=", height);
mInputStreamWidth = width;
mInputStreamHeight = height;
if (mInputStreamWidth > 0 && mInputStreamHeight > 0) {
crop(mCropTask);
}
}
/**
* Returns the current input stream size, in view coordinates.
* @return the current input stream size
*/
@SuppressWarnings("unused")
@NonNull
final Size getStreamSize() {
return new Size(mInputStreamWidth, mInputStreamHeight);
}
/**
* Returns the current output surface size, in view coordinates.
* @return the current output surface size.
*/
@NonNull
public final Size getSurfaceSize() {
return new Size(mOutputSurfaceWidth, mOutputSurfaceHeight);
}
/**
* Whether we have a valid surface already.
* @return whether we have a surface
*/
public final boolean hasSurface() {
return mOutputSurfaceWidth > 0 && mOutputSurfaceHeight > 0;
}
/**
* Subclasses can call this to notify that the surface is available.
* @param width surface width
* @param height surface height
*/
@SuppressWarnings("WeakerAccess")
protected final void dispatchOnSurfaceAvailable(int width, int height) {
LOG.i("dispatchOnSurfaceAvailable:", "w=", width, "h=", height);
mOutputSurfaceWidth = width;
mOutputSurfaceHeight = height;
if (mOutputSurfaceWidth > 0 && mOutputSurfaceHeight > 0) {
crop(mCropTask);
}
mSurfaceCallback.onSurfaceAvailable();
}
/**
* Subclasses can call this to notify that the surface has changed.
* @param width surface width
* @param height surface height
*/
@SuppressWarnings("WeakerAccess")
protected final void dispatchOnSurfaceSizeChanged(int width, int height) {
LOG.i("dispatchOnSurfaceSizeChanged:", "w=", width, "h=", height);
if (width != mOutputSurfaceWidth || height != mOutputSurfaceHeight) {
mOutputSurfaceWidth = width;
mOutputSurfaceHeight = height;
if (width > 0 && height > 0) {
crop(mCropTask);
}
mSurfaceCallback.onSurfaceChanged();
}
}
/**
* Subclasses can call this to notify that the surface has been destroyed.
*/
@SuppressWarnings("WeakerAccess")
protected final void dispatchOnSurfaceDestroyed() {
mOutputSurfaceWidth = 0;
mOutputSurfaceHeight = 0;
mSurfaceCallback.onSurfaceDestroyed();
}
/**
* Called by the hosting {@link com.otaliastudios.cameraview.CameraView},
* this is a lifecycle event.
*/
public void onResume() {}
/**
* Called by the hosting {@link com.otaliastudios.cameraview.CameraView},
* this is a lifecycle event.
*/
public void onPause() {}
/**
* Called by the hosting {@link com.otaliastudios.cameraview.CameraView},
* this is a lifecycle event.
*/
public void onDestroy() {}
/**
* Here we must crop the visible part by applying a > 1 scale to one of our
* dimensions. This way our internal aspect ratio (mOutputSurfaceWidth / mOutputSurfaceHeight)
* will match the preview size aspect ratio (mInputStreamWidth / mInputStreamHeight).
*
* There might still be some absolute difference (e.g. same ratio but bigger / smaller).
* However that should be already managed by the framework.
*/
protected void crop(@NonNull Task<Void> task) {
// The base implementation does not support cropping.
task.start();
task.end(null);
}
/**
* Whether this preview implementation supports cropping.
* The base implementation does not, but it is strongly recommended to do so.
* @return true if cropping is supported
*/
public boolean supportsCropping() {
return false;
}
/**
* Whether we are currently cropping the output.
* If false, this means that the output image will match the visible bounds.
* @return true if cropping
*/
public boolean isCropping() {
return mCropping;
}
}

@ -1,4 +1,4 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.preview;
import android.content.Context;
import android.graphics.SurfaceTexture;
@ -6,12 +6,18 @@ import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import android.view.LayoutInflater;
import android.view.SurfaceHolder;
import android.view.View;
import android.view.ViewGroup;
import com.otaliastudios.cameraview.R;
import com.otaliastudios.cameraview.internal.egl.EglViewport;
import com.otaliastudios.cameraview.internal.utils.Task;
import com.otaliastudios.cameraview.size.AspectRatio;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
@ -24,8 +30,8 @@ import javax.microedition.khronos.opengles.GL10;
*
* - in the SurfaceTexture constructor we pass the GL texture handle that we have created.
*
* - The SurfaceTexture is linked to the Camera1 object. The camera will pass down buffers of data with
* a specified size (that is, the Camera1 preview size). For this reason we don't have to specify
* - The SurfaceTexture is linked to the Camera1Engine object. The camera will pass down buffers of data with
* a specified size (that is, the Camera1Engine preview size). For this reason we don't have to specify
* surfaceTexture.setDefaultBufferSize() (like we do, for example, in SnapshotPictureRecorder).
*
* - When SurfaceTexture.updateTexImage() is called, it will fetch the latest texture image from the
@ -51,7 +57,7 @@ import javax.microedition.khronos.opengles.GL10;
* Callbacks are guaranteed to be called on the renderer thread, which means that we can fetch
* the GL context that was created and is managed by the {@link GLSurfaceView}.
*/
class GlCameraPreview extends CameraPreview<GLSurfaceView, SurfaceTexture> implements GLSurfaceView.Renderer {
public class GlCameraPreview extends CameraPreview<GLSurfaceView, SurfaceTexture> implements GLSurfaceView.Renderer {
private boolean mDispatched;
private final float[] mTransformMatrix = new float[16];
@ -59,12 +65,35 @@ class GlCameraPreview extends CameraPreview<GLSurfaceView, SurfaceTexture> imple
private SurfaceTexture mInputSurfaceTexture;
private EglViewport mOutputViewport;
private Set<RendererFrameCallback> mRendererFrameCallbacks = Collections.synchronizedSet(new HashSet<RendererFrameCallback>());
/* for tests */ float mScaleX = 1F;
/* for tests */ float mScaleY = 1F;
@VisibleForTesting float mScaleX = 1F;
@VisibleForTesting float mScaleY = 1F;
private View mRootView;
GlCameraPreview(@NonNull Context context, @NonNull ViewGroup parent, @Nullable SurfaceCallback callback) {
/**
* Method specific to the GL preview. Adds a {@link RendererFrameCallback}
* to receive renderer frame events.
* @param callback a callback
*/
public void addRendererFrameCallback(@NonNull final RendererFrameCallback callback) {
getView().queueEvent(new Runnable() {
@Override
public void run() {
mRendererFrameCallbacks.add(callback);
if (mOutputTextureId != 0) callback.onRendererTextureCreated(mOutputTextureId);
}
});
}
/**
* Method specific to the GL preview. Removes a {@link RendererFrameCallback}
* that was previously added to receive renderer frame events.
* @param callback a callback
*/
public void removeRendererFrameCallback(@NonNull final RendererFrameCallback callback) {
mRendererFrameCallbacks.remove(callback);
}
public GlCameraPreview(@NonNull Context context, @NonNull ViewGroup parent, @Nullable SurfaceCallback callback) {
super(context, parent, callback);
}
@ -199,18 +228,18 @@ class GlCameraPreview extends CameraPreview<GLSurfaceView, SurfaceTexture> imple
@NonNull
@Override
Class<SurfaceTexture> getOutputClass() {
public Class<SurfaceTexture> getOutputClass() {
return SurfaceTexture.class;
}
@NonNull
@Override
SurfaceTexture getOutput() {
public SurfaceTexture getOutput() {
return mInputSurfaceTexture;
}
@Override
boolean supportsCropping() {
public boolean supportsCropping() {
return true;
}
@ -227,8 +256,8 @@ class GlCameraPreview extends CameraPreview<GLSurfaceView, SurfaceTexture> imple
* then drawing it with a scaled transformation matrix. See {@link #onDrawFrame(GL10)}.
*/
@Override
protected void crop() {
mCropTask.start();
protected void crop(@NonNull Task<Void> task) {
task.start();
if (mInputStreamWidth > 0 && mInputStreamHeight > 0 && mOutputSurfaceWidth > 0 && mOutputSurfaceHeight > 0) {
float scaleX = 1f, scaleY = 1f;
AspectRatio current = AspectRatio.of(mOutputSurfaceWidth, mOutputSurfaceHeight);
@ -245,44 +274,6 @@ class GlCameraPreview extends CameraPreview<GLSurfaceView, SurfaceTexture> imple
mScaleY = 1F / scaleY;
getView().requestRender();
}
mCropTask.end(null);
}
interface RendererFrameCallback {
/**
* Called on the renderer thread, hopefully only once, to notify that
* the texture was created (or to inform a new callback of the old texture).
*
* @param textureId the GL texture linked to the image stream
*/
@RendererThread
void onRendererTextureCreated(int textureId);
/**
* Called on the renderer thread after each frame was drawn.
* You are not supposed to hold for too long onto this thread, because
* well, it is the rendering thread.
*
* @param surfaceTexture the texture to get transformation
* @param scaleX the scaleX (in REF_VIEW) value
* @param scaleY the scaleY (in REF_VIEW) value
*/
@RendererThread
void onRendererFrame(SurfaceTexture surfaceTexture, float scaleX, float scaleY);
}
void addRendererFrameCallback(@NonNull final RendererFrameCallback callback) {
getView().queueEvent(new Runnable() {
@Override
public void run() {
mRendererFrameCallbacks.add(callback);
if (mOutputTextureId != 0) callback.onRendererTextureCreated(mOutputTextureId);
}
});
}
void removeRendererFrameCallback(@NonNull final RendererFrameCallback callback) {
mRendererFrameCallbacks.remove(callback);
task.end(null);
}
}

@ -0,0 +1,32 @@
package com.otaliastudios.cameraview.preview;
import android.graphics.SurfaceTexture;
import androidx.annotation.NonNull;
/**
* Callback for renderer frames.
*/
public interface RendererFrameCallback {
/**
* Called on the renderer thread, hopefully only once, to notify that
* the texture was created (or to inform a new callback of the old texture).
*
* @param textureId the GL texture linked to the image stream
*/
@RendererThread
void onRendererTextureCreated(int textureId);
/**
* Called on the renderer thread after each frame was drawn.
* You are not supposed to hold for too long onto this thread, because
* well, it is the rendering thread.
*
* @param surfaceTexture the texture to get transformation
* @param scaleX the scaleX (in REF_VIEW) value
* @param scaleY the scaleY (in REF_VIEW) value
*/
@RendererThread
void onRendererFrame(@NonNull SurfaceTexture surfaceTexture, float scaleX, float scaleY);
}

@ -0,0 +1,6 @@
package com.otaliastudios.cameraview.preview;
/**
* Indicates that some action is being executed on the renderer thread.
*/
public @interface RendererThread {}

@ -1,4 +1,4 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.preview;
import android.content.Context;
import androidx.annotation.NonNull;
@ -9,18 +9,24 @@ import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
// Fallback preview when hardware acceleration is off.
// Currently this does NOT support cropping (e. g. the crop inside behavior),
// so we return false in supportsCropping() in order to have proper measuring.
// This means that CameraView is forced to be wrap_content.
class SurfaceCameraPreview extends CameraPreview<SurfaceView, SurfaceHolder> {
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.R;
/**
* This is the fallback preview when hardware acceleration is off, and is the last resort.
* Currently does not support cropping, which means that {@link com.otaliastudios.cameraview.CameraView}
* is forced to be wrap_content.
*
* Do not use.
*/
public class SurfaceCameraPreview extends CameraPreview<SurfaceView, SurfaceHolder> {
private final static CameraLogger LOG = CameraLogger.create(SurfaceCameraPreview.class.getSimpleName());
private boolean mDispatched;
private View mRootView;
SurfaceCameraPreview(@NonNull Context context, @NonNull ViewGroup parent, @Nullable SurfaceCallback callback) {
public SurfaceCameraPreview(@NonNull Context context, @NonNull ViewGroup parent, @Nullable SurfaceCallback callback) {
super(context, parent, callback);
}
@ -31,6 +37,7 @@ class SurfaceCameraPreview extends CameraPreview<SurfaceView, SurfaceHolder> {
parent.addView(root, 0);
SurfaceView surfaceView = root.findViewById(R.id.surface_view);
final SurfaceHolder holder = surfaceView.getHolder();
//noinspection deprecation
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
holder.addCallback(new SurfaceHolder.Callback() {
@ -70,13 +77,13 @@ class SurfaceCameraPreview extends CameraPreview<SurfaceView, SurfaceHolder> {
@NonNull
@Override
SurfaceHolder getOutput() {
public SurfaceHolder getOutput() {
return getView().getHolder();
}
@NonNull
@Override
Class<SurfaceHolder> getOutputClass() {
public Class<SurfaceHolder> getOutputClass() {
return SurfaceHolder.class;
}

@ -1,4 +1,4 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.preview;
import android.annotation.TargetApi;
import android.content.Context;
@ -10,11 +10,19 @@ import android.view.TextureView;
import android.view.View;
import android.view.ViewGroup;
class TextureCameraPreview extends CameraPreview<TextureView, SurfaceTexture> {
import com.otaliastudios.cameraview.R;
import com.otaliastudios.cameraview.internal.utils.Task;
import com.otaliastudios.cameraview.size.AspectRatio;
/**
* A preview implementation based on {@link TextureView}.
* Better than {@link SurfaceCameraPreview} but much less powerful than {@link GlCameraPreview}.
*/
public class TextureCameraPreview extends CameraPreview<TextureView, SurfaceTexture> {
private View mRootView;
TextureCameraPreview(@NonNull Context context, @NonNull ViewGroup parent, @Nullable SurfaceCallback callback) {
public TextureCameraPreview(@NonNull Context context, @NonNull ViewGroup parent, @Nullable SurfaceCallback callback) {
super(context, parent, callback);
}
@ -58,39 +66,39 @@ class TextureCameraPreview extends CameraPreview<TextureView, SurfaceTexture> {
@NonNull
@Override
Class<SurfaceTexture> getOutputClass() {
public Class<SurfaceTexture> getOutputClass() {
return SurfaceTexture.class;
}
@NonNull
@Override
SurfaceTexture getOutput() {
public SurfaceTexture getOutput() {
return getView().getSurfaceTexture();
}
@TargetApi(15)
@Override
void setStreamSize(int width, int height, boolean wasFlipped) {
super.setStreamSize(width, height, wasFlipped);
public void setStreamSize(int width, int height) {
super.setStreamSize(width, height);
if (getView().getSurfaceTexture() != null) {
getView().getSurfaceTexture().setDefaultBufferSize(width, height);
}
}
@Override
boolean supportsCropping() {
public boolean supportsCropping() {
return true;
}
@Override
protected void crop() {
mCropTask.start();
protected void crop(final @NonNull Task<Void> task) {
task.start();
getView().post(new Runnable() {
@Override
public void run() {
if (mInputStreamHeight == 0 || mInputStreamWidth == 0 ||
mOutputSurfaceHeight == 0 || mOutputSurfaceWidth == 0) {
mCropTask.end(null);
task.end(null);
return;
}
float scaleX = 1f, scaleY = 1f;
@ -110,7 +118,7 @@ class TextureCameraPreview extends CameraPreview<TextureView, SurfaceTexture> {
mCropping = scaleX > 1.02f || scaleY > 1.02f;
LOG.i("crop:", "applied scaleX=", scaleX);
LOG.i("crop:", "applied scaleY=", scaleY);
mCropTask.end(null);
task.end(null);
}
});
}

@ -1,6 +1,7 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.size;
import androidx.annotation.NonNull;
import androidx.annotation.VisibleForTesting;
import java.util.HashMap;
@ -10,7 +11,7 @@ import java.util.HashMap;
*/
public class AspectRatio implements Comparable<AspectRatio> {
final static HashMap<String, AspectRatio> sCache = new HashMap<>(16);
@VisibleForTesting final static HashMap<String, AspectRatio> sCache = new HashMap<>(16);
/**
* Creates an aspect ratio for the given size.
@ -100,6 +101,7 @@ public class AspectRatio implements Comparable<AspectRatio> {
return false;
}
@NonNull
@Override
public String toString() {
return mX + ":" + mY;
@ -125,9 +127,13 @@ public class AspectRatio implements Comparable<AspectRatio> {
return -1;
}
/**
* Returns a flipped aspect ratio, which means inverting its dimensions.
* @return a flipped aspect ratio
*/
@SuppressWarnings("SuspiciousNameCombination")
@NonNull
public AspectRatio inverse() {
public AspectRatio flip() {
return AspectRatio.of(mY, mX);
}

@ -1,4 +1,4 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.size;
import androidx.annotation.NonNull;
@ -10,7 +10,7 @@ public class Size implements Comparable<Size> {
private final int mWidth;
private final int mHeight;
Size(int width, int height) {
public Size(int width, int height) {
mWidth = width;
mHeight = height;
}
@ -23,8 +23,14 @@ public class Size implements Comparable<Size> {
return mHeight;
}
/**
* Returns a flipped size, with height equal to this size's width
* and width equal to this size's height.
*
* @return a flipped size
*/
@SuppressWarnings("SuspiciousNameCombination")
Size flip() {
public Size flip() {
return new Size(mHeight, mWidth);
}
@ -43,6 +49,7 @@ public class Size implements Comparable<Size> {
return false;
}
@NonNull
@Override
public String toString() {
return mWidth + "x" + mHeight;

@ -1,4 +1,4 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.size;
import androidx.annotation.NonNull;

@ -0,0 +1,92 @@
package com.otaliastudios.cameraview.size;
import android.content.res.TypedArray;
import com.otaliastudios.cameraview.R;
import java.util.ArrayList;
import java.util.List;
import androidx.annotation.NonNull;
/**
* Parses size selectors from XML attributes.
*/
public class SizeSelectorParser {
private SizeSelector pictureSizeSelector;
private SizeSelector videoSizeSelector;
public SizeSelectorParser(@NonNull TypedArray array) {
List<SizeSelector> pictureConstraints = new ArrayList<>(3);
if (array.hasValue(R.styleable.CameraView_cameraPictureSizeMinWidth)) {
pictureConstraints.add(SizeSelectors.minWidth(array.getInteger(R.styleable.CameraView_cameraPictureSizeMinWidth, 0)));
}
if (array.hasValue(R.styleable.CameraView_cameraPictureSizeMaxWidth)) {
pictureConstraints.add(SizeSelectors.maxWidth(array.getInteger(R.styleable.CameraView_cameraPictureSizeMaxWidth, 0)));
}
if (array.hasValue(R.styleable.CameraView_cameraPictureSizeMinHeight)) {
pictureConstraints.add(SizeSelectors.minHeight(array.getInteger(R.styleable.CameraView_cameraPictureSizeMinHeight, 0)));
}
if (array.hasValue(R.styleable.CameraView_cameraPictureSizeMaxHeight)) {
pictureConstraints.add(SizeSelectors.maxHeight(array.getInteger(R.styleable.CameraView_cameraPictureSizeMaxHeight, 0)));
}
if (array.hasValue(R.styleable.CameraView_cameraPictureSizeMinArea)) {
pictureConstraints.add(SizeSelectors.minArea(array.getInteger(R.styleable.CameraView_cameraPictureSizeMinArea, 0)));
}
if (array.hasValue(R.styleable.CameraView_cameraPictureSizeMaxArea)) {
pictureConstraints.add(SizeSelectors.maxArea(array.getInteger(R.styleable.CameraView_cameraPictureSizeMaxArea, 0)));
}
if (array.hasValue(R.styleable.CameraView_cameraPictureSizeAspectRatio)) {
//noinspection ConstantConditions
pictureConstraints.add(SizeSelectors.aspectRatio(AspectRatio.parse(array.getString(R.styleable.CameraView_cameraPictureSizeAspectRatio)), 0));
}
if (array.getBoolean(R.styleable.CameraView_cameraPictureSizeSmallest, false)) pictureConstraints.add(SizeSelectors.smallest());
if (array.getBoolean(R.styleable.CameraView_cameraPictureSizeBiggest, false)) pictureConstraints.add(SizeSelectors.biggest());
pictureSizeSelector = !pictureConstraints.isEmpty() ?
SizeSelectors.and(pictureConstraints.toArray(new SizeSelector[0])) :
SizeSelectors.biggest();
// Video size selector
List<SizeSelector> videoConstraints = new ArrayList<>(3);
if (array.hasValue(R.styleable.CameraView_cameraVideoSizeMinWidth)) {
videoConstraints.add(SizeSelectors.minWidth(array.getInteger(R.styleable.CameraView_cameraVideoSizeMinWidth, 0)));
}
if (array.hasValue(R.styleable.CameraView_cameraVideoSizeMaxWidth)) {
videoConstraints.add(SizeSelectors.maxWidth(array.getInteger(R.styleable.CameraView_cameraVideoSizeMaxWidth, 0)));
}
if (array.hasValue(R.styleable.CameraView_cameraVideoSizeMinHeight)) {
videoConstraints.add(SizeSelectors.minHeight(array.getInteger(R.styleable.CameraView_cameraVideoSizeMinHeight, 0)));
}
if (array.hasValue(R.styleable.CameraView_cameraVideoSizeMaxHeight)) {
videoConstraints.add(SizeSelectors.maxHeight(array.getInteger(R.styleable.CameraView_cameraVideoSizeMaxHeight, 0)));
}
if (array.hasValue(R.styleable.CameraView_cameraVideoSizeMinArea)) {
videoConstraints.add(SizeSelectors.minArea(array.getInteger(R.styleable.CameraView_cameraVideoSizeMinArea, 0)));
}
if (array.hasValue(R.styleable.CameraView_cameraVideoSizeMaxArea)) {
videoConstraints.add(SizeSelectors.maxArea(array.getInteger(R.styleable.CameraView_cameraVideoSizeMaxArea, 0)));
}
if (array.hasValue(R.styleable.CameraView_cameraVideoSizeAspectRatio)) {
//noinspection ConstantConditions
videoConstraints.add(SizeSelectors.aspectRatio(AspectRatio.parse(array.getString(R.styleable.CameraView_cameraVideoSizeAspectRatio)), 0));
}
if (array.getBoolean(R.styleable.CameraView_cameraVideoSizeSmallest, false)) videoConstraints.add(SizeSelectors.smallest());
if (array.getBoolean(R.styleable.CameraView_cameraVideoSizeBiggest, false)) videoConstraints.add(SizeSelectors.biggest());
videoSizeSelector = !videoConstraints.isEmpty() ?
SizeSelectors.and(videoConstraints.toArray(new SizeSelector[0])) :
SizeSelectors.biggest();
}
@NonNull
public SizeSelector getPictureSizeSelector() {
return pictureSizeSelector;
}
@NonNull
public SizeSelector getVideoSizeSelector() {
return videoSizeSelector;
}
}

@ -1,4 +1,4 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.size;
import androidx.annotation.NonNull;

@ -1,27 +1,35 @@
package com.otaliastudios.cameraview;
package com.otaliastudios.cameraview.video;
import android.hardware.Camera;
import android.media.CamcorderProfile;
import android.media.MediaRecorder;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.controls.Audio;
import com.otaliastudios.cameraview.engine.Camera1Engine;
import com.otaliastudios.cameraview.internal.utils.CamcorderProfiles;
import com.otaliastudios.cameraview.size.Size;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
/**
* A {@link VideoRecorder} that uses {@link android.media.MediaRecorder} APIs.
*/
class FullVideoRecorder extends VideoRecorder {
public class FullVideoRecorder extends VideoRecorder {
private static final String TAG = FullVideoRecorder.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
private MediaRecorder mMediaRecorder;
private CamcorderProfile mProfile;
private Camera1 mController;
private Camera1Engine mController;
private Camera mCamera;
private Size mSize;
FullVideoRecorder(@NonNull VideoResult stub, @Nullable VideoResultListener listener,
@NonNull Camera1 controller, @NonNull Camera camera, int cameraId) {
public FullVideoRecorder(@NonNull VideoResult.Stub stub, @Nullable VideoResultListener listener,
@NonNull Camera1Engine controller, @NonNull Camera camera, int cameraId) {
super(stub, listener);
mCamera = camera;
mController = controller;
@ -30,15 +38,13 @@ class FullVideoRecorder extends VideoRecorder {
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
// Get a profile of quality compatible with the chosen size.
mSize = mResult.getRotation() % 180 != 0 ? mResult.getSize().flip() : mResult.getSize();
mSize = mResult.rotation % 180 != 0 ? mResult.size.flip() : mResult.size;
mProfile = CamcorderProfiles.get(cameraId, mSize);
}
// Camera2 constructor here...
@Override
void start() {
if (mResult.getAudio() == Audio.ON) {
public void start() {
if (mResult.audio == Audio.ON) {
// Must be called before setOutputFormat.
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
}
@ -51,7 +57,7 @@ class FullVideoRecorder extends VideoRecorder {
mMediaRecorder.setVideoFrameRate(mResult.videoFrameRate);
}
mMediaRecorder.setVideoSize(mSize.getWidth(), mSize.getHeight());
switch (mResult.getVideoCodec()) {
switch (mResult.videoCodec) {
case H_263: mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H263); break;
case H_264: mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264); break;
case DEVICE_DEFAULT: mMediaRecorder.setVideoEncoder(mProfile.videoCodec); break;
@ -62,7 +68,7 @@ class FullVideoRecorder extends VideoRecorder {
} else {
mMediaRecorder.setVideoEncodingBitRate(mResult.videoBitRate);
}
if (mResult.getAudio() == Audio.ON) {
if (mResult.audio == Audio.ON) {
mMediaRecorder.setAudioChannels(mProfile.audioChannels);
mMediaRecorder.setAudioSamplingRate(mProfile.audioSampleRate);
mMediaRecorder.setAudioEncoder(mProfile.audioCodec);
@ -73,15 +79,15 @@ class FullVideoRecorder extends VideoRecorder {
mMediaRecorder.setAudioEncodingBitRate(mResult.audioBitRate);
}
}
if (mResult.getLocation() != null) {
if (mResult.location != null) {
mMediaRecorder.setLocation(
(float) mResult.getLocation().getLatitude(),
(float) mResult.getLocation().getLongitude());
(float) mResult.location.getLatitude(),
(float) mResult.location.getLongitude());
}
mMediaRecorder.setOutputFile(mResult.getFile().getAbsolutePath());
mMediaRecorder.setOrientationHint(mResult.getRotation());
mMediaRecorder.setMaxFileSize(mResult.getMaxSize());
mMediaRecorder.setMaxDuration(mResult.getMaxDuration());
mMediaRecorder.setOutputFile(mResult.file.getAbsolutePath());
mMediaRecorder.setOrientationHint(mResult.rotation);
mMediaRecorder.setMaxFileSize(mResult.maxSize);
mMediaRecorder.setMaxDuration(mResult.maxDuration);
mMediaRecorder.setOnInfoListener(new MediaRecorder.OnInfoListener() {
@Override
public void onInfo(MediaRecorder mediaRecorder, int what, int extra) {
@ -111,7 +117,7 @@ class FullVideoRecorder extends VideoRecorder {
}
@Override
void stop() {
public void stop() {
if (mMediaRecorder != null) {
try {
mMediaRecorder.stop();

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save