CameraController integration tests

pull/37/head
Mattia Iavarone 8 years ago
parent 70447d1cbf
commit fbd381c40f
  1. 2
      .travis.yml
  2. 1
      README.md
  3. 1
      cameraview/src/androidTest/AndroidManifest.xml
  4. 36
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/BaseTest.java
  5. 305
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraControllerIntegrationTest.java
  6. 3
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraViewTest.java
  7. 3
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/MockCameraController.java
  8. 16
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/WorkerHandlerTest.java
  9. 52
      cameraview/src/main/java/com/otaliastudios/cameraview/Camera1.java
  10. 16
      cameraview/src/main/java/com/otaliastudios/cameraview/Camera2.java
  11. 35
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraController.java
  12. 116
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java
  13. 4
      cameraview/src/main/utils/com/otaliastudios/cameraview/CameraUtils.java
  14. 7
      cameraview/src/main/utils/com/otaliastudios/cameraview/WorkerHandler.java
  15. 2
      codecov.yml

@ -30,7 +30,7 @@ android:
before_script:
- echo no | android create avd --force --name test --target android-$EMULATOR_API --abi $EMULATOR_ABI
- emulator -avd test -no-audio -no-window &
- emulator -avd test -no-audio -no-window -camera-back emulated -camera-front emulated &
- android-wait-for-emulator
- adb shell input keyevent 82 &

@ -422,6 +422,7 @@ Other APIs not mentioned above are provided, and are well documented and comment
|`toggleFlash()`|Toggles the flash value between `Flash.OFF`, `Flash.ON`, and `Flash.AUTO`.|
|`setLocation(Location)`|Sets location data to be appended to picture/video metadata.|
|`setLocation(double, double)`|Sets latitude and longitude to be appended to picture/video metadata.|
|`getLocation()`|Retrieves location data previously applied with setLocation().|
|`startAutoFocus(float, float)`|Starts an autofocus process at the given coordinates, with respect to the view dimensions.|
|`getPreviewSize()`|Returns the size of the preview surface. If CameraView was not constrained in its layout phase (e.g. it was `wrap_content`), this will return the same aspect ratio of CameraView.|
|`getSnapshotSize()`|Returns `getPreviewSize()`, since a snapshot is a preview frame.|

@ -6,6 +6,7 @@
<application>
<activity
android:configChanges="orientation|screenLayout|keyboardHidden"
android:hardwareAccelerated="true"
android:name=".TestActivity"/>
</application>

@ -9,6 +9,7 @@ import android.os.Handler;
import android.os.Looper;
import android.support.test.InstrumentationRegistry;
import android.support.test.annotation.UiThreadTest;
import android.support.test.espresso.core.internal.deps.guava.collect.ObjectArrays;
import android.support.test.rule.ActivityTestRule;
import android.view.View;
@ -16,12 +17,15 @@ import org.junit.Before;
import org.junit.Rule;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.mockito.stubbing.Stubber;
import java.io.ByteArrayOutputStream;
import java.io.OutputStream;
import java.util.concurrent.CountDownLatch;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@ -75,4 +79,36 @@ public class BaseTest {
});
return y;
}
public static Stubber doCountDown(final CountDownLatch latch) {
return doAnswer(new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
latch.countDown();
return null;
}
});
}
public static <T> Stubber doEndTask(final Task<T> task, final T response) {
return doAnswer(new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
task.end(response);
return null;
}
});
}
public static Stubber doEndTask(final Task task, final int withReturnArgument) {
return doAnswer(new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
Object o = invocation.getArguments()[withReturnArgument];
//noinspection unchecked
task.end(o);
return null;
}
});
}
}

@ -0,0 +1,305 @@
package com.otaliastudios.cameraview;
import android.content.Context;
import android.support.test.filters.MediumTest;
import android.support.test.rule.ActivityTestRule;
import android.support.test.runner.AndroidJUnit4;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import static org.mockito.Mockito.*;
import static org.junit.Assert.*;
@RunWith(AndroidJUnit4.class)
@MediumTest
public class CameraControllerIntegrationTest extends BaseTest {
@Rule
public ActivityTestRule<TestActivity> rule = new ActivityTestRule<>(TestActivity.class);
private CameraView camera;
private Camera1 controller;
private CameraListener listener;
@Before
public void setUp() {
ui(new Runnable() {
@Override
public void run() {
camera = new CameraView(rule.getActivity()) {
@Override
protected CameraController instantiateCameraController(CameraCallbacks callbacks, Preview preview) {
controller = new Camera1(callbacks, preview);
return controller;
}
};
listener = mock(CameraListener.class);
camera.addCameraListener(listener);
rule.getActivity().inflate(camera);
}
});
}
@After
public void tearDown() throws Exception {
camera.stopCapturingVideo();
camera.stop();
Thread.sleep(800); // Just to be sure it's released before next test.
}
private CameraOptions waitForOpen(boolean expectSuccess) {
final Task<CameraOptions> open = new Task<>();
open.listen();
doEndTask(open, 0).when(listener).onCameraOpened(any(CameraOptions.class));
CameraOptions result = open.await(1000);
if (expectSuccess) {
assertNotNull("Can open", result);
} else {
assertNull("Should not open", result);
}
return result;
}
private Boolean waitForClose(boolean expectSuccess) {
final Task<Boolean> close = new Task<>();
close.listen();
doEndTask(close, true).when(listener).onCameraClosed();
Boolean result = close.await(1000);
if (expectSuccess) {
assertNotNull("Can close", result);
} else {
assertNull("Should not close", result);
}
return result;
}
//region test open/close
@Test
public void testOpenClose() {
assertFalse(controller.isCameraAvailable());
camera.start();
waitForOpen(true);
assertTrue(controller.isCameraAvailable());
camera.stop();
waitForClose(true);
assertFalse(controller.isCameraAvailable());
}
@Test
public void testOpenTwice() {
camera.start();
waitForOpen(true);
waitForOpen(false);
}
@Test
public void testCloseTwice() {
camera.stop();
waitForClose(false);
}
@Test
public void testConcurrentCalls() throws Exception {
final CountDownLatch latch = new CountDownLatch(4);
doCountDown(latch).when(listener).onCameraOpened(any(CameraOptions.class));
doCountDown(latch).when(listener).onCameraClosed();
camera.start();
camera.stop();
camera.start();
camera.stop();
boolean did = latch.await(4, TimeUnit.SECONDS);
assertTrue("Handles concurrent calls to start & stop", did);
}
@Test
public void testStartInitializesOptions() {
assertNull(camera.getCameraOptions());
assertNull(camera.getExtraProperties());
camera.start();
waitForOpen(true);
assertNotNull(camera.getCameraOptions());
assertNotNull(camera.getExtraProperties());
}
//endregion
//region test Facing/SessionType
// Test things that should reset the camera.
@Test
public void testSetFacing() throws Exception {
camera.setFacing(Facing.BACK);
camera.start();
waitForOpen(true);
// set facing should call stop and start again.
final CountDownLatch latch = new CountDownLatch(2);
doCountDown(latch).when(listener).onCameraOpened(any(CameraOptions.class));
doCountDown(latch).when(listener).onCameraClosed();
camera.setFacing(Facing.FRONT);
boolean did = latch.await(2, TimeUnit.SECONDS);
assertTrue("Handles setFacing while active", did);
assertEquals(camera.getFacing(), Facing.FRONT);
}
@Test
public void testSetSessionType() throws Exception {
camera.setSessionType(SessionType.PICTURE);
camera.start();
waitForOpen(true);
// set session type should call stop and start again.
final CountDownLatch latch = new CountDownLatch(2);
doCountDown(latch).when(listener).onCameraOpened(any(CameraOptions.class));
doCountDown(latch).when(listener).onCameraClosed();
camera.setSessionType(SessionType.VIDEO);
boolean did = latch.await(2, TimeUnit.SECONDS);
assertTrue("Handles setSessionType while active", did);
assertEquals(camera.getSessionType(), SessionType.VIDEO);
}
//endregion
//region test Set Parameters
// When camera is open, parameters will be set only if supported.
@Test
public void testSetZoom() {
camera.start();
CameraOptions options = waitForOpen(true);
boolean can = options.isZoomSupported();
float oldValue = camera.getZoom();
float newValue = 0.65f;
camera.setZoom(newValue);
assertEquals(can ? newValue : oldValue, camera.getZoom(), 0f);
}
@Test
public void testSetExposureCorrection() {
camera.start();
CameraOptions options = waitForOpen(true);
boolean can = options.isExposureCorrectionSupported();
float oldValue = camera.getExposureCorrection();
float newValue = options.getExposureCorrectionMaxValue();
camera.setExposureCorrection(newValue);
assertEquals(can ? newValue : oldValue, camera.getExposureCorrection(), 0f);
}
@Test
public void testSetFlash() {
camera.start();
CameraOptions options = waitForOpen(true);
Flash[] values = Flash.values();
Flash oldValue = camera.getFlash();
for (Flash value : values) {
camera.setFlash(value);
if (options.supports(value)) {
assertEquals(camera.getFlash(), value);
} else {
assertEquals(camera.getFlash(), oldValue);
}
}
}
@Test
public void testSetWhiteBalance() {
camera.start();
CameraOptions options = waitForOpen(true);
WhiteBalance[] values = WhiteBalance.values();
WhiteBalance oldValue = camera.getWhiteBalance();
for (WhiteBalance value : values) {
camera.setWhiteBalance(value);
if (options.supports(value)) {
assertEquals(camera.getWhiteBalance(), value);
} else {
assertEquals(camera.getWhiteBalance(), oldValue);
}
}
}
@Test
public void testSetHdr() {
camera.start();
CameraOptions options = waitForOpen(true);
WhiteBalance[] values = WhiteBalance.values();
WhiteBalance oldValue = camera.getWhiteBalance();
for (WhiteBalance value : values) {
camera.setWhiteBalance(value);
if (options.supports(value)) {
assertEquals(camera.getWhiteBalance(), value);
} else {
assertEquals(camera.getWhiteBalance(), oldValue);
}
}
}
//endregion
//region testSetVideoQuality
// This can be tricky because can trigger layout changes.
@Test(expected = IllegalStateException.class)
public void testSetVideoQuality_whileRecording() {
camera.setSessionType(SessionType.VIDEO);
camera.setVideoQuality(VideoQuality.HIGHEST);
camera.start();
waitForOpen(true);
camera.startCapturingVideo(null);
camera.setVideoQuality(VideoQuality.LOWEST);
}
@Test
public void testSetVideoQuality_whileInPictureSessionType() {
camera.setSessionType(SessionType.PICTURE);
camera.setVideoQuality(VideoQuality.HIGHEST);
camera.start();
waitForOpen(true);
camera.setVideoQuality(VideoQuality.LOWEST);
assertEquals(camera.getVideoQuality(), VideoQuality.LOWEST);
}
@Test
public void testSetVideoQuality_whileNotStarted() {
camera.setVideoQuality(VideoQuality.HIGHEST);
assertEquals(camera.getVideoQuality(), VideoQuality.HIGHEST);
camera.setVideoQuality(VideoQuality.LOWEST);
assertEquals(camera.getVideoQuality(), VideoQuality.LOWEST);
}
@Test
public void testSetVideoQuality_shouldRecompute() {
// If video quality changes bring to a new capture size,
// this might bring to a new aspect ratio,
// which might bring to a new preview size. No idea how to test.
assertTrue(true);
}
//endregion
}

@ -93,6 +93,7 @@ public class CameraViewTest extends BaseTest {
assertEquals(cameraView.getSessionType(), SessionType.DEFAULT);
assertEquals(cameraView.getHdr(), Hdr.DEFAULT);
assertEquals(cameraView.getVideoQuality(), VideoQuality.DEFAULT);
assertEquals(cameraView.getLocation(), null);
// Self managed
assertEquals(cameraView.getExposureCorrection(), 0f, 0f);
@ -410,7 +411,7 @@ public class CameraViewTest extends BaseTest {
source.setLongitude(-10d);
source.setAltitude(50d);
cameraView.setLocation(source);
Location other = mockController.mLocation;
Location other = cameraView.getLocation();
assertEquals(10d, other.getLatitude(), 0d);
assertEquals(-10d, other.getLongitude(), 0d);
assertEquals(50d, other.getAltitude(), 0d);

@ -10,7 +10,6 @@ import java.io.File;
public class MockCameraController extends CameraController {
Location mLocation;
boolean mPictureCaptured;
boolean mFocusStarted;
boolean mZoomChanged;
@ -110,7 +109,7 @@ public class MockCameraController extends CameraController {
}
@Override
boolean isCameraOpened() {
boolean isCameraAvailable() {
return true;
}

@ -21,4 +21,20 @@ public class WorkerHandlerTest {
assertTrue(w1 == w1a);
assertFalse(w1 == w2);
}
@Test
public void testStaticRun() {
final Task<Boolean> task = new Task<>();
task.listen();
Runnable action = new Runnable() {
@Override
public void run() {
task.end(true);
}
};
WorkerHandler.run(action);
Boolean result = task.await(500);
assertNotNull(result);
assertTrue(result);
}
}

@ -9,11 +9,9 @@ import android.location.Location;
import android.media.CamcorderProfile;
import android.media.MediaRecorder;
import android.os.Build;
import android.os.Handler;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.annotation.WorkerThread;
import android.util.Log;
import android.view.SurfaceHolder;
import java.io.File;
@ -36,13 +34,11 @@ class Camera1 extends CameraController {
private int mSensorOffset;
private Location mLocation;
private final int mPostFocusResetDelay = 3000;
private Runnable mPostFocusResetRunnable = new Runnable() {
@Override
public void run() {
if (!isCameraOpened()) return;
if (!isCameraAvailable()) return;
mCamera.cancelAutoFocus();
synchronized (mLock) {
Camera.Parameters params = mCamera.getParameters();
@ -104,7 +100,7 @@ class Camera1 extends CameraController {
}
private boolean shouldSetup() {
return isCameraOpened() && mPreview.isReady() && !mIsSetup;
return isCameraAvailable() && mPreview.isReady() && !mIsSetup;
}
// The act of binding an "open" camera to a "ready" preview.
@ -144,7 +140,7 @@ class Camera1 extends CameraController {
@WorkerThread
@Override
void onStart() {
if (isCameraOpened()) onStop();
if (isCameraAvailable()) onStop();
if (collectCameraId()) {
mCamera = Camera.open(mCameraId);
@ -172,7 +168,7 @@ class Camera1 extends CameraController {
@Override
void onStop() {
mHandler.get().removeCallbacks(mPostFocusResetRunnable);
if (isCameraOpened()) {
if (isCameraAvailable()) {
if (mIsCapturingVideo) endVideo();
mCamera.stopPreview();
mCamera.release();
@ -205,7 +201,7 @@ class Camera1 extends CameraController {
void setSessionType(SessionType sessionType) {
if (sessionType != mSessionType) {
mSessionType = sessionType;
if (isCameraOpened()) {
if (isCameraAvailable()) {
start();
}
}
@ -215,7 +211,7 @@ class Camera1 extends CameraController {
void setLocation(Location location) {
Location oldLocation = mLocation;
mLocation = location;
if (isCameraOpened()) {
if (isCameraAvailable()) {
synchronized (mLock) {
Camera.Parameters params = mCamera.getParameters();
if (mergeLocation(params, oldLocation)) mCamera.setParameters(params);
@ -243,7 +239,7 @@ class Camera1 extends CameraController {
void setFacing(Facing facing) {
if (facing != mFacing) {
mFacing = facing;
if (collectCameraId() && isCameraOpened()) {
if (collectCameraId() && isCameraAvailable()) {
start();
}
}
@ -253,7 +249,7 @@ class Camera1 extends CameraController {
void setWhiteBalance(WhiteBalance whiteBalance) {
WhiteBalance old = mWhiteBalance;
mWhiteBalance = whiteBalance;
if (isCameraOpened()) {
if (isCameraAvailable()) {
synchronized (mLock) {
Camera.Parameters params = mCamera.getParameters();
if (mergeWhiteBalance(params, old)) mCamera.setParameters(params);
@ -274,7 +270,7 @@ class Camera1 extends CameraController {
void setHdr(Hdr hdr) {
Hdr old = mHdr;
mHdr = hdr;
if (isCameraOpened()) {
if (isCameraAvailable()) {
synchronized (mLock) {
Camera.Parameters params = mCamera.getParameters();
if (mergeHdr(params, old)) mCamera.setParameters(params);
@ -295,7 +291,7 @@ class Camera1 extends CameraController {
void setFlash(Flash flash) {
Flash old = mFlash;
mFlash = flash;
if (isCameraOpened()) {
if (isCameraAvailable()) {
synchronized (mLock) {
Camera.Parameters params = mCamera.getParameters();
if (mergeFlash(params, old)) mCamera.setParameters(params);
@ -348,7 +344,7 @@ class Camera1 extends CameraController {
}
mVideoQuality = videoQuality;
if (isCameraOpened() && mSessionType == SessionType.VIDEO) {
if (isCameraAvailable() && mSessionType == SessionType.VIDEO) {
// Change capture size to a size that fits the video aspect ratio.
Size oldSize = mCaptureSize;
mCaptureSize = computeCaptureSize();
@ -370,7 +366,7 @@ class Camera1 extends CameraController {
@Override
boolean capturePicture() {
if (mIsCapturingImage) return false;
if (!isCameraOpened()) return false;
if (!isCameraAvailable()) return false;
if (mSessionType == SessionType.VIDEO && mIsCapturingVideo) {
if (!mOptions.isVideoSnapshotSupported()) return false;
}
@ -410,7 +406,7 @@ class Camera1 extends CameraController {
@Override
boolean captureSnapshot() {
if (!isCameraOpened()) return false;
if (!isCameraAvailable()) return false;
if (mIsCapturingImage) return false;
// This won't work while capturing a video.
// Switch to capturePicture.
@ -435,7 +431,7 @@ class Camera1 extends CameraController {
final int postWidth = flip ? preHeight : preWidth;
final int postHeight = flip ? preWidth : preHeight;
final int format = params.getPreviewFormat();
new Thread(new Runnable() {
WorkerHandler.run(new Runnable() {
@Override
public void run() {
@ -445,7 +441,7 @@ class Camera1 extends CameraController {
mCameraCallbacks.processSnapshot(yuv, consistentWithView, exifFlip);
mIsCapturingImage = false;
}
}).start();
});
}
});
return true;
@ -460,7 +456,7 @@ class Camera1 extends CameraController {
}
@Override
boolean isCameraOpened() {
boolean isCameraAvailable() {
return mCamera != null;
}
@ -548,7 +544,7 @@ class Camera1 extends CameraController {
boolean startVideo(@NonNull File videoFile) {
mVideoFile = videoFile;
if (mIsCapturingVideo) return false;
if (!isCameraOpened()) return false;
if (!isCameraAvailable()) return false;
Camera.Parameters params = mCamera.getParameters();
params.setVideoStabilization(false);
if (mSessionType == SessionType.VIDEO) {
@ -573,9 +569,17 @@ class Camera1 extends CameraController {
boolean endVideo() {
if (mIsCapturingVideo) {
mIsCapturingVideo = false;
if (mMediaRecorder != null) {
try {
mMediaRecorder.stop();
mMediaRecorder.release();
} catch (Exception e) {
// This can happen if endVideo() is called right after startVideo().
// We don't care.
LOG.w("Error while closing media recorder.", e);
}
mMediaRecorder = null;
}
if (mVideoFile != null) {
mCameraCallbacks.dispatchOnVideoTaken(mVideoFile);
mVideoFile = null;
@ -655,7 +659,7 @@ class Camera1 extends CameraController {
@Override
boolean setZoom(float zoom) {
if (!isCameraOpened()) return false;
if (!isCameraAvailable()) return false;
if (!mOptions.isZoomSupported()) return false;
synchronized (mLock) {
Camera.Parameters params = mCamera.getParameters();
@ -669,7 +673,7 @@ class Camera1 extends CameraController {
@Override
boolean setExposureCorrection(float EVvalue) {
if (!isCameraOpened()) return false;
if (!isCameraAvailable()) return false;
if (!mOptions.isExposureCorrectionSupported()) return false;
float max = mOptions.getExposureCorrectionMaxValue();
float min = mOptions.getExposureCorrectionMinValue();
@ -689,7 +693,7 @@ class Camera1 extends CameraController {
@Override
boolean startAutoFocus(@Nullable final Gesture gesture, PointF point) {
if (!isCameraOpened()) return false;
if (!isCameraAvailable()) return false;
if (!mOptions.isAutoFocusSupported()) return false;
final PointF p = new PointF(point.x, point.y); // copy.
List<Camera.Area> meteringAreas2 = computeMeteringAreas(p.x, p.y);

@ -11,15 +11,12 @@ import android.hardware.camera2.CameraManager;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.location.Location;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.annotation.WorkerThread;
import android.util.Log;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.TreeSet;
@TargetApi(21)
class Camera2 extends CameraController {
@ -90,15 +87,6 @@ class Camera2 extends CameraController {
}
@Override
void onDisplayOffset(int displayOrientation) {
}
@Override
void onDeviceOrientation(int deviceOrientation) {
}
@Override
void setFacing(Facing facing) {
@ -131,7 +119,7 @@ class Camera2 extends CameraController {
// }
// }
if (mFacing == facing && isCameraOpened()) {
if (mFacing == facing && isCameraAvailable()) {
stop();
start();
}
@ -194,7 +182,7 @@ class Camera2 extends CameraController {
}
@Override
boolean isCameraOpened() {
boolean isCameraAvailable() {
return mCamera != null;
}

@ -19,6 +19,7 @@ abstract class CameraController implements Preview.SurfaceCallback {
protected VideoQuality mVideoQuality;
protected SessionType mSessionType;
protected Hdr mHdr;
protected Location mLocation;
protected Size mCaptureSize;
protected Size mPreviewSize;
@ -68,6 +69,11 @@ abstract class CameraController implements Preview.SurfaceCallback {
@WorkerThread
abstract void onStop();
// Returns whether the camera is available (started),
// so we can start setting parameters on it.
// Preview surface might still be off at this point.
abstract boolean isCameraAvailable();
//endregion
//region Rotation callbacks
@ -85,24 +91,34 @@ abstract class CameraController implements Preview.SurfaceCallback {
//region Abstract setParameters
// Should restart the session if active.
abstract void setSessionType(SessionType sessionType);
// Should restart the session if active.
abstract void setFacing(Facing facing);
// If opened and supported, apply and return true.
abstract boolean setZoom(float zoom);
// If opened and supported, apply and return true.
abstract boolean setExposureCorrection(float EVvalue);
abstract void setFacing(Facing facing);
// If closed, keep. If opened, check supported and apply.
abstract void setFlash(Flash flash);
// If closed, keep. If opened, check supported and apply.
abstract void setWhiteBalance(WhiteBalance whiteBalance);
abstract void setVideoQuality(VideoQuality videoQuality);
abstract void setSessionType(SessionType sessionType);
// If closed, keep. If opened, check supported and apply.
abstract void setHdr(Hdr hdr);
// If closed, keep. If opened, check supported and apply.
abstract void setLocation(Location location);
// Throw if capturing. If in video session, recompute capture size, and, if needed, preview size.
abstract void setVideoQuality(VideoQuality videoQuality);
//endregion
//region APIs
@ -115,11 +131,8 @@ abstract class CameraController implements Preview.SurfaceCallback {
abstract boolean endVideo();
abstract boolean shouldFlipSizes(); // Wheter the Sizes should be flipped to match the view orientation.
abstract boolean isCameraOpened();
abstract boolean startAutoFocus(@Nullable Gesture gesture, PointF point);
//endregion
@ -160,6 +173,10 @@ abstract class CameraController implements Preview.SurfaceCallback {
return mHdr;
}
final Location getLocation() {
return mLocation;
}
final Size getCaptureSize() {
return mCaptureSize;
}

@ -16,16 +16,12 @@ import android.location.Location;
import android.media.MediaActionSound;
import android.os.Build;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Display;
import android.view.MotionEvent;
import android.view.ViewGroup;
import android.view.WindowManager;
import android.widget.FrameLayout;
import java.io.ByteArrayOutputStream;
@ -59,17 +55,19 @@ public class CameraView extends FrameLayout {
private HashMap<Gesture, GestureAction> mGestureMap = new HashMap<>(4);
// Components
CameraCallbacks mCameraCallbacks;
/* for tests */ CameraCallbacks mCameraCallbacks;
private OrientationHelper mOrientationHelper;
private CameraController mCameraController;
private Preview mPreviewImpl;
private ArrayList<CameraListener> mListeners = new ArrayList<>(2);
// Views
GridLinesLayout mGridLinesLayout;
PinchGestureLayout mPinchGestureLayout;
TapGestureLayout mTapGestureLayout;
ScrollGestureLayout mScrollGestureLayout;
private boolean mIsStarted;
private boolean mStartCalled;
private boolean mKeepScreenOn;
// Threading
@ -113,7 +111,7 @@ public class CameraView extends FrameLayout {
a.recycle();
// Components
mCameraCallbacks = new CameraCallbacks();
mCameraCallbacks = new Callbacks();
mPreviewImpl = instantiatePreview(context, this);
mCameraController = instantiateCameraController(mCameraCallbacks, mPreviewImpl);
mUiHandler = new Handler(Looper.getMainLooper());
@ -129,7 +127,7 @@ public class CameraView extends FrameLayout {
addView(mTapGestureLayout);
addView(mScrollGestureLayout);
mIsStarted = false;
mStartCalled = false;
// Apply self managed
setCropOutput(cropOutput);
@ -408,7 +406,7 @@ public class CameraView extends FrameLayout {
@Override
public boolean onTouchEvent(MotionEvent event) {
if (!mCameraController.isCameraOpened()) return true;
if (!mCameraController.isCameraAvailable()) return true;
// Pass to our own GestureLayouts
CameraOptions options = mCameraController.getCameraOptions(); // Non null
@ -478,7 +476,7 @@ public class CameraView extends FrameLayout {
* @return whether the camera has started
*/
public boolean isStarted() {
return mIsStarted;
return mStartCalled;
}
@ -487,13 +485,13 @@ public class CameraView extends FrameLayout {
* This should be called onResume(), or when you are ready with permissions.
*/
public void start() {
if (mIsStarted || !isEnabled()) {
if (mStartCalled || !isEnabled()) {
// Already started, do nothing.
return;
}
if (checkPermissions(getSessionType())) {
mIsStarted = true;
mStartCalled = true;
// Update display orientation for current CameraController
mOrientationHelper.enable(getContext());
mCameraController.start();
@ -567,17 +565,17 @@ public class CameraView extends FrameLayout {
* This should be called onPause().
*/
public void stop() {
if (!mIsStarted) {
if (!mStartCalled) {
// Already stopped, do nothing.
return;
}
mIsStarted = false;
mStartCalled = false;
mCameraController.stop();
}
public void destroy() {
// TODO: this is not strictly needed
mCameraCallbacks.clearListeners(); // Release inner listener.
clearCameraListeners(); // Release
}
//endregion
@ -745,6 +743,16 @@ public class CameraView extends FrameLayout {
}
/**
* Retrieves the location previously applied with setLocation().
*
* @return the current location, if any.
*/
@Nullable
public Location getLocation() {
return mCameraController.getLocation();
}
/**
* Sets desired white balance to current camera session.
*
@ -892,7 +900,7 @@ public class CameraView extends FrameLayout {
*/
public void setSessionType(SessionType sessionType) {
if (sessionType == getSessionType() || !mIsStarted) {
if (sessionType == getSessionType() || !mStartCalled) {
// Check did took place, or will happen on start().
mCameraController.setSessionType(sessionType);
@ -1003,10 +1011,8 @@ public class CameraView extends FrameLayout {
*/
@Deprecated
public void setCameraListener(CameraListener cameraListener) {
mCameraCallbacks.clearListeners();
if (cameraListener != null) {
mCameraCallbacks.addListener(cameraListener);
}
mListeners.clear();
addCameraListener(cameraListener);
}
@ -1018,7 +1024,7 @@ public class CameraView extends FrameLayout {
*/
public void addCameraListener(CameraListener cameraListener) {
if (cameraListener != null) {
mCameraCallbacks.addListener(cameraListener);
mListeners.add(cameraListener);
}
}
@ -1030,7 +1036,7 @@ public class CameraView extends FrameLayout {
*/
public void removeCameraListener(CameraListener cameraListener) {
if (cameraListener != null) {
mCameraCallbacks.removeListener(cameraListener);
mListeners.remove(cameraListener);
}
}
@ -1040,7 +1046,7 @@ public class CameraView extends FrameLayout {
* to camera events.
*/
public void clearCameraListeners() {
mCameraCallbacks.clearListeners();
mListeners.clear();
}
@ -1103,9 +1109,14 @@ public class CameraView extends FrameLayout {
file = new File(getContext().getExternalFilesDir(null), "video.mp4");
}
if (mCameraController.startVideo(file)) {
mUiHandler.post(new Runnable() {
@Override
public void run() {
mKeepScreenOn = getKeepScreenOn();
if (!mKeepScreenOn) setKeepScreenOn(true);
}
});
}
}
@ -1143,8 +1154,13 @@ public class CameraView extends FrameLayout {
*/
public void stopCapturingVideo() {
if (mCameraController.endVideo()) {
mUiHandler.post(new Runnable() {
@Override
public void run() {
if (getKeepScreenOn() != mKeepScreenOn) setKeepScreenOn(mKeepScreenOn);
}
});
}
}
@ -1218,20 +1234,31 @@ public class CameraView extends FrameLayout {
}
}
interface CameraCallbacks extends OrientationHelper.Callbacks {
void dispatchOnCameraOpened(CameraOptions options);
void dispatchOnCameraClosed();
void onCameraPreviewSizeChanged();
void processImage(byte[] jpeg, boolean consistentWithView, boolean flipHorizontally);
void processSnapshot(YuvImage image, boolean consistentWithView, boolean flipHorizontally);
void dispatchOnVideoTaken(File file);
void dispatchOnFocusStart(@Nullable Gesture trigger, PointF where);
void dispatchOnFocusEnd(@Nullable Gesture trigger, boolean success, PointF where);
void dispatchOnZoomChanged(final float newValue, final PointF[] fingers);
void dispatchOnExposureCorrectionChanged(float newValue, float[] bounds, PointF[] fingers);
}
class CameraCallbacks implements OrientationHelper.Callbacks {
private class Callbacks implements CameraCallbacks {
// Outer listeners
private ArrayList<CameraListener> mListeners = new ArrayList<>(2);
private CameraLogger mLogger = CameraLogger.create(CameraCallbacks.class.getSimpleName());
// Orientation TODO: move this logic into OrientationHelper
private Integer mDisplayOffset;
private Integer mDeviceOrientation;
CameraCallbacks() {}
Callbacks() {}
@Override
public void dispatchOnCameraOpened(final CameraOptions options) {
mLogger.i("dispatchOnCameraOpened", options);
mUiHandler.post(new Runnable() {
@ -1244,7 +1271,7 @@ public class CameraView extends FrameLayout {
});
}
@Override
public void dispatchOnCameraClosed() {
mLogger.i("dispatchOnCameraClosed");
mUiHandler.post(new Runnable() {
@ -1257,7 +1284,7 @@ public class CameraView extends FrameLayout {
});
}
@Override
public void onCameraPreviewSizeChanged() {
mLogger.i("onCameraPreviewSizeChanged");
// Camera preview size, as returned by getPreviewSize(), has changed.
@ -1290,6 +1317,7 @@ public class CameraView extends FrameLayout {
* @param flipHorizontally whether this picture should be flipped horizontally after decoding,
* because it was taken with the front camera.
*/
@Override
public void processImage(final byte[] jpeg, final boolean consistentWithView, final boolean flipHorizontally) {
mLogger.i("processImage");
mWorkerHandler.post(new Runnable() {
@ -1311,7 +1339,7 @@ public class CameraView extends FrameLayout {
});
}
@Override
public void processSnapshot(final YuvImage yuv, final boolean consistentWithView, boolean flipHorizontally) {
mLogger.i("processSnapshot");
mWorkerHandler.post(new Runnable() {
@ -1349,7 +1377,7 @@ public class CameraView extends FrameLayout {
});
}
@Override
public void dispatchOnVideoTaken(final File video) {
mLogger.i("dispatchOnVideoTaken", video);
mUiHandler.post(new Runnable() {
@ -1362,7 +1390,7 @@ public class CameraView extends FrameLayout {
});
}
@Override
public void dispatchOnFocusStart(@Nullable final Gesture gesture, final PointF point) {
mLogger.i("dispatchOnFocusStart", gesture, point);
mUiHandler.post(new Runnable() {
@ -1379,7 +1407,7 @@ public class CameraView extends FrameLayout {
});
}
@Override
public void dispatchOnFocusEnd(@Nullable final Gesture gesture, final boolean success,
final PointF point) {
mLogger.i("dispatchOnFocusEnd", gesture, success, point);
@ -1437,7 +1465,7 @@ public class CameraView extends FrameLayout {
});
}
@Override
public void dispatchOnZoomChanged(final float newValue, final PointF[] fingers) {
mLogger.i("dispatchOnZoomChanged", newValue);
mUiHandler.post(new Runnable() {
@ -1450,7 +1478,7 @@ public class CameraView extends FrameLayout {
});
}
@Override
public void dispatchOnExposureCorrectionChanged(final float newValue,
final float[] bounds,
final PointF[] fingers) {
@ -1464,24 +1492,6 @@ public class CameraView extends FrameLayout {
}
});
}
private void addListener(@NonNull CameraListener cameraListener) {
mLogger.i("addListener");
mListeners.add(cameraListener);
}
private void removeListener(@NonNull CameraListener cameraListener) {
mLogger.i("removeListener");
mListeners.remove(cameraListener);
}
private void clearListeners() {
mLogger.i("clearListeners");
mListeners.clear();
}
}
//endregion

@ -68,7 +68,7 @@ public class CameraUtils {
*/
public static void decodeBitmap(final byte[] source, final BitmapCallback callback) {
final Handler ui = new Handler();
new Thread(new Runnable() {
WorkerHandler.run(new Runnable() {
@Override
public void run() {
final Bitmap bitmap = decodeBitmap(source);
@ -79,7 +79,7 @@ public class CameraUtils {
}
});
}
}).start();
});
}

@ -36,6 +36,13 @@ class WorkerHandler {
return handler;
}
// Handy util to perform action in a fallback thread.
// Not to be used for long-running operations since they will
// block the fallback thread.
public static void run(Runnable action) {
get("FallbackCameraThread").post(action);
}
private HandlerThread mThread;
private Handler mHandler;

@ -6,7 +6,7 @@ coverage:
status:
project:
default:
target: 40%
target: 50%
patch:
default:
target: 60%

Loading…
Cancel
Save