Make CameraView thread-safe (#97)

* Make CameraController fully async, fix shutter sounds, tests

* Schedule everything to the same handler

* Ignore integration tests in travis

* Add basic error handling

* Revert useless logs
pull/105/head
Mattia Iavarone 7 years ago committed by GitHub
parent 2e9715fa89
commit 6b9affc435
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 13
      README.md
  2. 3
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraUtilsTest.java
  3. 19
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraViewCallbacksTest.java
  4. 4
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraViewTest.java
  5. 173
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/IntegrationTest.java
  6. 30
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/MockCameraController.java
  7. 3
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/PreviewTest.java
  8. 3
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/WorkerHandlerTest.java
  9. 470
      cameraview/src/main/java/com/otaliastudios/cameraview/Camera1.java
  10. 32
      cameraview/src/main/java/com/otaliastudios/cameraview/Camera2.java
  11. 149
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraController.java
  12. 12
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraException.java
  13. 25
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraListener.java
  14. 82
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java
  15. 4
      cameraview/src/main/utils/com/otaliastudios/cameraview/Task.java
  16. 16
      cameraview/src/test/java/com/otaliastudios/cameraview/CameraExceptionTest.java

@ -44,6 +44,7 @@ See below for a [list of what was done](#roadmap) and [licensing info](#contribu
- Automatically detected orientation tags
- Plug in location tags with `setLocation()` API
- `CameraUtils` to help with Bitmaps and orientations
- Error handling
- **Lightweight**, no dependencies, just support `ExifInterface`
- Works down to API level 15
@ -169,6 +170,16 @@ camera.addCameraListener(new CameraListener() {
@Override
public void onCameraClosed() {}
/**
* Notifies about an error during the camera setup or configuration.
* At the moment, errors that are passed here are unrecoverable. When this is called,
* the camera has been released and is presumably showing a black preview.
*
* This is the right moment to show an error dialog to the user.
*/
@Override
public void onCameraError(CameraException error) {}
/**
* Notifies that a picture previously captured with capturePicture()
* or captureSnapshot() is ready to be shown or saved.
@ -605,6 +616,7 @@ all the code was changed.
- *Better threading, start() in worker thread and callbacks in UI*
- *Frame processor support*
- *inject external loggers*
- *error handling*
These are still things that need to be done, off the top of my head:
@ -612,7 +624,6 @@ These are still things that need to be done, off the top of my head:
- [ ] add a `setPreferredAspectRatio` API to choose the capture size. Preview size will adapt, and then, if let free, the CameraView will adapt as well
- [ ] animate grid lines similar to stock camera app
- [ ] add onRequestPermissionResults for easy permission callback
- [ ] better error handling, maybe with a onError(e) method in the public listener, or have each public method return a boolean
- [ ] decent code coverage
# Contributing and licenses

@ -48,8 +48,7 @@ public class CameraUtilsTest extends BaseTest {
source.compress(Bitmap.CompressFormat.PNG, 100, os);
final byte[] data = os.toByteArray();
final Task<Bitmap> decode = new Task<>();
decode.listen();
final Task<Bitmap> decode = new Task<>(true);
final CameraUtils.BitmapCallback callback = new CameraUtils.BitmapCallback() {
@Override
public void onBitmapReady(Bitmap bitmap) {

@ -34,7 +34,7 @@ import static org.mockito.Mockito.verify;
@RunWith(AndroidJUnit4.class)
@MediumTest
public class CameraCallbacksTest extends BaseTest {
public class CameraViewCallbacksTest extends BaseTest {
private CameraView camera;
private CameraListener listener;
@ -73,8 +73,7 @@ public class CameraCallbacksTest extends BaseTest {
camera.instantiatePreview();
camera.addCameraListener(listener);
camera.addFrameProcessor(processor);
task = new Task<>();
task.listen();
task = new Task<>(true);
}
});
}
@ -225,6 +224,17 @@ public class CameraCallbacksTest extends BaseTest {
verify(listener, times(1)).onOrientationChanged(anyInt());
}
// TODO: test onShutter, here or elsewhere
@Test
public void testCameraError() {
CameraException error = new CameraException(new RuntimeException("Error"));
completeTask().when(listener).onCameraError(error);
camera.mCameraCallbacks.dispatchError(error);
assertNotNull(task.await(200));
verify(listener, times(1)).onCameraError(error);
}
@Test
public void testProcessJpeg() {
@ -264,8 +274,7 @@ public class CameraCallbacksTest extends BaseTest {
private int[] testProcessImage(boolean jpeg, boolean crop, int[] viewDim, int[] imageDim) {
// End our task when onPictureTaken is called. Take note of the result.
final Task<byte[]> jpegTask = new Task<>();
jpegTask.listen();
final Task<byte[]> jpegTask = new Task<>(true);
doAnswer(new Answer() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {

@ -92,10 +92,10 @@ public class CameraViewTest extends BaseTest {
assertEquals(cameraView.getAudio(), Audio.DEFAULT);
assertEquals(cameraView.getVideoQuality(), VideoQuality.DEFAULT);
assertEquals(cameraView.getLocation(), null);
// Self managed
assertEquals(cameraView.getExposureCorrection(), 0f, 0f);
assertEquals(cameraView.getZoom(), 0f, 0f);
// Self managed
assertEquals(cameraView.getPlaySounds(), CameraView.DEFAULT_PLAY_SOUNDS);
assertEquals(cameraView.getCropOutput(), CameraView.DEFAULT_CROP_OUTPUT);
assertEquals(cameraView.getJpegQuality(), CameraView.DEFAULT_JPEG_QUALITY);

@ -1,13 +1,12 @@
package com.otaliastudios.cameraview;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.PointF;
import android.media.MediaRecorder;
import android.support.test.filters.MediumTest;
import android.support.test.rule.ActivityTestRule;
import android.support.test.runner.AndroidJUnit4;
import android.view.ViewGroup;
import org.junit.After;
import org.junit.Before;
@ -42,6 +41,7 @@ public class IntegrationTest extends BaseTest {
private CameraView camera;
private Camera1 controller;
private CameraListener listener;
private Task<Throwable> uiExceptionTask;
@BeforeClass
public static void grant() {
@ -51,6 +51,7 @@ public class IntegrationTest extends BaseTest {
@Before
public void setUp() {
WorkerHandler.destroy();
ui(new Runnable() {
@Override
public void run() {
@ -67,6 +68,17 @@ public class IntegrationTest extends BaseTest {
rule.getActivity().inflate(camera);
}
});
// Ensure that controller exceptions are thrown on this thread (not on the UI thread).
uiExceptionTask = new Task<>(true);
WorkerHandler crashThread = WorkerHandler.get("CrashThread");
crashThread.getThread().setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() {
@Override
public void uncaughtException(Thread t, Throwable e) {
uiExceptionTask.end(e);
}
});
controller.mCrashHandler = crashThread.get();
}
@After
@ -76,9 +88,14 @@ public class IntegrationTest extends BaseTest {
WorkerHandler.destroy();
}
private void waitForUiException() throws Throwable {
Throwable throwable = uiExceptionTask.await(2500);
if (throwable != null) throw throwable;
}
private CameraOptions waitForOpen(boolean expectSuccess) {
final Task<CameraOptions> open = new Task<>();
open.listen();
camera.start();
final Task<CameraOptions> open = new Task<>(true);
doEndTask(open, 0).when(listener).onCameraOpened(any(CameraOptions.class));
CameraOptions result = open.await(4000);
if (expectSuccess) {
@ -89,9 +106,9 @@ public class IntegrationTest extends BaseTest {
return result;
}
private Boolean waitForClose(boolean expectSuccess) {
final Task<Boolean> close = new Task<>();
close.listen();
private void waitForClose(boolean expectSuccess) {
camera.stop();
final Task<Boolean> close = new Task<>(true);
doEndTask(close, true).when(listener).onCameraClosed();
Boolean result = close.await(4000);
if (expectSuccess) {
@ -99,25 +116,21 @@ public class IntegrationTest extends BaseTest {
} else {
assertNull("Should not close", result);
}
return result;
}
private Boolean waitForVideo(boolean expectSuccess) {
final Task<Boolean> video = new Task<>();
video.listen();
private void waitForVideoEnd(boolean expectSuccess) {
final Task<Boolean> video = new Task<>(true);
doEndTask(video, true).when(listener).onVideoTaken(any(File.class));
Boolean result = video.await(2000);
Boolean result = video.await(8000);
if (expectSuccess) {
assertNotNull("Can take video", result);
} else {
assertNull("Should not take video", result);
}
return result;
}
private byte[] waitForPicture(boolean expectSuccess) {
final Task<byte[]> pic = new Task<>();
pic.listen();
final Task<byte[]> pic = new Task<>(true);
doEndTask(pic, 0).when(listener).onPictureTaken(any(byte[].class));
byte[] result = pic.await(5000);
if (expectSuccess) {
@ -128,33 +141,40 @@ public class IntegrationTest extends BaseTest {
return result;
}
private void waitForVideoStart() {
controller.mStartVideoTask.listen();
camera.startCapturingVideo(null);
controller.mStartVideoTask.await(400);
}
private void waitForVideoQuality(VideoQuality quality) {
controller.mVideoQualityTask.listen();
camera.setVideoQuality(quality);
controller.mVideoQualityTask.await(400);
}
//region test open/close
//-@Test
@Test
public void testOpenClose() throws Exception {
// Starting and stopping are hard to get since they happen on another thread.
assertEquals(controller.getState(), CameraController.STATE_STOPPED);
camera.start();
waitForOpen(true);
assertEquals(controller.getState(), CameraController.STATE_STARTED);
camera.stop();
waitForClose(true);
assertEquals(controller.getState(), CameraController.STATE_STOPPED);
}
//-@Test
@Test
public void testOpenTwice() {
camera.start();
waitForOpen(true);
camera.start();
waitForOpen(false);
}
//-@Test
@Test
public void testCloseTwice() {
camera.stop();
waitForClose(false);
}
@ -180,7 +200,6 @@ public class IntegrationTest extends BaseTest {
public void testStartInitializesOptions() {
assertNull(camera.getCameraOptions());
assertNull(camera.getExtraProperties());
camera.start();
waitForOpen(true);
assertNotNull(camera.getCameraOptions());
assertNotNull(camera.getExtraProperties());
@ -193,7 +212,6 @@ public class IntegrationTest extends BaseTest {
@Test
public void testSetFacing() throws Exception {
camera.start();
CameraOptions o = waitForOpen(true);
int size = o.getSupportedFacing().size();
if (size > 1) {
@ -212,7 +230,6 @@ public class IntegrationTest extends BaseTest {
@Test
public void testSetSessionType() throws Exception {
camera.setSessionType(SessionType.PICTURE);
camera.start();
waitForOpen(true);
// set session type should call stop and start again.
@ -234,34 +251,47 @@ public class IntegrationTest extends BaseTest {
@Test
public void testSetZoom() {
camera.start();
CameraOptions options = waitForOpen(true);
boolean can = options.isZoomSupported();
controller.mZoomTask.listen();
float oldValue = camera.getZoom();
float newValue = 0.65f;
camera.setZoom(newValue);
assertEquals(can ? newValue : oldValue, camera.getZoom(), 0f);
controller.mZoomTask.await(500);
if (options.isZoomSupported()) {
assertEquals(newValue, camera.getZoom(), 0f);
} else {
assertEquals(oldValue, camera.getZoom(), 0f);
}
}
@Test
public void testSetExposureCorrection() {
camera.start();
CameraOptions options = waitForOpen(true);
boolean can = options.isExposureCorrectionSupported();
controller.mExposureCorrectionTask.listen();
float oldValue = camera.getExposureCorrection();
float newValue = options.getExposureCorrectionMaxValue();
camera.setExposureCorrection(newValue);
assertEquals(can ? newValue : oldValue, camera.getExposureCorrection(), 0f);
controller.mExposureCorrectionTask.await(300);
if (options.isExposureCorrectionSupported()) {
assertEquals(newValue, camera.getExposureCorrection(), 0f);
} else {
assertEquals(oldValue, camera.getExposureCorrection(), 0f);
}
}
@Test
public void testSetFlash() {
camera.start();
CameraOptions options = waitForOpen(true);
Flash[] values = Flash.values();
Flash oldValue = camera.getFlash();
for (Flash value : values) {
controller.mFlashTask.listen();
camera.setFlash(value);
controller.mFlashTask.await(300);
if (options.supports(value)) {
assertEquals(camera.getFlash(), value);
oldValue = value;
@ -273,12 +303,13 @@ public class IntegrationTest extends BaseTest {
@Test
public void testSetWhiteBalance() {
camera.start();
CameraOptions options = waitForOpen(true);
WhiteBalance[] values = WhiteBalance.values();
WhiteBalance oldValue = camera.getWhiteBalance();
for (WhiteBalance value : values) {
controller.mWhiteBalanceTask.listen();
camera.setWhiteBalance(value);
controller.mWhiteBalanceTask.await(300);
if (options.supports(value)) {
assertEquals(camera.getWhiteBalance(), value);
oldValue = value;
@ -290,12 +321,13 @@ public class IntegrationTest extends BaseTest {
@Test
public void testSetHdr() {
camera.start();
CameraOptions options = waitForOpen(true);
Hdr[] values = Hdr.values();
Hdr oldValue = camera.getHdr();
for (Hdr value : values) {
controller.mHdrTask.listen();
camera.setHdr(value);
controller.mHdrTask.await(300);
if (options.supports(value)) {
assertEquals(camera.getHdr(), value);
oldValue = value;
@ -308,7 +340,6 @@ public class IntegrationTest extends BaseTest {
@Test
public void testSetAudio() {
// TODO: when permissions are managed, check that Audio.ON triggers the audio permission
camera.start();
waitForOpen(true);
Audio[] values = Audio.values();
for (Audio value : values) {
@ -319,9 +350,10 @@ public class IntegrationTest extends BaseTest {
@Test
public void testSetLocation() {
camera.start();
waitForOpen(true);
controller.mLocationTask.listen();
camera.setLocation(10d, 2d);
controller.mLocationTask.await(300);
assertNotNull(camera.getLocation());
assertEquals(camera.getLocation().getLatitude(), 10d, 0d);
assertEquals(camera.getLocation().getLongitude(), 2d, 0d);
@ -333,38 +365,39 @@ public class IntegrationTest extends BaseTest {
//region testSetVideoQuality
// This can be tricky because can trigger layout changes.
// TODO: @Test(expected = IllegalStateException.class)
@Test(expected = RuntimeException.class)
public void testSetVideoQuality_whileRecording() throws Throwable {
// Can't run on Travis, MediaRecorder not supported.
// Error while starting MediaRecorder. java.lang.RuntimeException: start failed.
public void testSetVideoQuality_whileRecording() {
camera.setSessionType(SessionType.VIDEO);
camera.setVideoQuality(VideoQuality.HIGHEST);
camera.start();
waitForVideoQuality(VideoQuality.HIGHEST);
waitForOpen(true);
camera.startCapturingVideo(null);
camera.setVideoQuality(VideoQuality.LOWEST);
waitForVideoStart();
waitForVideoQuality(VideoQuality.LOWEST);
waitForUiException();
}
@Test
public void testSetVideoQuality_whileInPictureSessionType() {
camera.setSessionType(SessionType.PICTURE);
camera.setVideoQuality(VideoQuality.HIGHEST);
camera.start();
waitForVideoQuality(VideoQuality.HIGHEST);
waitForOpen(true);
camera.setVideoQuality(VideoQuality.LOWEST);
waitForVideoQuality(VideoQuality.LOWEST);
assertEquals(camera.getVideoQuality(), VideoQuality.LOWEST);
}
@Test
public void testSetVideoQuality_whileNotStarted() {
camera.setVideoQuality(VideoQuality.HIGHEST);
waitForVideoQuality(VideoQuality.HIGHEST);
assertEquals(camera.getVideoQuality(), VideoQuality.HIGHEST);
camera.setVideoQuality(VideoQuality.LOWEST);
waitForVideoQuality(VideoQuality.LOWEST);
assertEquals(camera.getVideoQuality(), VideoQuality.LOWEST);
}
@Test
public void testSetVideoQuality_shouldRecompute() {
// TODO:
// If video quality changes bring to a new capture size,
// this might bring to a new aspect ratio,
// which might bring to a new preview size. No idea how to test.
@ -375,36 +408,34 @@ public class IntegrationTest extends BaseTest {
//region test startVideo
// TODO: @Test(expected = IllegalStateException.class)
@Test(expected = RuntimeException.class)
public void testStartVideo_whileInPictureMode() throws Throwable {
// Fails on Travis. Some emulators can't deal with MediaRecorder
// Error while starting MediaRecorder. java.lang.RuntimeException: start failed.
// as documented. This works locally though.
public void testStartVideo_whileInPictureMode() {
camera.setSessionType(SessionType.PICTURE);
camera.start();
waitForOpen(true);
camera.startCapturingVideo(null);
waitForVideoStart();
waitForUiException();
}
// TODO: @Test
@Test
public void testStartEndVideo() {
// Fails on Travis. Some emulators can't deal with MediaRecorder,
// Error while starting MediaRecorder. java.lang.RuntimeException: start failed.
// as documented. This works locally though.
public void testStartEndVideo() {
camera.setSessionType(SessionType.VIDEO);
camera.start();
waitForOpen(true);
camera.startCapturingVideo(null, 1000);
waitForVideo(true); // waits 2000
camera.startCapturingVideo(null, 4000);
waitForVideoEnd(true);
}
@Test
public void testEndVideo_withoutStarting() {
camera.setSessionType(SessionType.VIDEO);
camera.start();
waitForOpen(true);
camera.stopCapturingVideo();
waitForVideo(false);
waitForVideoEnd(false);
}
//endregion
@ -414,13 +445,18 @@ public class IntegrationTest extends BaseTest {
@Test
public void testStartAutoFocus() {
camera.start();
CameraOptions o = waitForOpen(true);
final Task<PointF> focus = new Task<>(true);
doEndTask(focus, 0).when(listener).onFocusStart(any(PointF.class));
camera.startAutoFocus(1, 1);
PointF point = focus.await(300);
if (o.isAutoFocusSupported()) {
verify(listener, times(1)).onFocusStart(new PointF(1, 1));
assertNotNull(point);
assertEquals(point, new PointF(1, 1));
} else {
verify(listener, never()).onFocusStart(any(PointF.class));
assertNull(point);
}
}
@ -437,7 +473,6 @@ public class IntegrationTest extends BaseTest {
@Test
public void testCapturePicture_concurrentCalls() throws Exception {
// Second take should fail.
camera.start();
waitForOpen(true);
CountDownLatch latch = new CountDownLatch(2);
@ -453,7 +488,6 @@ public class IntegrationTest extends BaseTest {
@Test
public void testCapturePicture_size() throws Exception {
camera.setCropOutput(false);
camera.start();
waitForOpen(true);
Size size = camera.getCaptureSize();
@ -475,7 +509,6 @@ public class IntegrationTest extends BaseTest {
@Test
public void testCaptureSnapshot_concurrentCalls() throws Exception {
// Second take should fail.
camera.start();
waitForOpen(true);
CountDownLatch latch = new CountDownLatch(2);
@ -483,15 +516,14 @@ public class IntegrationTest extends BaseTest {
camera.captureSnapshot();
camera.captureSnapshot();
boolean did = latch.await(4, TimeUnit.SECONDS);
boolean did = latch.await(6, TimeUnit.SECONDS);
assertFalse(did);
assertEquals(latch.getCount(), 1);
assertEquals(1, latch.getCount());
}
@Test
public void testCaptureSnapshot_size() throws Exception {
camera.setCropOutput(false);
camera.start();
waitForOpen(true);
Size size = camera.getPreviewSize();
@ -520,7 +552,6 @@ public class IntegrationTest extends BaseTest {
public void testFrameProcessing_simple() throws Exception {
FrameProcessor processor = mock(FrameProcessor.class);
camera.addFrameProcessor(processor);
camera.start();
waitForOpen(true);
assert30Frames(processor);
@ -530,7 +561,6 @@ public class IntegrationTest extends BaseTest {
public void testFrameProcessing_afterSnapshot() throws Exception {
FrameProcessor processor = mock(FrameProcessor.class);
camera.addFrameProcessor(processor);
camera.start();
waitForOpen(true);
// In Camera1, snapshots will clear the preview callback
@ -545,11 +575,8 @@ public class IntegrationTest extends BaseTest {
public void testFrameProcessing_afterRestart() throws Exception {
FrameProcessor processor = mock(FrameProcessor.class);
camera.addFrameProcessor(processor);
camera.start();
waitForOpen(true);
camera.stop();
waitForClose(true);
camera.start();
waitForOpen(true);
assert30Frames(processor);

@ -32,23 +32,23 @@ public class MockCameraController extends CameraController {
}
@Override
void onStart() throws Exception {
void onStart() {
}
@Override
void onStop() throws Exception {
void onStop() {
}
@Override
boolean setZoom(float zoom) {
void setZoom(float zoom, PointF[] points, boolean notify) {
mZoomValue = zoom;
mZoomChanged = true;
return true;
}
@Override
boolean setExposureCorrection(float EVvalue) {
void setExposureCorrection(float EVvalue, float[] bounds, PointF[] points, boolean notify) {
mExposureCorrectionValue = EVvalue;
mExposureCorrectionChanged = true;
return true;
}
@Override
@ -92,24 +92,20 @@ public class MockCameraController extends CameraController {
}
@Override
boolean capturePicture() {
void capturePicture() {
mPictureCaptured = true;
return true;
}
@Override
boolean captureSnapshot() {
return true;
void captureSnapshot() {
}
@Override
boolean startVideo(@NonNull File file) {
return true;
void startVideo(@NonNull File file) {
}
@Override
boolean endVideo() {
return true;
void endVideo() {
}
@Override
@ -120,23 +116,19 @@ public class MockCameraController extends CameraController {
@Override
boolean startAutoFocus(@Nullable Gesture gesture, PointF point) {
void startAutoFocus(@Nullable Gesture gesture, PointF point) {
mFocusStarted = true;
return true;
}
@Override
public void onSurfaceChanged() {
}
@Override
public void onSurfaceAvailable() {
}
@Override
public void onBufferAvailable(byte[] buffer) {
}
}

@ -31,8 +31,7 @@ public abstract class PreviewTest extends BaseTest {
@Before
public void setUp() {
availability = new Task<>();
availability.listen();
availability = new Task<>(true);
ui(new Runnable() {
@Override

@ -24,8 +24,7 @@ public class WorkerHandlerTest extends BaseTest {
@Test
public void testStaticRun() {
final Task<Boolean> task = new Task<>();
task.listen();
final Task<Boolean> task = new Task<>(true);
Runnable action = new Runnable() {
@Override
public void run() {

@ -16,13 +16,14 @@ import android.support.annotation.WorkerThread;
import android.view.SurfaceHolder;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
@SuppressWarnings("deprecation")
class Camera1 extends CameraController implements Camera.PreviewCallback {
class Camera1 extends CameraController implements Camera.PreviewCallback, Camera.ErrorCallback {
private static final String TAG = Camera1.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
@ -40,144 +41,145 @@ class Camera1 extends CameraController implements Camera.PreviewCallback {
public void run() {
if (!isCameraAvailable()) return;
mCamera.cancelAutoFocus();
synchronized (mLock) {
Camera.Parameters params = mCamera.getParameters();
params.setFocusAreas(null);
params.setMeteringAreas(null);
applyDefaultFocus(params); // Revert to internal focus.
mCamera.setParameters(params);
}
}
};
private Mapper mMapper = new Mapper.Mapper1();
private boolean mIsSetup = false;
private Mapper mMapper;
private boolean mIsBound = false;
private boolean mIsCapturingImage = false;
private boolean mIsCapturingVideo = false;
private final Object mLock = new Object();
Camera1(CameraView.CameraCallbacks callback) {
super(callback);
mMapper = new Mapper.Mapper1();
}
/**
* Preview surface is now available. If camera is open, set up.
*/
private void schedule(@Nullable final Task<Void> task, final boolean ensureAvailable, final Runnable action) {
mHandler.post(new Runnable() {
@Override
public void run() {
if (ensureAvailable && !isCameraAvailable()) {
if (task != null) task.end(null);
} else {
action.run();
if (task != null) task.end(null);
}
}
});
}
// Preview surface is now available. If camera is open, set up.
@Override
public void onSurfaceAvailable() {
LOG.i("onSurfaceAvailable:", "Size is", mPreview.getSurfaceSize());
if (!shouldSetup()) return;
mHandler.post(new Runnable() {
schedule(null, false, new Runnable() {
@Override
public void run() {
if (!shouldSetup()) return;
if (shouldBindToSurface()) {
LOG.i("onSurfaceAvailable:", "Inside handler. About to bind.");
try {
setup();
bindToSurface();
} catch (Exception e) {
LOG.w("onSurfaceAvailable:", "Exception while binding camera to preview.", e);
throw new RuntimeException(e);
LOG.e("onSurfaceAvailable:", "Exception while binding camera to preview.", e);
throw new CameraException(e);
}
}
}
});
}
/**
* Preview surface did change its size. Compute a new preview size.
* This requires stopping and restarting the preview.
*/
// Preview surface did change its size. Compute a new preview size.
// This requires stopping and restarting the preview.
@Override
public void onSurfaceChanged() {
LOG.i("onSurfaceChanged, size is", mPreview.getSurfaceSize());
if (mIsSetup) {
schedule(null, true, new Runnable() {
@Override
public void run() {
if (!mIsBound) return;
// Compute a new camera preview size.
Size newSize = computePreviewSize();
if (!newSize.equals(mPreviewSize)) {
LOG.i("onSurfaceChanged:", "Computed a new preview size. Dispatching.");
if (newSize.equals(mPreviewSize)) return;
// Apply.
LOG.i("onSurfaceChanged:", "Computed a new preview size. Going on.");
mPreviewSize = newSize;
mCameraCallbacks.onCameraPreviewSizeChanged();
synchronized (mLock) {
LOG.i("onSurfaceChanged:", "Stopping preview.");
mCamera.stopPreview();
LOG.i("onSurfaceChanged:", "Stopped preview.");
Camera.Parameters params = mCamera.getParameters();
params.setPreviewSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
mCamera.setParameters(params);
}
boolean invertPreviewSizes = shouldFlipSizes();
mPreview.setDesiredSize(
invertPreviewSizes ? mPreviewSize.getHeight() : mPreviewSize.getWidth(),
invertPreviewSizes ? mPreviewSize.getWidth() : mPreviewSize.getHeight()
);
mCamera.setPreviewCallbackWithBuffer(null); // This clears the buffers
mCamera.setPreviewCallbackWithBuffer(this); // Reset
mFrameManager.allocate(ImageFormat.getBitsPerPixel(mPreviewFormat), mPreviewSize);
LOG.i("onSurfaceChanged:", "Restarting preview.");
mCamera.startPreview();
LOG.i("onSurfaceChanged:", "Restarted preview.");
}
applySizesAndStartPreview("onSurfaceChanged:");
}
});
}
private boolean shouldSetup() {
return isCameraAvailable() && mPreview != null && mPreview.isReady() && !mIsSetup;
private boolean shouldBindToSurface() {
return isCameraAvailable() && mPreview != null && mPreview.isReady() && !mIsBound;
}
// The act of binding an "open" camera to a "ready" preview.
// These can happen at different times but we want to end up here.
@WorkerThread
private void setup() throws Exception {
LOG.i("setup:", "Started");
private void bindToSurface() {
LOG.i("bindToSurface:", "Started");
Object output = mPreview.getOutput();
try {
if (mPreview.getOutputClass() == SurfaceHolder.class) {
mCamera.setPreviewDisplay((SurfaceHolder) output);
} else {
mCamera.setPreviewTexture((SurfaceTexture) output);
}
} catch (IOException e) {
throw new CameraException(e);
}
boolean invertPreviewSizes = shouldFlipSizes();
mCaptureSize = computeCaptureSize();
mPreviewSize = computePreviewSize();
LOG.i("setup:", "Dispatching onCameraPreviewSizeChanged.");
applySizesAndStartPreview("bindToSurface:");
mIsBound = true;
}
// To be called when the preview size is setup or changed.
private void applySizesAndStartPreview(String log) {
LOG.i(log, "Dispatching onCameraPreviewSizeChanged.");
mCameraCallbacks.onCameraPreviewSizeChanged();
boolean invertPreviewSizes = shouldFlipSizes();
mPreview.setDesiredSize(
invertPreviewSizes ? mPreviewSize.getHeight() : mPreviewSize.getWidth(),
invertPreviewSizes ? mPreviewSize.getWidth() : mPreviewSize.getHeight()
);
synchronized (mLock) {
Camera.Parameters params = mCamera.getParameters();
mPreviewFormat = params.getPreviewFormat();
params.setPreviewSize(mPreviewSize.getWidth(), mPreviewSize.getHeight()); // <- not allowed during preview
params.setPictureSize(mCaptureSize.getWidth(), mCaptureSize.getHeight()); // <- allowed
mCamera.setParameters(params);
}
mCamera.setPreviewCallbackWithBuffer(null); // Release anything left
mCamera.setPreviewCallbackWithBuffer(this); // Add ourselves
mFrameManager.allocate(ImageFormat.getBitsPerPixel(mPreviewFormat), mPreviewSize);
LOG.i("setup:", "Starting preview with startPreview().");
LOG.i(log, "Starting preview with startPreview().");
mCamera.startPreview();
LOG.i("setup:", "Started preview with startPreview().");
mIsSetup = true;
LOG.i(log, "Started preview.");
}
@WorkerThread
@Override
void onStart() throws Exception {
void onStart() {
if (isCameraAvailable()) {
LOG.w("onStart:", "Camera not available. Should not happen.");
onStop(); // Should not happen.
}
if (collectCameraId()) {
mCamera = Camera.open(mCameraId);
mCamera.setErrorCallback(this);
// Set parameters that might have been set before the camera was opened.
synchronized (mLock) {
LOG.i("onStart:", "Applying default parameters.");
Camera.Parameters params = mCamera.getParameters();
mExtraProperties = new ExtraProperties(params);
@ -186,29 +188,28 @@ class Camera1 extends CameraController implements Camera.PreviewCallback {
mergeFlash(params, Flash.DEFAULT);
mergeLocation(params, null);
mergeWhiteBalance(params, WhiteBalance.DEFAULT);
mergeHdr(params, Hdr.DEFAULT);
params.setRecordingHint(mSessionType == SessionType.VIDEO);
mCamera.setParameters(params);
}
// Try starting preview.
mCamera.setDisplayOrientation(computeSensorToDisplayOffset()); // <- not allowed during preview
if (shouldSetup()) setup();
if (shouldBindToSurface()) bindToSurface();
LOG.i("onStart:", "Ended");
}
}
@WorkerThread
@Override
void onStop() throws Exception {
void onStop() {
Exception error = null;
LOG.i("onStop:", "About to clean up.");
mHandler.get().removeCallbacks(mPostFocusResetRunnable);
mFrameManager.release();
if (mCamera != null) {
LOG.i("onStop:", "Clean up.", "Ending video?", mIsCapturingVideo);
if (mIsCapturingVideo) endVideo();
LOG.i("onStop:", "Clean up.", "Ending video.");
endVideoImmediately();
try {
LOG.i("onStop:", "Clean up.", "Stopping preview.");
@ -216,7 +217,7 @@ class Camera1 extends CameraController implements Camera.PreviewCallback {
mCamera.setPreviewCallbackWithBuffer(null);
LOG.i("onStop:", "Clean up.", "Stopped preview.");
} catch (Exception e) {
LOG.w("onStop:", "Clean up.", "Exception while stopping preview.");
LOG.w("onStop:", "Clean up.", "Exception while stopping preview.", e);
error = e;
}
@ -225,7 +226,7 @@ class Camera1 extends CameraController implements Camera.PreviewCallback {
mCamera.release();
LOG.i("onStop:", "Clean up.", "Released camera.");
} catch (Exception e) {
LOG.w("onStop:", "Clean up.", "Exception while releasing camera.");
LOG.w("onStop:", "Clean up.", "Exception while releasing camera.", e);
error = e;
}
}
@ -234,9 +235,9 @@ class Camera1 extends CameraController implements Camera.PreviewCallback {
mCamera = null;
mPreviewSize = null;
mCaptureSize = null;
mIsSetup = false;
mIsBound = false;
if (error != null) throw error;
if (error != null) throw new CameraException(error);
}
private boolean collectCameraId() {
@ -255,31 +256,50 @@ class Camera1 extends CameraController implements Camera.PreviewCallback {
@Override
public void onBufferAvailable(byte[] buffer) {
// TODO: sync with handler?
if (isCameraAvailable()) {
mCamera.addCallbackBuffer(buffer);
}
}
@Override
public void onError(int error, Camera camera) {
if (error == Camera.CAMERA_ERROR_SERVER_DIED) {
// Looks like this is recoverable.
LOG.w("Recoverable error inside the onError callback.", "CAMERA_ERROR_SERVER_DIED");
stopImmediately();
start();
return;
}
LOG.e("Error inside the onError callback.", error);
throw new CameraException(new RuntimeException(CameraLogger.lastMessage));
}
@Override
void setSessionType(SessionType sessionType) {
if (sessionType != mSessionType) {
mSessionType = sessionType;
if (isCameraAvailable()) {
schedule(null, true, new Runnable() {
@Override
public void run() {
restart();
}
});
}
}
@Override
void setLocation(Location location) {
Location oldLocation = mLocation;
final Location oldLocation = mLocation;
mLocation = location;
if (isCameraAvailable()) {
synchronized (mLock) {
schedule(mLocationTask, true, new Runnable() {
@Override
public void run() {
Camera.Parameters params = mCamera.getParameters();
if (mergeLocation(params, oldLocation)) mCamera.setParameters(params);
}
}
});
}
private boolean mergeLocation(Camera.Parameters params, Location oldLocation) {
@ -302,22 +322,28 @@ class Camera1 extends CameraController implements Camera.PreviewCallback {
void setFacing(Facing facing) {
if (facing != mFacing) {
mFacing = facing;
if (collectCameraId() && isCameraAvailable()) {
schedule(null, true, new Runnable() {
@Override
public void run() {
if (collectCameraId()) {
restart();
}
}
});
}
}
@Override
void setWhiteBalance(WhiteBalance whiteBalance) {
WhiteBalance old = mWhiteBalance;
final WhiteBalance old = mWhiteBalance;
mWhiteBalance = whiteBalance;
if (isCameraAvailable()) {
synchronized (mLock) {
schedule(mWhiteBalanceTask, true, new Runnable() {
@Override
public void run() {
Camera.Parameters params = mCamera.getParameters();
if (mergeWhiteBalance(params, old)) mCamera.setParameters(params);
}
}
});
}
private boolean mergeWhiteBalance(Camera.Parameters params, WhiteBalance oldWhiteBalance) {
@ -331,14 +357,15 @@ class Camera1 extends CameraController implements Camera.PreviewCallback {
@Override
void setHdr(Hdr hdr) {
Hdr old = mHdr;
final Hdr old = mHdr;
mHdr = hdr;
if (isCameraAvailable()) {
synchronized (mLock) {
schedule(mHdrTask, true, new Runnable() {
@Override
public void run() {
Camera.Parameters params = mCamera.getParameters();
if (mergeHdr(params, old)) mCamera.setParameters(params);
}
}
});
}
private boolean mergeHdr(Camera.Parameters params, Hdr oldHdr) {
@ -355,7 +382,8 @@ class Camera1 extends CameraController implements Camera.PreviewCallback {
void setAudio(Audio audio) {
if (mAudio != audio) {
if (mIsCapturingVideo) {
LOG.w("Changing audio mode while recording. Changes will take place starting from next video");
LOG.w("Audio setting was changed while recording. " +
"Changes will take place starting from next video");
}
mAudio = audio;
}
@ -363,14 +391,15 @@ class Camera1 extends CameraController implements Camera.PreviewCallback {
@Override
void setFlash(Flash flash) {
Flash old = mFlash;
final Flash old = mFlash;
mFlash = flash;
if (isCameraAvailable()) {
synchronized (mLock) {
schedule(mFlashTask, true, new Runnable() {
@Override
public void run() {
Camera.Parameters params = mCamera.getParameters();
if (mergeFlash(params, old)) mCamera.setParameters(params);
}
}
});
}
@ -413,87 +442,102 @@ class Camera1 extends CameraController implements Camera.PreviewCallback {
@Override
void setVideoQuality(VideoQuality videoQuality) {
final VideoQuality old = mVideoQuality;
mVideoQuality = videoQuality;
schedule(mVideoQualityTask, true, new Runnable() {
@Override
public void run() {
if (mIsCapturingVideo) {
// TODO: actually any call to getParameters() could fail while recording a video.
// See. https://stackoverflow.com/questions/14941625/correct-handling-of-exception-getparameters-failed-empty-parameters
// See. https://stackoverflow.com/questions/14941625/
mVideoQuality = old;
throw new IllegalStateException("Can't change video quality while recording a video.");
}
mVideoQuality = videoQuality;
if (isCameraAvailable() && mSessionType == SessionType.VIDEO) {
if (mSessionType == SessionType.VIDEO) {
// Change capture size to a size that fits the video aspect ratio.
Size oldSize = mCaptureSize;
mCaptureSize = computeCaptureSize();
if (!mCaptureSize.equals(oldSize)) {
// New video quality triggers a new aspect ratio.
// Go on and see if preview size should change also.
synchronized (mLock) {
Camera.Parameters params = mCamera.getParameters();
params.setPictureSize(mCaptureSize.getWidth(), mCaptureSize.getHeight());
mCamera.setParameters(params);
}
onSurfaceChanged();
}
LOG.i("setVideoQuality:", "captureSize:", mCaptureSize);
LOG.i("setVideoQuality:", "previewSize:", mPreviewSize);
}
}
});
}
@Override
boolean capturePicture() {
if (mIsCapturingImage) return false;
if (!isCameraAvailable()) return false;
if (mSessionType == SessionType.VIDEO && mIsCapturingVideo) {
if (!mOptions.isVideoSnapshotSupported()) return false;
}
void capturePicture() {
LOG.v("capturePicture: scheduling");
schedule(null, true, new Runnable() {
@Override
public void run() {
LOG.v("capturePicture: performing.", mIsCapturingImage);
if (mIsCapturingImage) return;
if (mIsCapturingVideo && !mOptions.isVideoSnapshotSupported()) return;
// Set boolean to wait for image callback
mIsCapturingImage = true;
final int exifRotation = computeExifRotation();
final boolean exifFlip = computeExifFlip();
final int sensorToDisplay = computeSensorToDisplayOffset();
synchronized (mLock) {
Camera.Parameters params = mCamera.getParameters();
params.setRotation(exifRotation);
mCamera.setParameters(params);
}
// Is the final picture (decoded respecting EXIF) consistent with CameraView orientation?
// We must consider exifOrientation to bring back the picture in the sensor world.
// Then use sensorToDisplay to move to the display world, where CameraView lives.
final boolean consistentWithView = (exifRotation + sensorToDisplay + 180) % 180 == 0;
mCamera.takePicture(null, null, null,
mCamera.takePicture(
new Camera.ShutterCallback() {
@Override
public void onShutter() {
mCameraCallbacks.onShutter(false);
}
},
null,
null,
new Camera.PictureCallback() {
@Override
public void onPictureTaken(byte[] data, final Camera camera) {
mIsCapturingImage = false;
mHandler.post(new Runnable() {
@Override
public void run() {
// This is needed, read somewhere in the docs.
camera.startPreview();
}
});
mCameraCallbacks.processImage(data, consistentWithView, exifFlip);
camera.startPreview(); // This is needed, read somewhere in the docs.
}
}
);
}
});
return true;
}
@Override
boolean captureSnapshot() {
if (!isCameraAvailable()) return false;
if (mIsCapturingImage) return false;
void captureSnapshot() {
LOG.v("captureSnapshot: scheduling");
schedule(null, true, new Runnable() {
@Override
public void run() {
LOG.v("captureSnapshot: performing.", mIsCapturingImage);
if (mIsCapturingImage) return;
// This won't work while capturing a video.
// Switch to capturePicture.
if (mIsCapturingVideo) {
capturePicture();
return false;
return;
}
mIsCapturingImage = true;
mCamera.setOneShotPreviewCallback(new Camera.PreviewCallback() {
@Override
public void onPreviewFrame(final byte[] data, Camera camera) {
mCameraCallbacks.onShutter(true);
// Got to rotate the preview frame, since byte[] data here does not include
// EXIF tags automatically set by camera. So either we add EXIF, or we rotate.
// Adding EXIF to a byte array, unfortunately, is hard.
@ -510,8 +554,10 @@ class Camera1 extends CameraController implements Camera.PreviewCallback {
@Override
public void run() {
LOG.v("captureSnapshot: rotating.");
final boolean consistentWithView = (sensorToDevice + sensorToDisplay + 180) % 180 == 0;
byte[] rotatedData = RotationHelper.rotate(data, preWidth, preHeight, sensorToDevice);
LOG.v("captureSnapshot: rotated.");
YuvImage yuv = new YuvImage(rotatedData, format, postWidth, postHeight, null);
mCameraCallbacks.processSnapshot(yuv, consistentWithView, exifFlip);
mIsCapturingImage = false;
@ -520,11 +566,12 @@ class Camera1 extends CameraController implements Camera.PreviewCallback {
// It seems that the buffers are already cleared here, so we need to allocate again.
mCamera.setPreviewCallbackWithBuffer(null); // Release anything left
mCamera.setPreviewCallbackWithBuffer(this); // Add ourselves
mFrameManager.allocate(ImageFormat.getBitsPerPixel(mPreviewFormat), mPreviewSize); mCamera.setPreviewCallbackWithBuffer(Camera1.this);
mCamera.setPreviewCallbackWithBuffer(Camera1.this); // Add ourselves
mFrameManager.allocate(ImageFormat.getBitsPerPixel(mPreviewFormat), mPreviewSize);
}
});
}
});
return true;
}
@Override
@ -552,12 +599,11 @@ class Camera1 extends CameraController implements Camera.PreviewCallback {
// If we are going to be closed, don't act on camera.
// Even if mCamera != null, it might have been released.
case STATE_STOPPING: return false;
// If we are started, act as long as there is no stop/restart scheduled.
// At this point mCamera should never be null.
case STATE_STARTED: return !mScheduledForStop && !mScheduledForRestart;
// If we are started, mCamera should never be null.
case STATE_STARTED: return true;
// If we are starting, theoretically we could act.
// Just check that camera is available.
case STATE_STARTING: return mCamera != null && !mScheduledForStop && !mScheduledForRestart;
case STATE_STARTING: return mCamera != null;
}
return false;
}
@ -621,7 +667,7 @@ class Camera1 extends CameraController implements Camera.PreviewCallback {
// The Camcorder internally checks for cameraParameters.getSupportedVideoSizes() etc.
// We want the picture size to be the max picture consistent with the video aspect ratio.
List<Size> captureSizes = sizesFromList(params.getSupportedPictureSizes());
CamcorderProfile profile = getCamcorderProfile(mVideoQuality);
CamcorderProfile profile = getCamcorderProfile(mCameraId, mVideoQuality);
AspectRatio targetRatio = AspectRatio.of(profile.videoFrameWidth, profile.videoFrameHeight);
LOG.i("size:", "computeCaptureSize:", "videoQuality:", mVideoQuality, "targetRatio:", targetRatio);
return matchSize(captureSizes, targetRatio, new Size(0, 0), true);
@ -643,9 +689,11 @@ class Camera1 extends CameraController implements Camera.PreviewCallback {
@Override
boolean startVideo(@NonNull File videoFile) {
if (mIsCapturingVideo) return false;
if (!isCameraAvailable()) return false;
void startVideo(@NonNull final File videoFile) {
schedule(mStartVideoTask, true, new Runnable() {
@Override
public void run() {
if (mIsCapturingVideo) return;
if (mSessionType == SessionType.VIDEO) {
mVideoFile = videoFile;
mIsCapturingVideo = true;
@ -653,62 +701,71 @@ class Camera1 extends CameraController implements Camera.PreviewCallback {
try {
mMediaRecorder.prepare();
mMediaRecorder.start();
return true;
} catch (Exception e) {
LOG.e("Error while starting MediaRecorder. Swallowing.", e);
mVideoFile = null;
mCamera.lock();
endVideo();
return false;
endVideoImmediately();
}
} else {
throw new IllegalStateException("Can't record video while session type is picture");
}
}
});
}
@Override
boolean endVideo() {
if (mIsCapturingVideo) {
void endVideo() {
schedule(null, false, new Runnable() {
@Override
public void run() {
endVideoImmediately();
}
});
}
@WorkerThread
private void endVideoImmediately() {
LOG.i("endVideoImmediately:", "is capturing:", mIsCapturingVideo);
mIsCapturingVideo = false;
if (mMediaRecorder != null) {
try {
mMediaRecorder.stop();
mMediaRecorder.release();
} catch (Exception e) {
// This can happen if endVideo() is called right after startVideo().
// We don't care.
LOG.w("Error while closing media recorder. Swallowing", e);
// This can happen if endVideo() is called right after startVideo(). We don't care.
LOG.w("endVideoImmediately:", "Error while closing media recorder. Swallowing", e);
}
mMediaRecorder.release();
mMediaRecorder = null;
}
if (mVideoFile != null) {
mCameraCallbacks.dispatchOnVideoTaken(mVideoFile);
mVideoFile = null;
}
return true;
}
return false;
}
@WorkerThread
private void initMediaRecorder() {
mMediaRecorder = new MediaRecorder();
mCamera.unlock();
mMediaRecorder.setCamera(mCamera);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
CamcorderProfile profile = getCamcorderProfile(mVideoQuality);
if (mAudio == Audio.ON) {
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.CAMCORDER);
mMediaRecorder.setProfile(profile);
} else {
// Set all values contained in profile except audio settings
// Must be called before setOutputFormat.
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
}
CamcorderProfile profile = getCamcorderProfile(mCameraId, mVideoQuality);
mMediaRecorder.setOutputFormat(profile.fileFormat);
mMediaRecorder.setVideoEncoder(profile.videoCodec);
mMediaRecorder.setVideoEncodingBitRate(profile.videoBitRate);
mMediaRecorder.setVideoFrameRate(profile.videoFrameRate);
mMediaRecorder.setVideoSize(profile.videoFrameWidth, profile.videoFrameHeight);
mMediaRecorder.setVideoEncoder(profile.videoCodec);
mMediaRecorder.setVideoEncodingBitRate(profile.videoBitRate);
if (mAudio == Audio.ON) {
mMediaRecorder.setAudioChannels(profile.audioChannels);
mMediaRecorder.setAudioSamplingRate(profile.audioSampleRate);
mMediaRecorder.setAudioEncoder(profile.audioCodec);
mMediaRecorder.setAudioEncodingBitRate(profile.audioBitRate);
}
if (mLocation != null) {
@ -721,48 +778,47 @@ class Camera1 extends CameraController implements Camera.PreviewCallback {
// Not needed. mMediaRecorder.setPreviewDisplay(mPreview.getSurface());
}
@NonNull
private CamcorderProfile getCamcorderProfile(VideoQuality videoQuality) {
private static CamcorderProfile getCamcorderProfile(int cameraId, VideoQuality videoQuality) {
switch (videoQuality) {
case HIGHEST:
return CamcorderProfile.get(mCameraId, CamcorderProfile.QUALITY_HIGH);
return CamcorderProfile.get(cameraId, CamcorderProfile.QUALITY_HIGH);
case MAX_2160P:
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP &&
CamcorderProfile.hasProfile(CamcorderProfile.QUALITY_2160P)) {
return CamcorderProfile.get(mCameraId, CamcorderProfile.QUALITY_2160P);
return CamcorderProfile.get(cameraId, CamcorderProfile.QUALITY_2160P);
}
// Don't break.
case MAX_1080P:
if (CamcorderProfile.hasProfile(mCameraId, CamcorderProfile.QUALITY_1080P)) {
return CamcorderProfile.get(mCameraId, CamcorderProfile.QUALITY_1080P);
if (CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_1080P)) {
return CamcorderProfile.get(cameraId, CamcorderProfile.QUALITY_1080P);
}
// Don't break.
case MAX_720P:
if (CamcorderProfile.hasProfile(mCameraId, CamcorderProfile.QUALITY_720P)) {
return CamcorderProfile.get(mCameraId, CamcorderProfile.QUALITY_720P);
if (CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_720P)) {
return CamcorderProfile.get(cameraId, CamcorderProfile.QUALITY_720P);
}
// Don't break.
case MAX_480P:
if (CamcorderProfile.hasProfile(mCameraId, CamcorderProfile.QUALITY_480P)) {
return CamcorderProfile.get(mCameraId, CamcorderProfile.QUALITY_480P);
if (CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_480P)) {
return CamcorderProfile.get(cameraId, CamcorderProfile.QUALITY_480P);
}
// Don't break.
case MAX_QVGA:
if (CamcorderProfile.hasProfile(mCameraId, CamcorderProfile.QUALITY_QVGA)) {
return CamcorderProfile.get(mCameraId, CamcorderProfile.QUALITY_QVGA);
if (CamcorderProfile.hasProfile(cameraId, CamcorderProfile.QUALITY_QVGA)) {
return CamcorderProfile.get(cameraId, CamcorderProfile.QUALITY_QVGA);
}
// Don't break.
case LOWEST:
default:
// Fallback to lowest.
return CamcorderProfile.get(mCameraId, CamcorderProfile.QUALITY_LOW);
return CamcorderProfile.get(cameraId, CamcorderProfile.QUALITY_LOW);
}
}
@ -771,33 +827,48 @@ class Camera1 extends CameraController implements Camera.PreviewCallback {
@Override
boolean setZoom(float zoom) {
if (!isCameraAvailable()) return false;
if (!mOptions.isZoomSupported()) return false;
synchronized (mLock) {
void setZoom(final float zoom, final PointF[] points, final boolean notify) {
schedule(mZoomTask, true, new Runnable() {
@Override
public void run() {
if (!mOptions.isZoomSupported()) return;
mZoomValue = zoom;
Camera.Parameters params = mCamera.getParameters();
float max = params.getMaxZoom();
params.setZoom((int) (zoom * max));
mCamera.setParameters(params);
if (notify) {
mCameraCallbacks.dispatchOnZoomChanged(zoom, points);
}
return true;
}
});
}
@Override
boolean setExposureCorrection(float EVvalue) {
if (!isCameraAvailable()) return false;
if (!mOptions.isExposureCorrectionSupported()) return false;
void setExposureCorrection(final float EVvalue, final float[] bounds,
final PointF[] points, final boolean notify) {
schedule(mExposureCorrectionTask, true, new Runnable() {
@Override
public void run() {
if (!mOptions.isExposureCorrectionSupported()) return;
float value = EVvalue;
float max = mOptions.getExposureCorrectionMaxValue();
float min = mOptions.getExposureCorrectionMinValue();
EVvalue = EVvalue < min ? min : EVvalue > max ? max : EVvalue; // cap
synchronized (mLock) {
value = value < min ? min : value > max ? max : value; // cap
mExposureCorrectionValue = value;
Camera.Parameters params = mCamera.getParameters();
int indexValue = (int) (EVvalue / params.getExposureCompensationStep());
int indexValue = (int) (value / params.getExposureCompensationStep());
params.setExposureCompensation(indexValue);
mCamera.setParameters(params);
if (notify) {
mCameraCallbacks.dispatchOnExposureCorrectionChanged(value, bounds, points);
}
return true;
}
});
}
// -----------------
@ -805,13 +876,25 @@ class Camera1 extends CameraController implements Camera.PreviewCallback {
@Override
boolean startAutoFocus(@Nullable final Gesture gesture, PointF point) {
if (!isCameraAvailable()) return false;
if (!mOptions.isAutoFocusSupported()) return false;
void startAutoFocus(@Nullable final Gesture gesture, final PointF point) {
// Must get width and height from the UI thread.
int viewWidth = 0, viewHeight = 0;
if (mPreview != null && mPreview.isReady()) {
viewWidth = mPreview.getView().getWidth();
viewHeight = mPreview.getView().getHeight();
}
final int viewWidthF = viewWidth;
final int viewHeightF = viewHeight;
// Schedule.
schedule(null, true, new Runnable() {
@Override
public void run() {
if (!mOptions.isAutoFocusSupported()) return;
final PointF p = new PointF(point.x, point.y); // copy.
List<Camera.Area> meteringAreas2 = computeMeteringAreas(p.x, p.y);
List<Camera.Area> meteringAreas2 = computeMeteringAreas(p.x, p.y,
viewWidthF, viewHeightF, computeSensorToDisplayOffset());
List<Camera.Area> meteringAreas1 = meteringAreas2.subList(0, 1);
synchronized (mLock) {
// At this point we are sure that camera supports auto focus... right? Look at CameraView.onTouchEvent().
Camera.Parameters params = mCamera.getParameters();
int maxAF = params.getMaxNumFocusAreas();
@ -832,18 +915,19 @@ class Camera1 extends CameraController implements Camera.PreviewCallback {
}
});
}
return true;
});
}
private List<Camera.Area> computeMeteringAreas(double viewClickX, double viewClickY) {
@WorkerThread
private static List<Camera.Area> computeMeteringAreas(double viewClickX, double viewClickY,
int viewWidth, int viewHeight,
int sensorToDisplay) {
// Event came in view coordinates. We must rotate to sensor coordinates.
// First, rescale to the -1000 ... 1000 range.
int displayToSensor = -computeSensorToDisplayOffset();
double viewWidth = mPreview.getView().getWidth();
double viewHeight = mPreview.getView().getHeight();
viewClickX = -1000d + (viewClickX / viewWidth) * 2000d;
viewClickY = -1000d + (viewClickY / viewHeight) * 2000d;
int displayToSensor = -sensorToDisplay;
viewClickX = -1000d + (viewClickX / (double) viewWidth) * 2000d;
viewClickY = -1000d + (viewClickY / (double) viewHeight) * 2000d;
// Apply rotation to this point.
// https://academo.org/demos/rotation-about-point/
@ -866,7 +950,7 @@ class Camera1 extends CameraController implements Camera.PreviewCallback {
}
private Rect computeMeteringArea(double centerX, double centerY, double size) {
private static Rect computeMeteringArea(double centerX, double centerY, double size) {
double delta = size / 2d;
int top = (int) Math.max(centerY - delta, -1000);
int bottom = (int) Math.min(centerY + delta, 1000);
@ -924,23 +1008,15 @@ class Camera1 extends CameraController implements Camera.PreviewCallback {
LOG.i("size:", "matchSize:", "found consistent:", consistent.size());
LOG.i("size:", "matchSize:", "found big enough and consistent:", bigEnoughAndConsistent.size());
Size result;
if (biggestPossible) {
if (bigEnoughAndConsistent.size() > 0) {
result = Collections.max(bigEnoughAndConsistent);
result = biggestPossible ?
Collections.max(bigEnoughAndConsistent) :
Collections.min(bigEnoughAndConsistent);
} else if (consistent.size() > 0) {
result = Collections.max(consistent);
} else {
result = Collections.max(sizes);
}
} else {
if (bigEnoughAndConsistent.size() > 0) {
result = Collections.min(bigEnoughAndConsistent);
} else if (consistent.size() > 0) {
result = Collections.max(consistent);
} else {
result = Collections.max(sizes);
}
}
LOG.i("size", "matchSize:", "returning result", result);
return result;
}

@ -37,12 +37,12 @@ class Camera2 extends CameraController {
}
@Override
void onStart() throws Exception {
void onStart() {
}
@Override
void onStop() throws Exception {
void onStop() {
}
@ -57,13 +57,13 @@ class Camera2 extends CameraController {
}
@Override
boolean setZoom(float zoom) {
return false;
void setZoom(float zoom, PointF[] points, boolean notify) {
}
@Override
boolean setExposureCorrection(float EVvalue) {
return false;
void setExposureCorrection(float EVvalue, float[] bounds, PointF[] points, boolean notify) {
}
@Override
@ -97,23 +97,23 @@ class Camera2 extends CameraController {
}
@Override
boolean capturePicture() {
return false;
void capturePicture() {
}
@Override
boolean captureSnapshot() {
return false;
void captureSnapshot() {
}
@Override
boolean startVideo(@NonNull File file) {
return false;
void startVideo(@NonNull File file) {
}
@Override
boolean endVideo() {
return false;
void endVideo() {
}
@Override
@ -122,8 +122,8 @@ class Camera2 extends CameraController {
}
@Override
boolean startAutoFocus(@Nullable Gesture gesture, PointF point) {
return false;
void startAutoFocus(@Nullable Gesture gesture, PointF point) {
}
@Override

@ -2,13 +2,18 @@ package com.otaliastudios.cameraview;
import android.graphics.PointF;
import android.location.Location;
import android.os.Handler;
import android.os.Looper;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.annotation.WorkerThread;
import java.io.File;
abstract class CameraController implements CameraPreview.SurfaceCallback, FrameManager.BufferCallback {
abstract class CameraController implements
CameraPreview.SurfaceCallback,
FrameManager.BufferCallback,
Thread.UncaughtExceptionHandler {
private static final String TAG = CameraController.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
@ -20,6 +25,8 @@ abstract class CameraController implements CameraPreview.SurfaceCallback, FrameM
protected final CameraView.CameraCallbacks mCameraCallbacks;
protected CameraPreview mPreview;
protected WorkerHandler mHandler;
/* for tests */ Handler mCrashHandler;
protected Facing mFacing;
protected Flash mFlash;
@ -30,6 +37,9 @@ abstract class CameraController implements CameraPreview.SurfaceCallback, FrameM
protected Location mLocation;
protected Audio mAudio;
protected float mZoomValue;
protected float mExposureCorrectionValue;
protected Size mCaptureSize;
protected Size mPreviewSize;
protected int mPreviewFormat;
@ -40,42 +50,83 @@ abstract class CameraController implements CameraPreview.SurfaceCallback, FrameM
protected int mDisplayOffset;
protected int mDeviceOrientation;
protected boolean mScheduledForStart = false;
protected boolean mScheduledForStop = false;
protected boolean mScheduledForRestart = false;
protected int mState = STATE_STOPPED;
protected WorkerHandler mHandler;
// Used for testing.
Task<Void> mZoomTask = new Task<>();
Task<Void> mExposureCorrectionTask = new Task<>();
Task<Void> mFlashTask = new Task<>();
Task<Void> mWhiteBalanceTask = new Task<>();
Task<Void> mHdrTask = new Task<>();
Task<Void> mLocationTask = new Task<>();
Task<Void> mVideoQualityTask = new Task<>();
Task<Void> mStartVideoTask = new Task<>();
CameraController(CameraView.CameraCallbacks callback) {
mCameraCallbacks = callback;
mCrashHandler = new Handler(Looper.getMainLooper());
mHandler = WorkerHandler.get("CameraViewController");
mHandler.getThread().setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() {
mHandler.getThread().setUncaughtExceptionHandler(this);
mFrameManager = new FrameManager(2, this);
}
void setPreview(CameraPreview cameraPreview) {
mPreview = cameraPreview;
mPreview.setSurfaceCallback(this);
}
//region Error handling
@Override
public void uncaughtException(Thread thread, Throwable throwable) {
public void uncaughtException(final Thread thread, final Throwable throwable) {
// Something went wrong. Thread is terminated (about to?).
// Move to other thread and stop resources.
LOG.w("Interrupting thread, due to exception.", throwable);
// Move to other thread and release resources.
if (!(throwable instanceof CameraException)) {
// This is unexpected, either a bug or something the developer should know.
// Release and crash the UI thread so we get bug reports.
LOG.e("uncaughtException:", "Unexpected exception:", throwable);
destroy();
mCrashHandler.post(new Runnable() {
@Override
public void run() {
RuntimeException exception;
if (throwable instanceof RuntimeException) {
exception = (RuntimeException) throwable;
} else {
exception = new RuntimeException(throwable);
}
throw exception;
}
});
} else {
// At the moment all CameraExceptions are unrecoverable, there was something
// wrong when starting, stopping, or binding the camera to the preview.
final CameraException error = (CameraException) throwable;
LOG.e("uncaughtException:", "Interrupting thread with state:", ss(), "due to CameraException:", error);
thread.interrupt();
LOG.w("Interrupted thread. Posting a stopImmediately.", ss());
mHandler = WorkerHandler.get("CameraViewController");
mHandler.getThread().setUncaughtExceptionHandler(this);
LOG.i("uncaughtException:", "Calling stopImmediately and notifying.");
mHandler.post(new Runnable() {
@Override
public void run() {
stopImmediately();
mCameraCallbacks.dispatchError(error);
}
});
}
});
mFrameManager = new FrameManager(2, this);
}
void setPreview(CameraPreview cameraPreview) {
mPreview = cameraPreview;
mPreview.setSurfaceCallback(this);
final void destroy() {
LOG.i("destroy:", "state:", ss());
// Prevent CameraController leaks.
mHandler.getThread().setUncaughtExceptionHandler(null);
// Stop if needed.
stopImmediately();
}
//endregion
//region Start&Stop
private String ss() {
@ -91,13 +142,10 @@ abstract class CameraController implements CameraPreview.SurfaceCallback, FrameM
// Starts the preview asynchronously.
final void start() {
LOG.i("Start:", "posting runnable. State:", ss());
mScheduledForStart = true;
mHandler.post(new Runnable() {
@Override
public void run() {
try {
LOG.i("Start:", "executing. State:", ss());
mScheduledForStart = false;
if (mState >= STATE_STARTING) return;
mState = STATE_STARTING;
LOG.i("Start:", "about to call onStart()", ss());
@ -105,11 +153,6 @@ abstract class CameraController implements CameraPreview.SurfaceCallback, FrameM
LOG.i("Start:", "returned from onStart().", "Dispatching.", ss());
mState = STATE_STARTED;
mCameraCallbacks.dispatchOnCameraOpened(mOptions);
} catch (Exception e) {
LOG.e("Error while starting the camera engine.", e);
throw new RuntimeException(e);
}
}
});
}
@ -117,13 +160,10 @@ abstract class CameraController implements CameraPreview.SurfaceCallback, FrameM
// Stops the preview asynchronously.
final void stop() {
LOG.i("Stop:", "posting runnable. State:", ss());
mScheduledForStop = true;
mHandler.post(new Runnable() {
@Override
public void run() {
try {
LOG.i("Stop:", "executing. State:", ss());
mScheduledForStop = false;
if (mState <= STATE_STOPPED) return;
mState = STATE_STOPPING;
LOG.i("Stop:", "about to call onStop()");
@ -131,29 +171,23 @@ abstract class CameraController implements CameraPreview.SurfaceCallback, FrameM
LOG.i("Stop:", "returned from onStop().", "Dispatching.");
mState = STATE_STOPPED;
mCameraCallbacks.dispatchOnCameraClosed();
} catch (Exception e) {
LOG.e("Error while stopping the camera engine.", e);
throw new RuntimeException(e);
}
}
});
}
// Stops the preview synchronously, ensuring no exceptions are thrown.
void stopImmediately() {
final void stopImmediately() {
try {
// Don't check, try stop again.
LOG.i("Stop immediately. State was:", ss());
LOG.i("stopImmediately:", "State was:", ss());
if (mState == STATE_STOPPED) return;
mState = STATE_STOPPING;
// Prevent leaking CameraController.
mHandler.getThread().setUncaughtExceptionHandler(null);
onStop();
mState = STATE_STOPPED;
LOG.i("Stop immediately. Stopped. State is:", ss());
LOG.i("stopImmediately:", "Stopped. State is:", ss());
} catch (Exception e) {
// Do nothing.
LOG.i("Stop immediately. Exception while stopping.", e);
LOG.i("stopImmediately:", "Swallowing exception while stopping.", e);
mState = STATE_STOPPED;
}
}
@ -161,13 +195,10 @@ abstract class CameraController implements CameraPreview.SurfaceCallback, FrameM
// Forces a restart.
protected final void restart() {
LOG.i("Restart:", "posting runnable");
mScheduledForRestart = true;
mHandler.post(new Runnable() {
@Override
public void run() {
try {
LOG.i("Restart:", "executing. Needs stopping:", mState > STATE_STOPPED, ss());
mScheduledForRestart = false;
// Don't stop if stopped.
if (mState > STATE_STOPPED) {
mState = STATE_STOPPING;
@ -183,12 +214,6 @@ abstract class CameraController implements CameraPreview.SurfaceCallback, FrameM
mState = STATE_STARTED;
LOG.i("Restart: returned from start. Dispatching. State:", ss());
mCameraCallbacks.dispatchOnCameraOpened(mOptions);
} catch (Exception e) {
LOG.e("Error while restarting the camera engine.", e);
throw new RuntimeException(e);
}
}
});
}
@ -196,11 +221,11 @@ abstract class CameraController implements CameraPreview.SurfaceCallback, FrameM
// Starts the preview.
// At the end of this method camera must be available, e.g. for setting parameters.
@WorkerThread
abstract void onStart() throws Exception;
abstract void onStart();
// Stops the preview.
@WorkerThread
abstract void onStop() throws Exception;
abstract void onStop();
// Returns current state.
final int getState() {
@ -231,11 +256,11 @@ abstract class CameraController implements CameraPreview.SurfaceCallback, FrameM
// Should restart the session if active.
abstract void setFacing(Facing facing);
// If opened and supported, apply and return true.
abstract boolean setZoom(float zoom);
// If closed, no-op. If opened, check supported and apply.
abstract void setZoom(float zoom, PointF[] points, boolean notify);
// If opened and supported, apply and return true.
abstract boolean setExposureCorrection(float EVvalue);
// If closed, no-op. If opened, check supported and apply.
abstract void setExposureCorrection(float EVvalue, float[] bounds, PointF[] points, boolean notify);
// If closed, keep. If opened, check supported and apply.
abstract void setFlash(Flash flash);
@ -260,17 +285,17 @@ abstract class CameraController implements CameraPreview.SurfaceCallback, FrameM
//region APIs
abstract boolean capturePicture();
abstract void capturePicture();
abstract boolean captureSnapshot();
abstract void captureSnapshot();
abstract boolean startVideo(@NonNull File file);
abstract void startVideo(@NonNull File file);
abstract boolean endVideo();
abstract void endVideo();
abstract boolean shouldFlipSizes(); // Wheter the Sizes should be flipped to match the view orientation.
abstract boolean startAutoFocus(@Nullable Gesture gesture, PointF point);
abstract void startAutoFocus(@Nullable Gesture gesture, PointF point);
//endregion
@ -318,6 +343,14 @@ abstract class CameraController implements CameraPreview.SurfaceCallback, FrameM
return mAudio;
}
final float getZoomValue() {
return mZoomValue;
}
final float getExposureCorrectionValue() {
return mExposureCorrectionValue;
}
final Size getCaptureSize() {
return mCaptureSize;
}

@ -0,0 +1,12 @@
package com.otaliastudios.cameraview;
/**
* Holds an error with the camera configuration.
*/
public class CameraException extends RuntimeException {
CameraException(Throwable cause) {
super(cause);
}
}

@ -1,6 +1,7 @@
package com.otaliastudios.cameraview;
import android.graphics.PointF;
import android.support.annotation.NonNull;
import android.support.annotation.UiThread;
import java.io.File;
@ -29,6 +30,25 @@ public abstract class CameraListener {
}
/**
* Notifies about an error during the camera setup or configuration.
* At the moment, errors that are passed here are unrecoverable. When this is called,
* the camera has been released and is presumably showing a black preview.
*
* This is the right moment to show an error dialog to the user.
* You can try calling start() again, but that is not guaranteed to work - if it doesn't,
* this callback will be invoked again.
*
* In the future, more information will be passed through the {@link CameraException} instance.
*
* @param exception the error
*/
@UiThread
public void onCameraError(@NonNull CameraException exception) {
}
/**
* Notifies that a picture previously captured with {@link CameraView#capturePicture()}
* or {@link CameraView#captureSnapshot()} is ready to be shown or saved.
@ -78,8 +98,8 @@ public abstract class CameraListener {
/**
* Notifies that user interacted with the screen and started focus with a gesture,
* and the autofocus is trying to focus around that area.
* This can be used to draw things on screen.
* and the autofocus is trying to focus around that area. This can be used to draw things on screen.
* Can also be triggered by {@link CameraView#startAutoFocus(float, float)}.
*
* @param point coordinates with respect to CameraView.getWidth() and CameraView.getHeight()
*/
@ -93,6 +113,7 @@ public abstract class CameraListener {
* Notifies that a gesture focus event just ended, and the camera converged
* to a new focus (and possibly exposure and white balance).
* This might succeed or not.
* Can also be triggered by {@link CameraView#startAutoFocus(float, float)}.
*
* @param successful whether camera succeeded
* @param point coordinates with respect to CameraView.getWidth() and CameraView.getHeight()

@ -52,8 +52,6 @@ public class CameraView extends FrameLayout {
// Self managed parameters
private int mJpegQuality;
private boolean mCropOutput;
private float mZoomValue;
private float mExposureCorrectionValue;
private boolean mPlaySounds;
private HashMap<Gesture, GestureAction> mGestureMap = new HashMap<>(4);
@ -448,7 +446,7 @@ public class CameraView extends FrameLayout {
// Some gesture layout detected a gesture. It's not known at this moment:
// (1) if it was mapped to some action (we check here)
// (2) if it's supported by the camera (CameraController checks)
private boolean onGesture(GestureLayout source, @NonNull CameraOptions options) {
private void onGesture(GestureLayout source, @NonNull CameraOptions options) {
Gesture gesture = source.getGestureType();
GestureAction action = mGestureMap.get(gesture);
PointF[] points = source.getPoints();
@ -456,36 +454,29 @@ public class CameraView extends FrameLayout {
switch (action) {
case CAPTURE:
return mCameraController.capturePicture();
mCameraController.capturePicture();
break;
case FOCUS:
case FOCUS_WITH_MARKER:
return mCameraController.startAutoFocus(gesture, points[0]);
mCameraController.startAutoFocus(gesture, points[0]);
break;
case ZOOM:
oldValue = mZoomValue;
oldValue = mCameraController.getZoomValue();
newValue = source.scaleValue(oldValue, 0, 1);
if (mCameraController.setZoom(newValue)) {
mZoomValue = newValue;
mCameraCallbacks.dispatchOnZoomChanged(newValue, points);
return true;
}
mCameraController.setZoom(newValue, points, true);
break;
case EXPOSURE_CORRECTION:
oldValue = mExposureCorrectionValue;
oldValue = mCameraController.getExposureCorrectionValue();
float minValue = options.getExposureCorrectionMinValue();
float maxValue = options.getExposureCorrectionMaxValue();
newValue = source.scaleValue(oldValue, minValue, maxValue);
float[] bounds = new float[]{minValue, maxValue};
if (mCameraController.setExposureCorrection(newValue)) {
mExposureCorrectionValue = newValue;
mCameraCallbacks.dispatchOnExposureCorrectionChanged(newValue, bounds, points);
return true;
}
mCameraController.setExposureCorrection(newValue, bounds, points, true);
break;
}
return false;
}
//endregion
@ -589,7 +580,7 @@ public class CameraView extends FrameLayout {
public void destroy() {
clearCameraListeners();
clearFrameProcessors();
mCameraController.stopImmediately();
mCameraController.destroy();
}
//endregion
@ -640,9 +631,7 @@ public class CameraView extends FrameLayout {
float max = options.getExposureCorrectionMaxValue();
if (EVvalue < min) EVvalue = min;
if (EVvalue > max) EVvalue = max;
if (mCameraController.setExposureCorrection(EVvalue)) {
mExposureCorrectionValue = EVvalue;
}
mCameraController.setExposureCorrection(EVvalue, null, null, false);
}
}
@ -653,7 +642,7 @@ public class CameraView extends FrameLayout {
* @return the current exposure correction value
*/
public float getExposureCorrection() {
return mExposureCorrectionValue;
return mCameraController.getExposureCorrectionValue();
}
@ -670,9 +659,7 @@ public class CameraView extends FrameLayout {
public void setZoom(float zoom) {
if (zoom < 0) zoom = 0;
if (zoom > 1) zoom = 1;
if (mCameraController.setZoom(zoom)) {
mZoomValue = zoom;
}
mCameraController.setZoom(zoom, null, false);
}
@ -681,7 +668,7 @@ public class CameraView extends FrameLayout {
* @return the current zoom value
*/
public float getZoom() {
return mZoomValue;
return mCameraController.getZoomValue();
}
@ -1146,9 +1133,7 @@ public class CameraView extends FrameLayout {
* @see #captureSnapshot()
*/
public void capturePicture() {
if (mCameraController.capturePicture() && mPlaySounds) {
// TODO: playSound on Camera2
}
mCameraController.capturePicture();
}
@ -1163,10 +1148,7 @@ public class CameraView extends FrameLayout {
* @see #capturePicture()
*/
public void captureSnapshot() {
if (mCameraController.captureSnapshot() && mPlaySounds) {
//noinspection all
playSound(MediaActionSound.SHUTTER_CLICK);
}
mCameraController.captureSnapshot();
}
@ -1193,7 +1175,7 @@ public class CameraView extends FrameLayout {
if (file == null) {
file = new File(getContext().getFilesDir(), "video.mp4");
}
if (mCameraController.startVideo(file)) {
mCameraController.startVideo(file);
mUiHandler.post(new Runnable() {
@Override
public void run() {
@ -1202,7 +1184,6 @@ public class CameraView extends FrameLayout {
}
});
}
}
/**
@ -1238,7 +1219,7 @@ public class CameraView extends FrameLayout {
* This will fire {@link CameraListener#onVideoTaken(File)}.
*/
public void stopCapturingVideo() {
if (mCameraController.endVideo()) {
mCameraController.endVideo();
mUiHandler.post(new Runnable() {
@Override
public void run() {
@ -1246,7 +1227,6 @@ public class CameraView extends FrameLayout {
}
});
}
}
/**
@ -1346,6 +1326,7 @@ public class CameraView extends FrameLayout {
void dispatchOnCameraOpened(CameraOptions options);
void dispatchOnCameraClosed();
void onCameraPreviewSizeChanged();
void onShutter(boolean shouldPlaySound);
void processImage(byte[] jpeg, boolean consistentWithView, boolean flipHorizontally);
void processSnapshot(YuvImage image, boolean consistentWithView, boolean flipHorizontally);
void dispatchOnVideoTaken(File file);
@ -1354,6 +1335,7 @@ public class CameraView extends FrameLayout {
void dispatchOnZoomChanged(final float newValue, final PointF[] fingers);
void dispatchOnExposureCorrectionChanged(float newValue, float[] bounds, PointF[] fingers);
void dispatchFrame(Frame frame);
void dispatchError(CameraException exception);
}
private class Callbacks implements CameraCallbacks {
@ -1408,6 +1390,13 @@ public class CameraView extends FrameLayout {
});
}
@Override
public void onShutter(boolean shouldPlaySound) {
if (shouldPlaySound && mPlaySounds) {
//noinspection all
playSound(MediaActionSound.SHUTTER_CLICK);
}
}
/**
* What would be great here is to ensure the EXIF tag in the jpeg is consistent with what we expect,
@ -1620,11 +1609,20 @@ public class CameraView extends FrameLayout {
});
}
}
}
//endregion
//region Deprecated
@Override
public void dispatchError(final CameraException exception) {
mLogger.i("dispatchError", exception);
mUiHandler.post(new Runnable() {
@Override
public void run() {
for (CameraListener listener : mListeners) {
listener.onCameraError(exception);
}
}
});
}
}
//endregion
}

@ -18,6 +18,10 @@ class Task<T> {
Task() {
}
Task(boolean startListening) {
if (startListening) listen();
}
private boolean listening() {
return mLatch != null;
}

@ -0,0 +1,16 @@
package com.otaliastudios.cameraview;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class CameraExceptionTest {
@Test
public void testConstructor() {
RuntimeException cause = new RuntimeException("Error");
CameraException camera = new CameraException(cause);
assertEquals(cause, camera.getCause());
}
}
Loading…
Cancel
Save