Reorder Camera1Engine

pull/494/head
Mattia Iavarone 6 years ago
parent 98741c2bfa
commit a63d446012
  1. 727
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java
  2. 1
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java
  3. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/egl/EglViewport.java
  4. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/Snapshot1PictureRecorder.java

@ -3,6 +3,7 @@ package com.otaliastudios.cameraview.engine;
import android.annotation.SuppressLint; import android.annotation.SuppressLint;
import android.annotation.TargetApi; import android.annotation.TargetApi;
import android.graphics.ImageFormat; import android.graphics.ImageFormat;
import android.graphics.PixelFormat;
import android.graphics.PointF; import android.graphics.PointF;
import android.graphics.Rect; import android.graphics.Rect;
import android.graphics.SurfaceTexture; import android.graphics.SurfaceTexture;
@ -57,51 +58,83 @@ public class Camera1Engine extends CameraEngine implements
private static final String TAG = Camera1Engine.class.getSimpleName(); private static final String TAG = Camera1Engine.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG); private static final CameraLogger LOG = CameraLogger.create(TAG);
private static final int PREVIEW_FORMAT = ImageFormat.NV21;
@VisibleForTesting static final int AUTOFOCUS_END_DELAY_MILLIS = 2500; @VisibleForTesting static final int AUTOFOCUS_END_DELAY_MILLIS = 2500;
private Camera mCamera; private Camera mCamera;
@VisibleForTesting int mCameraId; @VisibleForTesting int mCameraId;
private int mPreviewStreamFormat;
private Runnable mFocusEndRunnable; private Runnable mFocusEndRunnable;
private final Runnable mFocusResetRunnable = new Runnable() {
@Override
public void run() {
if (!isCameraAvailable()) return;
mCamera.cancelAutoFocus();
Camera.Parameters params = mCamera.getParameters();
int maxAF = params.getMaxNumFocusAreas();
int maxAE = params.getMaxNumMeteringAreas();
if (maxAF > 0) params.setFocusAreas(null);
if (maxAE > 0) params.setMeteringAreas(null);
applyDefaultFocus(params); // Revert to internal focus.
mCamera.setParameters(params);
}
};
public Camera1Engine(@NonNull Callback callback) { public Camera1Engine(@NonNull Callback callback) {
super(callback); super(callback);
mMapper = Mapper.get(Engine.CAMERA1); mMapper = Mapper.get(Engine.CAMERA1);
} }
private boolean isCameraAvailable() { //region Utilities
return getEngineState() == STATE_STARTED;
@Override
public void onError(int error, Camera camera) {
if (error == Camera.CAMERA_ERROR_SERVER_DIED) {
// Looks like this is recoverable.
LOG.w("Recoverable error inside the onError callback.", "CAMERA_ERROR_SERVER_DIED");
restart();
return;
} }
private void schedule(@Nullable final Op<Void> op, final boolean ensureAvailable, final Runnable action) { String message = LOG.e("Internal Camera1 error.", error);
mHandler.run(new Runnable() { Exception runtime = new RuntimeException(message);
int reason;
switch (error) {
case Camera.CAMERA_ERROR_EVICTED: reason = CameraException.REASON_DISCONNECTED; break;
case Camera.CAMERA_ERROR_UNKNOWN: reason = CameraException.REASON_UNKNOWN; break;
default: reason = CameraException.REASON_UNKNOWN;
}
throw new CameraException(runtime, reason);
}
//endregion
//region Protected APIs
@NonNull
@Override @Override
public void run() { protected List<Size> getPreviewStreamAvailableSizes() {
if (ensureAvailable && !isCameraAvailable()) { List<Camera.Size> sizes = mCamera.getParameters().getSupportedPreviewSizes();
if (op != null) op.end(null); List<Size> result = new ArrayList<>(sizes.size());
} else { for (Camera.Size size : sizes) {
action.run(); Size add = new Size(size.width, size.height);
if (op != null) op.end(null); if (!result.contains(add)) result.add(add);
}
LOG.i("getPreviewStreamAvailableSizes:", result);
return result;
} }
@WorkerThread
@Override
protected void onPreviewStreamSizeChanged() {
restartPreview();
} }
});
@Override
protected boolean collectCameraInfo(@NonNull Facing facing) {
int internalFacing = mMapper.map(facing);
LOG.i("collectCameraInfo", "Facing:", facing, "Internal:", internalFacing, "Cameras:", Camera.getNumberOfCameras());
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int i = 0, count = Camera.getNumberOfCameras(); i < count; i++) {
Camera.getCameraInfo(i, cameraInfo);
if (cameraInfo.facing == internalFacing) {
mSensorOffset = cameraInfo.orientation;
mCameraId = i;
return true;
} }
}
return false;
}
//endregion
//region Start
@NonNull @NonNull
@WorkerThread @WorkerThread
@ -119,13 +152,7 @@ public class Camera1Engine extends CameraEngine implements
LOG.i("onStartEngine:", "Applying default parameters."); LOG.i("onStartEngine:", "Applying default parameters.");
Camera.Parameters params = mCamera.getParameters(); Camera.Parameters params = mCamera.getParameters();
mCameraOptions = new CameraOptions(params, flip(REF_SENSOR, REF_VIEW)); mCameraOptions = new CameraOptions(params, flip(REF_SENSOR, REF_VIEW));
applyDefaultFocus(params); applyAllParameters(params);
applyFlash(params, Flash.OFF);
applyLocation(params, null);
applyWhiteBalance(params, WhiteBalance.AUTO);
applyHdr(params, Hdr.OFF);
applyPlaySounds(mPlaySounds);
params.setRecordingHint(getMode() == Mode.VIDEO);
mCamera.setParameters(params); mCamera.setParameters(params);
mCamera.setDisplayOrientation(offset(REF_SENSOR, REF_VIEW)); // <- not allowed during preview mCamera.setDisplayOrientation(offset(REF_SENSOR, REF_VIEW)); // <- not allowed during preview
LOG.i("onStartEngine:", "Ended"); LOG.i("onStartEngine:", "Ended");
@ -168,10 +195,10 @@ public class Camera1Engine extends CameraEngine implements
mPreview.setStreamSize(previewSize.getWidth(), previewSize.getHeight()); mPreview.setStreamSize(previewSize.getWidth(), previewSize.getHeight());
Camera.Parameters params = mCamera.getParameters(); Camera.Parameters params = mCamera.getParameters();
mPreviewStreamFormat = params.getPreviewFormat(); params.setPreviewFormat(ImageFormat.NV21); // should be the default, but let's make sure, since YuvImage will only support this & a few others
params.setPreviewSize(mPreviewStreamSize.getWidth(), mPreviewStreamSize.getHeight()); // <- not allowed during preview params.setPreviewSize(mPreviewStreamSize.getWidth(), mPreviewStreamSize.getHeight()); // not allowed during preview
if (getMode() == Mode.PICTURE) { if (getMode() == Mode.PICTURE) {
params.setPictureSize(mCaptureSize.getWidth(), mCaptureSize.getHeight()); // <- allowed params.setPictureSize(mCaptureSize.getWidth(), mCaptureSize.getHeight()); // allowed during preview
} else { } else {
// mCaptureSize in this case is a video size. The available video sizes are not necessarily // mCaptureSize in this case is a video size. The available video sizes are not necessarily
// a subset of the picture sizes, so we can't use the mCaptureSize value: it might crash. // a subset of the picture sizes, so we can't use the mCaptureSize value: it might crash.
@ -184,7 +211,7 @@ public class Camera1Engine extends CameraEngine implements
mCamera.setPreviewCallbackWithBuffer(null); // Release anything left mCamera.setPreviewCallbackWithBuffer(null); // Release anything left
mCamera.setPreviewCallbackWithBuffer(this); // Add ourselves mCamera.setPreviewCallbackWithBuffer(this); // Add ourselves
getFrameManager().setUp(ImageFormat.getBitsPerPixel(mPreviewStreamFormat), mPreviewStreamSize); getFrameManager().setUp(ImageFormat.getBitsPerPixel(PREVIEW_FORMAT), mPreviewStreamSize);
LOG.i("onStartPreview", "Starting preview with startPreview()."); LOG.i("onStartPreview", "Starting preview with startPreview().");
try { try {
@ -197,6 +224,10 @@ public class Camera1Engine extends CameraEngine implements
return Tasks.forResult(null); return Tasks.forResult(null);
} }
//endregion
//region Stop
@NonNull @NonNull
@Override @Override
protected Task<Void> onStopPreview() { protected Task<Void> onStopPreview() {
@ -205,7 +236,6 @@ public class Camera1Engine extends CameraEngine implements
mVideoRecorder = null; mVideoRecorder = null;
} }
mPictureRecorder = null; mPictureRecorder = null;
mPreviewStreamFormat = 0;
getFrameManager().release(); getFrameManager().release();
mCamera.setPreviewCallbackWithBuffer(null); // Release anything left mCamera.setPreviewCallbackWithBuffer(null); // Release anything left
try { try {
@ -216,7 +246,6 @@ public class Camera1Engine extends CameraEngine implements
return Tasks.forResult(null); return Tasks.forResult(null);
} }
@NonNull @NonNull
@Override @Override
protected Task<Void> onStopBind() { protected Task<Void> onStopBind() {
@ -263,178 +292,147 @@ public class Camera1Engine extends CameraEngine implements
return Tasks.forResult(null); return Tasks.forResult(null);
} }
//endregion
//region Pictures
@WorkerThread @WorkerThread
@Override @Override
protected void onPreviewStreamSizeChanged() { protected void onTakePicture(@NonNull PictureResult.Stub stub) {
restartPreview(); stub.rotation = offset(REF_SENSOR, REF_OUTPUT);
stub.size = getPictureSize(REF_OUTPUT);
mPictureRecorder = new Full1PictureRecorder(stub, Camera1Engine.this, mCamera);
mPictureRecorder.take();
} }
@WorkerThread
@Override @Override
protected boolean collectCameraInfo(@NonNull Facing facing) { protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio viewAspectRatio) {
int internalFacing = mMapper.map(facing); stub.size = getUncroppedSnapshotSize(REF_OUTPUT); // Not the real size: it will be cropped to match the view ratio
LOG.i("collectCameraInfo", "Facing:", facing, "Internal:", internalFacing, "Cameras:", Camera.getNumberOfCameras()); stub.rotation = offset(REF_SENSOR, REF_OUTPUT); // Actually it will be rotated and set to 0.
Camera.CameraInfo cameraInfo = new Camera.CameraInfo(); AspectRatio outputRatio = flip(REF_OUTPUT, REF_VIEW) ? viewAspectRatio.flip() : viewAspectRatio;
for (int i = 0, count = Camera.getNumberOfCameras(); i < count; i++) {
Camera.getCameraInfo(i, cameraInfo); if (mPreview instanceof GlCameraPreview && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
if (cameraInfo.facing == internalFacing) { mPictureRecorder = new SnapshotGlPictureRecorder(stub, this, (GlCameraPreview) mPreview, outputRatio);
mSensorOffset = cameraInfo.orientation; } else {
mCameraId = i; mPictureRecorder = new Snapshot1PictureRecorder(stub, this, mCamera, outputRatio);
return true;
}
} }
return false; mPictureRecorder.take();
} }
@NonNull //endregion
@Override
protected FrameManager instantiateFrameManager() {
return new FrameManager(2, this);
}
@Override //region Videos
public void onBufferAvailable(@NonNull byte[] buffer) {
// TODO: sync with handler?
if (isCameraAvailable()) {
mCamera.addCallbackBuffer(buffer);
}
}
@Override @Override
public void onError(int error, Camera camera) { protected void onTakeVideo(@NonNull VideoResult.Stub stub) {
if (error == Camera.CAMERA_ERROR_SERVER_DIED) { stub.rotation = offset(REF_SENSOR, REF_OUTPUT);
// Looks like this is recoverable. stub.size = flip(REF_SENSOR, REF_OUTPUT) ? mCaptureSize.flip() : mCaptureSize;
LOG.w("Recoverable error inside the onError callback.", "CAMERA_ERROR_SERVER_DIED"); // Unlock the camera and start recording.
restart(); try {
mCamera.unlock();
} catch (Exception e) {
// If this failed, we are unlikely able to record the video.
// Dispatch an error.
onVideoResult(null, e);
return; return;
} }
if (!(mVideoRecorder instanceof Full1VideoRecorder)) {
String message = LOG.e("Internal Camera1 error.", error); mVideoRecorder = new Full1VideoRecorder(Camera1Engine.this, mCamera, mCameraId);
Exception runtime = new RuntimeException(message);
int reason;
switch (error) {
case Camera.CAMERA_ERROR_EVICTED: reason = CameraException.REASON_DISCONNECTED; break;
case Camera.CAMERA_ERROR_UNKNOWN: reason = CameraException.REASON_UNKNOWN; break;
default: reason = CameraException.REASON_UNKNOWN;
} }
throw new CameraException(runtime, reason); mVideoRecorder.start(stub);
} }
@SuppressLint("NewApi")
@WorkerThread
@Override @Override
public void setLocation(@Nullable Location location) { protected void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull AspectRatio viewAspectRatio) {
final Location oldLocation = mLocation; if (!(mPreview instanceof GlCameraPreview)) {
mLocation = location; throw new IllegalStateException("Video snapshots are only supported with GlCameraPreview.");
schedule(mLocationOp, true, new Runnable() {
@Override
public void run() {
Camera.Parameters params = mCamera.getParameters();
if (applyLocation(params, oldLocation)) mCamera.setParameters(params);
} }
}); if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR2) {
throw new IllegalStateException("Video snapshots are only supported starting from API 18.");
} }
GlCameraPreview glPreview = (GlCameraPreview) mPreview;
private boolean applyLocation(@NonNull Camera.Parameters params, // Size and rotation turned out to be extremely tricky. In case of Snapshot1PictureRecorder
@SuppressWarnings("unused") @Nullable Location oldLocation) { // we use the preview size in REF_OUTPUT (cropped) and offset(REF_SENSOR, REF_OUTPUT) as rotation.
if (mLocation != null) { // These values mean that we expect input to be in the REF_SENSOR system.
params.setGpsLatitude(mLocation.getLatitude());
params.setGpsLongitude(mLocation.getLongitude());
params.setGpsAltitude(mLocation.getAltitude());
params.setGpsTimestamp(mLocation.getTime());
params.setGpsProcessingMethod(mLocation.getProvider());
}
return true;
}
@Override // Here everything seems different. We would expect a difference because the two snapshot
public void setWhiteBalance(@NonNull WhiteBalance whiteBalance) { // recorders have different mechanics (the picture one uses a SurfaceTexture with setBufferSize,
final WhiteBalance old = mWhiteBalance; // the video one here uses the MediaCodec input surface which we can't control).
mWhiteBalance = whiteBalance;
schedule(mWhiteBalanceOp, true, new Runnable() { // The strangest thing is the fact that the correct angle seems to be the same for FRONT and
@Override // BACK sensor, which means that our sensor correction actually screws things up. For this reason
public void run() { // facing value is temporarily set to BACK.
Camera.Parameters params = mCamera.getParameters(); Facing realFacing = mFacing;
if (applyWhiteBalance(params, old)) mCamera.setParameters(params); mFacing = Facing.BACK;
}
}); // These are the angles that make it work on a Nexus5X, compared to the offset() results.
// For instance, SV means offset(REF_SENSOR, REF_VIEW). The rest should be clear.
// CONFIG | WANTED | SV | VS | VO | OV | SO | OS |
// ------------|--------|--------|--------|--------|--------|--------|--------|
// Vertical | 0 | 270 | 90 | 0 | 0 | 270 | 90 |
// Left | 270 | 270 | 90 | 270 | 90 | 180 | 180 |
// Right | 90 | 270 | 90 | 90 | 270 | 0 | 0 |
// Upside down | 180 | 270 | 90 | 180 | 180 | 90 | 270 |
// The VO is the only correct value. Things change when using FRONT camera, in which case,
// no value is actually correct, and the needed values are the same of BACK!
// CONFIG | WANTED | SV | VS | VO | OV | SO | OS |
// ------------|--------|--------|--------|--------|--------|--------|--------|
// Vertical | 0 | 90 | 270 | 180 | 180 | 270 | 90 |
// Left | 270 | 90 | 270 | 270 | 90 | 0 | 0 |
// Right | 90 | 90 | 270 | 90 | 270 | 180 | 180 |
// Upside down | 180 | 90 | 270 | 0 | 0 | 90 | 270 |
// Based on this we will use VO for everything. See if we get issues about distortion
// and maybe we can improve. The reason why this happen is beyond my understanding.
Size outputSize = getUncroppedSnapshotSize(REF_OUTPUT);
if (outputSize == null) {
throw new IllegalStateException("outputSize should not be null.");
} }
AspectRatio outputRatio = flip(REF_OUTPUT, REF_VIEW) ? viewAspectRatio.flip() : viewAspectRatio;
Rect outputCrop = CropHelper.computeCrop(outputSize, outputRatio);
outputSize = new Size(outputCrop.width(), outputCrop.height());
stub.size = outputSize;
stub.rotation = offset(REF_VIEW, REF_OUTPUT);
private boolean applyWhiteBalance(@NonNull Camera.Parameters params, @NonNull WhiteBalance oldWhiteBalance) { // Reset facing and start.
if (mCameraOptions.supports(mWhiteBalance)) { mFacing = realFacing;
params.setWhiteBalance((String) mMapper.map(mWhiteBalance)); if (!(mVideoRecorder instanceof SnapshotVideoRecorder)) {
return true; mVideoRecorder = new SnapshotVideoRecorder(Camera1Engine.this, glPreview);
} }
mWhiteBalance = oldWhiteBalance; mVideoRecorder.start(stub);
return false;
} }
@Override @Override
public void setHdr(@NonNull Hdr hdr) { public void onVideoResult(@Nullable VideoResult.Stub result, @Nullable Exception exception) {
final Hdr old = mHdr; super.onVideoResult(result, exception);
mHdr = hdr; if (result == null) {
schedule(mHdrOp, true, new Runnable() { // Something went wrong, lock the camera again.
@Override mCamera.lock();
public void run() {
Camera.Parameters params = mCamera.getParameters();
if (applyHdr(params, old)) mCamera.setParameters(params);
} }
});
} }
private boolean applyHdr(@NonNull Camera.Parameters params, @NonNull Hdr oldHdr) { //endregion
if (mCameraOptions.supports(mHdr)) {
params.setSceneMode((String) mMapper.map(mHdr));
return true;
}
mHdr = oldHdr;
return false;
}
@SuppressWarnings("UnusedReturnValue") //region Parameters
@TargetApi(17)
private boolean applyPlaySounds(boolean oldPlaySound) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(mCameraId, info);
if (info.canDisableShutterSound) {
try {
// this method is documented to throw on some occasions. #377
return mCamera.enableShutterSound(mPlaySounds);
} catch (RuntimeException exception) {
return false;
}
}
}
if (mPlaySounds) {
return true;
}
mPlaySounds = oldPlaySound;
return false;
}
@Override
public void setFlash(@NonNull Flash flash) {
final Flash old = mFlash;
mFlash = flash;
schedule(mFlashOp, true, new Runnable() {
@Override
public void run() {
Camera.Parameters params = mCamera.getParameters();
if (applyFlash(params, old)) mCamera.setParameters(params);
}
});
}
private void applyAllParameters(@NonNull Camera.Parameters params) {
private boolean applyFlash(@NonNull Camera.Parameters params, @NonNull Flash oldFlash) { params.setRecordingHint(getMode() == Mode.VIDEO);
if (mCameraOptions.supports(mFlash)) { applyDefaultFocus(params);
params.setFlashMode((String) mMapper.map(mFlash)); applyFlash(params, Flash.OFF);
return true; applyLocation(params, null);
} applyWhiteBalance(params, WhiteBalance.AUTO);
mFlash = oldFlash; applyHdr(params, Hdr.OFF);
return false; applyZoom(params, 0F);
applyExposureCorrection(params, 0F);
applyPlaySounds(mPlaySounds);
} }
// Choose the best default focus, based on session type.
private void applyDefaultFocus(@NonNull Camera.Parameters params) { private void applyDefaultFocus(@NonNull Camera.Parameters params) {
List<String> modes = params.getSupportedFocusModes(); List<String> modes = params.getSupportedFocusModes();
@ -461,195 +459,252 @@ public class Camera1Engine extends CameraEngine implements
} }
} }
// -----------------
// Picture recording stuff.
@WorkerThread
@Override @Override
protected void onTakePicture(@NonNull PictureResult.Stub stub) { public void setFlash(@NonNull Flash flash) {
stub.rotation = offset(REF_SENSOR, REF_OUTPUT); final Flash old = mFlash;
stub.size = getPictureSize(REF_OUTPUT); mFlash = flash;
mPictureRecorder = new Full1PictureRecorder(stub, Camera1Engine.this, mCamera); mHandler.run(new Runnable() {
mPictureRecorder.take();
}
@WorkerThread
@Override @Override
protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio viewAspectRatio) { public void run() {
stub.size = getUncroppedSnapshotSize(REF_OUTPUT); // Not the real size: it will be cropped to match the view ratio if (getEngineState() == STATE_STARTED) {
stub.rotation = offset(REF_SENSOR, REF_OUTPUT); // Actually it will be rotated and set to 0. Camera.Parameters params = mCamera.getParameters();
AspectRatio outputRatio = flip(REF_OUTPUT, REF_VIEW) ? viewAspectRatio.flip() : viewAspectRatio; if (applyFlash(params, old)) mCamera.setParameters(params);
}
mFlashOp.end(null);
}
});
}
if (mPreview instanceof GlCameraPreview && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { private boolean applyFlash(@NonNull Camera.Parameters params, @NonNull Flash oldFlash) {
mPictureRecorder = new SnapshotGlPictureRecorder(stub, this, (GlCameraPreview) mPreview, outputRatio); if (mCameraOptions.supports(mFlash)) {
} else { params.setFlashMode((String) mMapper.map(mFlash));
mPictureRecorder = new Snapshot1PictureRecorder(stub, this, mCamera, outputRatio); return true;
} }
mPictureRecorder.take(); mFlash = oldFlash;
return false;
} }
@Override @Override
public void onPreviewFrame(@NonNull byte[] data, Camera camera) { public void setLocation(@Nullable Location location) {
Frame frame = getFrameManager().getFrame(data, final Location oldLocation = mLocation;
System.currentTimeMillis(), mLocation = location;
offset(REF_SENSOR, REF_OUTPUT), mHandler.run(new Runnable() {
mPreviewStreamSize, @Override
mPreviewStreamFormat); public void run() {
mCallback.dispatchFrame(frame); if (getEngineState() == STATE_STARTED) {
Camera.Parameters params = mCamera.getParameters();
if (applyLocation(params, oldLocation)) mCamera.setParameters(params);
}
mLocationOp.end(null);
}
});
} }
// ----------------- private boolean applyLocation(@NonNull Camera.Parameters params,
// Video recording stuff. @SuppressWarnings("unused") @Nullable Location oldLocation) {
if (mLocation != null) {
@Override params.setGpsLatitude(mLocation.getLatitude());
public void onVideoResult(@Nullable VideoResult.Stub result, @Nullable Exception exception) { params.setGpsLongitude(mLocation.getLongitude());
super.onVideoResult(result, exception); params.setGpsAltitude(mLocation.getAltitude());
if (result == null) { params.setGpsTimestamp(mLocation.getTime());
// Something went wrong, lock the camera again. params.setGpsProcessingMethod(mLocation.getProvider());
mCamera.lock();
} }
return true;
} }
@Override @Override
protected void onTakeVideo(@NonNull VideoResult.Stub stub) { public void setWhiteBalance(@NonNull WhiteBalance whiteBalance) {
stub.rotation = offset(REF_SENSOR, REF_OUTPUT); final WhiteBalance old = mWhiteBalance;
stub.size = flip(REF_SENSOR, REF_OUTPUT) ? mCaptureSize.flip() : mCaptureSize; mWhiteBalance = whiteBalance;
// Unlock the camera and start recording. mHandler.run(new Runnable() {
try { @Override
mCamera.unlock(); public void run() {
} catch (Exception e) { if (getEngineState() == STATE_STARTED) {
// If this failed, we are unlikely able to record the video. Camera.Parameters params = mCamera.getParameters();
// Dispatch an error. if (applyWhiteBalance(params, old)) mCamera.setParameters(params);
onVideoResult(null, e);
return;
} }
if (!(mVideoRecorder instanceof Full1VideoRecorder)) { mWhiteBalanceOp.end(null);
mVideoRecorder = new Full1VideoRecorder(Camera1Engine.this, mCamera, mCameraId);
} }
mVideoRecorder.start(stub); });
} }
@SuppressLint("NewApi") private boolean applyWhiteBalance(@NonNull Camera.Parameters params, @NonNull WhiteBalance oldWhiteBalance) {
@WorkerThread if (mCameraOptions.supports(mWhiteBalance)) {
@Override params.setWhiteBalance((String) mMapper.map(mWhiteBalance));
protected void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull AspectRatio viewAspectRatio) { return true;
if (!(mPreview instanceof GlCameraPreview)) {
throw new IllegalStateException("Video snapshots are only supported with GlCameraPreview.");
} }
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR2) { mWhiteBalance = oldWhiteBalance;
throw new IllegalStateException("Video snapshots are only supported starting from API 18."); return false;
} }
GlCameraPreview glPreview = (GlCameraPreview) mPreview;
// Size and rotation turned out to be extremely tricky. In case of Snapshot1PictureRecorder
// we use the preview size in REF_OUTPUT (cropped) and offset(REF_SENSOR, REF_OUTPUT) as rotation.
// These values mean that we expect input to be in the REF_SENSOR system.
// Here everything seems different. We would expect a difference because the two snapshot @Override
// recorders have different mechanics (the picture one uses a SurfaceTexture with setBufferSize, public void setHdr(@NonNull Hdr hdr) {
// the video one here uses the MediaCodec input surface which we can't control). final Hdr old = mHdr;
mHdr = hdr;
// The strangest thing is the fact that the correct angle seems to be the same for FRONT and mHandler.run(new Runnable() {
// BACK sensor, which means that our sensor correction actually screws things up. For this reason @Override
// facing value is temporarily set to BACK. public void run() {
Facing realFacing = mFacing; if (getEngineState() == STATE_STARTED) {
mFacing = Facing.BACK; Camera.Parameters params = mCamera.getParameters();
if (applyHdr(params, old)) mCamera.setParameters(params);
// These are the angles that make it work on a Nexus5X, compared to the offset() results.
// For instance, SV means offset(REF_SENSOR, REF_VIEW). The rest should be clear.
// CONFIG | WANTED | SV | VS | VO | OV | SO | OS |
// ------------|--------|--------|--------|--------|--------|--------|--------|
// Vertical | 0 | 270 | 90 | 0 | 0 | 270 | 90 |
// Left | 270 | 270 | 90 | 270 | 90 | 180 | 180 |
// Right | 90 | 270 | 90 | 90 | 270 | 0 | 0 |
// Upside down | 180 | 270 | 90 | 180 | 180 | 90 | 270 |
// The VO is the only correct value. Things change when using FRONT camera, in which case,
// no value is actually correct, and the needed values are the same of BACK!
// CONFIG | WANTED | SV | VS | VO | OV | SO | OS |
// ------------|--------|--------|--------|--------|--------|--------|--------|
// Vertical | 0 | 90 | 270 | 180 | 180 | 270 | 90 |
// Left | 270 | 90 | 270 | 270 | 90 | 0 | 0 |
// Right | 90 | 90 | 270 | 90 | 270 | 180 | 180 |
// Upside down | 180 | 90 | 270 | 0 | 0 | 90 | 270 |
// Based on this we will use VO for everything. See if we get issues about distortion
// and maybe we can improve. The reason why this happen is beyond my understanding.
Size outputSize = getUncroppedSnapshotSize(REF_OUTPUT);
if (outputSize == null) {
throw new IllegalStateException("outputSize should not be null.");
} }
AspectRatio outputRatio = flip(REF_OUTPUT, REF_VIEW) ? viewAspectRatio.flip() : viewAspectRatio; mHdrOp.end(null);
Rect outputCrop = CropHelper.computeCrop(outputSize, outputRatio);
outputSize = new Size(outputCrop.width(), outputCrop.height());
stub.size = outputSize;
stub.rotation = offset(REF_VIEW, REF_OUTPUT);
// Reset facing and start.
mFacing = realFacing;
if (!(mVideoRecorder instanceof SnapshotVideoRecorder)) {
mVideoRecorder = new SnapshotVideoRecorder(Camera1Engine.this, glPreview);
} }
mVideoRecorder.start(stub); });
} }
// ----------------- private boolean applyHdr(@NonNull Camera.Parameters params, @NonNull Hdr oldHdr) {
// Zoom and simpler stuff. if (mCameraOptions.supports(mHdr)) {
params.setSceneMode((String) mMapper.map(mHdr));
return true;
}
mHdr = oldHdr;
return false;
}
@Override @Override
public void setZoom(final float zoom, @Nullable final PointF[] points, final boolean notify) { public void setZoom(final float zoom, @Nullable final PointF[] points, final boolean notify) {
schedule(mZoomOp, true, new Runnable() { final float old = mZoomValue;
mZoomValue = zoom;
mHandler.run(new Runnable() {
@Override @Override
public void run() { public void run() {
if (!mCameraOptions.isZoomSupported()) return; if (getEngineState() == STATE_STARTED) {
mZoomValue = zoom;
Camera.Parameters params = mCamera.getParameters(); Camera.Parameters params = mCamera.getParameters();
float max = params.getMaxZoom(); if (applyZoom(params, old)) {
params.setZoom((int) (zoom * max));
mCamera.setParameters(params); mCamera.setParameters(params);
if (notify) { if (notify) {
mCallback.dispatchOnZoomChanged(zoom, points); mCallback.dispatchOnZoomChanged(mZoomValue, points);
} }
} }
}
mZoomOp.end(null);
}
}); });
} }
private boolean applyZoom(@NonNull Camera.Parameters params, float oldZoom) {
if (mCameraOptions.isZoomSupported()) {
float max = params.getMaxZoom();
params.setZoom((int) (mZoomValue * max));
mCamera.setParameters(params);
return true;
}
mZoomValue = oldZoom;
return false;
}
@Override @Override
public void setExposureCorrection(final float EVvalue, @NonNull final float[] bounds, public void setExposureCorrection(final float EVvalue, @NonNull final float[] bounds,
@Nullable final PointF[] points, final boolean notify) { @Nullable final PointF[] points, final boolean notify) {
schedule(mExposureCorrectionOp, true, new Runnable() { final float old = mExposureCorrectionValue;
mExposureCorrectionValue = EVvalue;
mHandler.run(new Runnable() {
@Override @Override
public void run() { public void run() {
if (!mCameraOptions.isExposureCorrectionSupported()) return; if (getEngineState() == STATE_STARTED) {
Camera.Parameters params = mCamera.getParameters();
if (applyExposureCorrection(params, old)) {
mCamera.setParameters(params);
if (notify) {
mCallback.dispatchOnExposureCorrectionChanged(mExposureCorrectionValue, bounds, points);
}
}
}
mExposureCorrectionOp.end(null);
}
});
}
float value = EVvalue; private boolean applyExposureCorrection(@NonNull Camera.Parameters params, float oldExposureCorrection) {
if (mCameraOptions.isExposureCorrectionSupported()) {
// Just make sure we're inside boundaries.
float max = mCameraOptions.getExposureCorrectionMaxValue(); float max = mCameraOptions.getExposureCorrectionMaxValue();
float min = mCameraOptions.getExposureCorrectionMinValue(); float min = mCameraOptions.getExposureCorrectionMinValue();
value = value < min ? min : value > max ? max : value; // cap float val = mExposureCorrectionValue;
mExposureCorrectionValue = value; val = val < min ? min : val > max ? max : val; // cap
Camera.Parameters params = mCamera.getParameters(); mExposureCorrectionValue = val;
int indexValue = (int) (value / params.getExposureCompensationStep()); // Apply.
int indexValue = (int) (mExposureCorrectionValue / params.getExposureCompensationStep());
params.setExposureCompensation(indexValue); params.setExposureCompensation(indexValue);
mCamera.setParameters(params); return true;
}
mExposureCorrectionValue = oldExposureCorrection;
return false;
}
if (notify) { @Override
mCallback.dispatchOnExposureCorrectionChanged(value, bounds, points); public void setPlaySounds(boolean playSounds) {
final boolean old = mPlaySounds;
mPlaySounds = playSounds;
mHandler.run(new Runnable() {
@Override
public void run() {
if (getEngineState() == STATE_STARTED) {
applyPlaySounds(old);
} }
mPlaySoundsOp.end(null);
} }
}); });
} }
// ----------------- @SuppressWarnings("UnusedReturnValue")
// Tap to focus stuff. @TargetApi(17)
private boolean applyPlaySounds(boolean oldPlaySound) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(mCameraId, info);
if (info.canDisableShutterSound) {
try {
// this method is documented to throw on some occasions. #377
return mCamera.enableShutterSound(mPlaySounds);
} catch (RuntimeException exception) {
return false;
}
}
}
if (mPlaySounds) {
return true;
}
mPlaySounds = oldPlaySound;
return false;
}
//endregion
//region Frame Processing
@NonNull
@Override
protected FrameManager instantiateFrameManager() {
return new FrameManager(2, this);
}
@Override
public void onBufferAvailable(@NonNull byte[] buffer) {
if (getEngineState() == STATE_STARTED) {
mCamera.addCallbackBuffer(buffer);
}
}
@Override
public void onPreviewFrame(@NonNull byte[] data, Camera camera) {
Frame frame = getFrameManager().getFrame(data,
System.currentTimeMillis(),
offset(REF_SENSOR, REF_OUTPUT),
mPreviewStreamSize,
PREVIEW_FORMAT);
mCallback.dispatchFrame(frame);
}
//endregion
//region Auto Focus
@Override @Override
public void startAutoFocus(@Nullable final Gesture gesture, @NonNull final PointF point) { public void startAutoFocus(@Nullable final Gesture gesture, @NonNull final PointF point) {
// Must get width and height from the UI thread. // Must get width and height from the UI thread.
// TODO could take mPreview.surfaceSize like Camera2 does?
int viewWidth = 0, viewHeight = 0; int viewWidth = 0, viewHeight = 0;
if (mPreview != null && mPreview.hasSurface()) { if (mPreview != null && mPreview.hasSurface()) {
viewWidth = mPreview.getView().getWidth(); viewWidth = mPreview.getView().getWidth();
@ -657,10 +712,10 @@ public class Camera1Engine extends CameraEngine implements
} }
final int viewWidthF = viewWidth; final int viewWidthF = viewWidth;
final int viewHeightF = viewHeight; final int viewHeightF = viewHeight;
// Schedule. mHandler.run(new Runnable() {
schedule(null, true, new Runnable() {
@Override @Override
public void run() { public void run() {
if (getEngineState() < STATE_STARTED) return;
if (!mCameraOptions.isAutoFocusSupported()) return; if (!mCameraOptions.isAutoFocusSupported()) return;
final PointF p = new PointF(point.x, point.y); // copy. final PointF p = new PointF(point.x, point.y); // copy.
List<Camera.Area> meteringAreas2 = computeMeteringAreas(p.x, p.y, List<Camera.Area> meteringAreas2 = computeMeteringAreas(p.x, p.y,
@ -683,10 +738,8 @@ public class Camera1Engine extends CameraEngine implements
mFocusEndRunnable = new Runnable() { mFocusEndRunnable = new Runnable() {
@Override @Override
public void run() { public void run() {
if (isCameraAvailable()) {
mCallback.dispatchOnFocusEnd(gesture, false, p); mCallback.dispatchOnFocusEnd(gesture, false, p);
} }
}
}; };
mHandler.post(AUTOFOCUS_END_DELAY_MILLIS, mFocusEndRunnable); mHandler.post(AUTOFOCUS_END_DELAY_MILLIS, mFocusEndRunnable);
@ -712,6 +765,7 @@ public class Camera1Engine extends CameraEngine implements
// Let the mFocusEndRunnable do its job. (could remove it and quickly dispatch // Let the mFocusEndRunnable do its job. (could remove it and quickly dispatch
// onFocusEnd here, but let's make it simpler). // onFocusEnd here, but let's make it simpler).
} }
} }
}); });
} }
@ -758,38 +812,21 @@ public class Camera1Engine extends CameraEngine implements
return new Rect(left, top, right, bottom); return new Rect(left, top, right, bottom);
} }
private final Runnable mFocusResetRunnable = new Runnable() {
// -----------------
// Size stuff.
public final int getPreviewStreamFormat() {
return mPreviewStreamFormat;
}
@NonNull
@Override
protected List<Size> getPreviewStreamAvailableSizes() {
List<Camera.Size> sizes = mCamera.getParameters().getSupportedPreviewSizes();
List<Size> result = new ArrayList<>(sizes.size());
for (Camera.Size size : sizes) {
Size add = new Size(size.width, size.height);
if (!result.contains(add)) result.add(add);
}
LOG.i("getPreviewStreamAvailableSizes:", result);
return result;
}
@Override
public void setPlaySounds(boolean playSounds) {
final boolean old = mPlaySounds;
mPlaySounds = playSounds;
schedule(mPlaySoundsOp, true, new Runnable() {
@Override @Override
public void run() { public void run() {
applyPlaySounds(old); if (getEngineState() < STATE_STARTED) return;
} mCamera.cancelAutoFocus();
}); Camera.Parameters params = mCamera.getParameters();
int maxAF = params.getMaxNumFocusAreas();
int maxAE = params.getMaxNumMeteringAreas();
if (maxAF > 0) params.setFocusAreas(null);
if (maxAE > 0) params.setMeteringAreas(null);
applyDefaultFocus(params); // Revert to internal focus.
mCamera.setParameters(params);
} }
};
//endregion
} }

@ -647,7 +647,6 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
public void onPictureResult(@Nullable PictureResult.Stub result, @Nullable Exception error) { public void onPictureResult(@Nullable PictureResult.Stub result, @Nullable Exception error) {
boolean fullPicture = mPictureRecorder instanceof Full2PictureRecorder; boolean fullPicture = mPictureRecorder instanceof Full2PictureRecorder;
super.onPictureResult(result, error); super.onPictureResult(result, error);
//noinspection StatementWithEmptyBody
if (fullPicture && mPictureCaptureStopsPreview) { if (fullPicture && mPictureCaptureStopsPreview) {
// See comments in Full2PictureRecorder. // See comments in Full2PictureRecorder.
applyRepeatingRequestBuilder(); applyRepeatingRequestBuilder();

@ -40,7 +40,7 @@ public class EglViewport extends EglElement {
// Stuff from Drawable2d.FULL_RECTANGLE // Stuff from Drawable2d.FULL_RECTANGLE
// A full square, extending from -1 to +1 in both dimensions. // A full square, extending from -1 to +1 in both dimensions.
// When the model/view/projection matrix is identity, this will exactly cover the viewport. // When the model/view/projection matrix is identity, this will exactly cover the viewport.
private static final float FULL_RECTANGLE_COORDS[] = { private static final float[] FULL_RECTANGLE_COORDS = {
-1.0f, -1.0f, // 0 bottom left -1.0f, -1.0f, // 0 bottom left
1.0f, -1.0f, // 1 bottom right 1.0f, -1.0f, // 1 bottom right
-1.0f, 1.0f, // 2 top left -1.0f, 1.0f, // 2 top left
@ -49,7 +49,7 @@ public class EglViewport extends EglElement {
// Stuff from Drawable2d.FULL_RECTANGLE // Stuff from Drawable2d.FULL_RECTANGLE
// A full square, extending from -1 to +1 in both dimensions. // A full square, extending from -1 to +1 in both dimensions.
private static final float FULL_RECTANGLE_TEX_COORDS[] = { private static final float[] FULL_RECTANGLE_TEX_COORDS = {
0.0f, 0.0f, // 0 bottom left 0.0f, 0.0f, // 0 bottom left
1.0f, 0.0f, // 1 bottom right 1.0f, 0.0f, // 1 bottom right
0.0f, 1.0f, // 2 top left 0.0f, 1.0f, // 2 top left

@ -59,7 +59,7 @@ public class Snapshot1PictureRecorder extends PictureRecorder {
mEngine1 = engine; mEngine1 = engine;
mCamera = camera; mCamera = camera;
mOutputRatio = outputRatio; mOutputRatio = outputRatio;
mFormat = engine.getPreviewStreamFormat(); mFormat = camera.getParameters().getPreviewFormat();
} }
@Override @Override

Loading…
Cancel
Save