Camera2 support for picture and video snapshots

pull/493/head
Mattia Iavarone 5 years ago
parent e2d5aa1941
commit 1dabf4db01
  1. 9
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/MockCameraEngine.java
  2. 2
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/internal/utils/WorkerHandlerTest.java
  3. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraUtils.java
  4. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java
  5. 299
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java
  6. 145
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java
  7. 199
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraEngine.java
  8. 14
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/WorkerHandler.java
  9. 119
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/Snapshot1PictureRecorder.java
  10. 99
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/SnapshotGlPictureRecorder.java
  11. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/GlCameraPreview.java
  12. 15
      cameraview/src/main/java/com/otaliastudios/cameraview/video/SnapshotVideoRecorder.java
  13. 12
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoder.java
  14. 5
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/TextureMediaEncoder.java
  15. 7
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/VideoMediaEncoder.java

@ -160,7 +160,8 @@ public class MockCameraEngine extends CameraEngine {
} }
@Override @Override
public void takePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio viewAspectRatio) { protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio viewAspectRatio) {
} }
@Override @Override
@ -168,12 +169,8 @@ public class MockCameraEngine extends CameraEngine {
} }
@Override @Override
public void takeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull File file, @NonNull AspectRatio viewAspectRatio) { protected void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull File file, @NonNull AspectRatio viewAspectRatio) {
}
@Override
public void stopVideo() {
} }
@Override @Override

@ -33,7 +33,7 @@ public class WorkerHandlerTest extends BaseTest {
op.end(true); op.end(true);
} }
}; };
WorkerHandler.run(action); WorkerHandler.execute(action);
Boolean result = op.await(500); Boolean result = op.await(500);
assertNotNull(result); assertNotNull(result);
assertTrue(result); assertTrue(result);

@ -111,7 +111,7 @@ public class CameraUtils {
@SuppressWarnings("WeakerAccess") @SuppressWarnings("WeakerAccess")
public static void writeToFile(@NonNull final byte[] data, @NonNull final File file, @NonNull final FileCallback callback) { public static void writeToFile(@NonNull final byte[] data, @NonNull final File file, @NonNull final FileCallback callback) {
final Handler ui = new Handler(); final Handler ui = new Handler();
WorkerHandler.run(new Runnable() { WorkerHandler.execute(new Runnable() {
@Override @Override
public void run() { public void run() {
final File result = writeToFile(data, file); final File result = writeToFile(data, file);
@ -194,7 +194,7 @@ public class CameraUtils {
@SuppressWarnings("WeakerAccess") @SuppressWarnings("WeakerAccess")
static void decodeBitmap(@NonNull final byte[] source, final int maxWidth, final int maxHeight, @NonNull final BitmapFactory.Options options, final int rotation, @NonNull final BitmapCallback callback) { static void decodeBitmap(@NonNull final byte[] source, final int maxWidth, final int maxHeight, @NonNull final BitmapFactory.Options options, final int rotation, @NonNull final BitmapCallback callback) {
final Handler ui = new Handler(); final Handler ui = new Handler();
WorkerHandler.run(new Runnable() { WorkerHandler.execute(new Runnable() {
@Override @Override
public void run() { public void run() {
final Bitmap bitmap = decodeBitmap(source, maxWidth, maxHeight, options, rotation); final Bitmap bitmap = decodeBitmap(source, maxWidth, maxHeight, options, rotation);

@ -1897,7 +1897,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
frame.release(); frame.release();
} else { } else {
mLogger.v("dispatchFrame:", frame.getTime(), "processors:", mFrameProcessors.size()); mLogger.v("dispatchFrame:", frame.getTime(), "processors:", mFrameProcessors.size());
mFrameProcessorsHandler.post(new Runnable() { mFrameProcessorsHandler.run(new Runnable() {
@Override @Override
public void run() { public void run() {
for (FrameProcessor processor : mFrameProcessors) { for (FrameProcessor processor : mFrameProcessors) {

@ -35,7 +35,8 @@ import com.otaliastudios.cameraview.internal.utils.CropHelper;
import com.otaliastudios.cameraview.internal.utils.Op; import com.otaliastudios.cameraview.internal.utils.Op;
import com.otaliastudios.cameraview.picture.FullPictureRecorder; import com.otaliastudios.cameraview.picture.FullPictureRecorder;
import com.otaliastudios.cameraview.picture.PictureRecorder; import com.otaliastudios.cameraview.picture.PictureRecorder;
import com.otaliastudios.cameraview.picture.SnapshotPictureRecorder; import com.otaliastudios.cameraview.picture.Snapshot1PictureRecorder;
import com.otaliastudios.cameraview.picture.SnapshotGlPictureRecorder;
import com.otaliastudios.cameraview.preview.GlCameraPreview; import com.otaliastudios.cameraview.preview.GlCameraPreview;
import com.otaliastudios.cameraview.size.AspectRatio; import com.otaliastudios.cameraview.size.AspectRatio;
import com.otaliastudios.cameraview.size.Size; import com.otaliastudios.cameraview.size.Size;
@ -50,9 +51,7 @@ import java.util.List;
@SuppressWarnings("deprecation") @SuppressWarnings("deprecation")
public class Camera1Engine extends CameraEngine implements Camera.PreviewCallback, Camera.ErrorCallback, public class Camera1Engine extends CameraEngine implements Camera.PreviewCallback, Camera.ErrorCallback {
VideoRecorder.VideoResultListener,
PictureRecorder.PictureResultListener {
private static final String TAG = Camera1Engine.class.getSimpleName(); private static final String TAG = Camera1Engine.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG); private static final CameraLogger LOG = CameraLogger.create(TAG);
@ -87,7 +86,7 @@ public class Camera1Engine extends CameraEngine implements Camera.PreviewCallbac
} }
private void schedule(@Nullable final Op<Void> op, final boolean ensureAvailable, final Runnable action) { private void schedule(@Nullable final Op<Void> op, final boolean ensureAvailable, final Runnable action) {
mHandler.post(new Runnable() { mHandler.run(new Runnable() {
@Override @Override
public void run() { public void run() {
if (ensureAvailable && !isCameraAvailable()) { if (ensureAvailable && !isCameraAvailable()) {
@ -104,34 +103,29 @@ public class Camera1Engine extends CameraEngine implements Camera.PreviewCallbac
@WorkerThread @WorkerThread
@Override @Override
protected Task<Void> onStartEngine() { protected Task<Void> onStartEngine() {
if (collectCameraId()) { try {
try { mCamera = Camera.open(mCameraId);
mCamera = Camera.open(mCameraId); } catch (Exception e) {
} catch (Exception e) { LOG.e("onStartEngine:", "Failed to connect. Maybe in use by another app?");
LOG.e("onStartEngine:", "Failed to connect. Maybe in use by another app?"); throw new CameraException(e, CameraException.REASON_FAILED_TO_CONNECT);
throw new CameraException(e, CameraException.REASON_FAILED_TO_CONNECT);
}
mCamera.setErrorCallback(this);
// Set parameters that might have been set before the camera was opened.
LOG.i("onStartEngine:", "Applying default parameters.");
Camera.Parameters params = mCamera.getParameters();
mCameraOptions = new CameraOptions(params, flip(REF_SENSOR, REF_VIEW));
applyDefaultFocus(params);
applyFlash(params, Flash.OFF);
applyLocation(params, null);
applyWhiteBalance(params, WhiteBalance.AUTO);
applyHdr(params, Hdr.OFF);
applyPlaySounds(mPlaySounds);
params.setRecordingHint(mMode == Mode.VIDEO);
mCamera.setParameters(params);
mCamera.setDisplayOrientation(offset(REF_SENSOR, REF_VIEW)); // <- not allowed during preview
LOG.i("onStartEngine:", "Ended");
return Tasks.forResult(null);
} else {
LOG.e("onStartEngine:", "No camera available for facing", mFacing);
throw new CameraException(CameraException.REASON_NO_CAMERA);
} }
mCamera.setErrorCallback(this);
// Set parameters that might have been set before the camera was opened.
LOG.i("onStartEngine:", "Applying default parameters.");
Camera.Parameters params = mCamera.getParameters();
mCameraOptions = new CameraOptions(params, flip(REF_SENSOR, REF_VIEW));
applyDefaultFocus(params);
applyFlash(params, Flash.OFF);
applyLocation(params, null);
applyWhiteBalance(params, WhiteBalance.AUTO);
applyHdr(params, Hdr.OFF);
applyPlaySounds(mPlaySounds);
params.setRecordingHint(mMode == Mode.VIDEO);
mCamera.setParameters(params);
mCamera.setDisplayOrientation(offset(REF_SENSOR, REF_VIEW)); // <- not allowed during preview
LOG.i("onStartEngine:", "Ended");
return Tasks.forResult(null);
} }
@NonNull @NonNull
@ -269,9 +263,10 @@ public class Camera1Engine extends CameraEngine implements Camera.PreviewCallbac
restartPreview(); restartPreview();
} }
private boolean collectCameraId() { @Override
int internalFacing = mMapper.map(mFacing); protected boolean collectCameraInfo(@NonNull Facing facing) {
LOG.i("collectCameraId", "Facing:", mFacing, "Internal:", internalFacing, "Cameras:", Camera.getNumberOfCameras()); int internalFacing = mMapper.map(facing);
LOG.i("collectCameraInfo", "Facing:", facing, "Internal:", internalFacing, "Cameras:", Camera.getNumberOfCameras());
Camera.CameraInfo cameraInfo = new Camera.CameraInfo(); Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int i = 0, count = Camera.getNumberOfCameras(); i < count; i++) { for (int i = 0, count = Camera.getNumberOfCameras(); i < count; i++) {
Camera.getCameraInfo(i, cameraInfo); Camera.getCameraInfo(i, cameraInfo);
@ -284,6 +279,7 @@ public class Camera1Engine extends CameraEngine implements Camera.PreviewCallbac
return false; return false;
} }
@Override @Override
public void onBufferAvailable(@NonNull byte[] buffer) { public void onBufferAvailable(@NonNull byte[] buffer) {
// TODO: sync with handler? // TODO: sync with handler?
@ -350,24 +346,6 @@ public class Camera1Engine extends CameraEngine implements Camera.PreviewCallbac
return true; return true;
} }
@Override
public void setFacing(@NonNull Facing facing) {
final Facing old = mFacing;
if (facing != old) {
mFacing = facing;
schedule(null, true, new Runnable() {
@Override
public void run() {
if (collectCameraId()) {
restart();
} else {
mFacing = old;
}
}
});
}
}
@Override @Override
public void setWhiteBalance(@NonNull WhiteBalance whiteBalance) { public void setWhiteBalance(@NonNull WhiteBalance whiteBalance) {
final WhiteBalance old = mWhiteBalance; final WhiteBalance old = mWhiteBalance;
@ -434,18 +412,6 @@ public class Camera1Engine extends CameraEngine implements Camera.PreviewCallbac
return false; return false;
} }
@Override
public void setAudio(@NonNull Audio audio) {
if (mAudio != audio) {
if (isTakingVideo()) {
LOG.w("Audio setting was changed while recording. " +
"Changes will take place starting from next video");
}
mAudio = audio;
}
}
@Override @Override
public void setFlash(@NonNull Flash flash) { public void setFlash(@NonNull Flash flash) {
final Flash old = mFlash; final Flash old = mFlash;
@ -500,24 +466,6 @@ public class Camera1Engine extends CameraEngine implements Camera.PreviewCallbac
// ----------------- // -----------------
// Picture recording stuff. // Picture recording stuff.
@Override
public void onPictureShutter(boolean didPlaySound) {
mCallback.onShutter(!didPlaySound);
}
@Override
public void onPictureResult(@Nullable PictureResult.Stub result) {
mPictureRecorder = null;
if (result != null) {
mCallback.dispatchOnPictureTaken(result);
} else {
// Something went wrong.
mCallback.dispatchError(new CameraException(CameraException.REASON_PICTURE_FAILED));
LOG.e("onPictureResult", "result is null: something went wrong.");
}
}
@Override @Override
public void takePicture(final @NonNull PictureResult.Stub stub) { public void takePicture(final @NonNull PictureResult.Stub stub) {
LOG.v("takePicture: scheduling"); LOG.v("takePicture: scheduling");
@ -543,34 +491,19 @@ public class Camera1Engine extends CameraEngine implements Camera.PreviewCallbac
}); });
} }
/** @WorkerThread
* Just a note about the snapshot size - it is the PreviewStreamSize, cropped with the view ratio.
* @param viewAspectRatio the view aspect ratio
*/
@Override @Override
public void takePictureSnapshot(final @NonNull PictureResult.Stub stub, @NonNull final AspectRatio viewAspectRatio) { protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio viewAspectRatio) {
LOG.v("takePictureSnapshot: scheduling"); stub.size = getUncroppedSnapshotSize(REF_OUTPUT); // Not the real size: it will be cropped to match the view ratio
schedule(null, true, new Runnable() { stub.rotation = offset(REF_SENSOR, REF_OUTPUT); // Actually it will be rotated and set to 0.
@Override AspectRatio outputRatio = flip(REF_OUTPUT, REF_VIEW) ? viewAspectRatio.flip() : viewAspectRatio;
public void run() {
LOG.v("takePictureSnapshot: performing.", isTakingPicture());
if (isTakingPicture()) return;
stub.location = mLocation;
stub.isSnapshot = true;
stub.facing = mFacing;
stub.size = getUncroppedSnapshotSize(REF_OUTPUT); // Not the real size: it will be cropped to match the view ratio
stub.rotation = offset(REF_SENSOR, REF_OUTPUT); // Actually it will be rotated and set to 0.
AspectRatio outputRatio = flip(REF_OUTPUT, REF_VIEW) ? viewAspectRatio.flip() : viewAspectRatio;
LOG.v("Rotations", "SV", offset(REF_SENSOR, REF_VIEW), "VS", offset(REF_VIEW, REF_SENSOR)); if (mPreview instanceof GlCameraPreview && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
LOG.v("Rotations", "SO", offset(REF_SENSOR, REF_OUTPUT), "OS", offset(REF_OUTPUT, REF_SENSOR)); mPictureRecorder = new SnapshotGlPictureRecorder(stub, this, (GlCameraPreview) mPreview, outputRatio);
LOG.v("Rotations", "VO", offset(REF_VIEW, REF_OUTPUT), "OV", offset(REF_OUTPUT, REF_VIEW)); } else {
mPictureRecorder = new Snapshot1PictureRecorder(stub, this, mCamera, outputRatio);
mPictureRecorder = new SnapshotPictureRecorder(stub, Camera1Engine.this, mPreview, mCamera, outputRatio); }
mPictureRecorder.take(); mPictureRecorder.take();
}
});
} }
@Override @Override
@ -588,12 +521,8 @@ public class Camera1Engine extends CameraEngine implements Camera.PreviewCallbac
@Override @Override
public void onVideoResult(@Nullable VideoResult.Stub result, @Nullable Exception exception) { public void onVideoResult(@Nullable VideoResult.Stub result, @Nullable Exception exception) {
mVideoRecorder = null; if (result == null) {
if (result != null) {
mCallback.dispatchOnVideoTaken(result);
} else {
// Something went wrong, lock the camera again. // Something went wrong, lock the camera again.
mCallback.dispatchError(new CameraException(exception, CameraException.REASON_VIDEO_FAILED));
mCamera.lock(); mCamera.lock();
} }
} }
@ -639,103 +568,67 @@ public class Camera1Engine extends CameraEngine implements Camera.PreviewCallbac
}); });
} }
/**
* @param file the output file
* @param viewAspectRatio the view aspect ratio
*/
@SuppressLint("NewApi") @SuppressLint("NewApi")
@WorkerThread
@Override @Override
public void takeVideoSnapshot(final @NonNull VideoResult.Stub stub, @NonNull final File file, @NonNull final AspectRatio viewAspectRatio) { protected void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull File file, @NonNull AspectRatio viewAspectRatio) {
if (!(mPreview instanceof GlCameraPreview)) { if (!(mPreview instanceof GlCameraPreview)) {
throw new IllegalStateException("Video snapshots are only supported with GlCameraPreview."); throw new IllegalStateException("Video snapshots are only supported with GlCameraPreview.");
} }
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR2) { if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR2) {
throw new IllegalStateException("Video snapshots are only supported starting from API 18."); throw new IllegalStateException("Video snapshots are only supported starting from API 18.");
} }
schedule(mStartVideoOp, true, new Runnable() { GlCameraPreview glPreview = (GlCameraPreview) mPreview;
@Override
public void run() { // Size and rotation turned out to be extremely tricky. In case of Snapshot1PictureRecorder
if (isTakingVideo()) return; // we use the preview size in REF_OUTPUT (cropped) and offset(REF_SENSOR, REF_OUTPUT) as rotation.
// These values mean that we expect input to be in the REF_SENSOR system.
// Create the video result stub
stub.file = file; // Here everything seems different. We would expect a difference because the two snapshot
stub.isSnapshot = true; // recorders have different mechanics (the picture one uses a SurfaceTexture with setBufferSize,
stub.videoCodec = mVideoCodec; // the video one here uses the MediaCodec input surface which we can't control).
stub.location = mLocation;
stub.facing = mFacing; // The strangest thing is the fact that the correct angle seems to be the same for FRONT and
stub.videoBitRate = mVideoBitRate; // BACK sensor, which means that our sensor correction actually screws things up. For this reason
stub.audioBitRate = mAudioBitRate; // facing value is temporarily set to BACK.
stub.audio = mAudio; Facing realFacing = mFacing;
stub.maxSize = mVideoMaxSize; mFacing = Facing.BACK;
stub.maxDuration = mVideoMaxDuration;
// These are the angles that make it work on a Nexus5X, compared to the offset() results.
// Size and rotation turned out to be extremely tricky. In case of SnapshotPictureRecorder // For instance, SV means offset(REF_SENSOR, REF_VIEW). The rest should be clear.
// we use the preview size in REF_OUTPUT (cropped) and offset(REF_SENSOR, REF_OUTPUT) as rotation. // CONFIG | WANTED | SV | VS | VO | OV | SO | OS |
// These values mean that we expect input to be in the REF_SENSOR system. // ------------|--------|--------|--------|--------|--------|--------|--------|
// Vertical | 0 | 270 | 90 | 0 | 0 | 270 | 90 |
// Here everything seems different. We would expect a difference because the two snapshot // Left | 270 | 270 | 90 | 270 | 90 | 180 | 180 |
// recorders have different mechanics (the picture one uses a SurfaceTexture with setBufferSize, // Right | 90 | 270 | 90 | 90 | 270 | 0 | 0 |
// the video one here uses the MediaCodec input surface which we can't control). // Upside down | 180 | 270 | 90 | 180 | 180 | 90 | 270 |
// The strangest thing is the fact that the correct angle seems to be the same for FRONT and // The VO is the only correct value. Things change when using FRONT camera, in which case,
// BACK sensor, which means that our sensor correction actually screws things up. For this reason // no value is actually correct, and the needed values are the same of BACK!
// facing value is temporarily set to BACK. // CONFIG | WANTED | SV | VS | VO | OV | SO | OS |
Facing realFacing = mFacing; // ------------|--------|--------|--------|--------|--------|--------|--------|
mFacing = Facing.BACK; // Vertical | 0 | 90 | 270 | 180 | 180 | 270 | 90 |
// Left | 270 | 90 | 270 | 270 | 90 | 0 | 0 |
// These are the angles that make it work on a Nexus5X, compared to the offset() results. // Right | 90 | 90 | 270 | 90 | 270 | 180 | 180 |
// For instance, SV means offset(REF_SENSOR, REF_VIEW). The rest should be clear. // Upside down | 180 | 90 | 270 | 0 | 0 | 90 | 270 |
// CONFIG | WANTED | SV | VS | VO | OV | SO | OS |
// ------------|--------|--------|--------|--------|--------|--------|--------| // Based on this we will use VO for everything. See if we get issues about distortion
// Vertical | 0 | 270 | 90 | 0 | 0 | 270 | 90 | // and maybe we can improve. The reason why this happen is beyond my understanding.
// Left | 270 | 270 | 90 | 270 | 90 | 180 | 180 |
// Right | 90 | 270 | 90 | 90 | 270 | 0 | 0 | Size outputSize = getUncroppedSnapshotSize(REF_OUTPUT);
// Upside down | 180 | 270 | 90 | 180 | 180 | 90 | 270 | if (outputSize == null) {
throw new IllegalStateException("outputSize should not be null.");
// The VO is the only correct value. Things change when using FRONT camera, in which case, }
// no value is actually correct, and the needed values are the same of BACK! AspectRatio outputRatio = flip(REF_OUTPUT, REF_VIEW) ? viewAspectRatio.flip() : viewAspectRatio;
// CONFIG | WANTED | SV | VS | VO | OV | SO | OS | Rect outputCrop = CropHelper.computeCrop(outputSize, outputRatio);
// ------------|--------|--------|--------|--------|--------|--------|--------| outputSize = new Size(outputCrop.width(), outputCrop.height());
// Vertical | 0 | 90 | 270 | 180 | 180 | 270 | 90 | stub.size = outputSize;
// Left | 270 | 90 | 270 | 270 | 90 | 0 | 0 | stub.rotation = offset(REF_VIEW, REF_OUTPUT);
// Right | 90 | 90 | 270 | 90 | 270 | 180 | 180 |
// Upside down | 180 | 90 | 270 | 0 | 0 | 90 | 270 | // Reset facing and start.
mFacing = realFacing;
// Based on this we will use VO for everything. See if we get issues about distortion mVideoRecorder = new SnapshotVideoRecorder(stub, Camera1Engine.this, glPreview);
// and maybe we can improve. The reason why this happen is beyond my understanding. mVideoRecorder.start();
Size outputSize = getUncroppedSnapshotSize(REF_OUTPUT);
if (outputSize == null) {
throw new IllegalStateException("outputSize should not be null.");
}
AspectRatio outputRatio = flip(REF_OUTPUT, REF_VIEW) ? viewAspectRatio.flip() : viewAspectRatio;
Rect outputCrop = CropHelper.computeCrop(outputSize, outputRatio);
outputSize = new Size(outputCrop.width(), outputCrop.height());
stub.size = outputSize;
stub.rotation = offset(REF_VIEW, REF_OUTPUT);
// Reset facing and start.
mFacing = realFacing;
GlCameraPreview cameraPreview = (GlCameraPreview) mPreview;
mVideoRecorder = new SnapshotVideoRecorder(stub,
Camera1Engine.this, Camera1Engine.this, cameraPreview);
mVideoRecorder.start();
}
});
}
@Override
public void stopVideo() {
schedule(null, false, new Runnable() {
@Override
public void run() {
LOG.i("stopVideo", "mVideoRecorder is null?", mVideoRecorder == null);
if (mVideoRecorder != null) {
mVideoRecorder.stop();
mVideoRecorder = null;
}
}
});
} }
// ----------------- // -----------------

@ -3,6 +3,7 @@ package com.otaliastudios.cameraview.engine;
import android.annotation.SuppressLint; import android.annotation.SuppressLint;
import android.content.Context; import android.content.Context;
import android.graphics.PointF; import android.graphics.PointF;
import android.graphics.Rect;
import android.graphics.SurfaceTexture; import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException; import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession; import android.hardware.camera2.CameraCaptureSession;
@ -32,9 +33,14 @@ import com.otaliastudios.cameraview.controls.Hdr;
import com.otaliastudios.cameraview.controls.Mode; import com.otaliastudios.cameraview.controls.Mode;
import com.otaliastudios.cameraview.controls.WhiteBalance; import com.otaliastudios.cameraview.controls.WhiteBalance;
import com.otaliastudios.cameraview.gesture.Gesture; import com.otaliastudios.cameraview.gesture.Gesture;
import com.otaliastudios.cameraview.internal.utils.CropHelper;
import com.otaliastudios.cameraview.internal.utils.Op; import com.otaliastudios.cameraview.internal.utils.Op;
import com.otaliastudios.cameraview.picture.Snapshot1PictureRecorder;
import com.otaliastudios.cameraview.picture.SnapshotGlPictureRecorder;
import com.otaliastudios.cameraview.preview.GlCameraPreview;
import com.otaliastudios.cameraview.size.AspectRatio; import com.otaliastudios.cameraview.size.AspectRatio;
import com.otaliastudios.cameraview.size.Size; import com.otaliastudios.cameraview.size.Size;
import com.otaliastudios.cameraview.video.SnapshotVideoRecorder;
import java.io.File; import java.io.File;
import java.util.ArrayList; import java.util.ArrayList;
@ -48,7 +54,9 @@ import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi; import androidx.annotation.RequiresApi;
import androidx.annotation.WorkerThread; import androidx.annotation.WorkerThread;
// TODO parameters
// TODO pictures
// TODO videos
@RequiresApi(Build.VERSION_CODES.LOLLIPOP) @RequiresApi(Build.VERSION_CODES.LOLLIPOP)
public class Camera2Engine extends CameraEngine { public class Camera2Engine extends CameraEngine {
@ -68,23 +76,7 @@ public class Camera2Engine extends CameraEngine {
mManager = (CameraManager) mCallback.getContext().getSystemService(Context.CAMERA_SERVICE); mManager = (CameraManager) mCallback.getContext().getSystemService(Context.CAMERA_SERVICE);
} }
private boolean isCameraAvailable() { //region Utilities
return getEngineState() == STATE_STARTED;
}
private void schedule(@Nullable final Op<Void> op, final boolean ensureAvailable, final Runnable action) {
mHandler.post(new Runnable() {
@Override
public void run() {
if (ensureAvailable && !isCameraAvailable()) {
if (op != null) op.end(null);
} else {
action.run();
if (op != null) op.end(null);
}
}
});
}
@NonNull @NonNull
private <T> T readCharacteristic(@NonNull CameraCharacteristics characteristics, private <T> T readCharacteristic(@NonNull CameraCharacteristics characteristics,
@ -122,6 +114,10 @@ public class Camera2Engine extends CameraEngine {
return new CameraException(reason); return new CameraException(reason);
} }
//endregion
//region Protected APIs
@NonNull @NonNull
@Override @Override
protected List<Size> getPreviewStreamAvailableSizes() { protected List<Size> getPreviewStreamAvailableSizes() {
@ -144,8 +140,15 @@ public class Camera2Engine extends CameraEngine {
} }
} }
private boolean collectCameraId() { @WorkerThread
int internalFacing = mMapper.map(mFacing); @Override
protected void onPreviewStreamSizeChanged() {
restartBind();
}
@Override
protected boolean collectCameraInfo(@NonNull Facing facing) {
int internalFacing = mMapper.map(facing);
String[] cameraIds = null; String[] cameraIds = null;
try { try {
cameraIds = mManager.getCameraIdList(); cameraIds = mManager.getCameraIdList();
@ -154,7 +157,7 @@ public class Camera2Engine extends CameraEngine {
// However, let's launch an unrecoverable exception. // However, let's launch an unrecoverable exception.
throw createCameraException(e); throw createCameraException(e);
} }
LOG.i("collectCameraId", "Facing:", mFacing, "Internal:", internalFacing, "Cameras:", cameraIds.length); LOG.i("collectCameraInfo", "Facing:", facing, "Internal:", internalFacing, "Cameras:", cameraIds.length);
for (String cameraId : cameraIds) { for (String cameraId : cameraIds) {
try { try {
CameraCharacteristics characteristics = mManager.getCameraCharacteristics(cameraId); CameraCharacteristics characteristics = mManager.getCameraCharacteristics(cameraId);
@ -171,6 +174,9 @@ public class Camera2Engine extends CameraEngine {
return false; return false;
} }
//endregion
//region Start
@SuppressLint("MissingPermission") @SuppressLint("MissingPermission")
@NonNull @NonNull
@ -178,12 +184,6 @@ public class Camera2Engine extends CameraEngine {
protected Task<Void> onStartEngine() { protected Task<Void> onStartEngine() {
final TaskCompletionSource<Void> task = new TaskCompletionSource<>(); final TaskCompletionSource<Void> task = new TaskCompletionSource<>();
try { try {
boolean hasCamera = collectCameraId();
if (!hasCamera) {
LOG.e("onStartEngine:", "No camera available for facing", mFacing);
throw new CameraException(CameraException.REASON_NO_CAMERA);
}
// We have a valid camera for this Facing. Go on. // We have a valid camera for this Facing. Go on.
mManager.openCamera(mCameraId, new CameraDevice.StateCallback() { mManager.openCamera(mCameraId, new CameraDevice.StateCallback() {
@Override @Override
@ -340,6 +340,10 @@ public class Camera2Engine extends CameraEngine {
return Tasks.forResult(null); return Tasks.forResult(null);
} }
//endregion
//region Stop
@NonNull @NonNull
@Override @Override
protected Task<Void> onStopPreview() { protected Task<Void> onStopPreview() {
@ -394,12 +398,58 @@ public class Camera2Engine extends CameraEngine {
return Tasks.forResult(null); return Tasks.forResult(null);
} }
//endregion
//region Pictures
@WorkerThread @WorkerThread
@Override @Override
protected void onPreviewStreamSizeChanged() { protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio viewAspectRatio) {
restartBind(); stub.size = getUncroppedSnapshotSize(REF_OUTPUT); // Not the real size: it will be cropped to match the view ratio
stub.rotation = offset(REF_SENSOR, REF_OUTPUT); // Actually it will be rotated and set to 0.
AspectRatio outputRatio = flip(REF_OUTPUT, REF_VIEW) ? viewAspectRatio.flip() : viewAspectRatio;
if (mPreview instanceof GlCameraPreview) {
mPictureRecorder = new SnapshotGlPictureRecorder(stub, this, (GlCameraPreview) mPreview, outputRatio);
} else {
throw new RuntimeException("takePictureSnapshot with Camera2 is only supported with Preview.GL_SURFACE");
}
mPictureRecorder.take();
} }
//endregion
//region Videos
/**
* See {@link Camera1Engine#onTakeVideoSnapshot(VideoResult.Stub, File, AspectRatio)}
* to read about the size and rotation computation.
*/
@WorkerThread
@Override
protected void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull File file, @NonNull AspectRatio viewAspectRatio) {
if (!(mPreview instanceof GlCameraPreview)) {
throw new IllegalStateException("Video snapshots are only supported with GlCameraPreview.");
}
GlCameraPreview glPreview = (GlCameraPreview) mPreview;
Facing realFacing = mFacing;
mFacing = Facing.BACK;
Size outputSize = getUncroppedSnapshotSize(REF_OUTPUT);
if (outputSize == null) {
throw new IllegalStateException("outputSize should not be null.");
}
AspectRatio outputRatio = flip(REF_OUTPUT, REF_VIEW) ? viewAspectRatio.flip() : viewAspectRatio;
Rect outputCrop = CropHelper.computeCrop(outputSize, outputRatio);
outputSize = new Size(outputCrop.width(), outputCrop.height());
stub.size = outputSize;
stub.rotation = offset(REF_VIEW, REF_OUTPUT);
// Reset facing and start.
mFacing = realFacing;
mVideoRecorder = new SnapshotVideoRecorder(stub, this, glPreview);
mVideoRecorder.start();
}
//endregion
@ -422,24 +472,6 @@ public class Camera2Engine extends CameraEngine {
} }
@Override
public void setFacing(@NonNull Facing facing) {
final Facing old = mFacing;
if (facing != old) {
mFacing = facing;
schedule(null, true, new Runnable() {
@Override
public void run() {
if (collectCameraId()) {
restart();
} else {
mFacing = old;
}
}
});
}
}
@Override @Override
public void setZoom(float zoom, @Nullable PointF[] points, boolean notify) { public void setZoom(float zoom, @Nullable PointF[] points, boolean notify) {
@ -470,36 +502,17 @@ public class Camera2Engine extends CameraEngine {
} }
@Override
public void setAudio(@NonNull Audio audio) {
}
@Override @Override
public void takePicture(@NonNull PictureResult.Stub stub) { public void takePicture(@NonNull PictureResult.Stub stub) {
} }
@Override
public void takePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio viewAspectRatio) {
}
@Override @Override
public void takeVideo(@NonNull VideoResult.Stub stub, @NonNull File file) { public void takeVideo(@NonNull VideoResult.Stub stub, @NonNull File file) {
} }
@Override
public void takeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull File file, @NonNull AspectRatio viewAspectRatio) {
}
@Override
public void stopVideo() {
}
@Override @Override
public void startAutoFocus(@Nullable Gesture gesture, @NonNull PointF point) { public void startAutoFocus(@Nullable Gesture gesture, @NonNull PointF point) {

@ -1,10 +1,13 @@
package com.otaliastudios.cameraview.engine; package com.otaliastudios.cameraview.engine;
import android.annotation.SuppressLint;
import android.content.Context; import android.content.Context;
import android.graphics.PointF; import android.graphics.PointF;
import android.graphics.Rect;
import android.location.Location; import android.location.Location;
import android.os.Build;
import android.os.Handler; import android.os.Handler;
import android.os.Looper; import android.os.Looper;
@ -22,9 +25,12 @@ import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.VideoResult; import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.frame.Frame; import com.otaliastudios.cameraview.frame.Frame;
import com.otaliastudios.cameraview.frame.FrameManager; import com.otaliastudios.cameraview.frame.FrameManager;
import com.otaliastudios.cameraview.internal.utils.CropHelper;
import com.otaliastudios.cameraview.internal.utils.Op; import com.otaliastudios.cameraview.internal.utils.Op;
import com.otaliastudios.cameraview.internal.utils.WorkerHandler; import com.otaliastudios.cameraview.internal.utils.WorkerHandler;
import com.otaliastudios.cameraview.picture.PictureRecorder; import com.otaliastudios.cameraview.picture.PictureRecorder;
import com.otaliastudios.cameraview.picture.Snapshot1PictureRecorder;
import com.otaliastudios.cameraview.picture.SnapshotGlPictureRecorder;
import com.otaliastudios.cameraview.preview.CameraPreview; import com.otaliastudios.cameraview.preview.CameraPreview;
import com.otaliastudios.cameraview.controls.Audio; import com.otaliastudios.cameraview.controls.Audio;
import com.otaliastudios.cameraview.controls.Facing; import com.otaliastudios.cameraview.controls.Facing;
@ -34,10 +40,12 @@ import com.otaliastudios.cameraview.controls.Hdr;
import com.otaliastudios.cameraview.controls.Mode; import com.otaliastudios.cameraview.controls.Mode;
import com.otaliastudios.cameraview.controls.VideoCodec; import com.otaliastudios.cameraview.controls.VideoCodec;
import com.otaliastudios.cameraview.controls.WhiteBalance; import com.otaliastudios.cameraview.controls.WhiteBalance;
import com.otaliastudios.cameraview.preview.GlCameraPreview;
import com.otaliastudios.cameraview.size.AspectRatio; import com.otaliastudios.cameraview.size.AspectRatio;
import com.otaliastudios.cameraview.size.Size; import com.otaliastudios.cameraview.size.Size;
import com.otaliastudios.cameraview.size.SizeSelector; import com.otaliastudios.cameraview.size.SizeSelector;
import com.otaliastudios.cameraview.size.SizeSelectors; import com.otaliastudios.cameraview.size.SizeSelectors;
import com.otaliastudios.cameraview.video.SnapshotVideoRecorder;
import com.otaliastudios.cameraview.video.VideoRecorder; import com.otaliastudios.cameraview.video.VideoRecorder;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
@ -115,7 +123,9 @@ import java.util.concurrent.Executor;
*/ */
public abstract class CameraEngine implements public abstract class CameraEngine implements
CameraPreview.SurfaceCallback, CameraPreview.SurfaceCallback,
FrameManager.BufferCallback { FrameManager.BufferCallback,
PictureRecorder.PictureResultListener,
VideoRecorder.VideoResultListener {
public interface Callback { public interface Callback {
@NonNull Context getContext(); @NonNull Context getContext();
@ -213,8 +223,7 @@ public abstract class CameraEngine implements
Op<Void> mHdrOp = new Op<>(); Op<Void> mHdrOp = new Op<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) @VisibleForTesting(otherwise = VisibleForTesting.PROTECTED)
Op<Void> mLocationOp = new Op<>(); Op<Void> mLocationOp = new Op<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) @VisibleForTesting Op<Void> mStartVideoOp = new Op<>();
Op<Void> mStartVideoOp = new Op<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) @VisibleForTesting(otherwise = VisibleForTesting.PROTECTED)
Op<Void> mPlaySoundsOp = new Op<>(); Op<Void> mPlaySoundsOp = new Op<>();
@ -368,6 +377,10 @@ public abstract class CameraEngine implements
mEngineStep.doStart(false, new Callable<Task<Void>>() { mEngineStep.doStart(false, new Callable<Task<Void>>() {
@Override @Override
public Task<Void> call() { public Task<Void> call() {
if (!collectCameraInfo(mFacing)) {
LOG.e("onStartEngine:", "No camera available for facing", mFacing);
throw new CameraException(CameraException.REASON_NO_CAMERA);
}
return onStartEngine(); return onStartEngine();
} }
}, new Runnable() { }, new Runnable() {
@ -470,7 +483,7 @@ public abstract class CameraEngine implements
@SuppressWarnings("WeakerAccess") @SuppressWarnings("WeakerAccess")
protected void restartBind() { protected void restartBind() {
LOG.i("restartPreviewAndBind", "posting."); LOG.i("restartPreviewAndBind", "posting.");
mHandler.post(new Runnable() { mHandler.run(new Runnable() {
@Override @Override
public void run() { public void run() {
LOG.i("restartPreviewAndBind", "executing."); LOG.i("restartPreviewAndBind", "executing.");
@ -533,7 +546,7 @@ public abstract class CameraEngine implements
@SuppressWarnings("WeakerAccess") @SuppressWarnings("WeakerAccess")
protected void restartPreview() { protected void restartPreview() {
LOG.i("restartPreview", "posting."); LOG.i("restartPreview", "posting.");
mHandler.post(new Runnable() { mHandler.run(new Runnable() {
@Override @Override
public void run() { public void run() {
LOG.i("restartPreview", "executing."); LOG.i("restartPreview", "executing.");
@ -572,7 +585,7 @@ public abstract class CameraEngine implements
@Override @Override
public final void onSurfaceAvailable() { public final void onSurfaceAvailable() {
LOG.i("onSurfaceAvailable:", "Size is", getPreviewSurfaceSize(REF_VIEW)); LOG.i("onSurfaceAvailable:", "Size is", getPreviewSurfaceSize(REF_VIEW));
mHandler.post(new Runnable() { mHandler.run(new Runnable() {
@Override @Override
public void run() { public void run() {
startBind().onSuccessTask(mHandler.getExecutor(), new SuccessContinuation<Void, Void>() { startBind().onSuccessTask(mHandler.getExecutor(), new SuccessContinuation<Void, Void>() {
@ -589,7 +602,7 @@ public abstract class CameraEngine implements
@Override @Override
public final void onSurfaceChanged() { public final void onSurfaceChanged() {
LOG.i("onSurfaceChanged:", "Size is", getPreviewSurfaceSize(REF_VIEW), "Posting."); LOG.i("onSurfaceChanged:", "Size is", getPreviewSurfaceSize(REF_VIEW), "Posting.");
mHandler.post(new Runnable() { mHandler.run(new Runnable() {
@Override @Override
public void run() { public void run() {
LOG.i("onSurfaceChanged:", LOG.i("onSurfaceChanged:",
@ -620,7 +633,7 @@ public abstract class CameraEngine implements
@Override @Override
public final void onSurfaceDestroyed() { public final void onSurfaceDestroyed() {
LOG.i("onSurfaceDestroyed"); LOG.i("onSurfaceDestroyed");
mHandler.post(new Runnable() { mHandler.run(new Runnable() {
@Override @Override
public void run() { public void run() {
stopPreview(false).onSuccessTask(mHandler.getExecutor(), new SuccessContinuation<Void, Void>() { stopPreview(false).onSuccessTask(mHandler.getExecutor(), new SuccessContinuation<Void, Void>() {
@ -675,7 +688,7 @@ public abstract class CameraEngine implements
public Task<Void> start() { public Task<Void> start() {
LOG.i("Start:", "posting runnable. State:", getEngineStateName()); LOG.i("Start:", "posting runnable. State:", getEngineStateName());
final TaskCompletionSource<Void> outTask = new TaskCompletionSource<>(); final TaskCompletionSource<Void> outTask = new TaskCompletionSource<>();
mHandler.post(new Runnable() { mHandler.run(new Runnable() {
@Override @Override
public void run() { public void run() {
LOG.i("Start:", "executing runnable. State:", getEngineStateName()); LOG.i("Start:", "executing runnable. State:", getEngineStateName());
@ -722,7 +735,7 @@ public abstract class CameraEngine implements
private Task<Void> stop(final boolean swallowExceptions) { private Task<Void> stop(final boolean swallowExceptions) {
LOG.i("Stop:", "posting runnable. State:", getEngineStateName()); LOG.i("Stop:", "posting runnable. State:", getEngineStateName());
final TaskCompletionSource<Void> outTask = new TaskCompletionSource<>(); final TaskCompletionSource<Void> outTask = new TaskCompletionSource<>();
mHandler.post(new Runnable() { mHandler.run(new Runnable() {
@Override @Override
public void run() { public void run() {
LOG.i("Stop:", "executing runnable. State:", getEngineStateName()); LOG.i("Stop:", "executing runnable. State:", getEngineStateName());
@ -765,7 +778,7 @@ public abstract class CameraEngine implements
//endregion //endregion
//region Simple setters //region final setters
// This is called before start() and never again. // This is called before start() and never again.
public final void setDisplayOffset(int displayOffset) { public final void setDisplayOffset(int displayOffset) {
@ -819,15 +832,61 @@ public abstract class CameraEngine implements
public final void setAutoFocusResetDelay(long delayMillis) { mAutoFocusResetDelayMillis = delayMillis; } public final void setAutoFocusResetDelay(long delayMillis) { mAutoFocusResetDelayMillis = delayMillis; }
/**
* Sets a new facing value. This will restart the session (if there's any)
* so that we can open the new facing camera.
* @param facing facing
*/
public final void setFacing(final @NonNull Facing facing) {
final Facing old = mFacing;
if (facing != old) {
mFacing = facing;
mHandler.run(new Runnable() {
@Override
public void run() {
if (getEngineState() < STATE_STARTED) return;
if (collectCameraInfo(facing)) {
restart();
} else {
mFacing = old;
}
}
});
}
}
/**
* Sets a new audio value that will be used for video recordings.
* @param audio desired audio
*/
public final void setAudio(@NonNull Audio audio) {
if (mAudio != audio) {
if (isTakingVideo()) {
LOG.w("Audio setting was changed while recording. " +
"Changes will take place starting from next video");
}
mAudio = audio;
}
}
//endregion //endregion
//region Abstract setters and APIs //region Abstract setters and APIs
// Should restart the session if active. /**
public abstract void setMode(@NonNull Mode mode); * Camera is about to be opened. Implementors should look into available cameras
* and see if anyone matches the given {@link Facing value}.
*
* If so, implementors should set {@link #mSensorOffset} and any other information
* (like camera ID) needed to start teh engine.
*
* @param facing the facing value
* @return true if we have one
*/
protected abstract boolean collectCameraInfo(@NonNull Facing facing);
// Should restart the session if active. // Should restart the session if active.
public abstract void setFacing(@NonNull Facing facing); public abstract void setMode(@NonNull Mode mode);
// If closed, no-op. If opened, check supported and apply. // If closed, no-op. If opened, check supported and apply.
public abstract void setZoom(float zoom, @Nullable PointF[] points, boolean notify); public abstract void setZoom(float zoom, @Nullable PointF[] points, boolean notify);
@ -847,22 +906,120 @@ public abstract class CameraEngine implements
// If closed, keep. If opened, check supported and apply. // If closed, keep. If opened, check supported and apply.
public abstract void setLocation(@Nullable Location location); public abstract void setLocation(@Nullable Location location);
// Just set. public abstract void startAutoFocus(@Nullable Gesture gesture, @NonNull PointF point);
public abstract void setAudio(@NonNull Audio audio);
public abstract void setPlaySounds(boolean playSounds);
//endregion
//region picture and video control
public abstract void takePicture(@NonNull PictureResult.Stub stub); public abstract void takePicture(@NonNull PictureResult.Stub stub);
public abstract void takePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio viewAspectRatio); /**
* The snapshot size is the {@link #getPreviewStreamSize(int)}, but cropped based on the
* view/surface aspect ratio.
* @param stub a picture stub
* @param viewAspectRatio the view aspect ratio
*/
public final void takePictureSnapshot(final @NonNull PictureResult.Stub stub, @NonNull final AspectRatio viewAspectRatio) {
LOG.v("takePictureSnapshot", "scheduling");
mHandler.run(new Runnable() {
@Override
public void run() {
LOG.v("takePictureSnapshot", "performing. Engine:", getEngineStateName(), "isTakingPicture:", isTakingPicture());
if (getEngineState() < STATE_STARTED) return;
if (isTakingPicture()) return;
stub.location = mLocation;
stub.isSnapshot = true;
stub.facing = mFacing;
// Leave the other parameters to subclasses.
LOG.v("takePictureSnapshot", "Rotations", "SV", offset(REF_SENSOR, REF_VIEW), "VS", offset(REF_VIEW, REF_SENSOR));
LOG.v("takePictureSnapshot", "Rotations", "SO", offset(REF_SENSOR, REF_OUTPUT), "OS", offset(REF_OUTPUT, REF_SENSOR));
LOG.v("takePictureSnapshot", "Rotations", "VO", offset(REF_VIEW, REF_OUTPUT), "OV", offset(REF_OUTPUT, REF_VIEW));
onTakePictureSnapshot(stub, viewAspectRatio);
}
});
}
@Override
public void onPictureShutter(boolean didPlaySound) {
mCallback.onShutter(!didPlaySound);
}
@Override
public void onPictureResult(@Nullable PictureResult.Stub result) {
mPictureRecorder = null;
if (result != null) {
mCallback.dispatchOnPictureTaken(result);
} else {
// Something went wrong.
LOG.e("onPictureResult", "result is null: something went wrong.");
mCallback.dispatchError(new CameraException(CameraException.REASON_PICTURE_FAILED));
}
}
public abstract void takeVideo(@NonNull VideoResult.Stub stub, @NonNull File file); public abstract void takeVideo(@NonNull VideoResult.Stub stub, @NonNull File file);
public abstract void takeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull File file, @NonNull AspectRatio viewAspectRatio); /**
* @param stub a video stub
* @param file the output file
* @param viewAspectRatio the view aspect ratio
*/
public final void takeVideoSnapshot(final @NonNull VideoResult.Stub stub, @NonNull final File file, @NonNull final AspectRatio viewAspectRatio) {
LOG.v("takeVideoSnapshot", "scheduling");
mHandler.run(new Runnable() {
@Override
public void run() {
LOG.v("takeVideoSnapshot", "performing. Engine:", getEngineStateName(), "isTakingVideo:", isTakingVideo());
if (getEngineState() < STATE_STARTED) { mStartVideoOp.end(null); return; }
if (isTakingVideo()) { mStartVideoOp.end(null); return; }
stub.file = file;
stub.isSnapshot = true;
stub.videoCodec = mVideoCodec;
stub.location = mLocation;
stub.facing = mFacing;
stub.videoBitRate = mVideoBitRate;
stub.audioBitRate = mAudioBitRate;
stub.audio = mAudio;
stub.maxSize = mVideoMaxSize;
stub.maxDuration = mVideoMaxDuration;
onTakeVideoSnapshot(stub, file, viewAspectRatio);
mStartVideoOp.end(null);
}
});
}
public final void stopVideo() {
LOG.i("stopVideo", "posting");
mHandler.run(new Runnable() {
@Override
public void run() {
LOG.i("stopVideo", "executing.", "has recorder?", mVideoRecorder != null);
if (mVideoRecorder != null) {
mVideoRecorder.stop();
mVideoRecorder = null;
}
}
});
}
public abstract void stopVideo(); @Override
public void onVideoResult(@Nullable VideoResult.Stub result, @Nullable Exception exception) {
mVideoRecorder = null;
if (result != null) {
mCallback.dispatchOnVideoTaken(result);
} else {
LOG.e("onVideoResult", "result is null: something went wrong.", exception);
mCallback.dispatchError(new CameraException(exception, CameraException.REASON_VIDEO_FAILED));
}
}
public abstract void startAutoFocus(@Nullable Gesture gesture, @NonNull PointF point); @WorkerThread
protected abstract void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio viewAspectRatio);
public abstract void setPlaySounds(boolean playSounds); @WorkerThread
protected abstract void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull File file, @NonNull AspectRatio viewAspectRatio);
//endregion //endregion

@ -55,7 +55,7 @@ public class WorkerHandler {
* *
* @param action the action * @param action the action
*/ */
public static void run(@NonNull Runnable action) { public static void execute(@NonNull Runnable action) {
get("FallbackCameraThread").post(action); get("FallbackCameraThread").post(action);
} }
@ -71,7 +71,7 @@ public class WorkerHandler {
mExecutor = new Executor() { mExecutor = new Executor() {
@Override @Override
public void execute(Runnable command) { public void execute(Runnable command) {
post(command); WorkerHandler.this.run(command);
} }
}; };
} }
@ -80,7 +80,7 @@ public class WorkerHandler {
* Post an action on this handler. * Post an action on this handler.
* @param runnable the action * @param runnable the action
*/ */
public void post(@NonNull Runnable runnable) { public void run(@NonNull Runnable runnable) {
if (Thread.currentThread() == getThread()) { if (Thread.currentThread() == getThread()) {
runnable.run(); runnable.run();
} else { } else {
@ -88,6 +88,14 @@ public class WorkerHandler {
} }
} }
/**
* Post an action on this handler.
* @param runnable the action
*/
public void post(@NonNull Runnable runnable) {
mHandler.post(runnable);
}
/** /**
* Post an action on this handler. * Post an action on this handler.
* @param delay the delay in millis * @param delay the delay in millis

@ -0,0 +1,119 @@
package com.otaliastudios.cameraview.picture;
import android.annotation.TargetApi;
import android.graphics.Bitmap;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.graphics.SurfaceTexture;
import android.graphics.YuvImage;
import android.hardware.Camera;
import android.opengl.EGL14;
import android.opengl.EGLContext;
import android.opengl.Matrix;
import android.os.Build;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.engine.Camera1Engine;
import com.otaliastudios.cameraview.engine.CameraEngine;
import com.otaliastudios.cameraview.internal.egl.EglCore;
import com.otaliastudios.cameraview.internal.egl.EglViewport;
import com.otaliastudios.cameraview.internal.egl.EglWindowSurface;
import com.otaliastudios.cameraview.internal.utils.CropHelper;
import com.otaliastudios.cameraview.internal.utils.RotationHelper;
import com.otaliastudios.cameraview.internal.utils.WorkerHandler;
import com.otaliastudios.cameraview.preview.CameraPreview;
import com.otaliastudios.cameraview.preview.GlCameraPreview;
import com.otaliastudios.cameraview.preview.RendererFrameCallback;
import com.otaliastudios.cameraview.preview.RendererThread;
import com.otaliastudios.cameraview.size.AspectRatio;
import com.otaliastudios.cameraview.size.Size;
import androidx.annotation.NonNull;
import java.io.ByteArrayOutputStream;
/**
* A {@link PictureRecorder} that uses standard APIs.
*/
public class Snapshot1PictureRecorder extends PictureRecorder {
private static final String TAG = Snapshot1PictureRecorder.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
private Camera1Engine mEngine1;
private Camera mCamera;
private AspectRatio mOutputRatio;
private int mFormat;
/**
* Camera1 constructor.
*/
public Snapshot1PictureRecorder(
@NonNull PictureResult.Stub stub,
@NonNull Camera1Engine engine,
@NonNull Camera camera,
@NonNull AspectRatio outputRatio) {
super(stub, engine);
mEngine1 = engine;
mCamera = camera;
mOutputRatio = outputRatio;
mFormat = engine.getPreviewStreamFormat();
}
@Override
public void take() {
mCamera.setOneShotPreviewCallback(new Camera.PreviewCallback() {
@Override
public void onPreviewFrame(@NonNull final byte[] yuv, Camera camera) {
dispatchOnShutter(false);
// Got to rotate the preview frame, since byte[] data here does not include
// EXIF tags automatically set by camera. So either we add EXIF, or we rotate.
// Adding EXIF to a byte array, unfortunately, is hard.
final int sensorToOutput = mResult.rotation;
final Size outputSize = mResult.size;
final Size previewStreamSize = mEngine1.getPreviewStreamSize(CameraEngine.REF_SENSOR);
if (previewStreamSize == null) {
throw new IllegalStateException("Preview stream size should never be null here.");
}
WorkerHandler.execute(new Runnable() {
@Override
public void run() {
// Rotate the picture, because no one will write EXIF data,
// then crop if needed. In both cases, transform yuv to jpeg.
//noinspection deprecation
byte[] data = RotationHelper.rotate(yuv, previewStreamSize, sensorToOutput);
YuvImage yuv = new YuvImage(data, mFormat, outputSize.getWidth(), outputSize.getHeight(), null);
ByteArrayOutputStream stream = new ByteArrayOutputStream();
Rect outputRect = CropHelper.computeCrop(outputSize, mOutputRatio);
yuv.compressToJpeg(outputRect, 90, stream);
data = stream.toByteArray();
mResult.data = data;
mResult.size = new Size(outputRect.width(), outputRect.height());
mResult.rotation = 0;
mResult.format = PictureResult.FORMAT_JPEG;
dispatchResult();
}
});
// It seems that the buffers are already cleared here, so we need to allocate again.
camera.setPreviewCallbackWithBuffer(null); // Release anything left
camera.setPreviewCallbackWithBuffer(mEngine1); // Add ourselves
mEngine1.getFrameManager().setUp(ImageFormat.getBitsPerPixel(mFormat), previewStreamSize);
}
});
}
@Override
protected void dispatchResult() {
mEngine1 = null;
mCamera = null;
mOutputRatio = null;
mFormat = 0;
super.dispatchResult();
}
}

@ -2,10 +2,8 @@ package com.otaliastudios.cameraview.picture;
import android.annotation.TargetApi; import android.annotation.TargetApi;
import android.graphics.Bitmap; import android.graphics.Bitmap;
import android.graphics.ImageFormat;
import android.graphics.Rect; import android.graphics.Rect;
import android.graphics.SurfaceTexture; import android.graphics.SurfaceTexture;
import android.graphics.YuvImage;
import android.hardware.Camera; import android.hardware.Camera;
import android.opengl.EGL14; import android.opengl.EGL14;
import android.opengl.EGLContext; import android.opengl.EGLContext;
@ -21,7 +19,6 @@ import com.otaliastudios.cameraview.internal.egl.EglCore;
import com.otaliastudios.cameraview.internal.egl.EglViewport; import com.otaliastudios.cameraview.internal.egl.EglViewport;
import com.otaliastudios.cameraview.internal.egl.EglWindowSurface; import com.otaliastudios.cameraview.internal.egl.EglWindowSurface;
import com.otaliastudios.cameraview.internal.utils.CropHelper; import com.otaliastudios.cameraview.internal.utils.CropHelper;
import com.otaliastudios.cameraview.internal.utils.RotationHelper;
import com.otaliastudios.cameraview.internal.utils.WorkerHandler; import com.otaliastudios.cameraview.internal.utils.WorkerHandler;
import com.otaliastudios.cameraview.preview.CameraPreview; import com.otaliastudios.cameraview.preview.CameraPreview;
import com.otaliastudios.cameraview.preview.GlCameraPreview; import com.otaliastudios.cameraview.preview.GlCameraPreview;
@ -32,51 +29,31 @@ import com.otaliastudios.cameraview.size.Size;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
import java.io.ByteArrayOutputStream;
/** public class SnapshotGlPictureRecorder extends PictureRecorder {
* A {@link PictureResult} that uses standard APIs.
*/
public class SnapshotPictureRecorder extends PictureRecorder {
private static final String TAG = SnapshotPictureRecorder.class.getSimpleName(); private static final String TAG = SnapshotGlPictureRecorder.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG); private static final CameraLogger LOG = CameraLogger.create(TAG);
private Camera1Engine mEngine1; private CameraEngine mEngine;
private Camera mCamera; private GlCameraPreview mPreview;
private CameraPreview mPreview;
private AspectRatio mOutputRatio; private AspectRatio mOutputRatio;
private int mFormat;
/** public SnapshotGlPictureRecorder(
* Camera1 constructor.
*/
public SnapshotPictureRecorder(
@NonNull PictureResult.Stub stub, @NonNull PictureResult.Stub stub,
@NonNull Camera1Engine engine, @NonNull CameraEngine engine,
@NonNull CameraPreview preview, @NonNull GlCameraPreview preview,
@NonNull Camera camera,
@NonNull AspectRatio outputRatio) { @NonNull AspectRatio outputRatio) {
super(stub, engine); super(stub, engine);
mEngine1 = engine; mEngine = engine;
mPreview = preview; mPreview = preview;
mCamera = camera;
mOutputRatio = outputRatio; mOutputRatio = outputRatio;
mFormat = engine.getPreviewStreamFormat();
} }
@TargetApi(Build.VERSION_CODES.KITKAT)
@Override @Override
public void take() { public void take() {
if (mPreview instanceof GlCameraPreview && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { mPreview.addRendererFrameCallback(new RendererFrameCallback() {
takeGl((GlCameraPreview) mPreview);
} else {
takeLegacy();
}
}
@TargetApi(Build.VERSION_CODES.KITKAT)
private void takeGl(@NonNull final GlCameraPreview preview) {
preview.addRendererFrameCallback(new RendererFrameCallback() {
int mTextureId; int mTextureId;
SurfaceTexture mSurfaceTexture; SurfaceTexture mSurfaceTexture;
@ -96,7 +73,7 @@ public class SnapshotPictureRecorder extends PictureRecorder {
@RendererThread @RendererThread
@Override @Override
public void onRendererFrame(@NonNull SurfaceTexture surfaceTexture, final float scaleX, final float scaleY) { public void onRendererFrame(@NonNull SurfaceTexture surfaceTexture, final float scaleX, final float scaleY) {
preview.removeRendererFrameCallback(this); mPreview.removeRendererFrameCallback(this);
// This kinda work but has drawbacks: // This kinda work but has drawbacks:
// - output is upside down due to coordinates in GL: need to flip the byte[] someway // - output is upside down due to coordinates in GL: need to flip the byte[] someway
@ -119,7 +96,7 @@ public class SnapshotPictureRecorder extends PictureRecorder {
final EglCore core = new EglCore(eglContext, EglCore.FLAG_RECORDABLE); final EglCore core = new EglCore(eglContext, EglCore.FLAG_RECORDABLE);
// final EGLSurface oldSurface = EGL14.eglGetCurrentSurface(EGL14.EGL_DRAW); // final EGLSurface oldSurface = EGL14.eglGetCurrentSurface(EGL14.EGL_DRAW);
// final EGLDisplay oldDisplay = EGL14.eglGetCurrentDisplay(); // final EGLDisplay oldDisplay = EGL14.eglGetCurrentDisplay();
WorkerHandler.run(new Runnable() { WorkerHandler.execute(new Runnable() {
@Override @Override
public void run() { public void run() {
EglWindowSurface surface = new EglWindowSurface(core, mSurfaceTexture); EglWindowSurface surface = new EglWindowSurface(core, mSurfaceTexture);
@ -130,7 +107,7 @@ public class SnapshotPictureRecorder extends PictureRecorder {
// Apply scale and crop: // Apply scale and crop:
// NOTE: scaleX and scaleY are in REF_VIEW, while our input appears to be in REF_SENSOR. // NOTE: scaleX and scaleY are in REF_VIEW, while our input appears to be in REF_SENSOR.
boolean flip = mEngine1.flip(CameraEngine.REF_VIEW, CameraEngine.REF_SENSOR); boolean flip = mEngine.flip(CameraEngine.REF_VIEW, CameraEngine.REF_SENSOR);
float realScaleX = flip ? scaleY : scaleX; float realScaleX = flip ? scaleY : scaleX;
float realScaleY = flip ? scaleX : scaleY; float realScaleY = flip ? scaleX : scaleY;
float scaleTranslX = (1F - realScaleX) / 2F; float scaleTranslX = (1F - realScaleX) / 2F;
@ -178,58 +155,10 @@ public class SnapshotPictureRecorder extends PictureRecorder {
}); });
} }
private void takeLegacy() {
mCamera.setOneShotPreviewCallback(new Camera.PreviewCallback() {
@Override
public void onPreviewFrame(@NonNull final byte[] yuv, Camera camera) {
dispatchOnShutter(false);
// Got to rotate the preview frame, since byte[] data here does not include
// EXIF tags automatically set by camera. So either we add EXIF, or we rotate.
// Adding EXIF to a byte array, unfortunately, is hard.
final int sensorToOutput = mResult.rotation;
final Size outputSize = mResult.size;
final Size previewStreamSize = mEngine1.getPreviewStreamSize(CameraEngine.REF_SENSOR);
if (previewStreamSize == null) {
throw new IllegalStateException("Preview stream size should never be null here.");
}
WorkerHandler.run(new Runnable() {
@Override
public void run() {
// Rotate the picture, because no one will write EXIF data,
// then crop if needed. In both cases, transform yuv to jpeg.
//noinspection deprecation
byte[] data = RotationHelper.rotate(yuv, previewStreamSize, sensorToOutput);
YuvImage yuv = new YuvImage(data, mFormat, outputSize.getWidth(), outputSize.getHeight(), null);
ByteArrayOutputStream stream = new ByteArrayOutputStream();
Rect outputRect = CropHelper.computeCrop(outputSize, mOutputRatio);
yuv.compressToJpeg(outputRect, 90, stream);
data = stream.toByteArray();
mResult.data = data;
mResult.size = new Size(outputRect.width(), outputRect.height());
mResult.rotation = 0;
mResult.format = PictureResult.FORMAT_JPEG;
dispatchResult();
}
});
// It seems that the buffers are already cleared here, so we need to allocate again.
camera.setPreviewCallbackWithBuffer(null); // Release anything left
camera.setPreviewCallbackWithBuffer(mEngine1); // Add ourselves
mEngine1.getFrameManager().setUp(ImageFormat.getBitsPerPixel(mFormat), previewStreamSize);
}
});
}
@Override @Override
protected void dispatchResult() { protected void dispatchResult() {
mEngine1 = null; mEngine = null;
mCamera = null;
mOutputRatio = null; mOutputRatio = null;
mFormat = 0;
super.dispatchResult(); super.dispatchResult();
} }
} }

@ -32,7 +32,7 @@ import javax.microedition.khronos.opengles.GL10;
* *
* - The SurfaceTexture is linked to the Camera1Engine object. The camera will pass down buffers of data with * - The SurfaceTexture is linked to the Camera1Engine object. The camera will pass down buffers of data with
* a specified size (that is, the Camera1Engine preview size). For this reason we don't have to specify * a specified size (that is, the Camera1Engine preview size). For this reason we don't have to specify
* surfaceTexture.setDefaultBufferSize() (like we do, for example, in SnapshotPictureRecorder). * surfaceTexture.setDefaultBufferSize() (like we do, for example, in Snapshot1PictureRecorder).
* *
* - When SurfaceTexture.updateTexImage() is called, it will fetch the latest texture image from the * - When SurfaceTexture.updateTexImage() is called, it will fetch the latest texture image from the
* camera stream and assign it to the GL texture that was passed. * camera stream and assign it to the GL texture that was passed.

@ -46,9 +46,10 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
private int mDesiredState = STATE_NOT_RECORDING; private int mDesiredState = STATE_NOT_RECORDING;
private int mTextureId = 0; private int mTextureId = 0;
public SnapshotVideoRecorder(@NonNull VideoResult.Stub stub, @Nullable VideoResultListener listener, public SnapshotVideoRecorder(@NonNull VideoResult.Stub stub,
@NonNull CameraEngine engine, @NonNull GlCameraPreview preview) { @NonNull CameraEngine engine,
super(stub, listener); @NonNull GlCameraPreview preview) {
super(stub, engine);
mPreview = preview; mPreview = preview;
mPreview.addRendererFrameCallback(this); mPreview.addRendererFrameCallback(this);
mFlipped = engine.flip(CameraEngine.REF_SENSOR, CameraEngine.REF_VIEW); mFlipped = engine.flip(CameraEngine.REF_SENSOR, CameraEngine.REF_VIEW);
@ -74,6 +75,8 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
@Override @Override
public void onRendererFrame(@NonNull SurfaceTexture surfaceTexture, float scaleX, float scaleY) { public void onRendererFrame(@NonNull SurfaceTexture surfaceTexture, float scaleX, float scaleY) {
if (mCurrentState == STATE_NOT_RECORDING && mDesiredState == STATE_RECORDING) { if (mCurrentState == STATE_NOT_RECORDING && mDesiredState == STATE_RECORDING) {
LOG.i("Starting the encoder engine.");
// Set default options // Set default options
if (mResult.videoBitRate <= 0) mResult.videoBitRate = DEFAULT_VIDEO_BITRATE; if (mResult.videoBitRate <= 0) mResult.videoBitRate = DEFAULT_VIDEO_BITRATE;
if (mResult.videoFrameRate <= 0) mResult.videoFrameRate = DEFAULT_VIDEO_FRAMERATE; if (mResult.videoFrameRate <= 0) mResult.videoFrameRate = DEFAULT_VIDEO_FRAMERATE;
@ -118,6 +121,7 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
} }
if (mCurrentState == STATE_RECORDING) { if (mCurrentState == STATE_RECORDING) {
LOG.v("dispatching frame.");
TextureMediaEncoder textureEncoder = (TextureMediaEncoder) mEncoderEngine.getVideoEncoder(); TextureMediaEncoder textureEncoder = (TextureMediaEncoder) mEncoderEngine.getVideoEncoder();
TextureMediaEncoder.TextureFrame textureFrame = textureEncoder.acquireFrame(); TextureMediaEncoder.TextureFrame textureFrame = textureEncoder.acquireFrame();
textureFrame.timestamp = surfaceTexture.getTimestamp(); textureFrame.timestamp = surfaceTexture.getTimestamp();
@ -126,6 +130,7 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
} }
if (mCurrentState == STATE_RECORDING && mDesiredState == STATE_NOT_RECORDING) { if (mCurrentState == STATE_RECORDING && mDesiredState == STATE_NOT_RECORDING) {
LOG.i("Stopping the encoder engine.");
mCurrentState = STATE_NOT_RECORDING; // before nulling encoderEngine! mCurrentState = STATE_NOT_RECORDING; // before nulling encoderEngine!
mEncoderEngine.stop(); mEncoderEngine.stop();
mEncoderEngine = null; mEncoderEngine = null;
@ -146,9 +151,13 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
mError = e; mError = e;
} else { } else {
if (stopReason == MediaEncoderEngine.STOP_BY_MAX_DURATION) { if (stopReason == MediaEncoderEngine.STOP_BY_MAX_DURATION) {
LOG.i("onEncoderStop because of max duration.");
mResult.endReason = VideoResult.REASON_MAX_DURATION_REACHED; mResult.endReason = VideoResult.REASON_MAX_DURATION_REACHED;
} else if (stopReason == MediaEncoderEngine.STOP_BY_MAX_SIZE) { } else if (stopReason == MediaEncoderEngine.STOP_BY_MAX_SIZE) {
LOG.i("onEncoderStop because of max size.");
mResult.endReason = VideoResult.REASON_MAX_SIZE_REACHED; mResult.endReason = VideoResult.REASON_MAX_SIZE_REACHED;
} else {
LOG.i("onEncoderStop because of user.");
} }
} }
// Cleanup // Cleanup

@ -59,6 +59,11 @@ abstract class MediaEncoder {
/** /**
* This encoder was attached to the engine. Keep the controller * This encoder was attached to the engine. Keep the controller
* and run the internal thread. * and run the internal thread.
*
* NOTE: it's important to call {@link WorkerHandler#post(Runnable)} instead of run()!
* The internal actions can cause a stop/release, and due to how {@link WorkerHandler#run(Runnable)}
* works, we might have {@link #onStop()} or {@link #onRelease()} to be executed before
* the previous step has completed.
*/ */
final void prepare(@NonNull final MediaEncoderEngine.Controller controller, final long maxLengthMillis) { final void prepare(@NonNull final MediaEncoderEngine.Controller controller, final long maxLengthMillis) {
mController = controller; mController = controller;
@ -79,6 +84,8 @@ abstract class MediaEncoder {
* Start recording. This might be a lightweight operation * Start recording. This might be a lightweight operation
* in case the encoder needs to wait for a certain event * in case the encoder needs to wait for a certain event
* like a "frame available". * like a "frame available".
*
* NOTE: it's important to call {@link WorkerHandler#post(Runnable)} instead of run()!
*/ */
final void start() { final void start() {
LOG.i(getName(), "Start was called. Posting."); LOG.i(getName(), "Start was called. Posting.");
@ -94,6 +101,9 @@ abstract class MediaEncoder {
/** /**
* The caller notifying of a certain event occurring. * The caller notifying of a certain event occurring.
* Should analyze the string and see if the event is important. * Should analyze the string and see if the event is important.
*
* NOTE: it's important to call {@link WorkerHandler#post(Runnable)} instead of run()!
*
* @param event what happened * @param event what happened
* @param data object * @param data object
*/ */
@ -110,6 +120,8 @@ abstract class MediaEncoder {
/** /**
* Stop recording. * Stop recording.
*
* NOTE: it's important to call {@link WorkerHandler#post(Runnable)} instead of run()!
*/ */
final void stop() { final void stop() {
LOG.i(getName(), "Stop was called. Posting."); LOG.i(getName(), "Stop was called. Posting.");

@ -112,7 +112,8 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureMediaEncoder.C
} }
mFrameNum++; mFrameNum++;
LOG.v("Incoming frame timestamp:", frame.timestamp); int thisFrameNum = mFrameNum;
LOG.v("onEvent", "frameNum:", thisFrameNum, "realFrameNum:", mFrameNum, "timestamp:", frame.timestamp);
// We must scale this matrix like GlCameraPreview does, because it might have some cropping. // We must scale this matrix like GlCameraPreview does, because it might have some cropping.
// Scaling takes place with respect to the (0, 0, 0) point, so we must apply a Translation to compensate. // Scaling takes place with respect to the (0, 0, 0) point, so we must apply a Translation to compensate.
float[] transform = frame.transform; float[] transform = frame.transform;
@ -132,9 +133,11 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureMediaEncoder.C
Matrix.rotateM(transform, 0, mConfig.transformRotation, 0, 0, 1); Matrix.rotateM(transform, 0, mConfig.transformRotation, 0, 0, 1);
Matrix.translateM(transform, 0, -0.5F, -0.5F, 0); Matrix.translateM(transform, 0, -0.5F, -0.5F, 0);
LOG.v("onEvent", "frameNum:", thisFrameNum, "realFrameNum:", mFrameNum, "calling drainOutput.");
drainOutput(false); drainOutput(false);
// Future note: passing scale values to the viewport? They are scaleX and scaleY, // Future note: passing scale values to the viewport? They are scaleX and scaleY,
// but flipped based on the mConfig.scaleFlipped boolean. // but flipped based on the mConfig.scaleFlipped boolean.
LOG.v("onEvent", "frameNum:", thisFrameNum, "realFrameNum:", mFrameNum, "calling drawFrame.");
mViewport.drawFrame(mConfig.textureId, transform); mViewport.drawFrame(mConfig.textureId, transform);
mWindow.setPresentationTime(frame.timestamp); mWindow.setPresentationTime(frame.timestamp);
mWindow.swapBuffers(); mWindow.swapBuffers();

@ -9,6 +9,8 @@ import androidx.annotation.RequiresApi;
import android.view.Surface; import android.view.Surface;
import com.otaliastudios.cameraview.CameraLogger;
import java.io.IOException; import java.io.IOException;
/** /**
@ -20,6 +22,9 @@ import java.io.IOException;
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2) @RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2)
abstract class VideoMediaEncoder<C extends VideoMediaEncoder.Config> extends MediaEncoder { abstract class VideoMediaEncoder<C extends VideoMediaEncoder.Config> extends MediaEncoder {
private static final String TAG = VideoMediaEncoder.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
@SuppressWarnings("WeakerAccess") @SuppressWarnings("WeakerAccess")
protected C mConfig; protected C mConfig;
@ -67,7 +72,6 @@ abstract class VideoMediaEncoder<C extends VideoMediaEncoder.Config> extends Med
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, mConfig.bitRate); format.setInteger(MediaFormat.KEY_BIT_RATE, mConfig.bitRate);
format.setInteger(MediaFormat.KEY_FRAME_RATE, mConfig.frameRate); format.setInteger(MediaFormat.KEY_FRAME_RATE, mConfig.frameRate);
format.setInteger(MediaFormat.KEY_FRAME_RATE, 6); // TODO
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 2); format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 2);
format.setInteger("rotation-degrees", mConfig.rotation); format.setInteger("rotation-degrees", mConfig.rotation);
@ -93,6 +97,7 @@ abstract class VideoMediaEncoder<C extends VideoMediaEncoder.Config> extends Med
@EncoderThread @EncoderThread
@Override @Override
void onStop() { void onStop() {
LOG.i("onStop", "setting mFrameNum to 1 and signaling the end of input stream.");
mFrameNum = -1; mFrameNum = -1;
signalEndOfInputStream(); signalEndOfInputStream();
drainOutput(true); drainOutput(true);

Loading…
Cancel
Save