Capture the correct frame based on timestamp

pull/580/head
Mattia Iavarone 6 years ago
parent c405f4cd1a
commit a40c91bff9
  1. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java
  2. 85
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/Snapshot2PictureRecorder.java
  3. 52
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/SnapshotGlPictureRecorder.java
  4. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/GlCameraPreview.java

@ -311,7 +311,7 @@ public class Camera1Engine extends CameraEngine implements
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR); // Actually it will be rotated and set to 0.
if (mPreview instanceof GlCameraPreview && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
mPictureRecorder = new SnapshotGlPictureRecorder(stub, this, (GlCameraPreview) mPreview, outputRatio, getOverlay());
mPictureRecorder = new SnapshotGlPictureRecorder(stub, this, (GlCameraPreview) mPreview, outputRatio);
} else {
mPictureRecorder = new Snapshot1PictureRecorder(stub, this, mCamera, outputRatio);
}

@ -1,11 +1,15 @@
package com.otaliastudios.cameraview.picture;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.TotalCaptureResult;
import android.opengl.EGL14;
import android.opengl.EGLContext;
import android.os.Build;
import android.util.Log;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
@ -14,22 +18,24 @@ import androidx.annotation.RequiresApi;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.engine.Camera2Engine;
import com.otaliastudios.cameraview.engine.CameraEngine;
import com.otaliastudios.cameraview.overlay.Overlay;
import com.otaliastudios.cameraview.filter.Filter;
import com.otaliastudios.cameraview.preview.GlCameraPreview;
import com.otaliastudios.cameraview.preview.RendererFrameCallback;
import com.otaliastudios.cameraview.size.AspectRatio;
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
public class Snapshot2PictureRecorder extends SnapshotGlPictureRecorder {
public class Snapshot2PictureRecorder extends SnapshotGlPictureRecorder implements RendererFrameCallback {
private final static String TAG = Snapshot2PictureRecorder.class.getSimpleName();
private final static CameraLogger LOG = CameraLogger.create(TAG);
private final static int STATE_IDLE = 0;
private final static int STATE_WAITING_CAPTURE = 1;
private final static int STATE_WAITING_IMAGE = 2;
private final static int STATE_WAITING_CORRECT_FRAME = 2;
private final static int STATE_WAITING_IMAGE = 3;
private final Camera2Engine mEngine;
private final GlCameraPreview mPreview;
private final CameraCaptureSession mSession;
private final CameraCaptureSession.CaptureCallback mCallback;
private final CaptureRequest.Builder mBuilder;
@ -37,6 +43,12 @@ public class Snapshot2PictureRecorder extends SnapshotGlPictureRecorder {
private Integer mOldCaptureIntent;
private int mSequenceId;
private SurfaceTexture mLastFrameSurfaceTexture;
private float mLastFrameScaleX;
private float mLastFrameScaleY;
private EGLContext mLastFrameScaleEGLContext;
private Long mDesiredTimestamp = null;
public Snapshot2PictureRecorder(@NonNull PictureResult.Stub stub,
@NonNull Camera2Engine engine,
@NonNull GlCameraPreview preview,
@ -44,8 +56,9 @@ public class Snapshot2PictureRecorder extends SnapshotGlPictureRecorder {
@NonNull CameraCaptureSession session,
@NonNull CameraCaptureSession.CaptureCallback callback,
@NonNull CaptureRequest.Builder builder) {
super(stub, engine, preview, outputRatio, engine.getOverlay());
super(stub, engine, preview, outputRatio);
mEngine = engine;
mPreview = preview;
mSession = session;
mCallback = callback;
mBuilder = builder;
@ -53,10 +66,15 @@ public class Snapshot2PictureRecorder extends SnapshotGlPictureRecorder {
@Override
public void take() {
if (mEngine.getPictureSnapshotMetering()) {
try {
if (!mEngine.getPictureSnapshotMetering()) {
super.take();
return;
}
LOG.i("take:", "Engine does metering, adding our CONTROL_CAPTURE_INTENT.");
mPreview.addRendererFrameCallback(this);
mState = STATE_WAITING_CAPTURE;
try {
mOldCaptureIntent = mBuilder.get(CaptureRequest.CONTROL_CAPTURE_INTENT);
mBuilder.set(CaptureRequest.CONTROL_CAPTURE_INTENT, CaptureRequest.CONTROL_CAPTURE_INTENT_STILL_CAPTURE);
mSequenceId = mSession.setRepeatingRequest(mBuilder.build(), mCallback, null);
@ -66,9 +84,25 @@ public class Snapshot2PictureRecorder extends SnapshotGlPictureRecorder {
mError = e;
dispatchResult();
}
} else {
super.take();
}
@Override
public void onRendererTextureCreated(int textureId) {
super.onRendererTextureCreated(textureId);
}
@Override
public void onRendererFilterChanged(@NonNull Filter filter) {
super.onRendererFilterChanged(filter);
}
@Override
public void onRendererFrame(@NonNull SurfaceTexture surfaceTexture, float scaleX, float scaleY) {
mLastFrameSurfaceTexture = surfaceTexture;
mLastFrameScaleX = scaleX;
mLastFrameScaleY = scaleY;
mLastFrameScaleEGLContext = EGL14.eglGetCurrentContext();
maybeTakeFrame();
}
public void onCaptureCompleted(@NonNull TotalCaptureResult result) {
@ -81,13 +115,39 @@ public class Snapshot2PictureRecorder extends SnapshotGlPictureRecorder {
LOG.w("onCaptureCompleted:",
"aeState:", result.get(CaptureResult.CONTROL_AE_STATE),
"flashState:", result.get(CaptureResult.FLASH_STATE));
mState = STATE_WAITING_IMAGE;
LOG.i("onCaptureCompleted:", "Got first result! Calling the GL recorder.");
super.take();
mState = STATE_WAITING_CORRECT_FRAME;
mDesiredTimestamp = result.get(CaptureResult.SENSOR_TIMESTAMP);
if (mDesiredTimestamp == null) mDesiredTimestamp = 0L;
LOG.i("onCaptureCompleted:", "Got timestamp:", mDesiredTimestamp);
maybeTakeFrame();
}
}
}
private void maybeTakeFrame() {
if (mState != STATE_WAITING_CORRECT_FRAME) {
LOG.w("maybeTakeFrame:", "we're not waiting for a frame. Ignoring.", mState);
return;
}
if (mDesiredTimestamp == null || mLastFrameSurfaceTexture == null) {
LOG.w("maybeTakeFrame:", "either timestamp or surfaceTexture are null.", mDesiredTimestamp);
return;
}
long currentTimestamp = mLastFrameSurfaceTexture.getTimestamp();
if (currentTimestamp == mDesiredTimestamp) {
LOG.i("maybeTakeFrame:", "taking frame with exact timestamp:", currentTimestamp);
mState = STATE_WAITING_IMAGE;
takeFrame(mLastFrameSurfaceTexture, mLastFrameScaleX, mLastFrameScaleY, mLastFrameScaleEGLContext);
} else if (currentTimestamp > mDesiredTimestamp) {
LOG.w("maybeTakeFrame:", "taking frame with some delay. Flash might not be respected.");
mState = STATE_WAITING_IMAGE;
takeFrame(mLastFrameSurfaceTexture, mLastFrameScaleX, mLastFrameScaleY, mLastFrameScaleEGLContext);
} else {
LOG.i("maybeTakeFrame:", "Waiting...", mDesiredTimestamp - currentTimestamp);
}
}
@Override
protected void dispatchResult() {
if (mState == STATE_WAITING_IMAGE) {
@ -98,6 +158,7 @@ public class Snapshot2PictureRecorder extends SnapshotGlPictureRecorder {
mSession.setRepeatingRequest(mBuilder.build(), mCallback, null);
} catch (CameraAccessException ignore) {}
}
mPreview.removeRendererFrameCallback(this);
mState = STATE_IDLE;
super.dispatchResult();
}

@ -31,7 +31,7 @@ import com.otaliastudios.cameraview.size.AspectRatio;
import com.otaliastudios.cameraview.size.Size;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.WorkerThread;
import android.view.Surface;
@ -75,14 +75,13 @@ public class SnapshotGlPictureRecorder extends PictureRecorder {
@NonNull PictureResult.Stub stub,
@NonNull CameraEngine engine,
@NonNull GlCameraPreview preview,
@NonNull AspectRatio outputRatio,
@Nullable Overlay overlay) {
@NonNull AspectRatio outputRatio) {
super(stub, engine);
mEngine = engine;
mPreview = preview;
mOutputRatio = outputRatio;
mOverlay = overlay;
mHasOverlay = overlay != null && overlay.drawsOn(Overlay.Target.PICTURE_SNAPSHOT);
mOverlay = engine.getOverlay();
mHasOverlay = mOverlay != null && mOverlay.drawsOn(Overlay.Target.PICTURE_SNAPSHOT);
}
@TargetApi(Build.VERSION_CODES.KITKAT)
@ -113,7 +112,7 @@ public class SnapshotGlPictureRecorder extends PictureRecorder {
@RendererThread
@TargetApi(Build.VERSION_CODES.KITKAT)
private void onRendererTextureCreated(int textureId) {
protected void onRendererTextureCreated(int textureId) {
mTextureId = textureId;
mViewport = new EglViewport();
// Need to crop the size.
@ -129,10 +128,30 @@ public class SnapshotGlPictureRecorder extends PictureRecorder {
@RendererThread
@TargetApi(Build.VERSION_CODES.KITKAT)
private void onRendererFilterChanged(@NonNull Filter filter) {
protected void onRendererFilterChanged(@NonNull Filter filter) {
mViewport.setFilter(filter.copy());
}
@RendererThread
@TargetApi(Build.VERSION_CODES.KITKAT)
protected void onRendererFrame(@SuppressWarnings("unused") @NonNull final SurfaceTexture surfaceTexture,
final float scaleX,
final float scaleY) {
// Get egl context from the RendererThread, which is the one in which we have created
// the textureId and the overlayTextureId, managed by the GlSurfaceView.
// Next operations can then be performed on different threads using this handle.
final EGLContext eglContext = EGL14.eglGetCurrentContext();
// Calling this invalidates the rotation/scale logic below:
// surfaceTexture.getTransformMatrix(mTransform); // TODO activate and fix the logic.
WorkerHandler.execute(new Runnable() {
@Override
public void run() {
takeFrame(surfaceTexture, scaleX, scaleY, eglContext);
}
});
}
/**
* The tricky part here is the EGL surface creation.
*
@ -156,20 +175,10 @@ public class SnapshotGlPictureRecorder extends PictureRecorder {
* @param scaleX frame scale x in {@link Reference#VIEW}
* @param scaleY frame scale y in {@link Reference#VIEW}
*/
@RendererThread
@WorkerThread
@TargetApi(Build.VERSION_CODES.KITKAT)
private void onRendererFrame(@SuppressWarnings("unused") @NonNull SurfaceTexture surfaceTexture,
final float scaleX,
final float scaleY) {
// Get egl context from the RendererThread, which is the one in which we have created
// the textureId and the overlayTextureId, managed by the GlSurfaceView.
// Next operations can then be performed on different threads using this handle.
final EGLContext eglContext = EGL14.eglGetCurrentContext();
// Calling this invalidates the rotation/scale logic below:
// surfaceTexture.getTransformMatrix(mTransform); // TODO activate and fix the logic.
WorkerHandler.execute(new Runnable() {
@Override
public void run() {
protected void takeFrame(@NonNull SurfaceTexture surfaceTexture, float scaleX, float scaleY, @NonNull EGLContext eglContext) {
// 0. EGL window will need an output.
// We create a fake one as explained in javadocs.
final int fakeOutputTextureId = 9999;
@ -214,6 +223,7 @@ public class SnapshotGlPictureRecorder extends PictureRecorder {
}
// 5. Draw and save
LOG.i("takeFrame:", "timestamp:", surfaceTexture.getTimestamp());
mViewport.drawFrame(mTextureId, mTransform);
if (mHasOverlay) mOverlayDrawer.render();
mResult.format = PictureResult.FORMAT_JPEG;
@ -227,8 +237,6 @@ public class SnapshotGlPictureRecorder extends PictureRecorder {
core.release();
dispatchResult();
}
});
}
@Override
protected void dispatchResult() {

@ -2,6 +2,7 @@ package com.otaliastudios.cameraview.preview;
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CaptureResult;
import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
import androidx.annotation.NonNull;
@ -194,6 +195,8 @@ public class GlCameraPreview extends FilterCameraPreview<GLSurfaceView, SurfaceT
return;
}
mInputSurfaceTexture.getTransformMatrix(mTransformMatrix);
LOG.v("onDrawFrame:", "timestamp:", mInputSurfaceTexture.getTimestamp());
// For Camera2, apply the draw rotation.
// See TextureCameraPreview.setDrawRotation() for info.

Loading…
Cancel
Save