New rotation approach

pull/816/head
Mattia Iavarone 5 years ago
parent fa03baf27b
commit 10bb655ab1
  1. 11
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java
  2. 21
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java
  3. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/Snapshot2PictureRecorder.java
  4. 55
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/SnapshotGlPictureRecorder.java
  5. 7
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/CameraPreview.java
  6. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/GlCameraPreview.java
  7. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/RendererFrameCallback.java
  8. 15
      cameraview/src/main/java/com/otaliastudios/cameraview/video/SnapshotVideoRecorder.java
  9. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/TextureMediaEncoder.java

@ -209,6 +209,7 @@ public class Camera1Engine extends CameraBaseEngine implements
throw new IllegalStateException("previewStreamSize should not be null at this point.");
}
mPreview.setStreamSize(previewSize.getWidth(), previewSize.getHeight());
mPreview.setDrawRotation(0);
Camera.Parameters params = mCamera.getParameters();
// NV21 should be the default, but let's make sure, since YuvImage will only support this
@ -357,11 +358,12 @@ public class Camera1Engine extends CameraBaseEngine implements
LOG.i("onTakePictureSnapshot:", "executing.");
// Not the real size: it will be cropped to match the view ratio
stub.size = getUncroppedSnapshotSize(Reference.OUTPUT);
// Actually it will be rotated and set to 0.
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR);
if (mPreview instanceof RendererCameraPreview && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
mPictureRecorder = new SnapshotGlPictureRecorder(stub, this, (RendererCameraPreview) mPreview, outputRatio);
stub.rotation = getAngles().offset(Reference.VIEW, Reference.OUTPUT, Axis.ABSOLUTE);
mPictureRecorder = new SnapshotGlPictureRecorder(stub, this,
(RendererCameraPreview) mPreview, outputRatio, getOverlay());
} else {
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR);
mPictureRecorder = new Snapshot1PictureRecorder(stub, this, mCamera, outputRatio);
}
mPictureRecorder.take();
@ -425,8 +427,7 @@ public class Camera1Engine extends CameraBaseEngine implements
LOG.i("onTakeVideoSnapshot", "rotation:", stub.rotation, "size:", stub.size);
// Start.
mVideoRecorder = new SnapshotVideoRecorder(Camera1Engine.this, glPreview,
getOverlay(), stub.rotation);
mVideoRecorder = new SnapshotVideoRecorder(Camera1Engine.this, glPreview, getOverlay());
mVideoRecorder.start(stub);
}

@ -792,8 +792,7 @@ public class Camera2Engine extends CameraBaseEngine implements
// stub.size is not the real size: it will be cropped to the given ratio
// stub.rotation will be set to 0 - we rotate the texture instead.
stub.size = getUncroppedSnapshotSize(Reference.OUTPUT);
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT,
Axis.RELATIVE_TO_SENSOR);
stub.rotation = getAngles().offset(Reference.VIEW, Reference.OUTPUT, Axis.ABSOLUTE);
mPictureRecorder = new Snapshot2PictureRecorder(stub, this,
(RendererCameraPreview) mPreview, outputRatio);
mPictureRecorder.take();
@ -921,24 +920,10 @@ public class Camera2Engine extends CameraBaseEngine implements
Rect outputCrop = CropHelper.computeCrop(outputSize, outputRatio);
outputSize = new Size(outputCrop.width(), outputCrop.height());
stub.size = outputSize;
// Vertical: 0 (270-0-0)
// Left (unlocked): 270 (270-90-270)
// Right (unlocked): 90 (270-270-90)
// Upside down (unlocked): 180 (270-180-180)
// Left (locked): 270 (270-0-270)
// Right (locked): 90 (270-0-90)
// Upside down (locked): 180 (270-0-180)
// Unlike Camera1, the correct formula seems to be deviceOrientation,
// which means offset(Reference.BASE, Reference.OUTPUT, Axis.ABSOLUTE).
stub.rotation = getAngles().offset(Reference.BASE, Reference.OUTPUT, Axis.ABSOLUTE);
stub.rotation = getAngles().offset(Reference.VIEW, Reference.OUTPUT, Axis.ABSOLUTE);
stub.videoFrameRate = Math.round(mPreviewFrameRate);
LOG.i("onTakeVideoSnapshot", "rotation:", stub.rotation, "size:", stub.size);
// Start.
// The overlay rotation should alway be VIEW-OUTPUT, just liek Camera1Engine.
int overlayRotation = getAngles().offset(Reference.VIEW, Reference.OUTPUT, Axis.ABSOLUTE);
mVideoRecorder = new SnapshotVideoRecorder(this, glPreview, getOverlay(),
overlayRotation);
mVideoRecorder = new SnapshotVideoRecorder(this, glPreview, getOverlay());
mVideoRecorder.start(stub);
}

@ -109,7 +109,7 @@ public class Snapshot2PictureRecorder extends SnapshotGlPictureRecorder {
@NonNull Camera2Engine engine,
@NonNull RendererCameraPreview preview,
@NonNull AspectRatio outputRatio) {
super(stub, engine, preview, outputRatio);
super(stub, engine, preview, outputRatio, engine.getOverlay());
mHolder = engine;
mAction = Actions.sequence(

@ -12,9 +12,6 @@ import android.os.Build;
import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.internal.GlTextureDrawer;
import com.otaliastudios.cameraview.overlay.Overlay;
import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.engine.CameraEngine;
import com.otaliastudios.cameraview.engine.offset.Axis;
import com.otaliastudios.cameraview.engine.offset.Reference;
import com.otaliastudios.cameraview.internal.CropHelper;
import com.otaliastudios.cameraview.internal.WorkerHandler;
@ -30,6 +27,7 @@ import com.otaliastudios.opengl.surface.EglSurface;
import com.otaliastudios.opengl.surface.EglWindowSurface;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.WorkerThread;
import android.view.Surface;
@ -55,7 +53,6 @@ import android.view.Surface;
*/
public class SnapshotGlPictureRecorder extends SnapshotPictureRecorder {
private CameraEngine mEngine;
private RendererCameraPreview mPreview;
private AspectRatio mOutputRatio;
@ -66,14 +63,14 @@ public class SnapshotGlPictureRecorder extends SnapshotPictureRecorder {
public SnapshotGlPictureRecorder(
@NonNull PictureResult.Stub stub,
@NonNull CameraEngine engine,
@Nullable PictureResultListener listener,
@NonNull RendererCameraPreview preview,
@NonNull AspectRatio outputRatio) {
super(stub, engine);
mEngine = engine;
@NonNull AspectRatio outputRatio,
@Nullable Overlay overlay) {
super(stub, listener);
mPreview = preview;
mOutputRatio = outputRatio;
mOverlay = engine.getOverlay();
mOverlay = overlay;
mHasOverlay = mOverlay != null && mOverlay.drawsOn(Overlay.Target.PICTURE_SNAPSHOT);
}
@ -96,10 +93,10 @@ public class SnapshotGlPictureRecorder extends SnapshotPictureRecorder {
@RendererThread
@Override
public void onRendererFrame(@NonNull SurfaceTexture surfaceTexture,
final float scaleX,
final float scaleY) {
int rotation, float scaleX, float scaleY) {
mPreview.removeRendererFrameCallback(this);
SnapshotGlPictureRecorder.this.onRendererFrame(surfaceTexture, scaleX, scaleY);
SnapshotGlPictureRecorder.this.onRendererFrame(surfaceTexture,
rotation, scaleX, scaleY);
}
});
@ -129,18 +126,17 @@ public class SnapshotGlPictureRecorder extends SnapshotPictureRecorder {
@RendererThread
@TargetApi(Build.VERSION_CODES.KITKAT)
protected void onRendererFrame(@SuppressWarnings("unused") @NonNull final SurfaceTexture surfaceTexture,
final int rotation,
final float scaleX,
final float scaleY) {
// Get egl context from the RendererThread, which is the one in which we have created
// the textureId and the overlayTextureId, managed by the GlSurfaceView.
// Next operations can then be performed on different threads using this handle.
final EGLContext eglContext = EGL14.eglGetCurrentContext();
// Calling this invalidates the rotation/scale logic below:
// surfaceTexture.getTransformMatrix(mTransform); // TODO activate and fix the logic.
WorkerHandler.execute(new Runnable() {
@Override
public void run() {
takeFrame(surfaceTexture, scaleX, scaleY, eglContext);
takeFrame(surfaceTexture, rotation, scaleX, scaleY, eglContext);
}
});
@ -173,6 +169,7 @@ public class SnapshotGlPictureRecorder extends SnapshotPictureRecorder {
@WorkerThread
@TargetApi(Build.VERSION_CODES.KITKAT)
protected void takeFrame(@NonNull SurfaceTexture surfaceTexture,
int rotation,
float scaleX,
float scaleY,
@NonNull EGLContext eglContext) {
@ -189,22 +186,18 @@ public class SnapshotGlPictureRecorder extends SnapshotPictureRecorder {
eglSurface.makeCurrent();
final float[] transform = mTextureDrawer.getTextureTransform();
// 2. Apply scale and crop
boolean flip = mEngine.getAngles().flip(Reference.VIEW, Reference.SENSOR);
float realScaleX = flip ? scaleY : scaleX;
float realScaleY = flip ? scaleX : scaleY;
float scaleTranslX = (1F - realScaleX) / 2F;
float scaleTranslY = (1F - realScaleY) / 2F;
// 2. Apply preview transformations
surfaceTexture.getTransformMatrix(transform);
float scaleTranslX = (1F - scaleX) / 2F;
float scaleTranslY = (1F - scaleY) / 2F;
Matrix.translateM(transform, 0, scaleTranslX, scaleTranslY, 0);
Matrix.scaleM(transform, 0, realScaleX, realScaleY, 1);
Matrix.scaleM(transform, 0, scaleX, scaleY, 1);
// 3. Apply rotation and flip
// If this doesn't work, rotate "rotation" before scaling, like GlCameraPreview does.
Matrix.translateM(transform, 0, 0.5F, 0.5F, 0); // Go back to 0,0
Matrix.rotateM(transform, 0, -mResult.rotation, 0, 0, 1); // Rotate (not sure why we need the minus)
mResult.rotation = 0;
if (mResult.facing == Facing.FRONT) { // 5. Flip horizontally for front camera
Matrix.scaleM(transform, 0, -1, 1, 1);
}
Matrix.rotateM(transform, 0, rotation + mResult.rotation, 0, 0, 1); // Rotate to OUTPUT
Matrix.scaleM(transform, 0, 1, -1, 1); // Vertical flip because we'll use glReadPixels
Matrix.translateM(transform, 0, -0.5F, -0.5F, 0); // Go back to old position
// 4. Do pretty much the same for overlays
@ -213,13 +206,12 @@ public class SnapshotGlPictureRecorder extends SnapshotPictureRecorder {
mOverlayDrawer.draw(Overlay.Target.PICTURE_SNAPSHOT);
// 2. Then we can apply the transformations
int rotation = mEngine.getAngles().offset(Reference.VIEW, Reference.OUTPUT, Axis.ABSOLUTE);
Matrix.translateM(mOverlayDrawer.getTransform(), 0, 0.5F, 0.5F, 0);
Matrix.rotateM(mOverlayDrawer.getTransform(), 0, rotation, 0, 0, 1);
// No need to flip the x axis for front camera, but need to flip the y axis always.
Matrix.scaleM(mOverlayDrawer.getTransform(), 0, 1, -1, 1);
Matrix.rotateM(mOverlayDrawer.getTransform(), 0, mResult.rotation, 0, 0, 1);
Matrix.scaleM(mOverlayDrawer.getTransform(), 0, 1, -1, 1); // Vertical flip because we'll use glReadPixels
Matrix.translateM(mOverlayDrawer.getTransform(), 0, -0.5F, -0.5F, 0);
}
mResult.rotation = 0;
// 5. Draw and save
long timestampUs = surfaceTexture.getTimestamp() / 1000L;
@ -239,7 +231,6 @@ public class SnapshotGlPictureRecorder extends SnapshotPictureRecorder {
@Override
protected void dispatchResult() {
mEngine = null;
mOutputRatio = null;
super.dispatchResult();
}

@ -18,6 +18,7 @@ import com.google.android.gms.tasks.TaskCompletionSource;
import com.google.android.gms.tasks.Tasks;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.engine.CameraEngine;
import com.otaliastudios.cameraview.engine.offset.Reference;
import com.otaliastudios.cameraview.size.Size;
/**
@ -331,11 +332,15 @@ public abstract class CameraPreview<T extends View, Output> {
* Sometimes we don't need this:
* - In Camera1, the buffer producer sets our Surface size and rotates it based on the value
* that we pass to {@link android.hardware.Camera.Parameters#setDisplayOrientation(int)},
* so the stream that comes in is already rotated.
* so the stream that comes in is already rotated (if we apply SurfaceTexture transform).
* - In Camera2, for {@link android.view.SurfaceView} based previews, apparently it just works
* out of the box. The producer might be doing something similar.
*
* But in all the other Camera2 cases, we need to apply this rotation when drawing the surface.
* Seems that Camera1 can correctly rotate the stream/transform to {@link Reference#VIEW},
* while Camera2, that does not have any rotation API, will only rotate to {@link Reference#BASE}.
* That's why in Camera2 this angle is set as the offset between BASE and VIEW.
*
* @param drawRotation the rotation in degrees
*/
public void setDrawRotation(int drawRotation) {

@ -233,7 +233,7 @@ public class GlCameraPreview extends CameraPreview<GLSurfaceView, SurfaceTexture
mOutputTextureDrawer.draw(mInputSurfaceTexture.getTimestamp() / 1000L);
for (RendererFrameCallback callback : mRendererFrameCallbacks) {
callback.onRendererFrame(mInputSurfaceTexture, mCropScaleX, mCropScaleY);
callback.onRendererFrame(mInputSurfaceTexture, mDrawRotation, mCropScaleX, mCropScaleY);
}
}
}

@ -24,17 +24,17 @@ public interface RendererFrameCallback {
* Called on the renderer thread after each frame was drawn.
* You are not supposed to hold for too long onto this thread, because
* well, it is the rendering thread.
*
* @param surfaceTexture the texture to get transformation
* @param rotation the rotation (to reach REF_VIEW)
* @param scaleX the scaleX (in REF_VIEW) value
* @param scaleY the scaleY (in REF_VIEW) value
*/
@RendererThread
void onRendererFrame(@NonNull SurfaceTexture surfaceTexture, float scaleX, float scaleY);
void onRendererFrame(@NonNull SurfaceTexture surfaceTexture, int rotation, float scaleX, float scaleY);
/**
* Called when the renderer filter changes. This is guaranteed to be called at least once
* before the first {@link #onRendererFrame(SurfaceTexture, float, float)}.
* before the first {@link #onRendererFrame(SurfaceTexture, int, float, float)}.
*
* @param filter the new filter
*/

@ -61,19 +61,16 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
private Overlay mOverlay;
private OverlayDrawer mOverlayDrawer;
private boolean mHasOverlay;
private int mOverlayRotation;
private Filter mCurrentFilter;
public SnapshotVideoRecorder(@NonNull CameraEngine engine,
@NonNull RendererCameraPreview preview,
@Nullable Overlay overlay,
int overlayRotation) {
@Nullable Overlay overlay) {
super(engine);
mPreview = preview;
mOverlay = overlay;
mHasOverlay = overlay != null && overlay.drawsOn(Overlay.Target.VIDEO_SNAPSHOT);
mOverlayRotation = overlayRotation;
}
@Override
@ -126,9 +123,8 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
@RendererThread
@Override
public void onRendererFrame(@NonNull SurfaceTexture surfaceTexture,
float scaleX,
float scaleY) {
public void onRendererFrame(@NonNull SurfaceTexture surfaceTexture, int rotation,
float scaleX, float scaleY) {
if (mCurrentState == STATE_NOT_RECORDING && mDesiredState == STATE_RECORDING) {
LOG.i("Starting the encoder engine.");
@ -219,7 +215,7 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
videoConfig.height = mResult.size.getHeight();
videoConfig.bitRate = mResult.videoBitRate;
videoConfig.frameRate = mResult.videoFrameRate;
videoConfig.rotation = mResult.rotation;
videoConfig.rotation = rotation + mResult.rotation;
videoConfig.mimeType = videoType;
videoConfig.encoder = deviceEncoders.getVideoEncoder();
videoConfig.textureId = mTextureId;
@ -232,7 +228,8 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
if (mHasOverlay) {
videoConfig.overlayTarget = Overlay.Target.VIDEO_SNAPSHOT;
videoConfig.overlayDrawer = mOverlayDrawer;
videoConfig.overlayRotation = mOverlayRotation;
videoConfig.overlayRotation = mResult.rotation;
// ^ no "rotation" here! Overlays are already in VIEW ref.
}
TextureMediaEncoder videoEncoder = new TextureMediaEncoder(videoConfig);

@ -218,8 +218,8 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
mConfig.overlayDrawer.draw(mConfig.overlayTarget);
Matrix.translateM(mConfig.overlayDrawer.getTransform(),
0, 0.5F, 0.5F, 0);
Matrix.rotateM(mConfig.overlayDrawer.getTransform(), 0, mConfig.overlayRotation,
0, 0, 1);
Matrix.rotateM(mConfig.overlayDrawer.getTransform(),
0, mConfig.overlayRotation, 0, 0, 1);
Matrix.translateM(mConfig.overlayDrawer.getTransform(),
0, -0.5F, -0.5F, 0);
}

Loading…
Cancel
Save