From 66ad422a19f4d216486942f969fdc381c81c091b Mon Sep 17 00:00:00 2001 From: Suneet Agrawal Date: Mon, 5 Aug 2019 17:24:43 +0530 Subject: [PATCH] changed filter implementation for image and video --- .../cameraview/internal/egl/EglViewport.java | 2 +- .../picture/SnapshotGlPictureRecorder.java | 11 +- .../cameraview/preview/GlCameraPreview.java | 7 +- .../preview/RendererFrameCallback.java | 8 + .../video/SnapshotVideoRecorder.java | 18 +++ .../video/encoding/TextureMediaEncoder.java | 153 +++++++++--------- 6 files changed, 119 insertions(+), 80 deletions(-) diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/internal/egl/EglViewport.java b/cameraview/src/main/java/com/otaliastudios/cameraview/internal/egl/EglViewport.java index 300bc80a..d1be7c14 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/internal/egl/EglViewport.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/internal/egl/EglViewport.java @@ -112,7 +112,7 @@ public class EglViewport extends EglElement { return texId; } - public void changeShaderEffect(@NonNull Filter shaderEffect){ + public void changeShaderFilter(@NonNull Filter shaderEffect){ this.mShaderEffect = shaderEffect; mIsShaderChanged = true; } diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/picture/SnapshotGlPictureRecorder.java b/cameraview/src/main/java/com/otaliastudios/cameraview/picture/SnapshotGlPictureRecorder.java index 2bd95048..9e88c955 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/picture/SnapshotGlPictureRecorder.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/picture/SnapshotGlPictureRecorder.java @@ -2,9 +2,6 @@ package com.otaliastudios.cameraview.picture; import android.annotation.TargetApi; import android.graphics.Bitmap; -import android.graphics.Canvas; -import android.graphics.Color; -import android.graphics.PorterDuff; import android.graphics.Rect; import android.graphics.SurfaceTexture; import android.opengl.EGL14; @@ -14,7 +11,6 @@ import android.os.Build; import com.otaliastudios.cameraview.CameraLogger; import com.otaliastudios.cameraview.PictureResult; -import com.otaliastudios.cameraview.internal.Issue514Workaround; import com.otaliastudios.cameraview.internal.egl.EglBaseSurface; import com.otaliastudios.cameraview.overlay.Overlay; import com.otaliastudios.cameraview.controls.Facing; @@ -105,6 +101,12 @@ public class SnapshotGlPictureRecorder extends PictureRecorder { mPreview.removeRendererFrameCallback(this); SnapshotGlPictureRecorder.this.onRendererFrame(surfaceTexture, scaleX, scaleY, shaderEffect); } + + @Override + public void + onFilterChanged(@NonNull Filter filter) { + mViewport.changeShaderFilter(filter); + } }); } @@ -150,7 +152,6 @@ public class SnapshotGlPictureRecorder extends PictureRecorder { @RendererThread @TargetApi(Build.VERSION_CODES.KITKAT) private void onRendererFrame(final @NonNull SurfaceTexture surfaceTexture, final float scaleX, final float scaleY, @NonNull Filter filter) { - mViewport.changeShaderEffect(filter); // Get egl context from the RendererThread, which is the one in which we have created // the textureId and the overlayTextureId, managed by the GlSurfaceView. // Next operations can then be performed on different threads using this handle. diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/preview/GlCameraPreview.java b/cameraview/src/main/java/com/otaliastudios/cameraview/preview/GlCameraPreview.java index 1afa4faa..28142c89 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/preview/GlCameraPreview.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/preview/GlCameraPreview.java @@ -284,6 +284,7 @@ public class GlCameraPreview extends CameraPreview { private static final CameraLogger LOG = CameraLogger.create(TAG); public final static String FRAME_EVENT = "frame"; + public final static String FILTER_EVENT = "filter"; private int mTransformRotation; private EglCore mEglCore; @@ -132,80 +134,85 @@ public class TextureMediaEncoder extends VideoMediaEncoder { @EncoderThread @Override protected void onEvent(@NonNull String event, @Nullable Object data) { - if (!event.equals(FRAME_EVENT)) return; - Frame frame = (Frame) data; - if (frame == null) { - throw new IllegalArgumentException("Got null frame for FRAME_EVENT."); - } - if (!shouldRenderFrame(frame.timestampUs())) { - mFramePool.recycle(frame); - return; - } - // Notify we're got the first frame and its absolute time. - if (mFrameNumber == 1) { - notifyFirstFrameMillis(frame.timestampMillis); - } + if (event.equals(FILTER_EVENT)) { + Filter filter = (Filter) data; + mViewport.changeShaderFilter(filter); + } else if (event.equals(FRAME_EVENT)) { + Frame frame = (Frame) data; + if (frame == null) { + throw new IllegalArgumentException("Got null frame for FRAME_EVENT."); + } + if (!shouldRenderFrame(frame.timestampUs())) { + mFramePool.recycle(frame); + return; + } - // Notify we have reached the max length value. - if (mFirstTimeUs == Long.MIN_VALUE) mFirstTimeUs = frame.timestampUs(); - if (!hasReachedMaxLength()) { - boolean didReachMaxLength = (frame.timestampUs() - mFirstTimeUs) > getMaxLengthMillis() * 1000L; - if (didReachMaxLength) { - LOG.w("onEvent -", - "frameNumber:", mFrameNumber, - "timestampUs:", frame.timestampUs(), - "firstTimeUs:", mFirstTimeUs, - "- reached max length! deltaUs:", frame.timestampUs() - mFirstTimeUs); - notifyMaxLengthReached(); + // Notify we're got the first frame and its absolute time. + if (mFrameNumber == 1) { + notifyFirstFrameMillis(frame.timestampMillis); } - } - // First, drain any previous data. - LOG.i("onEvent -", - "frameNumber:", mFrameNumber, - "timestampUs:", frame.timestampUs(), - "- draining."); - drainOutput(false); - - // Then draw on the surface. - LOG.i("onEvent -", - "frameNumber:", mFrameNumber, - "timestampUs:", frame.timestampUs(), - "- rendering."); - - // 1. We must scale this matrix like GlCameraPreview does, because it might have some cropping. - // Scaling takes place with respect to the (0, 0, 0) point, so we must apply a Translation to compensate. - float[] transform = frame.transform; - float scaleX = mConfig.scaleX; - float scaleY = mConfig.scaleY; - float scaleTranslX = (1F - scaleX) / 2F; - float scaleTranslY = (1F - scaleY) / 2F; - Matrix.translateM(transform, 0, scaleTranslX, scaleTranslY, 0); - Matrix.scaleM(transform, 0, scaleX, scaleY, 1); - - // 2. We also must rotate this matrix. In GlCameraPreview it is not needed because it is a live - // stream, but the output video, must be correctly rotated based on the device rotation at the moment. - // Rotation also takes place with respect to the origin (the Z axis), so we must - // translate to origin, rotate, then back to where we were. - Matrix.translateM(transform, 0, 0.5F, 0.5F, 0); - Matrix.rotateM(transform, 0, mTransformRotation, 0, 0, 1); - Matrix.translateM(transform, 0, -0.5F, -0.5F, 0); - - // 3. Do the same for overlays with their own rotation. - if (mConfig.hasOverlay()) { - mConfig.overlayDrawer.draw(mConfig.overlayTarget); - Matrix.translateM(mConfig.overlayDrawer.getTransform(), 0, 0.5F, 0.5F, 0); - Matrix.rotateM(mConfig.overlayDrawer.getTransform(), 0, mConfig.overlayRotation, 0, 0, 1); - Matrix.translateM(mConfig.overlayDrawer.getTransform(), 0, -0.5F, -0.5F, 0); - } - mViewport.drawFrame(mConfig.textureId, transform); - if (mConfig.hasOverlay()) { - mConfig.overlayDrawer.render(); + // Notify we have reached the max length value. + if (mFirstTimeUs == Long.MIN_VALUE) mFirstTimeUs = frame.timestampUs(); + if (!hasReachedMaxLength()) { + boolean didReachMaxLength = (frame.timestampUs() - mFirstTimeUs) > getMaxLengthMillis() * 1000L; + if (didReachMaxLength) { + LOG.w("onEvent -", + "frameNumber:", mFrameNumber, + "timestampUs:", frame.timestampUs(), + "firstTimeUs:", mFirstTimeUs, + "- reached max length! deltaUs:", frame.timestampUs() - mFirstTimeUs); + notifyMaxLengthReached(); + } + } + + // First, drain any previous data. + LOG.i("onEvent -", + "frameNumber:", mFrameNumber, + "timestampUs:", frame.timestampUs(), + "- draining."); + drainOutput(false); + + // Then draw on the surface. + LOG.i("onEvent -", + "frameNumber:", mFrameNumber, + "timestampUs:", frame.timestampUs(), + "- rendering."); + + // 1. We must scale this matrix like GlCameraPreview does, because it might have some cropping. + // Scaling takes place with respect to the (0, 0, 0) point, so we must apply a Translation to compensate. + float[] transform = frame.transform; + float scaleX = mConfig.scaleX; + float scaleY = mConfig.scaleY; + float scaleTranslX = (1F - scaleX) / 2F; + float scaleTranslY = (1F - scaleY) / 2F; + Matrix.translateM(transform, 0, scaleTranslX, scaleTranslY, 0); + Matrix.scaleM(transform, 0, scaleX, scaleY, 1); + + // 2. We also must rotate this matrix. In GlCameraPreview it is not needed because it is a live + // stream, but the output video, must be correctly rotated based on the device rotation at the moment. + // Rotation also takes place with respect to the origin (the Z axis), so we must + // translate to origin, rotate, then back to where we were. + Matrix.translateM(transform, 0, 0.5F, 0.5F, 0); + Matrix.rotateM(transform, 0, mTransformRotation, 0, 0, 1); + Matrix.translateM(transform, 0, -0.5F, -0.5F, 0); + + // 3. Do the same for overlays with their own rotation. + if (mConfig.hasOverlay()) { + mConfig.overlayDrawer.draw(mConfig.overlayTarget); + Matrix.translateM(mConfig.overlayDrawer.getTransform(), 0, 0.5F, 0.5F, 0); + Matrix.rotateM(mConfig.overlayDrawer.getTransform(), 0, mConfig.overlayRotation, 0, 0, 1); + Matrix.translateM(mConfig.overlayDrawer.getTransform(), 0, -0.5F, -0.5F, 0); + } + mViewport.drawFrame(mConfig.textureId, transform); + if (mConfig.hasOverlay()) { + mConfig.overlayDrawer.render(); + } + mWindow.setPresentationTime(frame.timestampNanos); + mWindow.swapBuffers(); + mFramePool.recycle(frame); } - mWindow.setPresentationTime(frame.timestampNanos); - mWindow.swapBuffers(); - mFramePool.recycle(frame); } @Override