changed filter implementation for image and video

pull/527/head
Suneet Agrawal 6 years ago
parent ef962ff269
commit 66ad422a19
  1. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/egl/EglViewport.java
  2. 11
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/SnapshotGlPictureRecorder.java
  3. 7
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/GlCameraPreview.java
  4. 8
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/RendererFrameCallback.java
  5. 18
      cameraview/src/main/java/com/otaliastudios/cameraview/video/SnapshotVideoRecorder.java
  6. 153
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/TextureMediaEncoder.java

@ -112,7 +112,7 @@ public class EglViewport extends EglElement {
return texId;
}
public void changeShaderEffect(@NonNull Filter shaderEffect){
public void changeShaderFilter(@NonNull Filter shaderEffect){
this.mShaderEffect = shaderEffect;
mIsShaderChanged = true;
}

@ -2,9 +2,6 @@ package com.otaliastudios.cameraview.picture;
import android.annotation.TargetApi;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.PorterDuff;
import android.graphics.Rect;
import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
@ -14,7 +11,6 @@ import android.os.Build;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.internal.Issue514Workaround;
import com.otaliastudios.cameraview.internal.egl.EglBaseSurface;
import com.otaliastudios.cameraview.overlay.Overlay;
import com.otaliastudios.cameraview.controls.Facing;
@ -105,6 +101,12 @@ public class SnapshotGlPictureRecorder extends PictureRecorder {
mPreview.removeRendererFrameCallback(this);
SnapshotGlPictureRecorder.this.onRendererFrame(surfaceTexture, scaleX, scaleY, shaderEffect);
}
@Override
public void
onFilterChanged(@NonNull Filter filter) {
mViewport.changeShaderFilter(filter);
}
});
}
@ -150,7 +152,6 @@ public class SnapshotGlPictureRecorder extends PictureRecorder {
@RendererThread
@TargetApi(Build.VERSION_CODES.KITKAT)
private void onRendererFrame(final @NonNull SurfaceTexture surfaceTexture, final float scaleX, final float scaleY, @NonNull Filter filter) {
mViewport.changeShaderEffect(filter);
// Get egl context from the RendererThread, which is the one in which we have created
// the textureId and the overlayTextureId, managed by the GlSurfaceView.
// Next operations can then be performed on different threads using this handle.

@ -284,6 +284,7 @@ public class GlCameraPreview extends CameraPreview<GLSurfaceView, SurfaceTexture
public void run() {
mRendererFrameCallbacks.add(callback);
if (mOutputTextureId != 0) callback.onRendererTextureCreated(mOutputTextureId);
callback.onFilterChanged(mCurrentShaderEffect);
}
});
}
@ -320,6 +321,10 @@ public class GlCameraPreview extends CameraPreview<GLSurfaceView, SurfaceTexture
shaderEffect.setPreviewingViewSize(getView().getWidth(), getView().getHeight());
mCurrentShaderEffect = shaderEffect;
mOutputViewport.changeShaderEffect(shaderEffect);
mOutputViewport.changeShaderFilter(shaderEffect);
for (RendererFrameCallback callback : mRendererFrameCallbacks) {
callback.onFilterChanged(shaderEffect);
}
}
}

@ -31,4 +31,12 @@ public interface RendererFrameCallback {
*/
@RendererThread
void onRendererFrame(@NonNull SurfaceTexture surfaceTexture, float scaleX, float scaleY, Filter shaderEffect);
/**
* Called on the change of shader filter
* We should update the EglViewPort once you receives this event
*
* @param filter the new filter applied
*/
void onFilterChanged(@NonNull Filter filter);
}

@ -61,6 +61,8 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
private boolean mHasOverlay;
private int mOverlayRotation;
private Filter mCurrentFilter;
public SnapshotVideoRecorder(@NonNull CameraEngine engine,
@NonNull GlCameraPreview preview,
@Nullable Overlay overlay,
@ -158,6 +160,13 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
// Engine
mEncoderEngine = new MediaEncoderEngine(mResult.file, videoEncoder, audioEncoder,
mResult.maxDuration, mResult.maxSize, SnapshotVideoRecorder.this);
//set current filter
if (mEncoderEngine != null) {
mEncoderEngine.notify(TextureMediaEncoder.FILTER_EVENT, mCurrentFilter);
}
mEncoderEngine.start();
mResult.rotation = 0; // We will rotate the result instead.
mCurrentState = STATE_RECORDING;
@ -183,6 +192,15 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
}
@Override
public void onFilterChanged(@NonNull Filter filter) {
mCurrentFilter = filter;
if (mEncoderEngine != null) {
mEncoderEngine.notify(TextureMediaEncoder.FILTER_EVENT, filter);
}
}
@Override
public void onEncodingStart() {
dispatchVideoRecordingStart();

@ -4,16 +4,17 @@ import android.graphics.SurfaceTexture;
import android.opengl.Matrix;
import android.os.Build;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.filters.Filter;
import com.otaliastudios.cameraview.internal.egl.EglCore;
import com.otaliastudios.cameraview.internal.egl.EglViewport;
import com.otaliastudios.cameraview.internal.egl.EglWindowSurface;
import com.otaliastudios.cameraview.internal.utils.Pool;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
/**
* Default implementation for video encoding.
*/
@ -24,6 +25,7 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
private static final CameraLogger LOG = CameraLogger.create(TAG);
public final static String FRAME_EVENT = "frame";
public final static String FILTER_EVENT = "filter";
private int mTransformRotation;
private EglCore mEglCore;
@ -132,80 +134,85 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
@EncoderThread
@Override
protected void onEvent(@NonNull String event, @Nullable Object data) {
if (!event.equals(FRAME_EVENT)) return;
Frame frame = (Frame) data;
if (frame == null) {
throw new IllegalArgumentException("Got null frame for FRAME_EVENT.");
}
if (!shouldRenderFrame(frame.timestampUs())) {
mFramePool.recycle(frame);
return;
}
// Notify we're got the first frame and its absolute time.
if (mFrameNumber == 1) {
notifyFirstFrameMillis(frame.timestampMillis);
}
if (event.equals(FILTER_EVENT)) {
Filter filter = (Filter) data;
mViewport.changeShaderFilter(filter);
} else if (event.equals(FRAME_EVENT)) {
Frame frame = (Frame) data;
if (frame == null) {
throw new IllegalArgumentException("Got null frame for FRAME_EVENT.");
}
if (!shouldRenderFrame(frame.timestampUs())) {
mFramePool.recycle(frame);
return;
}
// Notify we have reached the max length value.
if (mFirstTimeUs == Long.MIN_VALUE) mFirstTimeUs = frame.timestampUs();
if (!hasReachedMaxLength()) {
boolean didReachMaxLength = (frame.timestampUs() - mFirstTimeUs) > getMaxLengthMillis() * 1000L;
if (didReachMaxLength) {
LOG.w("onEvent -",
"frameNumber:", mFrameNumber,
"timestampUs:", frame.timestampUs(),
"firstTimeUs:", mFirstTimeUs,
"- reached max length! deltaUs:", frame.timestampUs() - mFirstTimeUs);
notifyMaxLengthReached();
// Notify we're got the first frame and its absolute time.
if (mFrameNumber == 1) {
notifyFirstFrameMillis(frame.timestampMillis);
}
}
// First, drain any previous data.
LOG.i("onEvent -",
"frameNumber:", mFrameNumber,
"timestampUs:", frame.timestampUs(),
"- draining.");
drainOutput(false);
// Then draw on the surface.
LOG.i("onEvent -",
"frameNumber:", mFrameNumber,
"timestampUs:", frame.timestampUs(),
"- rendering.");
// 1. We must scale this matrix like GlCameraPreview does, because it might have some cropping.
// Scaling takes place with respect to the (0, 0, 0) point, so we must apply a Translation to compensate.
float[] transform = frame.transform;
float scaleX = mConfig.scaleX;
float scaleY = mConfig.scaleY;
float scaleTranslX = (1F - scaleX) / 2F;
float scaleTranslY = (1F - scaleY) / 2F;
Matrix.translateM(transform, 0, scaleTranslX, scaleTranslY, 0);
Matrix.scaleM(transform, 0, scaleX, scaleY, 1);
// 2. We also must rotate this matrix. In GlCameraPreview it is not needed because it is a live
// stream, but the output video, must be correctly rotated based on the device rotation at the moment.
// Rotation also takes place with respect to the origin (the Z axis), so we must
// translate to origin, rotate, then back to where we were.
Matrix.translateM(transform, 0, 0.5F, 0.5F, 0);
Matrix.rotateM(transform, 0, mTransformRotation, 0, 0, 1);
Matrix.translateM(transform, 0, -0.5F, -0.5F, 0);
// 3. Do the same for overlays with their own rotation.
if (mConfig.hasOverlay()) {
mConfig.overlayDrawer.draw(mConfig.overlayTarget);
Matrix.translateM(mConfig.overlayDrawer.getTransform(), 0, 0.5F, 0.5F, 0);
Matrix.rotateM(mConfig.overlayDrawer.getTransform(), 0, mConfig.overlayRotation, 0, 0, 1);
Matrix.translateM(mConfig.overlayDrawer.getTransform(), 0, -0.5F, -0.5F, 0);
}
mViewport.drawFrame(mConfig.textureId, transform);
if (mConfig.hasOverlay()) {
mConfig.overlayDrawer.render();
// Notify we have reached the max length value.
if (mFirstTimeUs == Long.MIN_VALUE) mFirstTimeUs = frame.timestampUs();
if (!hasReachedMaxLength()) {
boolean didReachMaxLength = (frame.timestampUs() - mFirstTimeUs) > getMaxLengthMillis() * 1000L;
if (didReachMaxLength) {
LOG.w("onEvent -",
"frameNumber:", mFrameNumber,
"timestampUs:", frame.timestampUs(),
"firstTimeUs:", mFirstTimeUs,
"- reached max length! deltaUs:", frame.timestampUs() - mFirstTimeUs);
notifyMaxLengthReached();
}
}
// First, drain any previous data.
LOG.i("onEvent -",
"frameNumber:", mFrameNumber,
"timestampUs:", frame.timestampUs(),
"- draining.");
drainOutput(false);
// Then draw on the surface.
LOG.i("onEvent -",
"frameNumber:", mFrameNumber,
"timestampUs:", frame.timestampUs(),
"- rendering.");
// 1. We must scale this matrix like GlCameraPreview does, because it might have some cropping.
// Scaling takes place with respect to the (0, 0, 0) point, so we must apply a Translation to compensate.
float[] transform = frame.transform;
float scaleX = mConfig.scaleX;
float scaleY = mConfig.scaleY;
float scaleTranslX = (1F - scaleX) / 2F;
float scaleTranslY = (1F - scaleY) / 2F;
Matrix.translateM(transform, 0, scaleTranslX, scaleTranslY, 0);
Matrix.scaleM(transform, 0, scaleX, scaleY, 1);
// 2. We also must rotate this matrix. In GlCameraPreview it is not needed because it is a live
// stream, but the output video, must be correctly rotated based on the device rotation at the moment.
// Rotation also takes place with respect to the origin (the Z axis), so we must
// translate to origin, rotate, then back to where we were.
Matrix.translateM(transform, 0, 0.5F, 0.5F, 0);
Matrix.rotateM(transform, 0, mTransformRotation, 0, 0, 1);
Matrix.translateM(transform, 0, -0.5F, -0.5F, 0);
// 3. Do the same for overlays with their own rotation.
if (mConfig.hasOverlay()) {
mConfig.overlayDrawer.draw(mConfig.overlayTarget);
Matrix.translateM(mConfig.overlayDrawer.getTransform(), 0, 0.5F, 0.5F, 0);
Matrix.rotateM(mConfig.overlayDrawer.getTransform(), 0, mConfig.overlayRotation, 0, 0, 1);
Matrix.translateM(mConfig.overlayDrawer.getTransform(), 0, -0.5F, -0.5F, 0);
}
mViewport.drawFrame(mConfig.textureId, transform);
if (mConfig.hasOverlay()) {
mConfig.overlayDrawer.render();
}
mWindow.setPresentationTime(frame.timestampNanos);
mWindow.swapBuffers();
mFramePool.recycle(frame);
}
mWindow.setPresentationTime(frame.timestampNanos);
mWindow.swapBuffers();
mFramePool.recycle(frame);
}
@Override

Loading…
Cancel
Save