diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/internal/egl/EglViewport.java b/cameraview/src/main/java/com/otaliastudios/cameraview/internal/egl/EglViewport.java index f6f72080..ad43de64 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/internal/egl/EglViewport.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/internal/egl/EglViewport.java @@ -67,6 +67,8 @@ public class EglViewport { } public void setFilter(@NonNull Filter filter) { + // TODO see if this is needed. If setFilter is always called from the correct GL thread, + // we don't need to wait for a new draw call (which might not even happen). mPendingFilter = filter; } diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/picture/SnapshotGlPictureRecorder.java b/cameraview/src/main/java/com/otaliastudios/cameraview/picture/SnapshotGlPictureRecorder.java index a9939f0f..5e42d13e 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/picture/SnapshotGlPictureRecorder.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/picture/SnapshotGlPictureRecorder.java @@ -95,6 +95,12 @@ public class SnapshotGlPictureRecorder extends PictureRecorder { SnapshotGlPictureRecorder.this.onRendererTextureCreated(textureId); } + @RendererThread + @Override + public void onRendererFilterChanged(@NonNull Filter filter) { + SnapshotGlPictureRecorder.this.onRendererFilterChanged(filter); + } + @RendererThread @Override public void onRendererFrame(@NonNull SurfaceTexture surfaceTexture, final float scaleX, final float scaleY) { @@ -102,10 +108,6 @@ public class SnapshotGlPictureRecorder extends PictureRecorder { SnapshotGlPictureRecorder.this.onRendererFrame(surfaceTexture, scaleX, scaleY); } - @Override - public void onFilterChanged(@NonNull Filter filter) { - mViewport.setFilter(filter.copy()); - } }); } @@ -125,6 +127,12 @@ public class SnapshotGlPictureRecorder extends PictureRecorder { } } + @RendererThread + @TargetApi(Build.VERSION_CODES.KITKAT) + private void onRendererFilterChanged(@NonNull Filter filter) { + mViewport.setFilter(filter.copy()); + } + /** * The tricky part here is the EGL surface creation. * @@ -150,7 +158,9 @@ public class SnapshotGlPictureRecorder extends PictureRecorder { */ @RendererThread @TargetApi(Build.VERSION_CODES.KITKAT) - private void onRendererFrame(final @NonNull SurfaceTexture surfaceTexture, final float scaleX, final float scaleY) { + private void onRendererFrame(@SuppressWarnings("unused") @NonNull SurfaceTexture surfaceTexture, + final float scaleX, + final float scaleY) { // Get egl context from the RendererThread, which is the one in which we have created // the textureId and the overlayTextureId, managed by the GlSurfaceView. // Next operations can then be performed on different threads using this handle. diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/preview/GlCameraPreview.java b/cameraview/src/main/java/com/otaliastudios/cameraview/preview/GlCameraPreview.java index d65e23ef..b85785df 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/preview/GlCameraPreview.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/preview/GlCameraPreview.java @@ -141,7 +141,9 @@ public class GlCameraPreview extends FilterCameraPreview { @EncoderThread @Override protected void onEvent(@NonNull String event, @Nullable Object data) { + switch (event) { + case FILTER_EVENT: + //noinspection ConstantConditions + onFilter((Filter) data); + break; + case FRAME_EVENT: + //noinspection ConstantConditions + onFrame((Frame) data); + break; + } + } - if (event.equals(FILTER_EVENT)) { - Filter filter = (Filter) data; - mViewport.setFilter(filter); - } else if (event.equals(FRAME_EVENT)) { - Frame frame = (Frame) data; - if (frame == null) { - throw new IllegalArgumentException("Got null frame for FRAME_EVENT."); - } - if (!shouldRenderFrame(frame.timestampUs())) { - mFramePool.recycle(frame); - return; - } + private void onFilter(@NonNull Filter filter) { + mViewport.setFilter(filter); + } - // Notify we're got the first frame and its absolute time. - if (mFrameNumber == 1) { - notifyFirstFrameMillis(frame.timestampMillis); - } + private void onFrame(@NonNull Frame frame) { + if (!shouldRenderFrame(frame.timestampUs())) { + mFramePool.recycle(frame); + return; + } - // Notify we have reached the max length value. - if (mFirstTimeUs == Long.MIN_VALUE) mFirstTimeUs = frame.timestampUs(); - if (!hasReachedMaxLength()) { - boolean didReachMaxLength = (frame.timestampUs() - mFirstTimeUs) > getMaxLengthUs(); - if (didReachMaxLength) { - LOG.w("onEvent -", - "frameNumber:", mFrameNumber, - "timestampUs:", frame.timestampUs(), - "firstTimeUs:", mFirstTimeUs, - "- reached max length! deltaUs:", frame.timestampUs() - mFirstTimeUs); - notifyMaxLengthReached(); - } - } + // Notify we're got the first frame and its absolute time. + if (mFrameNumber == 1) { + notifyFirstFrameMillis(frame.timestampMillis); + } - // First, drain any previous data. - LOG.i("onEvent -", - "frameNumber:", mFrameNumber, - "timestampUs:", frame.timestampUs(), - "- draining."); - drainOutput(false); - - // Then draw on the surface. - LOG.i("onEvent -", - "frameNumber:", mFrameNumber, - "timestampUs:", frame.timestampUs(), - "- rendering."); - - // 1. We must scale this matrix like GlCameraPreview does, because it might have some cropping. - // Scaling takes place with respect to the (0, 0, 0) point, so we must apply a Translation to compensate. - float[] transform = frame.transform; - float scaleX = mConfig.scaleX; - float scaleY = mConfig.scaleY; - float scaleTranslX = (1F - scaleX) / 2F; - float scaleTranslY = (1F - scaleY) / 2F; - Matrix.translateM(transform, 0, scaleTranslX, scaleTranslY, 0); - Matrix.scaleM(transform, 0, scaleX, scaleY, 1); - - // 2. We also must rotate this matrix. In GlCameraPreview it is not needed because it is a live - // stream, but the output video, must be correctly rotated based on the device rotation at the moment. - // Rotation also takes place with respect to the origin (the Z axis), so we must - // translate to origin, rotate, then back to where we were. - Matrix.translateM(transform, 0, 0.5F, 0.5F, 0); - Matrix.rotateM(transform, 0, mTransformRotation, 0, 0, 1); - Matrix.translateM(transform, 0, -0.5F, -0.5F, 0); - - // 3. Do the same for overlays with their own rotation. - if (mConfig.hasOverlay()) { - mConfig.overlayDrawer.draw(mConfig.overlayTarget); - Matrix.translateM(mConfig.overlayDrawer.getTransform(), 0, 0.5F, 0.5F, 0); - Matrix.rotateM(mConfig.overlayDrawer.getTransform(), 0, mConfig.overlayRotation, 0, 0, 1); - Matrix.translateM(mConfig.overlayDrawer.getTransform(), 0, -0.5F, -0.5F, 0); + // Notify we have reached the max length value. + if (mFirstTimeUs == Long.MIN_VALUE) mFirstTimeUs = frame.timestampUs(); + if (!hasReachedMaxLength()) { + boolean didReachMaxLength = (frame.timestampUs() - mFirstTimeUs) > getMaxLengthUs(); + if (didReachMaxLength) { + LOG.w("onEvent -", + "frameNumber:", mFrameNumber, + "timestampUs:", frame.timestampUs(), + "firstTimeUs:", mFirstTimeUs, + "- reached max length! deltaUs:", frame.timestampUs() - mFirstTimeUs); + notifyMaxLengthReached(); } - mViewport.drawFrame(mConfig.textureId, transform); - if (mConfig.hasOverlay()) { - mConfig.overlayDrawer.render(); - } - mWindow.setPresentationTime(frame.timestampNanos); - mWindow.swapBuffers(); - mFramePool.recycle(frame); } + + // First, drain any previous data. + LOG.i("onEvent -", + "frameNumber:", mFrameNumber, + "timestampUs:", frame.timestampUs(), + "- draining."); + drainOutput(false); + + // Then draw on the surface. + LOG.i("onEvent -", + "frameNumber:", mFrameNumber, + "timestampUs:", frame.timestampUs(), + "- rendering."); + + // 1. We must scale this matrix like GlCameraPreview does, because it might have some cropping. + // Scaling takes place with respect to the (0, 0, 0) point, so we must apply a Translation to compensate. + float[] transform = frame.transform; + float scaleX = mConfig.scaleX; + float scaleY = mConfig.scaleY; + float scaleTranslX = (1F - scaleX) / 2F; + float scaleTranslY = (1F - scaleY) / 2F; + Matrix.translateM(transform, 0, scaleTranslX, scaleTranslY, 0); + Matrix.scaleM(transform, 0, scaleX, scaleY, 1); + + // 2. We also must rotate this matrix. In GlCameraPreview it is not needed because it is a live + // stream, but the output video, must be correctly rotated based on the device rotation at the moment. + // Rotation also takes place with respect to the origin (the Z axis), so we must + // translate to origin, rotate, then back to where we were. + Matrix.translateM(transform, 0, 0.5F, 0.5F, 0); + Matrix.rotateM(transform, 0, mTransformRotation, 0, 0, 1); + Matrix.translateM(transform, 0, -0.5F, -0.5F, 0); + + // 3. Do the same for overlays with their own rotation. + if (mConfig.hasOverlay()) { + mConfig.overlayDrawer.draw(mConfig.overlayTarget); + Matrix.translateM(mConfig.overlayDrawer.getTransform(), 0, 0.5F, 0.5F, 0); + Matrix.rotateM(mConfig.overlayDrawer.getTransform(), 0, mConfig.overlayRotation, 0, 0, 1); + Matrix.translateM(mConfig.overlayDrawer.getTransform(), 0, -0.5F, -0.5F, 0); + } + mViewport.drawFrame(mConfig.textureId, transform); + if (mConfig.hasOverlay()) { + mConfig.overlayDrawer.render(); + } + mWindow.setPresentationTime(frame.timestampNanos); + mWindow.swapBuffers(); + mFramePool.recycle(frame); } @Override