|
|
@ -4,16 +4,17 @@ import android.graphics.SurfaceTexture; |
|
|
|
import android.opengl.Matrix; |
|
|
|
import android.opengl.Matrix; |
|
|
|
import android.os.Build; |
|
|
|
import android.os.Build; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import androidx.annotation.NonNull; |
|
|
|
|
|
|
|
import androidx.annotation.Nullable; |
|
|
|
|
|
|
|
import androidx.annotation.RequiresApi; |
|
|
|
|
|
|
|
|
|
|
|
import com.otaliastudios.cameraview.CameraLogger; |
|
|
|
import com.otaliastudios.cameraview.CameraLogger; |
|
|
|
|
|
|
|
import com.otaliastudios.cameraview.filters.Filter; |
|
|
|
import com.otaliastudios.cameraview.internal.egl.EglCore; |
|
|
|
import com.otaliastudios.cameraview.internal.egl.EglCore; |
|
|
|
import com.otaliastudios.cameraview.internal.egl.EglViewport; |
|
|
|
import com.otaliastudios.cameraview.internal.egl.EglViewport; |
|
|
|
import com.otaliastudios.cameraview.internal.egl.EglWindowSurface; |
|
|
|
import com.otaliastudios.cameraview.internal.egl.EglWindowSurface; |
|
|
|
import com.otaliastudios.cameraview.internal.utils.Pool; |
|
|
|
import com.otaliastudios.cameraview.internal.utils.Pool; |
|
|
|
|
|
|
|
|
|
|
|
import androidx.annotation.NonNull; |
|
|
|
|
|
|
|
import androidx.annotation.Nullable; |
|
|
|
|
|
|
|
import androidx.annotation.RequiresApi; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/** |
|
|
|
/** |
|
|
|
* Default implementation for video encoding. |
|
|
|
* Default implementation for video encoding. |
|
|
|
*/ |
|
|
|
*/ |
|
|
@ -24,6 +25,7 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> { |
|
|
|
private static final CameraLogger LOG = CameraLogger.create(TAG); |
|
|
|
private static final CameraLogger LOG = CameraLogger.create(TAG); |
|
|
|
|
|
|
|
|
|
|
|
public final static String FRAME_EVENT = "frame"; |
|
|
|
public final static String FRAME_EVENT = "frame"; |
|
|
|
|
|
|
|
public final static String FILTER_EVENT = "filter"; |
|
|
|
|
|
|
|
|
|
|
|
private int mTransformRotation; |
|
|
|
private int mTransformRotation; |
|
|
|
private EglCore mEglCore; |
|
|
|
private EglCore mEglCore; |
|
|
@ -132,80 +134,85 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> { |
|
|
|
@EncoderThread |
|
|
|
@EncoderThread |
|
|
|
@Override |
|
|
|
@Override |
|
|
|
protected void onEvent(@NonNull String event, @Nullable Object data) { |
|
|
|
protected void onEvent(@NonNull String event, @Nullable Object data) { |
|
|
|
if (!event.equals(FRAME_EVENT)) return; |
|
|
|
|
|
|
|
Frame frame = (Frame) data; |
|
|
|
|
|
|
|
if (frame == null) { |
|
|
|
|
|
|
|
throw new IllegalArgumentException("Got null frame for FRAME_EVENT."); |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
if (!shouldRenderFrame(frame.timestampUs())) { |
|
|
|
|
|
|
|
mFramePool.recycle(frame); |
|
|
|
|
|
|
|
return; |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// Notify we're got the first frame and its absolute time.
|
|
|
|
if (event.equals(FILTER_EVENT)) { |
|
|
|
if (mFrameNumber == 1) { |
|
|
|
Filter filter = (Filter) data; |
|
|
|
notifyFirstFrameMillis(frame.timestampMillis); |
|
|
|
mViewport.changeShaderFilter(filter); |
|
|
|
} |
|
|
|
} else if (event.equals(FRAME_EVENT)) { |
|
|
|
|
|
|
|
Frame frame = (Frame) data; |
|
|
|
|
|
|
|
if (frame == null) { |
|
|
|
|
|
|
|
throw new IllegalArgumentException("Got null frame for FRAME_EVENT."); |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
if (!shouldRenderFrame(frame.timestampUs())) { |
|
|
|
|
|
|
|
mFramePool.recycle(frame); |
|
|
|
|
|
|
|
return; |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
// Notify we have reached the max length value.
|
|
|
|
// Notify we're got the first frame and its absolute time.
|
|
|
|
if (mFirstTimeUs == Long.MIN_VALUE) mFirstTimeUs = frame.timestampUs(); |
|
|
|
if (mFrameNumber == 1) { |
|
|
|
if (!hasReachedMaxLength()) { |
|
|
|
notifyFirstFrameMillis(frame.timestampMillis); |
|
|
|
boolean didReachMaxLength = (frame.timestampUs() - mFirstTimeUs) > getMaxLengthMillis() * 1000L; |
|
|
|
|
|
|
|
if (didReachMaxLength) { |
|
|
|
|
|
|
|
LOG.w("onEvent -", |
|
|
|
|
|
|
|
"frameNumber:", mFrameNumber, |
|
|
|
|
|
|
|
"timestampUs:", frame.timestampUs(), |
|
|
|
|
|
|
|
"firstTimeUs:", mFirstTimeUs, |
|
|
|
|
|
|
|
"- reached max length! deltaUs:", frame.timestampUs() - mFirstTimeUs); |
|
|
|
|
|
|
|
notifyMaxLengthReached(); |
|
|
|
|
|
|
|
} |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// First, drain any previous data.
|
|
|
|
// Notify we have reached the max length value.
|
|
|
|
LOG.i("onEvent -", |
|
|
|
if (mFirstTimeUs == Long.MIN_VALUE) mFirstTimeUs = frame.timestampUs(); |
|
|
|
"frameNumber:", mFrameNumber, |
|
|
|
if (!hasReachedMaxLength()) { |
|
|
|
"timestampUs:", frame.timestampUs(), |
|
|
|
boolean didReachMaxLength = (frame.timestampUs() - mFirstTimeUs) > getMaxLengthMillis() * 1000L; |
|
|
|
"- draining."); |
|
|
|
if (didReachMaxLength) { |
|
|
|
drainOutput(false); |
|
|
|
LOG.w("onEvent -", |
|
|
|
|
|
|
|
"frameNumber:", mFrameNumber, |
|
|
|
// Then draw on the surface.
|
|
|
|
"timestampUs:", frame.timestampUs(), |
|
|
|
LOG.i("onEvent -", |
|
|
|
"firstTimeUs:", mFirstTimeUs, |
|
|
|
"frameNumber:", mFrameNumber, |
|
|
|
"- reached max length! deltaUs:", frame.timestampUs() - mFirstTimeUs); |
|
|
|
"timestampUs:", frame.timestampUs(), |
|
|
|
notifyMaxLengthReached(); |
|
|
|
"- rendering."); |
|
|
|
} |
|
|
|
|
|
|
|
} |
|
|
|
// 1. We must scale this matrix like GlCameraPreview does, because it might have some cropping.
|
|
|
|
|
|
|
|
// Scaling takes place with respect to the (0, 0, 0) point, so we must apply a Translation to compensate.
|
|
|
|
// First, drain any previous data.
|
|
|
|
float[] transform = frame.transform; |
|
|
|
LOG.i("onEvent -", |
|
|
|
float scaleX = mConfig.scaleX; |
|
|
|
"frameNumber:", mFrameNumber, |
|
|
|
float scaleY = mConfig.scaleY; |
|
|
|
"timestampUs:", frame.timestampUs(), |
|
|
|
float scaleTranslX = (1F - scaleX) / 2F; |
|
|
|
"- draining."); |
|
|
|
float scaleTranslY = (1F - scaleY) / 2F; |
|
|
|
drainOutput(false); |
|
|
|
Matrix.translateM(transform, 0, scaleTranslX, scaleTranslY, 0); |
|
|
|
|
|
|
|
Matrix.scaleM(transform, 0, scaleX, scaleY, 1); |
|
|
|
// Then draw on the surface.
|
|
|
|
|
|
|
|
LOG.i("onEvent -", |
|
|
|
// 2. We also must rotate this matrix. In GlCameraPreview it is not needed because it is a live
|
|
|
|
"frameNumber:", mFrameNumber, |
|
|
|
// stream, but the output video, must be correctly rotated based on the device rotation at the moment.
|
|
|
|
"timestampUs:", frame.timestampUs(), |
|
|
|
// Rotation also takes place with respect to the origin (the Z axis), so we must
|
|
|
|
"- rendering."); |
|
|
|
// translate to origin, rotate, then back to where we were.
|
|
|
|
|
|
|
|
Matrix.translateM(transform, 0, 0.5F, 0.5F, 0); |
|
|
|
// 1. We must scale this matrix like GlCameraPreview does, because it might have some cropping.
|
|
|
|
Matrix.rotateM(transform, 0, mTransformRotation, 0, 0, 1); |
|
|
|
// Scaling takes place with respect to the (0, 0, 0) point, so we must apply a Translation to compensate.
|
|
|
|
Matrix.translateM(transform, 0, -0.5F, -0.5F, 0); |
|
|
|
float[] transform = frame.transform; |
|
|
|
|
|
|
|
float scaleX = mConfig.scaleX; |
|
|
|
// 3. Do the same for overlays with their own rotation.
|
|
|
|
float scaleY = mConfig.scaleY; |
|
|
|
if (mConfig.hasOverlay()) { |
|
|
|
float scaleTranslX = (1F - scaleX) / 2F; |
|
|
|
mConfig.overlayDrawer.draw(mConfig.overlayTarget); |
|
|
|
float scaleTranslY = (1F - scaleY) / 2F; |
|
|
|
Matrix.translateM(mConfig.overlayDrawer.getTransform(), 0, 0.5F, 0.5F, 0); |
|
|
|
Matrix.translateM(transform, 0, scaleTranslX, scaleTranslY, 0); |
|
|
|
Matrix.rotateM(mConfig.overlayDrawer.getTransform(), 0, mConfig.overlayRotation, 0, 0, 1); |
|
|
|
Matrix.scaleM(transform, 0, scaleX, scaleY, 1); |
|
|
|
Matrix.translateM(mConfig.overlayDrawer.getTransform(), 0, -0.5F, -0.5F, 0); |
|
|
|
|
|
|
|
} |
|
|
|
// 2. We also must rotate this matrix. In GlCameraPreview it is not needed because it is a live
|
|
|
|
mViewport.drawFrame(mConfig.textureId, transform); |
|
|
|
// stream, but the output video, must be correctly rotated based on the device rotation at the moment.
|
|
|
|
if (mConfig.hasOverlay()) { |
|
|
|
// Rotation also takes place with respect to the origin (the Z axis), so we must
|
|
|
|
mConfig.overlayDrawer.render(); |
|
|
|
// translate to origin, rotate, then back to where we were.
|
|
|
|
|
|
|
|
Matrix.translateM(transform, 0, 0.5F, 0.5F, 0); |
|
|
|
|
|
|
|
Matrix.rotateM(transform, 0, mTransformRotation, 0, 0, 1); |
|
|
|
|
|
|
|
Matrix.translateM(transform, 0, -0.5F, -0.5F, 0); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// 3. Do the same for overlays with their own rotation.
|
|
|
|
|
|
|
|
if (mConfig.hasOverlay()) { |
|
|
|
|
|
|
|
mConfig.overlayDrawer.draw(mConfig.overlayTarget); |
|
|
|
|
|
|
|
Matrix.translateM(mConfig.overlayDrawer.getTransform(), 0, 0.5F, 0.5F, 0); |
|
|
|
|
|
|
|
Matrix.rotateM(mConfig.overlayDrawer.getTransform(), 0, mConfig.overlayRotation, 0, 0, 1); |
|
|
|
|
|
|
|
Matrix.translateM(mConfig.overlayDrawer.getTransform(), 0, -0.5F, -0.5F, 0); |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
mViewport.drawFrame(mConfig.textureId, transform); |
|
|
|
|
|
|
|
if (mConfig.hasOverlay()) { |
|
|
|
|
|
|
|
mConfig.overlayDrawer.render(); |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
mWindow.setPresentationTime(frame.timestampNanos); |
|
|
|
|
|
|
|
mWindow.swapBuffers(); |
|
|
|
|
|
|
|
mFramePool.recycle(frame); |
|
|
|
} |
|
|
|
} |
|
|
|
mWindow.setPresentationTime(frame.timestampNanos); |
|
|
|
|
|
|
|
mWindow.swapBuffers(); |
|
|
|
|
|
|
|
mFramePool.recycle(frame); |
|
|
|
|
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
@Override |
|
|
|
@Override |
|
|
|