From be37dabacafa0288349ff026937f2a7a68cc722d Mon Sep 17 00:00:00 2001 From: xufuji456 Date: Tue, 18 Oct 2022 15:26:59 +0800 Subject: [PATCH] Feature: adjust recorder params --- ...Recorder.java => CameraVideoRecorder.java} | 155 +++++++++--------- .../recorder/video/VideoRecorderCore.java | 4 +- .../camerafilter/widget/CameraRender.java | 12 +- .../src/main/res/raw/default_fragment.glsl | 2 +- 4 files changed, 87 insertions(+), 86 deletions(-) rename CameraFilter/src/main/java/com/frank/camerafilter/recorder/video/{TextureVideoRecorder.java => CameraVideoRecorder.java} (96%) diff --git a/CameraFilter/src/main/java/com/frank/camerafilter/recorder/video/TextureVideoRecorder.java b/CameraFilter/src/main/java/com/frank/camerafilter/recorder/video/CameraVideoRecorder.java similarity index 96% rename from CameraFilter/src/main/java/com/frank/camerafilter/recorder/video/TextureVideoRecorder.java rename to CameraFilter/src/main/java/com/frank/camerafilter/recorder/video/CameraVideoRecorder.java index 7535e9e..f2aabc8 100644 --- a/CameraFilter/src/main/java/com/frank/camerafilter/recorder/video/TextureVideoRecorder.java +++ b/CameraFilter/src/main/java/com/frank/camerafilter/recorder/video/CameraVideoRecorder.java @@ -43,9 +43,9 @@ import java.nio.FloatBuffer; * call TextureMovieEncoder#frameAvailable(). * */ -public class TextureVideoRecorder implements Runnable { +public class CameraVideoRecorder implements Runnable { - private final static String TAG = TextureVideoRecorder.class.getSimpleName(); + private final static String TAG = CameraVideoRecorder.class.getSimpleName(); private final static int MSG_START_RECORDING = 0; private final static int MSG_STOP_RECORDING = 1; @@ -80,7 +80,7 @@ public class TextureVideoRecorder implements Runnable { private BeautyFilterType type = BeautyFilterType.NONE; - public TextureVideoRecorder(Context context) { + public CameraVideoRecorder(Context context) { mContext = context; } @@ -101,88 +101,17 @@ public class TextureVideoRecorder implements Runnable { } - public void startRecording(RecorderConfig config) { - synchronized (mReadyFence) { - if (mRunning) { - return; - } - mRunning = true; - new Thread(this, TAG).start(); - while (!mReady) { - try { - mReadyFence.wait(); - } catch (InterruptedException e) { - e.printStackTrace(); - } - } - } - mHandler.sendMessage(mHandler.obtainMessage(MSG_START_RECORDING, config)); - } - - public void stopRecording() { - mHandler.sendMessage(mHandler.obtainMessage(MSG_STOP_RECORDING)); - mHandler.sendMessage(mHandler.obtainMessage(MSG_QUIT_RECORDING)); - } - - public boolean isRecording() { - synchronized (mReadyFence) { - return mRunning; - } - } - - public void updateSharedContext(EGLContext eglContext) { - mHandler.sendMessage(mHandler.obtainMessage(MSG_UPDATE_SHARED_CONTEXT, eglContext)); - } - - public void frameAvailable(SurfaceTexture surfaceTexture) { - synchronized (mReadyFence) { - if (!mReady) - return; - } - float[] transform = new float[16]; - surfaceTexture.getTransformMatrix(transform); - long timestamp = surfaceTexture.getTimestamp(); - if (timestamp == 0) { - return; - } - mHandler.sendMessage(mHandler.obtainMessage(MSG_FRAME_AVAILABLE, (int) (timestamp >> 32), (int) timestamp, transform)); - } - - public void setTextureId(int id) { - synchronized (mReadyFence) { - if (!mReady) - return; - } - mHandler.sendMessage(mHandler.obtainMessage(MSG_SET_TEXTURE_ID, id, 0, null)); - } - - @Override - public void run() { - Looper.prepare(); - synchronized (mReadyFence) { - mHandler = new RecorderHandler(this); - mReady = true; - mReadyFence.notify(); - } - Looper.loop(); - synchronized (mReadyFence) { - mReady = false; - mRunning = false; - mHandler = null; - } - } - private static class RecorderHandler extends Handler { - private final WeakReference mWeakRecorder; + private final WeakReference mWeakRecorder; - public RecorderHandler(TextureVideoRecorder recorder) { + public RecorderHandler(CameraVideoRecorder recorder) { mWeakRecorder = new WeakReference<>(recorder); } @Override public void handleMessage(@NonNull Message msg) { Object obj = msg.obj; - TextureVideoRecorder recorder = mWeakRecorder.get(); + CameraVideoRecorder recorder = mWeakRecorder.get(); if (recorder == null) { return; } @@ -305,6 +234,78 @@ public class TextureVideoRecorder implements Runnable { mEglCore = null; } } + + public void startRecording(RecorderConfig config) { + synchronized (mReadyFence) { + if (mRunning) { + return; + } + mRunning = true; + new Thread(this, TAG).start(); + while (!mReady) { + try { + mReadyFence.wait(); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + } + mHandler.sendMessage(mHandler.obtainMessage(MSG_START_RECORDING, config)); + } + + public void stopRecording() { + mHandler.sendMessage(mHandler.obtainMessage(MSG_STOP_RECORDING)); + mHandler.sendMessage(mHandler.obtainMessage(MSG_QUIT_RECORDING)); + } + + public boolean isRecording() { + synchronized (mReadyFence) { + return mRunning; + } + } + + public void updateSharedContext(EGLContext eglContext) { + mHandler.sendMessage(mHandler.obtainMessage(MSG_UPDATE_SHARED_CONTEXT, eglContext)); + } + + public void frameAvailable(SurfaceTexture surfaceTexture) { + synchronized (mReadyFence) { + if (!mReady) + return; + } + float[] transform = new float[16]; + surfaceTexture.getTransformMatrix(transform); + long timestamp = surfaceTexture.getTimestamp(); + if (timestamp == 0) { + return; + } + mHandler.sendMessage(mHandler.obtainMessage(MSG_FRAME_AVAILABLE, (int) (timestamp >> 32), (int) timestamp, transform)); + } + + public void setTextureId(int id) { + synchronized (mReadyFence) { + if (!mReady) + return; + } + mHandler.sendMessage(mHandler.obtainMessage(MSG_SET_TEXTURE_ID, id, 0, null)); + } + + @Override + public void run() { + Looper.prepare(); + synchronized (mReadyFence) { + mHandler = new RecorderHandler(this); + mReady = true; + mReadyFence.notify(); + } + Looper.loop(); + synchronized (mReadyFence) { + mReady = false; + mRunning = false; + mHandler = null; + } + } + public void setFilter(BeautyFilterType type) { this.type = type; } diff --git a/CameraFilter/src/main/java/com/frank/camerafilter/recorder/video/VideoRecorderCore.java b/CameraFilter/src/main/java/com/frank/camerafilter/recorder/video/VideoRecorderCore.java index 91b47e5..80ff154 100644 --- a/CameraFilter/src/main/java/com/frank/camerafilter/recorder/video/VideoRecorderCore.java +++ b/CameraFilter/src/main/java/com/frank/camerafilter/recorder/video/VideoRecorderCore.java @@ -26,9 +26,9 @@ public class VideoRecorderCore { private final static String TAG = VideoRecorderCore.class.getSimpleName(); private final static int FRAME_RATE = 30; - private final static int IFRAME_INTERVAL = 5; + private final static int IFRAME_INTERVAL = 30; private final static String MIME_TYPE = "video/avc"; - private final static int TIMEOUT_USEC = 10000; + private final static int TIMEOUT_USEC = 20000; private int mTrackIndex; private boolean mMuxerStarted; diff --git a/CameraFilter/src/main/java/com/frank/camerafilter/widget/CameraRender.java b/CameraFilter/src/main/java/com/frank/camerafilter/widget/CameraRender.java index 9b989a4..5955969 100644 --- a/CameraFilter/src/main/java/com/frank/camerafilter/widget/CameraRender.java +++ b/CameraFilter/src/main/java/com/frank/camerafilter/widget/CameraRender.java @@ -8,11 +8,11 @@ import android.opengl.GLSurfaceView; import android.os.Environment; import com.frank.camerafilter.camera.CameraManager; -import com.frank.camerafilter.filter.BeautyCameraFilter; -import com.frank.camerafilter.filter.BaseFilter; import com.frank.camerafilter.factory.BeautyFilterFactory; import com.frank.camerafilter.factory.BeautyFilterType; -import com.frank.camerafilter.recorder.video.TextureVideoRecorder; +import com.frank.camerafilter.filter.BeautyCameraFilter; +import com.frank.camerafilter.filter.BaseFilter; +import com.frank.camerafilter.recorder.video.CameraVideoRecorder; import com.frank.camerafilter.util.OpenGLUtil; import com.frank.camerafilter.util.Rotation; import com.frank.camerafilter.util.TextureRotateUtil; @@ -50,7 +50,7 @@ public class CameraRender implements GLSurfaceView.Renderer, SurfaceTexture.OnFr private final File outputFile; private int recordStatus; protected boolean recordEnable; - private final TextureVideoRecorder videoRecorder; + private final CameraVideoRecorder videoRecorder; private final static int RECORDING_OFF = 0; private final static int RECORDING_ON = 1; @@ -75,7 +75,7 @@ public class CameraRender implements GLSurfaceView.Renderer, SurfaceTexture.OnFr recordEnable = false; recordStatus = RECORDING_OFF; - videoRecorder = new TextureVideoRecorder(mCameraView.getContext()); + videoRecorder = new CameraVideoRecorder(mCameraView.getContext()); outputFile = new File(videoPath, videoName); } @@ -178,7 +178,7 @@ public class CameraRender implements GLSurfaceView.Renderer, SurfaceTexture.OnFr videoRecorder.setPreviewSize(mImageWidth, mImageHeight); videoRecorder.setTextureBuffer(mTextureBuffer); videoRecorder.setCubeBuffer(mVertexBuffer); - videoRecorder.startRecording(new TextureVideoRecorder.RecorderConfig( + videoRecorder.startRecording(new CameraVideoRecorder.RecorderConfig( mImageWidth, mImageHeight, videoBitrate, diff --git a/CameraFilter/src/main/res/raw/default_fragment.glsl b/CameraFilter/src/main/res/raw/default_fragment.glsl index 677fb07..c3c80cb 100644 --- a/CameraFilter/src/main/res/raw/default_fragment.glsl +++ b/CameraFilter/src/main/res/raw/default_fragment.glsl @@ -9,5 +9,5 @@ uniform samplerExternalOES inputImageTexture; void main(){ vec3 centralColor = texture2D(inputImageTexture, textureCoordinate).rgb; - gl_FragColor = vec4(centralColor.rgb,1.0);; + gl_FragColor = vec4(centralColor.rgb,1.0); } \ No newline at end of file