From cfc46afaf21bf8f111b25d4c18a25743f60ae2b1 Mon Sep 17 00:00:00 2001 From: Mattia Iavarone Date: Mon, 17 Sep 2018 20:34:42 -0300 Subject: [PATCH] Create new MediaEncoder interfaces to support audio --- MIGRATION.md | 2 +- .../cameraview/AudioMediaEncoder.java | 44 ++++++ .../cameraview/MediaEncoder.java | 112 +++++++++++++++ .../cameraview/MediaEncoderEngine.java | 87 ++++++++++++ ...xtureEncoder.java => OldMediaEncoder.java} | 22 +-- ...eEncoder.java => OldMediaEncoderCore.java} | 76 ++++++----- .../cameraview/TextureMediaEncoder.java | 127 ++++++++++++++++++ .../cameraview/VideoMediaEncoder.java | 90 +++++++++++++ .../cameraview/SnapshotVideoRecorder.java | 89 ++++++++---- 9 files changed, 580 insertions(+), 69 deletions(-) create mode 100644 cameraview/src/main/gles/com/otaliastudios/cameraview/AudioMediaEncoder.java create mode 100644 cameraview/src/main/gles/com/otaliastudios/cameraview/MediaEncoder.java create mode 100644 cameraview/src/main/gles/com/otaliastudios/cameraview/MediaEncoderEngine.java rename cameraview/src/main/gles/com/otaliastudios/cameraview/{VideoTextureEncoder.java => OldMediaEncoder.java} (94%) rename cameraview/src/main/gles/com/otaliastudios/cameraview/{VideoCoreEncoder.java => OldMediaEncoderCore.java} (72%) create mode 100644 cameraview/src/main/gles/com/otaliastudios/cameraview/TextureMediaEncoder.java create mode 100644 cameraview/src/main/gles/com/otaliastudios/cameraview/VideoMediaEncoder.java diff --git a/MIGRATION.md b/MIGRATION.md index 1e399200..04a59184 100644 --- a/MIGRATION.md +++ b/MIGRATION.md @@ -1,4 +1,4 @@ -# Migrating to v2 +# Migrating to v2.X.X - JpegQuality: both cameraJpegQuality and setJpegQuality() have been removed, because they were working only with specific setups. We'll use the default quality provided diff --git a/cameraview/src/main/gles/com/otaliastudios/cameraview/AudioMediaEncoder.java b/cameraview/src/main/gles/com/otaliastudios/cameraview/AudioMediaEncoder.java new file mode 100644 index 00000000..717c6882 --- /dev/null +++ b/cameraview/src/main/gles/com/otaliastudios/cameraview/AudioMediaEncoder.java @@ -0,0 +1,44 @@ +package com.otaliastudios.cameraview; + +import android.media.MediaMuxer; +import android.os.Build; +import android.support.annotation.NonNull; +import android.support.annotation.RequiresApi; + +@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2) +class AudioMediaEncoder extends MediaEncoder { + + + static class Config { + Config() { } + } + + AudioMediaEncoder(@NonNull Config config) { + + } + + @Override + void prepare(MediaMuxer muxer) { + super.prepare(muxer); + } + + @Override + void start() { + + } + + @Override + void notify(String event, Object data) { + + } + + @Override + void stop() { + + } + + @Override + void release() { + + } +} diff --git a/cameraview/src/main/gles/com/otaliastudios/cameraview/MediaEncoder.java b/cameraview/src/main/gles/com/otaliastudios/cameraview/MediaEncoder.java new file mode 100644 index 00000000..d7c39d5e --- /dev/null +++ b/cameraview/src/main/gles/com/otaliastudios/cameraview/MediaEncoder.java @@ -0,0 +1,112 @@ +package com.otaliastudios.cameraview; + +import android.media.MediaCodec; +import android.media.MediaFormat; +import android.media.MediaMuxer; +import android.os.Build; +import android.support.annotation.RequiresApi; +import android.util.Log; + +import java.nio.ByteBuffer; + +// https://github.com/saki4510t/AudioVideoRecordingSample/blob/master/app/src/main/java/com/serenegiant/encoder/MediaEncoder.java +@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2) +abstract class MediaEncoder { + + private final static int TIMEOUT_USEC = 10000; // 10 msec + + protected MediaCodec mMediaCodec; + + private MediaCodec.BufferInfo mBufferInfo; + private MediaMuxer mMuxer; + private int mTrackIndex; + private boolean mTrackStarted; + + MediaEncoder() { + } + + void prepare(MediaMuxer muxer) { + mMuxer = muxer; + mBufferInfo = new MediaCodec.BufferInfo(); + } + + abstract void start(); + abstract void notify(String event, Object data); + abstract void stop(); + abstract void release(); + + /** + * Extracts all pending data from the encoder and forwards it to the muxer. + *

+ * If endOfStream is not set, this returns when there is no more data to drain. If it + * is set, we send EOS to the encoder, and then iterate until we see EOS on the output. + * Calling this with endOfStream set should be done once, right before stopping the muxer. + *

+ * We're just using the muxer to get a .mp4 file (instead of a raw H.264 stream). We're + * not recording audio. + */ + protected void drain(boolean endOfStream) { + if (endOfStream) { + mMediaCodec.signalEndOfInputStream(); + } + + ByteBuffer[] encoderOutputBuffers = mMediaCodec.getOutputBuffers(); + while (true) { + int encoderStatus = mMediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC); + if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { + // no output available yet + if (!endOfStream) { + break; // out of while + } + } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { + // not expected for an encoder + encoderOutputBuffers = mMediaCodec.getOutputBuffers(); + } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { + // should happen before receiving buffers, and should only happen once + if (mTrackStarted) { + throw new RuntimeException("format changed twice"); + } + MediaFormat newFormat = mMediaCodec.getOutputFormat(); + + // now that we have the Magic Goodies, start the muxer + mTrackIndex = mMuxer.addTrack(newFormat); + // TODO this is wrong. Look at the Github project: it is a muxer wrapper. + // If you have multiple encoders this breaks. + mMuxer.start(); + mTrackStarted = true; + } else if (encoderStatus < 0) { + Log.w("VideoMediaEncoder", "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus); + // let's ignore it + } else { + ByteBuffer encodedData = encoderOutputBuffers[encoderStatus]; + if (encodedData == null) { + throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null"); + } + + if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { + // The codec config data was pulled out and fed to the muxer when we got + // the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it. + mBufferInfo.size = 0; + } + + if (mBufferInfo.size != 0) { + if (!mTrackStarted) { + throw new RuntimeException("muxer hasn't started"); + } + + // adjust the ByteBuffer values to match BufferInfo (not needed?) + encodedData.position(mBufferInfo.offset); + encodedData.limit(mBufferInfo.offset + mBufferInfo.size); + mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo); + } + mMediaCodec.releaseOutputBuffer(encoderStatus, false); + if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { + if (!endOfStream) { + Log.w("VideoMediaEncoder", "reached end of stream unexpectedly"); + } + break; // out of while + } + } + } + } +} diff --git a/cameraview/src/main/gles/com/otaliastudios/cameraview/MediaEncoderEngine.java b/cameraview/src/main/gles/com/otaliastudios/cameraview/MediaEncoderEngine.java new file mode 100644 index 00000000..9ef1679f --- /dev/null +++ b/cameraview/src/main/gles/com/otaliastudios/cameraview/MediaEncoderEngine.java @@ -0,0 +1,87 @@ +package com.otaliastudios.cameraview; + +import android.graphics.SurfaceTexture; +import android.media.MediaMuxer; +import android.os.Build; +import android.support.annotation.NonNull; +import android.support.annotation.Nullable; +import android.support.annotation.RequiresApi; + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2) +class MediaEncoderEngine { + + private WorkerHandler mWorker; + private ArrayList mEncoders; + private MediaMuxer mMediaMuxer; + + MediaEncoderEngine(@NonNull File file, @NonNull VideoMediaEncoder videoEncoder, @Nullable AudioMediaEncoder audioEncoder) { + mWorker = WorkerHandler.get("EncoderEngine"); + mEncoders = new ArrayList<>(); + mEncoders.add(videoEncoder); + if (audioEncoder != null) { + mEncoders.add(audioEncoder); + } + try { + mMediaMuxer = new MediaMuxer(file.toString(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); + } catch (IOException e) { + throw new RuntimeException(e); + } + mWorker.post(new Runnable() { + @Override + public void run() { + for (MediaEncoder encoder : mEncoders) { + encoder.prepare(mMediaMuxer); + } + } + }); + } + + void start() { + mWorker.post(new Runnable() { + @Override + public void run() { + for (MediaEncoder encoder : mEncoders) { + encoder.start(); + } + } + }); + } + + void notify(final String event, final Object data) { + mWorker.post(new Runnable() { + @Override + public void run() { + for (MediaEncoder encoder : mEncoders) { + encoder.notify(event, data); + } + } + }); + } + + void stop(final Runnable onStop) { + mWorker.post(new Runnable() { + @Override + public void run() { + for (MediaEncoder encoder : mEncoders) { + encoder.stop(); + } + onStop.run(); + for (MediaEncoder encoder : mEncoders) { + encoder.release(); + } + if (mMediaMuxer != null) { + // TODO: stop() throws an exception if you haven't fed it any data. Keep track + // of frames submitted, and don't call stop() if we haven't written anything. + mMediaMuxer.stop(); + mMediaMuxer.release(); + mMediaMuxer = null; + } + } + }); + } +} diff --git a/cameraview/src/main/gles/com/otaliastudios/cameraview/VideoTextureEncoder.java b/cameraview/src/main/gles/com/otaliastudios/cameraview/OldMediaEncoder.java similarity index 94% rename from cameraview/src/main/gles/com/otaliastudios/cameraview/VideoTextureEncoder.java rename to cameraview/src/main/gles/com/otaliastudios/cameraview/OldMediaEncoder.java index cdc0f197..91dc8d93 100644 --- a/cameraview/src/main/gles/com/otaliastudios/cameraview/VideoTextureEncoder.java +++ b/cameraview/src/main/gles/com/otaliastudios/cameraview/OldMediaEncoder.java @@ -58,8 +58,8 @@ import java.lang.ref.WeakReference; * TODO: tweak the API (esp. textureId) so it's less awkward for simple use cases. */ @RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2) -class VideoTextureEncoder implements Runnable { - private static final String TAG = VideoTextureEncoder.class.getSimpleName(); +class OldMediaEncoder implements Runnable { + private static final String TAG = OldMediaEncoder.class.getSimpleName(); private static final int MSG_START_RECORDING = 0; private static final int MSG_STOP_RECORDING = 1; @@ -73,7 +73,7 @@ class VideoTextureEncoder implements Runnable { private EglViewport mFullScreen; private int mTextureId; private int mFrameNum = -1; // Important - private VideoCoreEncoder mVideoEncoder; + private OldMediaEncoderCore mVideoEncoder; private float mTransformationScaleX = 1F; private float mTransformationScaleY = 1F; private int mTransformationRotation = 0; @@ -131,12 +131,12 @@ class VideoTextureEncoder implements Runnable { } private void prepareEncoder(Config config) { + OldMediaEncoderCore.VideoConfig videoConfig = new OldMediaEncoderCore.VideoConfig( + config.mWidth, config.mHeight, config.mBitRate, config.mFrameRate, + 0, // The video encoder rotation does not work, so we apply it here using Matrix.rotateM(). + config.mMimeType); try { - mVideoEncoder = new VideoCoreEncoder(config.mWidth, config.mHeight, - config.mBitRate, config.mFrameRate, - 0, // The video encoder rotation does not work, so we apply it here using Matrix.rotateM(). - config.mOutputFile, - config.mMimeType); + mVideoEncoder = new OldMediaEncoderCore(videoConfig, config.mOutputFile); } catch (IOException ioe) { throw new RuntimeException(ioe); } @@ -292,9 +292,9 @@ class VideoTextureEncoder implements Runnable { * Handles encoder state change requests. The handler is created on the encoder thread. */ private static class EncoderHandler extends Handler { - private WeakReference mWeakEncoder; + private WeakReference mWeakEncoder; - public EncoderHandler(VideoTextureEncoder encoder) { + public EncoderHandler(OldMediaEncoder encoder) { mWeakEncoder = new WeakReference<>(encoder); } @@ -303,7 +303,7 @@ class VideoTextureEncoder implements Runnable { int what = inputMessage.what; Object obj = inputMessage.obj; - VideoTextureEncoder encoder = mWeakEncoder.get(); + OldMediaEncoder encoder = mWeakEncoder.get(); if (encoder == null) { Log.w(TAG, "EncoderHandler.handleMessage: encoder is null"); return; diff --git a/cameraview/src/main/gles/com/otaliastudios/cameraview/VideoCoreEncoder.java b/cameraview/src/main/gles/com/otaliastudios/cameraview/OldMediaEncoderCore.java similarity index 72% rename from cameraview/src/main/gles/com/otaliastudios/cameraview/VideoCoreEncoder.java rename to cameraview/src/main/gles/com/otaliastudios/cameraview/OldMediaEncoderCore.java index bcf3286c..3123e460 100644 --- a/cameraview/src/main/gles/com/otaliastudios/cameraview/VideoCoreEncoder.java +++ b/cameraview/src/main/gles/com/otaliastudios/cameraview/OldMediaEncoderCore.java @@ -43,40 +43,55 @@ import java.nio.ByteBuffer; * on one thread, and drain the output on a different thread. */ @RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2) -class VideoCoreEncoder { +class OldMediaEncoderCore { - private Surface mInputSurface; private MediaMuxer mMuxer; - private MediaCodec mEncoder; + private boolean mMuxerStarted; + private MediaCodec mVideoEncoder; + private Surface mVideoInputSurface; private MediaCodec.BufferInfo mBufferInfo; private int mTrackIndex; - private boolean mMuxerStarted; + static class VideoConfig { + int width; + int height; + int bitRate; + int frameRate; + int rotation; + String mimeType; + + VideoConfig(int width, int height, int bitRate, int frameRate, int rotation, String mimeType) { + this.width = width; + this.height = height; + this.bitRate = bitRate; + this.frameRate = frameRate; + this.rotation = rotation; + this.mimeType = mimeType; + } + } /** * Configures encoder and muxer state, and prepares the input Surface. */ - public VideoCoreEncoder(int width, int height, int bitRate, int frameRate, int rotation, File outputFile, String mimeType) - throws IOException { + OldMediaEncoderCore(VideoConfig videoConfig, File outputFile) throws IOException { mBufferInfo = new MediaCodec.BufferInfo(); - MediaFormat format = MediaFormat.createVideoFormat(mimeType, width, height); + MediaFormat format = MediaFormat.createVideoFormat(videoConfig.mimeType, videoConfig.width, videoConfig.height); // Set some properties. Failing to specify some of these can cause the MediaCodec // configure() call to throw an unhelpful exception. - format.setInteger(MediaFormat.KEY_COLOR_FORMAT, - MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); - format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); - format.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate); + format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); + format.setInteger(MediaFormat.KEY_BIT_RATE, videoConfig.bitRate); + format.setInteger(MediaFormat.KEY_FRAME_RATE, videoConfig.frameRate); format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5); - format.setInteger("rotation-degrees", rotation); + format.setInteger("rotation-degrees", videoConfig.rotation); // Create a MediaCodec encoder, and configure it with our format. Get a Surface // we can use for input and wrap it with a class that handles the EGL work. - mEncoder = MediaCodec.createEncoderByType(mimeType); - mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); - mInputSurface = mEncoder.createInputSurface(); - mEncoder.start(); + mVideoEncoder = MediaCodec.createEncoderByType(videoConfig.mimeType); + mVideoEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); + mVideoInputSurface = mVideoEncoder.createInputSurface(); + mVideoEncoder.start(); // Create a MediaMuxer. We can't add the video track and start() the muxer here, // because our MediaFormat doesn't have the Magic Goodies. These can only be @@ -84,8 +99,7 @@ class VideoCoreEncoder { // // We're not actually interested in multiplexing audio. We just want to convert // the raw H.264 elementary stream we get from MediaCodec into a .mp4 file. - mMuxer = new MediaMuxer(outputFile.toString(), - MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); + mMuxer = new MediaMuxer(outputFile.toString(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); mTrackIndex = -1; mMuxerStarted = false; @@ -95,17 +109,17 @@ class VideoCoreEncoder { * Returns the encoder's input surface. */ public Surface getInputSurface() { - return mInputSurface; + return mVideoInputSurface; } /** * Releases encoder resources. */ public void release() { - if (mEncoder != null) { - mEncoder.stop(); - mEncoder.release(); - mEncoder = null; + if (mVideoEncoder != null) { + mVideoEncoder.stop(); + mVideoEncoder.release(); + mVideoEncoder = null; } if (mMuxer != null) { // TODO: stop() throws an exception if you haven't fed it any data. Keep track @@ -130,12 +144,12 @@ class VideoCoreEncoder { final int TIMEOUT_USEC = 10000; if (endOfStream) { - mEncoder.signalEndOfInputStream(); + mVideoEncoder.signalEndOfInputStream(); } - ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers(); + ByteBuffer[] encoderOutputBuffers = mVideoEncoder.getOutputBuffers(); while (true) { - int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC); + int encoderStatus = mVideoEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC); if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { // no output available yet if (!endOfStream) { @@ -143,20 +157,20 @@ class VideoCoreEncoder { } } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { // not expected for an encoder - encoderOutputBuffers = mEncoder.getOutputBuffers(); + encoderOutputBuffers = mVideoEncoder.getOutputBuffers(); } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { // should happen before receiving buffers, and should only happen once if (mMuxerStarted) { throw new RuntimeException("format changed twice"); } - MediaFormat newFormat = mEncoder.getOutputFormat(); + MediaFormat newFormat = mVideoEncoder.getOutputFormat(); // now that we have the Magic Goodies, start the muxer mTrackIndex = mMuxer.addTrack(newFormat); mMuxer.start(); mMuxerStarted = true; } else if (encoderStatus < 0) { - Log.w("VideoCoreEncoder", "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus); + Log.w("OldMediaEncoderCore", "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus); // let's ignore it } else { ByteBuffer encodedData = encoderOutputBuffers[encoderStatus]; @@ -182,11 +196,11 @@ class VideoCoreEncoder { mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo); } - mEncoder.releaseOutputBuffer(encoderStatus, false); + mVideoEncoder.releaseOutputBuffer(encoderStatus, false); if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { if (!endOfStream) { - Log.w("VideoCoreEncoder", "reached end of stream unexpectedly"); + Log.w("OldMediaEncoderCore", "reached end of stream unexpectedly"); } break; // out of while } diff --git a/cameraview/src/main/gles/com/otaliastudios/cameraview/TextureMediaEncoder.java b/cameraview/src/main/gles/com/otaliastudios/cameraview/TextureMediaEncoder.java new file mode 100644 index 00000000..63d341f0 --- /dev/null +++ b/cameraview/src/main/gles/com/otaliastudios/cameraview/TextureMediaEncoder.java @@ -0,0 +1,127 @@ +package com.otaliastudios.cameraview; + +import android.graphics.SurfaceTexture; +import android.media.MediaCodec; +import android.media.MediaCodecInfo; +import android.media.MediaFormat; +import android.media.MediaMuxer; +import android.opengl.EGLContext; +import android.opengl.Matrix; +import android.os.Build; +import android.support.annotation.NonNull; +import android.support.annotation.RequiresApi; +import android.util.Log; +import android.view.Surface; + +import java.io.IOException; + +@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2) +class TextureMediaEncoder extends VideoMediaEncoder { + + public final static String FRAME_EVENT = "frame"; + + static class Frame { + float[] transform; + long timestamp; + } + static class Config extends VideoMediaEncoder.Config { + int textureId; + float scaleX; + float scaleY; + EGLContext eglContext; + + Config(int width, int height, int bitRate, int frameRate, int rotation, String mimeType, + int textureId, float scaleX, float scaleY, EGLContext eglContext) { + super(width, height, bitRate, frameRate, rotation, mimeType); + this.textureId = textureId; + this.scaleX = scaleX; + this.scaleY = scaleY; + this.eglContext = eglContext; + } + } + + private EglCore mEglCore; + private EglWindowSurface mWindow; + private EglViewport mViewport; + + public TextureMediaEncoder(@NonNull Config config) { + super(config); + } + + @Override + void prepare(MediaMuxer muxer) { + super.prepare(muxer); + mEglCore = new EglCore(mConfig.eglContext, EglCore.FLAG_RECORDABLE); + mWindow = new EglWindowSurface(mEglCore, mSurface, true); + mWindow.makeCurrent(); // drawing will happen on the InputWindowSurface, which + // is backed by mVideoEncoder.getInputSurface() + mViewport = new EglViewport(); + } + + @Override + void release() { + super.release(); + if (mWindow != null) { + mWindow.release(); + mWindow = null; + } + if (mViewport != null) { + mViewport.release(true); + mViewport = null; + } + if (mEglCore != null) { + mEglCore.release(); + mEglCore = null; + } + } + + @Override + void start() { + super.start(); + // Nothing to do here. Waiting for the first frame. + } + + @Override + void notify(String event, Object data) { + if (event.equals(FRAME_EVENT)) { + Frame frame = (Frame) data; + + // Seeing this after device is toggled off/on with power button. The + // first frame back has a zero timestamp. + // MPEG4Writer thinks this is cause to abort() in native code, so it's very + // important that we just ignore the frame. + if (frame.timestamp == 0) return; + if (mFrameNum < 0) return; + mFrameNum++; + + int arg1 = (int) (frame.timestamp >> 32); + int arg2 = (int) frame.timestamp; + long timestamp = (((long) arg1) << 32) | (((long) arg2) & 0xffffffffL); + float[] transform = frame.transform; + + // We must scale this matrix like GLCameraPreview does, because it might have some cropping. + // Scaling takes place with respect to the (0, 0, 0) point, so we must apply a Translation to compensate. + + float scaleX = mConfig.scaleX; + float scaleY = mConfig.scaleY; + float scaleTranslX = (1F - scaleX) / 2F; + float scaleTranslY = (1F - scaleY) / 2F; + Matrix.translateM(transform, 0, scaleTranslX, scaleTranslY, 0); + Matrix.scaleM(transform, 0, scaleX, scaleY, 1); + + // We also must rotate this matrix. In GLCameraPreview it is not needed because it is a live + // stream, but the output video, must be correctly rotated based on the device rotation at the moment. + // Rotation also takes place with respect to the origin (the Z axis), so we must + // translate to origin, rotate, then back to where we were. + + Matrix.translateM(transform, 0, 0.5F, 0.5F, 0); + Matrix.rotateM(transform, 0, mConfig.rotation, 0, 0, 1); + Matrix.translateM(transform, 0, -0.5F, -0.5F, 0); + + drain(false); + mViewport.drawFrame(mConfig.textureId, transform); + mWindow.setPresentationTime(timestamp); + mWindow.swapBuffers(); + } + } +} diff --git a/cameraview/src/main/gles/com/otaliastudios/cameraview/VideoMediaEncoder.java b/cameraview/src/main/gles/com/otaliastudios/cameraview/VideoMediaEncoder.java new file mode 100644 index 00000000..948ee7f8 --- /dev/null +++ b/cameraview/src/main/gles/com/otaliastudios/cameraview/VideoMediaEncoder.java @@ -0,0 +1,90 @@ +package com.otaliastudios.cameraview; + +import android.media.MediaCodec; +import android.media.MediaCodecInfo; +import android.media.MediaFormat; +import android.media.MediaMuxer; +import android.os.Build; +import android.support.annotation.NonNull; +import android.support.annotation.RequiresApi; +import android.util.Log; +import android.view.Surface; + +import java.io.IOException; +import java.nio.ByteBuffer; + +@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2) +abstract class VideoMediaEncoder extends MediaEncoder { + + protected C mConfig; + protected Surface mSurface; + protected int mFrameNum = -1; + + static class Config { + int width; + int height; + int bitRate; + int frameRate; + int rotation; + String mimeType; + + Config(int width, int height, int bitRate, int frameRate, int rotation, String mimeType) { + this.width = width; + this.height = height; + this.bitRate = bitRate; + this.frameRate = frameRate; + this.rotation = rotation; + this.mimeType = mimeType; + } + } + + VideoMediaEncoder(@NonNull C config) { + mConfig = config; + } + + @Override + void prepare(MediaMuxer muxer) { + super.prepare(muxer); + MediaFormat format = MediaFormat.createVideoFormat(mConfig.mimeType, mConfig.width, mConfig.height); + + // Set some properties. Failing to specify some of these can cause the MediaCodec + // configure() call to throw an unhelpful exception. + format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); + format.setInteger(MediaFormat.KEY_BIT_RATE, mConfig.bitRate); + format.setInteger(MediaFormat.KEY_FRAME_RATE, mConfig.frameRate); + format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5); + format.setInteger("rotation-degrees", mConfig.rotation); + + // Create a MediaCodec encoder, and configure it with our format. Get a Surface + // we can use for input and wrap it with a class that handles the EGL work. + try { + mMediaCodec = MediaCodec.createEncoderByType(mConfig.mimeType); + } catch (IOException e) { + throw new RuntimeException(e); + } + mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); + mSurface = mMediaCodec.createInputSurface(); + mMediaCodec.start(); + } + + @Override + void start() { + // Nothing to do here. Waiting for the first frame. + mFrameNum = 0; + } + + @Override + void stop() { + mFrameNum = -1; + drain(true); + } + + @Override + void release() { + if (mMediaCodec != null) { + mMediaCodec.stop(); + mMediaCodec.release(); + mMediaCodec = null; + } + } +} diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/SnapshotVideoRecorder.java b/cameraview/src/main/java/com/otaliastudios/cameraview/SnapshotVideoRecorder.java index b56711fc..8b1efa3d 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/SnapshotVideoRecorder.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/SnapshotVideoRecorder.java @@ -18,11 +18,13 @@ class SnapshotVideoRecorder extends VideoRecorder implements GLCameraPreview.Ren private static final String TAG = SnapshotVideoRecorder.class.getSimpleName(); private static final CameraLogger LOG = CameraLogger.create(TAG); + private static final boolean USE_OLD_ENCODER = false; private static final int STATE_RECORDING = 0; private static final int STATE_NOT_RECORDING = 1; - private VideoTextureEncoder mEncoder; + private OldMediaEncoder mEncoder; + private MediaEncoderEngine mEncoderEngine; private GLCameraPreview mPreview; private int mCurrentState = STATE_NOT_RECORDING; @@ -31,7 +33,9 @@ class SnapshotVideoRecorder extends VideoRecorder implements GLCameraPreview.Ren SnapshotVideoRecorder(VideoResult stub, VideoResultListener listener, GLCameraPreview preview) { super(stub, listener); - mEncoder = new VideoTextureEncoder(); + if (USE_OLD_ENCODER) { + mEncoder = new OldMediaEncoder(); + } mPreview = preview; mPreview.addRendererFrameCallback(this); } @@ -76,39 +80,72 @@ class SnapshotVideoRecorder extends VideoRecorder implements GLCameraPreview.Ren case H_264: type = "video/avc"; break; // MediaFormat.MIMETYPE_VIDEO_AVC: case DEVICE_DEFAULT: type = "video/avc"; break; } - VideoTextureEncoder.Config configuration = new VideoTextureEncoder.Config( - mResult.getFile(), - width, - height, - 1000000, - 30, - mResult.getRotation(), - scaleX, - scaleY, - type, - EGL14.eglGetCurrentContext() - ); + if (USE_OLD_ENCODER) { + OldMediaEncoder.Config configuration = new OldMediaEncoder.Config( + mResult.getFile(), + width, + height, + 1000000, + 30, + mResult.getRotation(), + scaleX, + scaleY, + type, + EGL14.eglGetCurrentContext() + ); + mEncoder.startRecording(configuration); + mEncoder.setTextureId(mTextureId); + } else { + TextureMediaEncoder.Config config = new TextureMediaEncoder.Config( + width, height, + 1000000, + 30, + mResult.getRotation(), + type, mTextureId, + scaleX, scaleY, + EGL14.eglGetCurrentContext() + ); + TextureMediaEncoder videoEncoder = new TextureMediaEncoder(config); + mEncoderEngine = new MediaEncoderEngine(mResult.file, videoEncoder, null); + mEncoderEngine.start(); + } mResult.rotation = 0; // We will rotate the result instead. - mEncoder.startRecording(configuration); - mEncoder.setTextureId(mTextureId); mCurrentState = STATE_RECORDING; } if (mCurrentState == STATE_RECORDING) { - mEncoder.frameAvailable(surfaceTexture); + if (USE_OLD_ENCODER) { + mEncoder.frameAvailable(surfaceTexture); + } else { + TextureMediaEncoder.Frame frame = new TextureMediaEncoder.Frame(); + frame.timestamp = surfaceTexture.getTimestamp(); + frame.transform = new float[16]; + surfaceTexture.getTransformMatrix(frame.transform); + mEncoderEngine.notify(TextureMediaEncoder.FRAME_EVENT, frame); + } } if (mCurrentState == STATE_RECORDING && mDesiredState == STATE_NOT_RECORDING) { - mEncoder.stopRecording(new Runnable() { - @Override - public void run() { - // We are in the encoder thread. - dispatchResult(); - } - }); + if (USE_OLD_ENCODER) { + mEncoder.stopRecording(new Runnable() { + @Override + public void run() { + // We are in the encoder thread. + dispatchResult(); + } + }); + mEncoder = null; + } else { + mEncoderEngine.stop(new Runnable() { + @Override + public void run() { + // We are in the encoder thread. + dispatchResult(); + } + }); + mEncoderEngine = null; + } mCurrentState = STATE_NOT_RECORDING; - - mEncoder = null; mPreview.removeRendererFrameCallback(SnapshotVideoRecorder.this); mPreview = null; }