diff --git a/README.md b/README.md index 8f34a85b..9ad40d1f 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ CameraView is a well documented, high-level library that makes capturing picture addressing most of the common issues and needs, and still leaving you with flexibility where needed. ```groovy -api 'com.otaliastudios:cameraview:2.0.0-rc1' +api 'com.otaliastudios:cameraview:2.0.0-rc2' ``` - Fast & reliable diff --git a/cameraview/src/androidTest/java/com/otaliastudios/cameraview/video/VideoRecorderTest.java b/cameraview/src/androidTest/java/com/otaliastudios/cameraview/video/VideoRecorderTest.java index 73175225..edafd44c 100644 --- a/cameraview/src/androidTest/java/com/otaliastudios/cameraview/video/VideoRecorderTest.java +++ b/cameraview/src/androidTest/java/com/otaliastudios/cameraview/video/VideoRecorderTest.java @@ -29,7 +29,7 @@ public class VideoRecorderTest extends BaseTest { } @Override - protected void onStop() { + protected void onStop(boolean isCameraShutdown) { dispatchVideoRecordingEnd(); dispatchResult(); } @@ -37,7 +37,7 @@ public class VideoRecorderTest extends BaseTest { recorder.start(result); Mockito.verify(listener,Mockito.times(1) ) .onVideoRecordingStart(); - recorder.stop(); + recorder.stop(false); Mockito.verify(listener, Mockito.times(1)) .onVideoRecordingEnd(); Mockito.verify(listener, Mockito.times(1)) diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java index be9f9009..5689fcf9 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java @@ -231,7 +231,7 @@ public class Camera1Engine extends CameraEngine implements @Override protected Task onStopPreview() { if (mVideoRecorder != null) { - mVideoRecorder.stop(); + mVideoRecorder.stop(true); mVideoRecorder = null; } mPictureRecorder = null; diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java index 4839303c..2b471bf7 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java @@ -535,7 +535,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv if (mVideoRecorder != null) { // This should synchronously call onVideoResult that will reset the repeating builder // to the PREVIEW template. This is very important. - mVideoRecorder.stop(); + mVideoRecorder.stop(true); mVideoRecorder = null; } mPictureRecorder = null; diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraEngine.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraEngine.java index a39907b2..8a05f7d3 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraEngine.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraEngine.java @@ -1189,7 +1189,7 @@ public abstract class CameraEngine implements public void run() { LOG.i("stopVideo", "executing.", "isTakingVideo?", isTakingVideo()); if (mVideoRecorder != null) { - mVideoRecorder.stop(); + mVideoRecorder.stop(false); mVideoRecorder = null; } } diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/WorkerHandler.java b/cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/WorkerHandler.java index fe33edbd..6d0a2f49 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/WorkerHandler.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/WorkerHandler.java @@ -14,6 +14,7 @@ import androidx.annotation.NonNull; import java.lang.ref.WeakReference; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CountDownLatch; import java.util.concurrent.Executor; /** @@ -100,6 +101,20 @@ public class WorkerHandler { WorkerHandler.this.run(command); } }; + + // HandlerThreads/Handlers sometimes have a significant warmup time. + // We want to spend this time here so when this object is built, it + // is fully operational. + final CountDownLatch latch = new CountDownLatch(1); + post(new Runnable() { + @Override + public void run() { + latch.countDown(); + } + }); + try { + latch.await(); + } catch (InterruptedException ignore) {} } /** @@ -219,7 +234,6 @@ public class WorkerHandler { * interrupt it, so the next {@link #get(String)} call will remove it. * In any case, we only store weak references. */ - @SuppressWarnings("WeakerAccess") public void destroy() { HandlerThread thread = getThread(); if (thread.isAlive()) { diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/video/FullVideoRecorder.java b/cameraview/src/main/java/com/otaliastudios/cameraview/video/FullVideoRecorder.java index abd73fcd..9c92deb3 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/video/FullVideoRecorder.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/video/FullVideoRecorder.java @@ -103,11 +103,11 @@ public abstract class FullVideoRecorder extends VideoRecorder { switch (what) { case MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED: mResult.endReason = VideoResult.REASON_MAX_DURATION_REACHED; - stop(); + stop(false); break; case MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED: mResult.endReason = VideoResult.REASON_MAX_SIZE_REACHED; - stop(); + stop(false); break; } } @@ -130,7 +130,7 @@ public abstract class FullVideoRecorder extends VideoRecorder { protected void onStart() { if (!prepareMediaRecorder(mResult)) { mResult = null; - stop(); + stop(false); return; } @@ -141,12 +141,12 @@ public abstract class FullVideoRecorder extends VideoRecorder { LOG.w("start:", "Error while starting media recorder.", e); mResult = null; mError = e; - stop(); + stop(false); } } @Override - protected void onStop() { + protected void onStop(boolean isCameraShutdown) { if (mMediaRecorder != null) { dispatchVideoRecordingEnd(); try { diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/video/SnapshotVideoRecorder.java b/cameraview/src/main/java/com/otaliastudios/cameraview/video/SnapshotVideoRecorder.java index e1eaba7a..37cbb4ba 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/video/SnapshotVideoRecorder.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/video/SnapshotVideoRecorder.java @@ -1,20 +1,14 @@ package com.otaliastudios.cameraview.video; -import android.graphics.Canvas; -import android.graphics.Color; -import android.graphics.PorterDuff; import android.graphics.SurfaceTexture; import android.opengl.EGL14; import android.os.Build; -import android.view.Surface; import com.otaliastudios.cameraview.CameraLogger; -import com.otaliastudios.cameraview.internal.Issue514Workaround; import com.otaliastudios.cameraview.overlay.Overlay; import com.otaliastudios.cameraview.VideoResult; import com.otaliastudios.cameraview.controls.Audio; import com.otaliastudios.cameraview.engine.CameraEngine; -import com.otaliastudios.cameraview.internal.egl.EglViewport; import com.otaliastudios.cameraview.overlay.OverlayDrawer; import com.otaliastudios.cameraview.preview.GlCameraPreview; import com.otaliastudios.cameraview.preview.RendererFrameCallback; @@ -84,8 +78,16 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram } @Override - protected void onStop() { - mDesiredState = STATE_NOT_RECORDING; + protected void onStop(boolean isCameraShutdown) { + if (isCameraShutdown) { + // The renderer callback might never be called. From my tests, it's not. + LOG.i("Stopping the encoder engine from isCameraShutdown."); + mDesiredState = STATE_NOT_RECORDING; + mCurrentState = STATE_NOT_RECORDING; + mEncoderEngine.stop(); + } else { + mDesiredState = STATE_NOT_RECORDING; + } } @RendererThread @@ -164,7 +166,7 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram LOG.v("dispatching frame."); TextureMediaEncoder textureEncoder = (TextureMediaEncoder) mEncoderEngine.getVideoEncoder(); TextureMediaEncoder.Frame frame = textureEncoder.acquireFrame(); - frame.timestamp = surfaceTexture.getTimestamp(); + frame.timestampNanos = surfaceTexture.getTimestamp(); frame.timestampMillis = System.currentTimeMillis(); // NOTE: this is an approximation but it seems to work. surfaceTexture.getTransformMatrix(frame.transform); if (mEncoderEngine != null) { // Can happen on teardown. At least it used to. diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/video/VideoRecorder.java b/cameraview/src/main/java/com/otaliastudios/cameraview/video/VideoRecorder.java index 02b52876..394cfbbe 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/video/VideoRecorder.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/video/VideoRecorder.java @@ -64,9 +64,10 @@ public abstract class VideoRecorder { /** * Stops recording. + * @param isCameraShutdown whether this is a full shutdown, camera is being closed */ - public final void stop() { - onStop(); + public final void stop(boolean isCameraShutdown) { + onStop(isCameraShutdown); } /** @@ -79,13 +80,12 @@ public abstract class VideoRecorder { protected abstract void onStart(); - protected abstract void onStop(); + protected abstract void onStop(boolean isCameraShutdown); /** * Subclasses can call this to notify that the result was obtained, * either with some error (null result) or with the actual stub, filled. */ - @SuppressWarnings("WeakerAccess") @CallSuper protected void dispatchResult() { mIsRecording = false; @@ -112,6 +112,7 @@ public abstract class VideoRecorder { * Subclasses can call this to notify that the video recording has ended, * although the video result might still be processed. */ + @SuppressWarnings("WeakerAccess") @CallSuper protected void dispatchVideoRecordingEnd() { if (mListener != null) { diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioConfig.java b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioConfig.java index 3d09b51e..fb663566 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioConfig.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioConfig.java @@ -20,7 +20,7 @@ public class AudioConfig { final int encoding = AudioFormat.ENCODING_PCM_16BIT; // Determines the sampleSizePerChannel // The 44.1KHz frequency is the only setting guaranteed to be available on all devices. final int samplingFrequency = 44100; // samples/sec - final int sampleSizePerChannel = 2; // byte/sample/channel [16bit] + final int sampleSizePerChannel = 2; // byte/sample/channel [16bit]. If this changes, review noise introduction final int byteRatePerChannel = samplingFrequency * sampleSizePerChannel; // byte/sec/channel @NonNull @@ -75,7 +75,7 @@ public class AudioConfig { * @return the number of frames */ int audioRecordBufferFrames() { - return 25; + return 50; } /** @@ -91,6 +91,6 @@ public class AudioConfig { * @return the buffer pool max size */ int bufferPoolMaxSize() { - return 80; + return 500; } } diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioMediaEncoder.java b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioMediaEncoder.java index 62a060f1..9e9dac09 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioMediaEncoder.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioMediaEncoder.java @@ -1,6 +1,5 @@ package com.otaliastudios.cameraview.video.encoding; -import android.media.AudioFormat; import android.media.AudioRecord; import android.media.MediaCodec; import android.media.MediaCodecInfo; @@ -15,8 +14,10 @@ import androidx.annotation.RequiresApi; import java.io.IOException; import java.nio.ByteBuffer; +import java.nio.ByteOrder; import java.util.HashMap; import java.util.Map; +import java.util.Random; import java.util.concurrent.LinkedBlockingQueue; /** @@ -30,23 +31,24 @@ public class AudioMediaEncoder extends MediaEncoder { private static final boolean PERFORMANCE_DEBUG = false; private static final boolean PERFORMANCE_FILL_GAPS = true; + private static final int PERFORMANCE_MAX_GAPS = 8; private boolean mRequestStop = false; private AudioEncodingThread mEncoder; private AudioRecordingThread mRecorder; private ByteBufferPool mByteBufferPool; - private ByteBuffer mZeroBuffer; private final AudioTimestamp mTimestamp; private AudioConfig mConfig; private InputBufferPool mInputBufferPool = new InputBufferPool(); private final LinkedBlockingQueue mInputBufferQueue = new LinkedBlockingQueue<>(); + private AudioNoise mAudioNoise; // Just to debug performance. - private int mSendCount = 0; - private int mExecuteCount = 0; - private long mAvgSendDelay = 0; - private long mAvgExecuteDelay = 0; - private Map mSendStartMap = new HashMap<>(); + private int mDebugSendCount = 0; + private int mDebugExecuteCount = 0; + private long mDebugSendAvgDelay = 0; + private long mDebugExecuteAvgDelay = 0; + private Map mDebugSendStartMap = new HashMap<>(); public AudioMediaEncoder(@NonNull AudioConfig config) { super("AudioEncoder"); @@ -76,7 +78,7 @@ public class AudioMediaEncoder extends MediaEncoder { mMediaCodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); mMediaCodec.start(); mByteBufferPool = new ByteBufferPool(mConfig.frameSize(), mConfig.bufferPoolMaxSize()); - mZeroBuffer = ByteBuffer.allocateDirect(mConfig.frameSize()); + mAudioNoise = new AudioNoise(mConfig); } @EncoderThread @@ -130,11 +132,13 @@ public class AudioMediaEncoder extends MediaEncoder { private AudioRecord mAudioRecord; private ByteBuffer mCurrentBuffer; - private int mReadBytes; + private int mCurrentReadBytes; + private long mLastTimeUs; private long mFirstTimeUs = Long.MIN_VALUE; private AudioRecordingThread() { + setPriority(Thread.MAX_PRIORITY); final int minBufferSize = AudioRecord.getMinBufferSize( mConfig.samplingFrequency, mConfig.audioFormatChannels(), @@ -152,14 +156,22 @@ public class AudioMediaEncoder extends MediaEncoder { mConfig.audioFormatChannels(), mConfig.encoding, bufferSize); - setPriority(Thread.MAX_PRIORITY); } @Override public void run() { mAudioRecord.startRecording(); while (!mRequestStop) { - read(false); + if (!hasReachedMaxLength()) { + read(false); + } else { + // We have reached the max length, so stop reading. + // However, do not get out of the loop - the controller + // will call stop() on us soon. It's not our responsibility + // to stop ourselves. + //noinspection UnnecessaryContinue + continue; + } } LOG.w("Stop was requested. We're out of the loop. Will post an endOfStream."); // Last input with 0 length. This will signal the endOfStream. @@ -192,25 +204,25 @@ public class AudioMediaEncoder extends MediaEncoder { // with left and right bytes. https://stackoverflow.com/q/20594750/4288782 if (PERFORMANCE_DEBUG) { long before = System.nanoTime(); - mReadBytes = mAudioRecord.read(mCurrentBuffer, mConfig.frameSize()); + mCurrentReadBytes = mAudioRecord.read(mCurrentBuffer, mConfig.frameSize()); long after = System.nanoTime(); float delayMillis = (after - before) / 1000000F; - float durationMillis = AudioTimestamp.bytesToMillis(mReadBytes, mConfig.byteRate()); + float durationMillis = AudioTimestamp.bytesToMillis(mCurrentReadBytes, mConfig.byteRate()); LOG.v("read thread - reading took:", delayMillis, "should be:", durationMillis, "delay:", delayMillis - durationMillis); } else { - mReadBytes = mAudioRecord.read(mCurrentBuffer, mConfig.frameSize()); + mCurrentReadBytes = mAudioRecord.read(mCurrentBuffer, mConfig.frameSize()); } - LOG.i("read thread - eos:", endOfStream, "- Read new audio frame. Bytes:", mReadBytes); - if (mReadBytes > 0) { // Good read: increase PTS. - increaseTime(mReadBytes, endOfStream); + LOG.i("read thread - eos:", endOfStream, "- Read new audio frame. Bytes:", mCurrentReadBytes); + if (mCurrentReadBytes > 0) { // Good read: increase PTS. + increaseTime(mCurrentReadBytes, endOfStream); LOG.i("read thread - eos:", endOfStream, "- mLastTimeUs:", mLastTimeUs); - mCurrentBuffer.limit(mReadBytes); + mCurrentBuffer.limit(mCurrentReadBytes); enqueue(mCurrentBuffer, mLastTimeUs, endOfStream); - } else if (mReadBytes == AudioRecord.ERROR_INVALID_OPERATION) { + } else if (mCurrentReadBytes == AudioRecord.ERROR_INVALID_OPERATION) { LOG.e("read thread - eos:", endOfStream, "- Got AudioRecord.ERROR_INVALID_OPERATION"); - } else if (mReadBytes == AudioRecord.ERROR_BAD_VALUE) { + } else if (mCurrentReadBytes == AudioRecord.ERROR_BAD_VALUE) { LOG.e("read thread - eos:", endOfStream, "- Got AudioRecord.ERROR_BAD_VALUE"); } } @@ -235,43 +247,21 @@ public class AudioMediaEncoder extends MediaEncoder { } // See if we reached the max length value. - boolean didReachMaxLength = (mLastTimeUs - mFirstTimeUs) > getMaxLengthMillis() * 1000L; - if (didReachMaxLength && !endOfStream) { - LOG.w("read thread - this frame reached the maxLength! deltaUs:", mLastTimeUs - mFirstTimeUs); - notifyMaxLengthReached(); - } - - // Add zeroes if we have huge gaps. Even if timestamps are correct, if we have gaps between - // them, the encoder might shrink all timestamps to have a continuous audio. This results - // in a video that is fast-forwarded. - // Adding zeroes does not solve the gaps issue - audio will still be distorted. But at - // least we get a video that has the correct playback speed. - if (PERFORMANCE_FILL_GAPS) { - int gaps = mTimestamp.getGapCount(mConfig.frameSize()); - if (gaps > 0) { - long gapStart = mTimestamp.getGapStartUs(mLastTimeUs); - long frameUs = AudioTimestamp.bytesToUs(mConfig.frameSize(), mConfig.byteRate()); - LOG.w("read thread - GAPS: trying to add", gaps, "zeroed buffers"); - for (int i = 0; i < gaps; i++) { - ByteBuffer zeroBuffer = mByteBufferPool.get(); - if (zeroBuffer == null) { - LOG.e("read thread - GAPS: aborting because we have no free buffer."); - break; - } - ; - zeroBuffer.position(0); - zeroBuffer.put(mZeroBuffer); - zeroBuffer.clear(); - enqueue(zeroBuffer, gapStart, false); - gapStart += frameUs; - } + if (!hasReachedMaxLength()) { + boolean didReachMaxLength = (mLastTimeUs - mFirstTimeUs) > getMaxLengthMillis() * 1000L; + if (didReachMaxLength && !endOfStream) { + LOG.w("read thread - this frame reached the maxLength! deltaUs:", mLastTimeUs - mFirstTimeUs); + notifyMaxLengthReached(); } } + + // Maybe add noise. + maybeAddNoise(); } private void enqueue(@NonNull ByteBuffer byteBuffer, long timestamp, boolean isEndOfStream) { if (PERFORMANCE_DEBUG) { - mSendStartMap.put(timestamp, System.nanoTime() / 1000000); + mDebugSendStartMap.put(timestamp, System.nanoTime() / 1000000); } int readBytes = byteBuffer.remaining(); InputBuffer inputBuffer = mInputBufferPool.get(); @@ -283,6 +273,45 @@ public class AudioMediaEncoder extends MediaEncoder { mInputBufferQueue.add(inputBuffer); } + /** + * If our {@link AudioTimestamp} detected huge gap, and the performance flag is enabled, + * we can add noise to fill them. + * + * Even if we always pass the correct timestamps, if there are big gaps between the frames, + * the encoder implementation might shrink all timestamps to have a continuous audio. + * This results in a video that is fast-forwarded. + * + * Adding noise does not solve the gaps issue, we'll still have distorted audio, but + * at least we get a video that has the correct playback speed. + * + * NOTE: this MUST be fast! + * If this operation is slow, we make the {@link AudioRecordingThread} busy, so we'll + * read the next frame with a delay, so we'll have even more gaps at the next call + * and spend even more time here. The result might be recording no audio at all - just + * random noise. + * This is the reason why we have a {@link #PERFORMANCE_MAX_GAPS} number. + */ + private void maybeAddNoise() { + if (!PERFORMANCE_FILL_GAPS) return; + int gaps = mTimestamp.getGapCount(mConfig.frameSize()); + if (gaps <= 0) return; + + long gapStart = mTimestamp.getGapStartUs(mLastTimeUs); + long frameUs = AudioTimestamp.bytesToUs(mConfig.frameSize(), mConfig.byteRate()); + LOG.w("read thread - GAPS: trying to add", gaps, "noise buffers. PERFORMANCE_MAX_GAPS:", PERFORMANCE_MAX_GAPS); + for (int i = 0; i < Math.min(gaps, PERFORMANCE_MAX_GAPS); i++) { + ByteBuffer noiseBuffer = mByteBufferPool.get(); + if (noiseBuffer == null) { + LOG.e("read thread - GAPS: aborting because we have no free buffer."); + break; + } + noiseBuffer.clear(); + mAudioNoise.fill(noiseBuffer); + noiseBuffer.rewind(); + enqueue(noiseBuffer, gapStart, false); + gapStart += frameUs; + } + } } /** @@ -311,10 +340,11 @@ public class AudioMediaEncoder extends MediaEncoder { // Performance logging if (PERFORMANCE_DEBUG) { long sendEnd = System.nanoTime() / 1000000; - Long sendStart = mSendStartMap.remove(inputBuffer.timestamp); + Long sendStart = mDebugSendStartMap.remove(inputBuffer.timestamp); + //noinspection StatementWithEmptyBody if (sendStart != null) { - mAvgSendDelay = ((mAvgSendDelay * mSendCount) + (sendEnd - sendStart)) / (++mSendCount); - LOG.v("send delay millis:", sendEnd - sendStart, "average:", mAvgSendDelay); + mDebugSendAvgDelay = ((mDebugSendAvgDelay * mDebugSendCount) + (sendEnd - sendStart)) / (++mDebugSendCount); + LOG.v("send delay millis:", sendEnd - sendStart, "average:", mDebugSendAvgDelay); } else { // This input buffer was already processed (but tryAcquire failed for now). } @@ -338,8 +368,8 @@ public class AudioMediaEncoder extends MediaEncoder { if (PERFORMANCE_DEBUG) { // After latest changes, the count here is not so different between MONO and STEREO. // We get about 400 frames in both cases (430 for MONO, but doesn't seem like a big issue). - LOG.e("EXECUTE DELAY MILLIS:", mAvgExecuteDelay, "COUNT:", mExecuteCount); - LOG.e("SEND DELAY MILLIS:", mAvgSendDelay, "COUNT:", mSendCount); + LOG.e("EXECUTE DELAY MILLIS:", mDebugExecuteAvgDelay, "COUNT:", mDebugExecuteCount); + LOG.e("SEND DELAY MILLIS:", mDebugSendAvgDelay, "COUNT:", mDebugSendCount); } } @@ -357,12 +387,12 @@ public class AudioMediaEncoder extends MediaEncoder { // NOTE: can consider calling this drainOutput on yet another thread, which would let us // use an even smaller BUFFER_POOL_MAX_SIZE without losing audio frames. But this way // we can accumulate delay on this new thread without noticing (no pool getting empty). - drainOutput(buffer.isEndOfStream); + drainOutput(eos); if (PERFORMANCE_DEBUG) { long executeEnd = System.nanoTime() / 1000000; - mAvgExecuteDelay = ((mAvgExecuteDelay * mExecuteCount) + (executeEnd - executeStart)) / (++mExecuteCount); - LOG.v("execute delay millis:", executeEnd - executeStart, "average:", mAvgExecuteDelay); + mDebugExecuteAvgDelay = ((mDebugExecuteAvgDelay * mDebugExecuteCount) + (executeEnd - executeStart)) / (++mDebugExecuteCount); + LOG.v("execute delay millis:", executeEnd - executeStart, "average:", mDebugExecuteAvgDelay); } } } diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioNoise.java b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioNoise.java new file mode 100644 index 00000000..ec60645c --- /dev/null +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioNoise.java @@ -0,0 +1,59 @@ +package com.otaliastudios.cameraview.video.encoding; + +import androidx.annotation.NonNull; + +import java.nio.Buffer; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.nio.ShortBuffer; +import java.util.Random; + +/** + * An AudioNoise instance offers buffers of noise that we can use when recording + * some samples failed for some reason. + * + * Since we can't create noise anytime it's needed - that would be expensive and + * slow down the recording thread - we create a big noise buffer at start time. + * + * We'd like to work with {@link ShortBuffer}s, but this requires converting the + * input buffer to ShortBuffer each time, and this can be expensive. + */ +class AudioNoise { + + private final static int FRAMES = 1; // After testing, it looks like this is the best setup + private final static Random RANDOM = new Random(); + + private final ByteBuffer mNoiseBuffer; + + AudioNoise(@NonNull AudioConfig config) { + //noinspection ConstantConditions + if (config.sampleSizePerChannel != 2) { + throw new IllegalArgumentException("AudioNoise expects 2bytes-1short samples."); + } + mNoiseBuffer = ByteBuffer + .allocateDirect(config.frameSize() * FRAMES) + .order(ByteOrder.nativeOrder()); + double i = 0; + double frequency = config.frameSize() / 2D; // each X samples, the signal repeats + double step = Math.PI / frequency; // the increase in radians + double max = 10; // might choose this from 0 to Short.MAX_VALUE + while (mNoiseBuffer.hasRemaining()) { + short noise = (short) (Math.sin(++i * step) * max); + mNoiseBuffer.put((byte) noise); + mNoiseBuffer.put((byte) (noise >> 8)); + } + mNoiseBuffer.rewind(); + } + + void fill(@NonNull ByteBuffer outBuffer) { + mNoiseBuffer.clear(); + if (mNoiseBuffer.capacity() == outBuffer.remaining()) { + mNoiseBuffer.position(0); // Happens if FRAMES = 1. + } else { + mNoiseBuffer.position(RANDOM.nextInt(mNoiseBuffer.capacity() + - outBuffer.remaining())); + } + mNoiseBuffer.limit(mNoiseBuffer.position() + outBuffer.remaining()); + outBuffer.put(mNoiseBuffer); + } +} diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoder.java b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoder.java index d3efef0d..12a52743 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoder.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoder.java @@ -14,6 +14,9 @@ import com.otaliastudios.cameraview.CameraLogger; import com.otaliastudios.cameraview.internal.utils.WorkerHandler; import java.nio.ByteBuffer; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; /** * Base class for single-track encoders, coordinated by a {@link MediaEncoderEngine}. @@ -117,12 +120,13 @@ public abstract class MediaEncoder { private OutputBufferPool mOutputBufferPool; private MediaCodec.BufferInfo mBufferInfo; private MediaCodecBuffers mBuffers; + private final Map mPendingEvents = new HashMap<>(); private long mMaxLengthMillis; private boolean mMaxLengthReached; private long mStartTimeMillis = 0; // In System.currentTimeMillis() - private long mStartTimeUs = Long.MIN_VALUE; // In unknown reference + private long mFirstTimeUs = Long.MIN_VALUE; // In unknown reference private long mLastTimeUs = 0; private long mDebugSetStateTimestamp = Long.MIN_VALUE; @@ -176,6 +180,7 @@ public abstract class MediaEncoder { mBufferInfo = new MediaCodec.BufferInfo(); mMaxLengthMillis = maxLengthMillis; mWorker = WorkerHandler.get(mName); + mWorker.getThread().setPriority(Thread.MAX_PRIORITY); LOG.i(mName, "Prepare was called. Posting."); mWorker.post(new Runnable() { @Override @@ -223,13 +228,18 @@ public abstract class MediaEncoder { * @param event what happened * @param data object */ + @SuppressWarnings("ConstantConditions") final void notify(final @NonNull String event, final @Nullable Object data) { - LOG.v(mName, "Notify was called. Posting."); + if (!mPendingEvents.containsKey(event)) mPendingEvents.put(event, new AtomicInteger(0)); + final AtomicInteger pendingEvents = mPendingEvents.get(event); + pendingEvents.incrementAndGet(); + LOG.v(mName, "Notify was called. Posting. pendingEvents:", pendingEvents.intValue()); mWorker.post(new Runnable() { @Override public void run() { - LOG.v(mName, "Notify was called. Executing."); + LOG.v(mName, "Notify was called. Executing. pendingEvents:", pendingEvents.intValue()); onEvent(event, data); + pendingEvents.decrementAndGet(); } }); } @@ -315,6 +325,7 @@ public abstract class MediaEncoder { mOutputBufferPool = null; mBuffers = null; setState(STATE_STOPPED); + mWorker.destroy(); } /** @@ -357,7 +368,9 @@ public abstract class MediaEncoder { */ @SuppressWarnings("WeakerAccess") protected void encodeInputBuffer(InputBuffer buffer) { - LOG.v(mName, "ENCODING - Buffer:", buffer.index, "Bytes:", buffer.length, "Presentation:", buffer.timestamp); + LOG.v(mName, "ENCODING - Buffer:", buffer.index, + "Bytes:", buffer.length, + "Presentation:", buffer.timestamp); if (buffer.isEndOfStream) { // send EOS mMediaCodec.queueInputBuffer(buffer.index, 0, 0, buffer.timestamp, MediaCodec.BUFFER_FLAG_END_OF_STREAM); @@ -379,8 +392,8 @@ public abstract class MediaEncoder { */ @SuppressLint("LogNotTimber") @SuppressWarnings("WeakerAccess") - protected void drainOutput(boolean drainAll) { - LOG.v(mName, "DRAINING - EOS:", drainAll); + protected final void drainOutput(boolean drainAll) { + LOG.i(mName, "DRAINING - EOS:", drainAll); if (mMediaCodec == null) { LOG.e("drain() was called before prepare() or after releasing."); return; @@ -422,9 +435,9 @@ public abstract class MediaEncoder { // Store mStartTimeUs and mLastTimeUs, useful to detect the max length // reached and stop recording when needed. - if (mStartTimeUs == Long.MIN_VALUE) { - mStartTimeUs = mBufferInfo.presentationTimeUs; - LOG.w(mName, "DRAINING - Got the first presentation time:", mStartTimeUs); + if (mFirstTimeUs == Long.MIN_VALUE) { + mFirstTimeUs = mBufferInfo.presentationTimeUs; + LOG.w(mName, "DRAINING - Got the first presentation time:", mFirstTimeUs); } mLastTimeUs = mBufferInfo.presentationTimeUs; @@ -434,16 +447,16 @@ public abstract class MediaEncoder { // To address this, encoders are required to call notifyFirstFrameMillis // so we can adjust here - moving to 1970 reference. // Extra benefit: we never pass a pts equal to 0, which some encoders refuse. - mBufferInfo.presentationTimeUs = (mStartTimeMillis * 1000) + mLastTimeUs - mStartTimeUs; + mBufferInfo.presentationTimeUs = (mStartTimeMillis * 1000) + mLastTimeUs - mFirstTimeUs; // Write. - LOG.v(mName, "DRAINING - About to write(). Adjusted presentation:", mBufferInfo.presentationTimeUs); + LOG.i(mName, "DRAINING - About to write(). Adjusted presentation:", mBufferInfo.presentationTimeUs); OutputBuffer buffer = mOutputBufferPool.get(); //noinspection ConstantConditions buffer.info = mBufferInfo; buffer.trackIndex = mTrackIndex; buffer.data = encodedData; - mController.write(mOutputBufferPool, buffer); + onWriteOutput(mOutputBufferPool, buffer); } mMediaCodec.releaseOutputBuffer(encoderStatus, false); @@ -451,10 +464,11 @@ public abstract class MediaEncoder { // Not needed if drainAll because we already were asked to stop if (!drainAll && !mMaxLengthReached - && mStartTimeUs != Long.MIN_VALUE - && mLastTimeUs - mStartTimeUs > mMaxLengthMillis * 1000) { + && mFirstTimeUs != Long.MIN_VALUE + && mLastTimeUs - mFirstTimeUs > mMaxLengthMillis * 1000) { LOG.w(mName, "DRAINING - Reached maxLength! mLastTimeUs:", mLastTimeUs, - "mStartTimeUs:", mStartTimeUs, + "mStartTimeUs:", mFirstTimeUs, + "mDeltaUs:", mLastTimeUs - mFirstTimeUs, "mMaxLengthUs:", mMaxLengthMillis * 1000); onMaxLengthReached(); break; @@ -470,6 +484,11 @@ public abstract class MediaEncoder { } } + @CallSuper + protected void onWriteOutput(@NonNull OutputBufferPool pool, @NonNull OutputBuffer buffer) { + mController.write(pool, buffer); + } + protected abstract int getEncodedBitRate(); /** @@ -494,6 +513,11 @@ public abstract class MediaEncoder { onMaxLengthReached(); } + @SuppressWarnings("WeakerAccess") + protected boolean hasReachedMaxLength() { + return mMaxLengthReached; + } + /** * Called by us (during {@link #drainOutput(boolean)}) or by subclasses * (through {@link #notifyMaxLengthReached()}) to notify that we reached the @@ -520,7 +544,20 @@ public abstract class MediaEncoder { * @param firstFrameMillis the milliseconds of the first frame presentation */ @SuppressWarnings("WeakerAccess") - protected void notifyFirstFrameMillis(long firstFrameMillis) { + protected final void notifyFirstFrameMillis(long firstFrameMillis) { mStartTimeMillis = firstFrameMillis; } + + /** + * Returns the number of events (see {@link #onEvent(String, Object)}) that were scheduled + * but still not passed to that function. Could be used to drop some of them if this + * number is too high. + * + * @param event the event type + * @return the pending events number + */ + @SuppressWarnings({"SameParameterValue", "ConstantConditions", "WeakerAccess"}) + protected final int getPendingEvents(@NonNull String event) { + return mPendingEvents.get(event).intValue(); + } } diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoderEngine.java b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoderEngine.java index 6e2725dc..0ee9e60a 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoderEngine.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoderEngine.java @@ -7,6 +7,7 @@ import android.os.Build; import android.text.format.DateFormat; import com.otaliastudios.cameraview.CameraLogger; +import com.otaliastudios.cameraview.internal.utils.WorkerHandler; import androidx.annotation.NonNull; import androidx.annotation.Nullable; @@ -68,8 +69,9 @@ public class MediaEncoderEngine { void onEncodingStart(); /** - * Called when encoding stopped. At this point the mxuer might still be processing, - * but we have stopped receiving input (recording video and audio frames). + * Called when encoding stopped. At this point the muxer or the encoders might still be + * processing data, but we have stopped receiving input (recording video and audio frames). + * Actually, we will stop very soon. * * The {@link #onEncodingEnd(int, Exception)} callback will soon be called * with the results. @@ -96,17 +98,18 @@ public class MediaEncoderEngine { public final static int END_BY_MAX_DURATION = 1; public final static int END_BY_MAX_SIZE = 2; - private List mEncoders; + private final List mEncoders = new ArrayList<>(); private MediaMuxer mMediaMuxer; - private int mStartedEncodersCount; - private int mReleasedEncodersCount; - private boolean mMediaMuxerStarted; + private int mStartedEncodersCount = 0; + private int mStoppedEncodersCount = 0; + private boolean mMediaMuxerStarted = false; @SuppressWarnings("FieldCanBeLocal") - private Controller mController; + private final Controller mController = new Controller(); + private final WorkerHandler mControllerThread = WorkerHandler.get("EncoderEngine"); + private final Object mControllerLock = new Object(); private Listener mListener; private int mEndReason = END_BY_USER; private int mPossibleEndReason; - private final Object mControllerLock = new Object(); /** * Creates a new engine for the given file, with the given encoders and max limits, @@ -126,8 +129,6 @@ public class MediaEncoderEngine { final long maxSize, @Nullable Listener listener) { mListener = listener; - mController = new Controller(); - mEncoders = new ArrayList<>(); mEncoders.add(videoEncoder); if (audioEncoder != null) { mEncoders.add(audioEncoder); @@ -137,9 +138,6 @@ public class MediaEncoderEngine { } catch (IOException e) { throw new RuntimeException(e); } - mStartedEncodersCount = 0; - mMediaMuxerStarted = false; - mReleasedEncodersCount = 0; // Trying to convert the size constraints to duration constraints, // because they are super easy to check. @@ -203,6 +201,9 @@ public class MediaEncoderEngine { for (MediaEncoder encoder : mEncoders) { encoder.stop(); } + if (mListener != null) { + mListener.onEncodingStop(); + } } /** @@ -218,10 +219,14 @@ public class MediaEncoderEngine { // went wrong, and we propagate that to the listener. try { mMediaMuxer.stop(); - mMediaMuxer.release(); } catch (Exception e) { error = e; } + try { + mMediaMuxer.release(); + } catch (Exception e) { + if (error == null) error = e; + } mMediaMuxer = null; } LOG.w("end:", "Dispatching end to listener - reason:", mEndReason, "error:", error); @@ -231,8 +236,9 @@ public class MediaEncoderEngine { } mEndReason = END_BY_USER; mStartedEncodersCount = 0; - mReleasedEncodersCount = 0; + mStoppedEncodersCount = 0; mMediaMuxerStarted = false; + mControllerThread.destroy(); LOG.i("end:", "Completed."); } @@ -281,11 +287,18 @@ public class MediaEncoderEngine { LOG.w("notifyStarted:", "Assigned track", track, "to format", format.getString(MediaFormat.KEY_MIME)); if (++mStartedEncodersCount == mEncoders.size()) { LOG.w("notifyStarted:", "All encoders have started. Starting muxer and dispatching onEncodingStart()."); - mMediaMuxer.start(); - mMediaMuxerStarted = true; - if (mListener != null) { - mListener.onEncodingStart(); - } + // Go out of this thread since it might be very important for the + // encoders and we don't want to perform expensive operations here. + mControllerThread.run(new Runnable() { + @Override + public void run() { + mMediaMuxer.start(); + mMediaMuxerStarted = true; + if (mListener != null) { + mListener.onEncodingStart(); + } + } + }); } return track; } @@ -322,10 +335,6 @@ public class MediaEncoderEngine { * large differences. */ public void write(@NonNull OutputBufferPool pool, @NonNull OutputBuffer buffer) { - if (!mMediaMuxerStarted) { - throw new IllegalStateException("Trying to write before muxer started"); - } - if (DEBUG_PERFORMANCE) { // When AUDIO = mono, this is called about twice the time. (200 vs 100 for 5 sec). Integer count = mDebugCount.get(buffer.trackIndex); @@ -342,7 +351,6 @@ public class MediaEncoderEngine { "track:", buffer.trackIndex, "presentation:", buffer.info.presentationTimeUs); } - mMediaMuxer.writeSampleData(buffer.trackIndex, buffer.data, buffer.info); pool.recycle(buffer); } @@ -360,7 +368,14 @@ public class MediaEncoderEngine { if (--mStartedEncodersCount == 0) { LOG.w("requestStop:", "All encoders have requested a stop. Stopping them."); mEndReason = mPossibleEndReason; - stop(); + // Go out of this thread since it might be very important for the + // encoders and we don't want to perform expensive operations here. + mControllerThread.run(new Runnable() { + @Override + public void run() { + stop(); + } + }); } } } @@ -372,12 +387,16 @@ public class MediaEncoderEngine { public void notifyStopped(int track) { synchronized (mControllerLock) { LOG.w("notifyStopped:", "Called for track", track); - if (++mReleasedEncodersCount == mEncoders.size()) { - LOG.w("requestStop:", "All encoders have been released. Stopping the muxer."); - if (mListener != null) { - mListener.onEncodingStop(); - } - end(); + if (++mStoppedEncodersCount == mEncoders.size()) { + LOG.w("requestStop:", "All encoders have been stopped. Stopping the muxer."); + // Go out of this thread since it might be very important for the + // encoders and we don't want to perform expensive operations here. + mControllerThread.run(new Runnable() { + @Override + public void run() { + end(); + } + }); } } } diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/TextureMediaEncoder.java b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/TextureMediaEncoder.java index f3bf63bc..c3ce7377 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/TextureMediaEncoder.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/TextureMediaEncoder.java @@ -1,12 +1,10 @@ package com.otaliastudios.cameraview.video.encoding; import android.graphics.SurfaceTexture; -import android.media.ImageReader; import android.opengl.Matrix; import android.os.Build; import com.otaliastudios.cameraview.CameraLogger; -import com.otaliastudios.cameraview.internal.Issue514Workaround; import com.otaliastudios.cameraview.internal.egl.EglCore; import com.otaliastudios.cameraview.internal.egl.EglViewport; import com.otaliastudios.cameraview.internal.egl.EglWindowSurface; @@ -38,6 +36,8 @@ public class TextureMediaEncoder extends VideoMediaEncoder { } }); + private long mFirstTimeUs = Long.MIN_VALUE; + public TextureMediaEncoder(@NonNull TextureConfig config) { super(config.copy()); } @@ -53,7 +53,7 @@ public class TextureMediaEncoder extends VideoMediaEncoder { * Nanoseconds, in no meaningful time-base. Will be used for offsets only. * Typically this comes from {@link SurfaceTexture#getTimestamp()}. */ - public long timestamp; + public long timestampNanos; /** * Milliseconds in the {@link System#currentTimeMillis()} reference. @@ -65,6 +65,10 @@ public class TextureMediaEncoder extends VideoMediaEncoder { * The transformation matrix for the base texture. */ public float[] transform = new float[16]; + + private long timestampUs() { + return timestampNanos / 1000L; + } } /** @@ -95,6 +99,36 @@ public class TextureMediaEncoder extends VideoMediaEncoder { mViewport = new EglViewport(); } + /** + * Any number of pending events > 1 means that we should skip this frame. + * To avoid skipping too many frames, we'll use 2 for now, but this just means + * that we'll be drawing the same frame twice. + * + * When an event is posted, the textureId data has already been updated so we're + * too late to draw the old one and it should be skipped. + * + * This is especially important if we perform overlay drawing here, since that + * makes this class thread busy and slows down the event dispatching. + * + * @param timestampUs frame timestamp + * @return true to render + */ + @Override + protected boolean shouldRenderFrame(long timestampUs) { + if (!super.shouldRenderFrame(timestampUs)) { + return false; + } else if (mFrameNumber <= 10) { + // Always render the first few frames, or muxer fails. + return true; + } else if (getPendingEvents(FRAME_EVENT) > 2) { + LOG.w("shouldRenderFrame - Dropping frame because we already have too many pending events:", + getPendingEvents(FRAME_EVENT)); + return false; + } else { + return true; + } + } + @EncoderThread @Override protected void onEvent(@NonNull String event, @Nullable Object data) { @@ -103,25 +137,42 @@ public class TextureMediaEncoder extends VideoMediaEncoder { if (frame == null) { throw new IllegalArgumentException("Got null frame for FRAME_EVENT."); } - if (frame.timestamp == 0) { // grafika - mFramePool.recycle(frame); - return; - } - if (mFrameNumber < 0) { // We were asked to stop. + if (!shouldRenderFrame(frame.timestampUs())) { mFramePool.recycle(frame); return; } - mFrameNumber++; + + // Notify we're got the first frame and its absolute time. if (mFrameNumber == 1) { notifyFirstFrameMillis(frame.timestampMillis); } + // Notify we have reached the max length value. + if (mFirstTimeUs == Long.MIN_VALUE) mFirstTimeUs = frame.timestampUs(); + if (!hasReachedMaxLength()) { + boolean didReachMaxLength = (frame.timestampUs() - mFirstTimeUs) > getMaxLengthMillis() * 1000L; + if (didReachMaxLength) { + LOG.w("onEvent -", + "frameNumber:", mFrameNumber, + "timestampUs:", frame.timestampUs(), + "firstTimeUs:", mFirstTimeUs, + "- reached max length! deltaUs:", frame.timestampUs() - mFirstTimeUs); + notifyMaxLengthReached(); + } + } + // First, drain any previous data. - LOG.i("onEvent", "frameNumber:", mFrameNumber, "timestamp:", frame.timestamp, "- draining."); + LOG.i("onEvent -", + "frameNumber:", mFrameNumber, + "timestampUs:", frame.timestampUs(), + "- draining."); drainOutput(false); // Then draw on the surface. - LOG.i("onEvent", "frameNumber:", mFrameNumber, "timestamp:", frame.timestamp, "- drawing."); + LOG.i("onEvent -", + "frameNumber:", mFrameNumber, + "timestampUs:", frame.timestampUs(), + "- rendering."); // 1. We must scale this matrix like GlCameraPreview does, because it might have some cropping. // Scaling takes place with respect to the (0, 0, 0) point, so we must apply a Translation to compensate. @@ -152,7 +203,7 @@ public class TextureMediaEncoder extends VideoMediaEncoder { if (mConfig.hasOverlay()) { mConfig.overlayDrawer.render(); } - mWindow.setPresentationTime(frame.timestamp); + mWindow.setPresentationTime(frame.timestampNanos); mWindow.swapBuffers(); mFramePool.recycle(frame); } diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/VideoMediaEncoder.java b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/VideoMediaEncoder.java index 64061a37..b542a882 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/VideoMediaEncoder.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/VideoMediaEncoder.java @@ -7,6 +7,7 @@ import android.os.Build; import androidx.annotation.NonNull; import androidx.annotation.RequiresApi; +import android.os.Bundle; import android.view.Surface; import com.otaliastudios.cameraview.CameraLogger; @@ -43,6 +44,8 @@ abstract class VideoMediaEncoder extends MediaEncoder { @SuppressWarnings("WeakerAccess") protected int mFrameNumber = -1; + private boolean mSyncFrameFound = false; + VideoMediaEncoder(@NonNull C config) { super("VideoEncoder"); mConfig = config; @@ -60,7 +63,7 @@ abstract class VideoMediaEncoder extends MediaEncoder { format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); format.setInteger(MediaFormat.KEY_BIT_RATE, mConfig.bitRate); format.setInteger(MediaFormat.KEY_FRAME_RATE, mConfig.frameRate); - format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5); + format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1); // seconds between key frames! format.setInteger("rotation-degrees", mConfig.rotation); try { @@ -92,8 +95,52 @@ abstract class VideoMediaEncoder extends MediaEncoder { drainOutput(true); } + /** + * The first frame that we write MUST have the BUFFER_FLAG_SYNC_FRAME flag set. + * It sometimes doesn't because we might drop some frames in {@link #drainOutput(boolean)}, + * basically if, at the time, the muxer was not started yet, due to Audio setup being slow. + * + * We can't add the BUFFER_FLAG_SYNC_FRAME flag to the first frame just because we'd like to. + * But we can drop frames until we get a sync one. + * + * @param pool the buffer pool + * @param buffer the buffer + */ + @Override + protected void onWriteOutput(@NonNull OutputBufferPool pool, @NonNull OutputBuffer buffer) { + if (!mSyncFrameFound) { + LOG.w("onWriteOutput:", "sync frame not found yet. Checking."); + int flag = MediaCodec.BUFFER_FLAG_SYNC_FRAME; + boolean hasFlag = (buffer.info.flags & flag) == flag; + if (hasFlag) { + LOG.w("onWriteOutput:", "SYNC FRAME FOUND!"); + mSyncFrameFound = true; + super.onWriteOutput(pool, buffer); + } else { + LOG.w("onWriteOutput:", "DROPPING FRAME and requesting a sync frame soon."); + if (Build.VERSION.SDK_INT >= 19) { + Bundle params = new Bundle(); + params.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0); + mMediaCodec.setParameters(params); + } + pool.recycle(buffer); + } + } else { + super.onWriteOutput(pool, buffer); + } + } + @Override protected int getEncodedBitRate() { return mConfig.bitRate; } + + @SuppressWarnings("BooleanMethodIsAlwaysInverted") + protected boolean shouldRenderFrame(long timestampUs) { + if (timestampUs == 0) return false; // grafika said so + if (mFrameNumber < 0) return false; // We were asked to stop. + if (hasReachedMaxLength()) return false; // We were not asked yet, but we'll be soon. + mFrameNumber++; + return true; + } } diff --git a/demo/src/main/java/com/otaliastudios/cameraview/demo/CameraActivity.java b/demo/src/main/java/com/otaliastudios/cameraview/demo/CameraActivity.java index 6af85d2f..0cb652cd 100644 --- a/demo/src/main/java/com/otaliastudios/cameraview/demo/CameraActivity.java +++ b/demo/src/main/java/com/otaliastudios/cameraview/demo/CameraActivity.java @@ -225,6 +225,13 @@ public class CameraActivity extends AppCompatActivity implements View.OnClickLis super.onVideoRecordingStart(); LOG.w("onVideoRecordingStart!"); } + + @Override + public void onVideoRecordingEnd() { + super.onVideoRecordingEnd(); + message("Video taken. Processing...", false); + LOG.w("onVideoRecordingEnd!"); + } } @Override diff --git a/demo/src/main/res/layout/activity_camera.xml b/demo/src/main/res/layout/activity_camera.xml index eb5e9092..9e58c634 100644 --- a/demo/src/main/res/layout/activity_camera.xml +++ b/demo/src/main/res/layout/activity_camera.xml @@ -21,7 +21,7 @@ app:cameraPlaySounds="true" app:cameraGrid="off" app:cameraFlash="off" - app:cameraAudio="off" + app:cameraAudio="on" app:cameraFacing="back" app:cameraGestureTap="autoFocus" app:cameraGestureLongTap="none" @@ -51,7 +51,7 @@ android:layout_margin="16dp" android:layout_gravity="top|end" android:background="@drawable/background" - android:elevation="6dp" + android:elevation="3dp" app:srcCompat="@drawable/ic_switch" /> @@ -155,6 +155,7 @@ app:behavior_hideable="true" app:behavior_peekHeight="300dp" app:behavior_skipCollapsed="false" + android:elevation="4dp" android:layout_width="match_parent" android:layout_height="wrap_content">