Improve video snapshots (#530)

* Fix button elevation

* Replace zero buffer with sample noise

* Small improvements

* Drop frames when we have too many pending events

* Warmup worker threads, enlarge audio buffers, use EncoderEngine thread

* Improve audio noise

* Ensure first frame is a sync one

* Request key frames when the first is lost

* Fix README

* Create AudioNoise

* Correctly recycle OutputBuffer

* Fix #526
pull/531/head
Mattia Iavarone 5 years ago committed by GitHub
parent 42de6e30a4
commit c824c7db6c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 2
      README.md
  2. 4
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/video/VideoRecorderTest.java
  3. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java
  4. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java
  5. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraEngine.java
  6. 16
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/WorkerHandler.java
  7. 10
      cameraview/src/main/java/com/otaliastudios/cameraview/video/FullVideoRecorder.java
  8. 18
      cameraview/src/main/java/com/otaliastudios/cameraview/video/SnapshotVideoRecorder.java
  9. 9
      cameraview/src/main/java/com/otaliastudios/cameraview/video/VideoRecorder.java
  10. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioConfig.java
  11. 138
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioMediaEncoder.java
  12. 59
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioNoise.java
  13. 69
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoder.java
  14. 69
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoderEngine.java
  15. 75
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/TextureMediaEncoder.java
  16. 49
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/VideoMediaEncoder.java
  17. 7
      demo/src/main/java/com/otaliastudios/cameraview/demo/CameraActivity.java
  18. 5
      demo/src/main/res/layout/activity_camera.xml

@ -16,7 +16,7 @@ CameraView is a well documented, high-level library that makes capturing picture
addressing most of the common issues and needs, and still leaving you with flexibility where needed. addressing most of the common issues and needs, and still leaving you with flexibility where needed.
```groovy ```groovy
api 'com.otaliastudios:cameraview:2.0.0-rc1' api 'com.otaliastudios:cameraview:2.0.0-rc2'
``` ```
- Fast & reliable - Fast & reliable

@ -29,7 +29,7 @@ public class VideoRecorderTest extends BaseTest {
} }
@Override @Override
protected void onStop() { protected void onStop(boolean isCameraShutdown) {
dispatchVideoRecordingEnd(); dispatchVideoRecordingEnd();
dispatchResult(); dispatchResult();
} }
@ -37,7 +37,7 @@ public class VideoRecorderTest extends BaseTest {
recorder.start(result); recorder.start(result);
Mockito.verify(listener,Mockito.times(1) ) Mockito.verify(listener,Mockito.times(1) )
.onVideoRecordingStart(); .onVideoRecordingStart();
recorder.stop(); recorder.stop(false);
Mockito.verify(listener, Mockito.times(1)) Mockito.verify(listener, Mockito.times(1))
.onVideoRecordingEnd(); .onVideoRecordingEnd();
Mockito.verify(listener, Mockito.times(1)) Mockito.verify(listener, Mockito.times(1))

@ -231,7 +231,7 @@ public class Camera1Engine extends CameraEngine implements
@Override @Override
protected Task<Void> onStopPreview() { protected Task<Void> onStopPreview() {
if (mVideoRecorder != null) { if (mVideoRecorder != null) {
mVideoRecorder.stop(); mVideoRecorder.stop(true);
mVideoRecorder = null; mVideoRecorder = null;
} }
mPictureRecorder = null; mPictureRecorder = null;

@ -535,7 +535,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
if (mVideoRecorder != null) { if (mVideoRecorder != null) {
// This should synchronously call onVideoResult that will reset the repeating builder // This should synchronously call onVideoResult that will reset the repeating builder
// to the PREVIEW template. This is very important. // to the PREVIEW template. This is very important.
mVideoRecorder.stop(); mVideoRecorder.stop(true);
mVideoRecorder = null; mVideoRecorder = null;
} }
mPictureRecorder = null; mPictureRecorder = null;

@ -1189,7 +1189,7 @@ public abstract class CameraEngine implements
public void run() { public void run() {
LOG.i("stopVideo", "executing.", "isTakingVideo?", isTakingVideo()); LOG.i("stopVideo", "executing.", "isTakingVideo?", isTakingVideo());
if (mVideoRecorder != null) { if (mVideoRecorder != null) {
mVideoRecorder.stop(); mVideoRecorder.stop(false);
mVideoRecorder = null; mVideoRecorder = null;
} }
} }

@ -14,6 +14,7 @@ import androidx.annotation.NonNull;
import java.lang.ref.WeakReference; import java.lang.ref.WeakReference;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Executor; import java.util.concurrent.Executor;
/** /**
@ -100,6 +101,20 @@ public class WorkerHandler {
WorkerHandler.this.run(command); WorkerHandler.this.run(command);
} }
}; };
// HandlerThreads/Handlers sometimes have a significant warmup time.
// We want to spend this time here so when this object is built, it
// is fully operational.
final CountDownLatch latch = new CountDownLatch(1);
post(new Runnable() {
@Override
public void run() {
latch.countDown();
}
});
try {
latch.await();
} catch (InterruptedException ignore) {}
} }
/** /**
@ -219,7 +234,6 @@ public class WorkerHandler {
* interrupt it, so the next {@link #get(String)} call will remove it. * interrupt it, so the next {@link #get(String)} call will remove it.
* In any case, we only store weak references. * In any case, we only store weak references.
*/ */
@SuppressWarnings("WeakerAccess")
public void destroy() { public void destroy() {
HandlerThread thread = getThread(); HandlerThread thread = getThread();
if (thread.isAlive()) { if (thread.isAlive()) {

@ -103,11 +103,11 @@ public abstract class FullVideoRecorder extends VideoRecorder {
switch (what) { switch (what) {
case MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED: case MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED:
mResult.endReason = VideoResult.REASON_MAX_DURATION_REACHED; mResult.endReason = VideoResult.REASON_MAX_DURATION_REACHED;
stop(); stop(false);
break; break;
case MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED: case MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED:
mResult.endReason = VideoResult.REASON_MAX_SIZE_REACHED; mResult.endReason = VideoResult.REASON_MAX_SIZE_REACHED;
stop(); stop(false);
break; break;
} }
} }
@ -130,7 +130,7 @@ public abstract class FullVideoRecorder extends VideoRecorder {
protected void onStart() { protected void onStart() {
if (!prepareMediaRecorder(mResult)) { if (!prepareMediaRecorder(mResult)) {
mResult = null; mResult = null;
stop(); stop(false);
return; return;
} }
@ -141,12 +141,12 @@ public abstract class FullVideoRecorder extends VideoRecorder {
LOG.w("start:", "Error while starting media recorder.", e); LOG.w("start:", "Error while starting media recorder.", e);
mResult = null; mResult = null;
mError = e; mError = e;
stop(); stop(false);
} }
} }
@Override @Override
protected void onStop() { protected void onStop(boolean isCameraShutdown) {
if (mMediaRecorder != null) { if (mMediaRecorder != null) {
dispatchVideoRecordingEnd(); dispatchVideoRecordingEnd();
try { try {

@ -1,20 +1,14 @@
package com.otaliastudios.cameraview.video; package com.otaliastudios.cameraview.video;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.PorterDuff;
import android.graphics.SurfaceTexture; import android.graphics.SurfaceTexture;
import android.opengl.EGL14; import android.opengl.EGL14;
import android.os.Build; import android.os.Build;
import android.view.Surface;
import com.otaliastudios.cameraview.CameraLogger; import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.internal.Issue514Workaround;
import com.otaliastudios.cameraview.overlay.Overlay; import com.otaliastudios.cameraview.overlay.Overlay;
import com.otaliastudios.cameraview.VideoResult; import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.controls.Audio; import com.otaliastudios.cameraview.controls.Audio;
import com.otaliastudios.cameraview.engine.CameraEngine; import com.otaliastudios.cameraview.engine.CameraEngine;
import com.otaliastudios.cameraview.internal.egl.EglViewport;
import com.otaliastudios.cameraview.overlay.OverlayDrawer; import com.otaliastudios.cameraview.overlay.OverlayDrawer;
import com.otaliastudios.cameraview.preview.GlCameraPreview; import com.otaliastudios.cameraview.preview.GlCameraPreview;
import com.otaliastudios.cameraview.preview.RendererFrameCallback; import com.otaliastudios.cameraview.preview.RendererFrameCallback;
@ -84,8 +78,16 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
} }
@Override @Override
protected void onStop() { protected void onStop(boolean isCameraShutdown) {
if (isCameraShutdown) {
// The renderer callback might never be called. From my tests, it's not.
LOG.i("Stopping the encoder engine from isCameraShutdown.");
mDesiredState = STATE_NOT_RECORDING; mDesiredState = STATE_NOT_RECORDING;
mCurrentState = STATE_NOT_RECORDING;
mEncoderEngine.stop();
} else {
mDesiredState = STATE_NOT_RECORDING;
}
} }
@RendererThread @RendererThread
@ -164,7 +166,7 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
LOG.v("dispatching frame."); LOG.v("dispatching frame.");
TextureMediaEncoder textureEncoder = (TextureMediaEncoder) mEncoderEngine.getVideoEncoder(); TextureMediaEncoder textureEncoder = (TextureMediaEncoder) mEncoderEngine.getVideoEncoder();
TextureMediaEncoder.Frame frame = textureEncoder.acquireFrame(); TextureMediaEncoder.Frame frame = textureEncoder.acquireFrame();
frame.timestamp = surfaceTexture.getTimestamp(); frame.timestampNanos = surfaceTexture.getTimestamp();
frame.timestampMillis = System.currentTimeMillis(); // NOTE: this is an approximation but it seems to work. frame.timestampMillis = System.currentTimeMillis(); // NOTE: this is an approximation but it seems to work.
surfaceTexture.getTransformMatrix(frame.transform); surfaceTexture.getTransformMatrix(frame.transform);
if (mEncoderEngine != null) { // Can happen on teardown. At least it used to. if (mEncoderEngine != null) { // Can happen on teardown. At least it used to.

@ -64,9 +64,10 @@ public abstract class VideoRecorder {
/** /**
* Stops recording. * Stops recording.
* @param isCameraShutdown whether this is a full shutdown, camera is being closed
*/ */
public final void stop() { public final void stop(boolean isCameraShutdown) {
onStop(); onStop(isCameraShutdown);
} }
/** /**
@ -79,13 +80,12 @@ public abstract class VideoRecorder {
protected abstract void onStart(); protected abstract void onStart();
protected abstract void onStop(); protected abstract void onStop(boolean isCameraShutdown);
/** /**
* Subclasses can call this to notify that the result was obtained, * Subclasses can call this to notify that the result was obtained,
* either with some error (null result) or with the actual stub, filled. * either with some error (null result) or with the actual stub, filled.
*/ */
@SuppressWarnings("WeakerAccess")
@CallSuper @CallSuper
protected void dispatchResult() { protected void dispatchResult() {
mIsRecording = false; mIsRecording = false;
@ -112,6 +112,7 @@ public abstract class VideoRecorder {
* Subclasses can call this to notify that the video recording has ended, * Subclasses can call this to notify that the video recording has ended,
* although the video result might still be processed. * although the video result might still be processed.
*/ */
@SuppressWarnings("WeakerAccess")
@CallSuper @CallSuper
protected void dispatchVideoRecordingEnd() { protected void dispatchVideoRecordingEnd() {
if (mListener != null) { if (mListener != null) {

@ -20,7 +20,7 @@ public class AudioConfig {
final int encoding = AudioFormat.ENCODING_PCM_16BIT; // Determines the sampleSizePerChannel final int encoding = AudioFormat.ENCODING_PCM_16BIT; // Determines the sampleSizePerChannel
// The 44.1KHz frequency is the only setting guaranteed to be available on all devices. // The 44.1KHz frequency is the only setting guaranteed to be available on all devices.
final int samplingFrequency = 44100; // samples/sec final int samplingFrequency = 44100; // samples/sec
final int sampleSizePerChannel = 2; // byte/sample/channel [16bit] final int sampleSizePerChannel = 2; // byte/sample/channel [16bit]. If this changes, review noise introduction
final int byteRatePerChannel = samplingFrequency * sampleSizePerChannel; // byte/sec/channel final int byteRatePerChannel = samplingFrequency * sampleSizePerChannel; // byte/sec/channel
@NonNull @NonNull
@ -75,7 +75,7 @@ public class AudioConfig {
* @return the number of frames * @return the number of frames
*/ */
int audioRecordBufferFrames() { int audioRecordBufferFrames() {
return 25; return 50;
} }
/** /**
@ -91,6 +91,6 @@ public class AudioConfig {
* @return the buffer pool max size * @return the buffer pool max size
*/ */
int bufferPoolMaxSize() { int bufferPoolMaxSize() {
return 80; return 500;
} }
} }

@ -1,6 +1,5 @@
package com.otaliastudios.cameraview.video.encoding; package com.otaliastudios.cameraview.video.encoding;
import android.media.AudioFormat;
import android.media.AudioRecord; import android.media.AudioRecord;
import android.media.MediaCodec; import android.media.MediaCodec;
import android.media.MediaCodecInfo; import android.media.MediaCodecInfo;
@ -15,8 +14,10 @@ import androidx.annotation.RequiresApi;
import java.io.IOException; import java.io.IOException;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.Random;
import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.LinkedBlockingQueue;
/** /**
@ -30,23 +31,24 @@ public class AudioMediaEncoder extends MediaEncoder {
private static final boolean PERFORMANCE_DEBUG = false; private static final boolean PERFORMANCE_DEBUG = false;
private static final boolean PERFORMANCE_FILL_GAPS = true; private static final boolean PERFORMANCE_FILL_GAPS = true;
private static final int PERFORMANCE_MAX_GAPS = 8;
private boolean mRequestStop = false; private boolean mRequestStop = false;
private AudioEncodingThread mEncoder; private AudioEncodingThread mEncoder;
private AudioRecordingThread mRecorder; private AudioRecordingThread mRecorder;
private ByteBufferPool mByteBufferPool; private ByteBufferPool mByteBufferPool;
private ByteBuffer mZeroBuffer;
private final AudioTimestamp mTimestamp; private final AudioTimestamp mTimestamp;
private AudioConfig mConfig; private AudioConfig mConfig;
private InputBufferPool mInputBufferPool = new InputBufferPool(); private InputBufferPool mInputBufferPool = new InputBufferPool();
private final LinkedBlockingQueue<InputBuffer> mInputBufferQueue = new LinkedBlockingQueue<>(); private final LinkedBlockingQueue<InputBuffer> mInputBufferQueue = new LinkedBlockingQueue<>();
private AudioNoise mAudioNoise;
// Just to debug performance. // Just to debug performance.
private int mSendCount = 0; private int mDebugSendCount = 0;
private int mExecuteCount = 0; private int mDebugExecuteCount = 0;
private long mAvgSendDelay = 0; private long mDebugSendAvgDelay = 0;
private long mAvgExecuteDelay = 0; private long mDebugExecuteAvgDelay = 0;
private Map<Long, Long> mSendStartMap = new HashMap<>(); private Map<Long, Long> mDebugSendStartMap = new HashMap<>();
public AudioMediaEncoder(@NonNull AudioConfig config) { public AudioMediaEncoder(@NonNull AudioConfig config) {
super("AudioEncoder"); super("AudioEncoder");
@ -76,7 +78,7 @@ public class AudioMediaEncoder extends MediaEncoder {
mMediaCodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); mMediaCodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mMediaCodec.start(); mMediaCodec.start();
mByteBufferPool = new ByteBufferPool(mConfig.frameSize(), mConfig.bufferPoolMaxSize()); mByteBufferPool = new ByteBufferPool(mConfig.frameSize(), mConfig.bufferPoolMaxSize());
mZeroBuffer = ByteBuffer.allocateDirect(mConfig.frameSize()); mAudioNoise = new AudioNoise(mConfig);
} }
@EncoderThread @EncoderThread
@ -130,11 +132,13 @@ public class AudioMediaEncoder extends MediaEncoder {
private AudioRecord mAudioRecord; private AudioRecord mAudioRecord;
private ByteBuffer mCurrentBuffer; private ByteBuffer mCurrentBuffer;
private int mReadBytes; private int mCurrentReadBytes;
private long mLastTimeUs; private long mLastTimeUs;
private long mFirstTimeUs = Long.MIN_VALUE; private long mFirstTimeUs = Long.MIN_VALUE;
private AudioRecordingThread() { private AudioRecordingThread() {
setPriority(Thread.MAX_PRIORITY);
final int minBufferSize = AudioRecord.getMinBufferSize( final int minBufferSize = AudioRecord.getMinBufferSize(
mConfig.samplingFrequency, mConfig.samplingFrequency,
mConfig.audioFormatChannels(), mConfig.audioFormatChannels(),
@ -152,14 +156,22 @@ public class AudioMediaEncoder extends MediaEncoder {
mConfig.audioFormatChannels(), mConfig.audioFormatChannels(),
mConfig.encoding, mConfig.encoding,
bufferSize); bufferSize);
setPriority(Thread.MAX_PRIORITY);
} }
@Override @Override
public void run() { public void run() {
mAudioRecord.startRecording(); mAudioRecord.startRecording();
while (!mRequestStop) { while (!mRequestStop) {
if (!hasReachedMaxLength()) {
read(false); read(false);
} else {
// We have reached the max length, so stop reading.
// However, do not get out of the loop - the controller
// will call stop() on us soon. It's not our responsibility
// to stop ourselves.
//noinspection UnnecessaryContinue
continue;
}
} }
LOG.w("Stop was requested. We're out of the loop. Will post an endOfStream."); LOG.w("Stop was requested. We're out of the loop. Will post an endOfStream.");
// Last input with 0 length. This will signal the endOfStream. // Last input with 0 length. This will signal the endOfStream.
@ -192,25 +204,25 @@ public class AudioMediaEncoder extends MediaEncoder {
// with left and right bytes. https://stackoverflow.com/q/20594750/4288782 // with left and right bytes. https://stackoverflow.com/q/20594750/4288782
if (PERFORMANCE_DEBUG) { if (PERFORMANCE_DEBUG) {
long before = System.nanoTime(); long before = System.nanoTime();
mReadBytes = mAudioRecord.read(mCurrentBuffer, mConfig.frameSize()); mCurrentReadBytes = mAudioRecord.read(mCurrentBuffer, mConfig.frameSize());
long after = System.nanoTime(); long after = System.nanoTime();
float delayMillis = (after - before) / 1000000F; float delayMillis = (after - before) / 1000000F;
float durationMillis = AudioTimestamp.bytesToMillis(mReadBytes, mConfig.byteRate()); float durationMillis = AudioTimestamp.bytesToMillis(mCurrentReadBytes, mConfig.byteRate());
LOG.v("read thread - reading took:", delayMillis, LOG.v("read thread - reading took:", delayMillis,
"should be:", durationMillis, "should be:", durationMillis,
"delay:", delayMillis - durationMillis); "delay:", delayMillis - durationMillis);
} else { } else {
mReadBytes = mAudioRecord.read(mCurrentBuffer, mConfig.frameSize()); mCurrentReadBytes = mAudioRecord.read(mCurrentBuffer, mConfig.frameSize());
} }
LOG.i("read thread - eos:", endOfStream, "- Read new audio frame. Bytes:", mReadBytes); LOG.i("read thread - eos:", endOfStream, "- Read new audio frame. Bytes:", mCurrentReadBytes);
if (mReadBytes > 0) { // Good read: increase PTS. if (mCurrentReadBytes > 0) { // Good read: increase PTS.
increaseTime(mReadBytes, endOfStream); increaseTime(mCurrentReadBytes, endOfStream);
LOG.i("read thread - eos:", endOfStream, "- mLastTimeUs:", mLastTimeUs); LOG.i("read thread - eos:", endOfStream, "- mLastTimeUs:", mLastTimeUs);
mCurrentBuffer.limit(mReadBytes); mCurrentBuffer.limit(mCurrentReadBytes);
enqueue(mCurrentBuffer, mLastTimeUs, endOfStream); enqueue(mCurrentBuffer, mLastTimeUs, endOfStream);
} else if (mReadBytes == AudioRecord.ERROR_INVALID_OPERATION) { } else if (mCurrentReadBytes == AudioRecord.ERROR_INVALID_OPERATION) {
LOG.e("read thread - eos:", endOfStream, "- Got AudioRecord.ERROR_INVALID_OPERATION"); LOG.e("read thread - eos:", endOfStream, "- Got AudioRecord.ERROR_INVALID_OPERATION");
} else if (mReadBytes == AudioRecord.ERROR_BAD_VALUE) { } else if (mCurrentReadBytes == AudioRecord.ERROR_BAD_VALUE) {
LOG.e("read thread - eos:", endOfStream, "- Got AudioRecord.ERROR_BAD_VALUE"); LOG.e("read thread - eos:", endOfStream, "- Got AudioRecord.ERROR_BAD_VALUE");
} }
} }
@ -235,43 +247,21 @@ public class AudioMediaEncoder extends MediaEncoder {
} }
// See if we reached the max length value. // See if we reached the max length value.
if (!hasReachedMaxLength()) {
boolean didReachMaxLength = (mLastTimeUs - mFirstTimeUs) > getMaxLengthMillis() * 1000L; boolean didReachMaxLength = (mLastTimeUs - mFirstTimeUs) > getMaxLengthMillis() * 1000L;
if (didReachMaxLength && !endOfStream) { if (didReachMaxLength && !endOfStream) {
LOG.w("read thread - this frame reached the maxLength! deltaUs:", mLastTimeUs - mFirstTimeUs); LOG.w("read thread - this frame reached the maxLength! deltaUs:", mLastTimeUs - mFirstTimeUs);
notifyMaxLengthReached(); notifyMaxLengthReached();
} }
// Add zeroes if we have huge gaps. Even if timestamps are correct, if we have gaps between
// them, the encoder might shrink all timestamps to have a continuous audio. This results
// in a video that is fast-forwarded.
// Adding zeroes does not solve the gaps issue - audio will still be distorted. But at
// least we get a video that has the correct playback speed.
if (PERFORMANCE_FILL_GAPS) {
int gaps = mTimestamp.getGapCount(mConfig.frameSize());
if (gaps > 0) {
long gapStart = mTimestamp.getGapStartUs(mLastTimeUs);
long frameUs = AudioTimestamp.bytesToUs(mConfig.frameSize(), mConfig.byteRate());
LOG.w("read thread - GAPS: trying to add", gaps, "zeroed buffers");
for (int i = 0; i < gaps; i++) {
ByteBuffer zeroBuffer = mByteBufferPool.get();
if (zeroBuffer == null) {
LOG.e("read thread - GAPS: aborting because we have no free buffer.");
break;
}
;
zeroBuffer.position(0);
zeroBuffer.put(mZeroBuffer);
zeroBuffer.clear();
enqueue(zeroBuffer, gapStart, false);
gapStart += frameUs;
}
}
} }
// Maybe add noise.
maybeAddNoise();
} }
private void enqueue(@NonNull ByteBuffer byteBuffer, long timestamp, boolean isEndOfStream) { private void enqueue(@NonNull ByteBuffer byteBuffer, long timestamp, boolean isEndOfStream) {
if (PERFORMANCE_DEBUG) { if (PERFORMANCE_DEBUG) {
mSendStartMap.put(timestamp, System.nanoTime() / 1000000); mDebugSendStartMap.put(timestamp, System.nanoTime() / 1000000);
} }
int readBytes = byteBuffer.remaining(); int readBytes = byteBuffer.remaining();
InputBuffer inputBuffer = mInputBufferPool.get(); InputBuffer inputBuffer = mInputBufferPool.get();
@ -283,6 +273,45 @@ public class AudioMediaEncoder extends MediaEncoder {
mInputBufferQueue.add(inputBuffer); mInputBufferQueue.add(inputBuffer);
} }
/**
* If our {@link AudioTimestamp} detected huge gap, and the performance flag is enabled,
* we can add noise to fill them.
*
* Even if we always pass the correct timestamps, if there are big gaps between the frames,
* the encoder implementation might shrink all timestamps to have a continuous audio.
* This results in a video that is fast-forwarded.
*
* Adding noise does not solve the gaps issue, we'll still have distorted audio, but
* at least we get a video that has the correct playback speed.
*
* NOTE: this MUST be fast!
* If this operation is slow, we make the {@link AudioRecordingThread} busy, so we'll
* read the next frame with a delay, so we'll have even more gaps at the next call
* and spend even more time here. The result might be recording no audio at all - just
* random noise.
* This is the reason why we have a {@link #PERFORMANCE_MAX_GAPS} number.
*/
private void maybeAddNoise() {
if (!PERFORMANCE_FILL_GAPS) return;
int gaps = mTimestamp.getGapCount(mConfig.frameSize());
if (gaps <= 0) return;
long gapStart = mTimestamp.getGapStartUs(mLastTimeUs);
long frameUs = AudioTimestamp.bytesToUs(mConfig.frameSize(), mConfig.byteRate());
LOG.w("read thread - GAPS: trying to add", gaps, "noise buffers. PERFORMANCE_MAX_GAPS:", PERFORMANCE_MAX_GAPS);
for (int i = 0; i < Math.min(gaps, PERFORMANCE_MAX_GAPS); i++) {
ByteBuffer noiseBuffer = mByteBufferPool.get();
if (noiseBuffer == null) {
LOG.e("read thread - GAPS: aborting because we have no free buffer.");
break;
}
noiseBuffer.clear();
mAudioNoise.fill(noiseBuffer);
noiseBuffer.rewind();
enqueue(noiseBuffer, gapStart, false);
gapStart += frameUs;
}
}
} }
/** /**
@ -311,10 +340,11 @@ public class AudioMediaEncoder extends MediaEncoder {
// Performance logging // Performance logging
if (PERFORMANCE_DEBUG) { if (PERFORMANCE_DEBUG) {
long sendEnd = System.nanoTime() / 1000000; long sendEnd = System.nanoTime() / 1000000;
Long sendStart = mSendStartMap.remove(inputBuffer.timestamp); Long sendStart = mDebugSendStartMap.remove(inputBuffer.timestamp);
//noinspection StatementWithEmptyBody
if (sendStart != null) { if (sendStart != null) {
mAvgSendDelay = ((mAvgSendDelay * mSendCount) + (sendEnd - sendStart)) / (++mSendCount); mDebugSendAvgDelay = ((mDebugSendAvgDelay * mDebugSendCount) + (sendEnd - sendStart)) / (++mDebugSendCount);
LOG.v("send delay millis:", sendEnd - sendStart, "average:", mAvgSendDelay); LOG.v("send delay millis:", sendEnd - sendStart, "average:", mDebugSendAvgDelay);
} else { } else {
// This input buffer was already processed (but tryAcquire failed for now). // This input buffer was already processed (but tryAcquire failed for now).
} }
@ -338,8 +368,8 @@ public class AudioMediaEncoder extends MediaEncoder {
if (PERFORMANCE_DEBUG) { if (PERFORMANCE_DEBUG) {
// After latest changes, the count here is not so different between MONO and STEREO. // After latest changes, the count here is not so different between MONO and STEREO.
// We get about 400 frames in both cases (430 for MONO, but doesn't seem like a big issue). // We get about 400 frames in both cases (430 for MONO, but doesn't seem like a big issue).
LOG.e("EXECUTE DELAY MILLIS:", mAvgExecuteDelay, "COUNT:", mExecuteCount); LOG.e("EXECUTE DELAY MILLIS:", mDebugExecuteAvgDelay, "COUNT:", mDebugExecuteCount);
LOG.e("SEND DELAY MILLIS:", mAvgSendDelay, "COUNT:", mSendCount); LOG.e("SEND DELAY MILLIS:", mDebugSendAvgDelay, "COUNT:", mDebugSendCount);
} }
} }
@ -357,12 +387,12 @@ public class AudioMediaEncoder extends MediaEncoder {
// NOTE: can consider calling this drainOutput on yet another thread, which would let us // NOTE: can consider calling this drainOutput on yet another thread, which would let us
// use an even smaller BUFFER_POOL_MAX_SIZE without losing audio frames. But this way // use an even smaller BUFFER_POOL_MAX_SIZE without losing audio frames. But this way
// we can accumulate delay on this new thread without noticing (no pool getting empty). // we can accumulate delay on this new thread without noticing (no pool getting empty).
drainOutput(buffer.isEndOfStream); drainOutput(eos);
if (PERFORMANCE_DEBUG) { if (PERFORMANCE_DEBUG) {
long executeEnd = System.nanoTime() / 1000000; long executeEnd = System.nanoTime() / 1000000;
mAvgExecuteDelay = ((mAvgExecuteDelay * mExecuteCount) + (executeEnd - executeStart)) / (++mExecuteCount); mDebugExecuteAvgDelay = ((mDebugExecuteAvgDelay * mDebugExecuteCount) + (executeEnd - executeStart)) / (++mDebugExecuteCount);
LOG.v("execute delay millis:", executeEnd - executeStart, "average:", mAvgExecuteDelay); LOG.v("execute delay millis:", executeEnd - executeStart, "average:", mDebugExecuteAvgDelay);
} }
} }
} }

@ -0,0 +1,59 @@
package com.otaliastudios.cameraview.video.encoding;
import androidx.annotation.NonNull;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.ShortBuffer;
import java.util.Random;
/**
* An AudioNoise instance offers buffers of noise that we can use when recording
* some samples failed for some reason.
*
* Since we can't create noise anytime it's needed - that would be expensive and
* slow down the recording thread - we create a big noise buffer at start time.
*
* We'd like to work with {@link ShortBuffer}s, but this requires converting the
* input buffer to ShortBuffer each time, and this can be expensive.
*/
class AudioNoise {
private final static int FRAMES = 1; // After testing, it looks like this is the best setup
private final static Random RANDOM = new Random();
private final ByteBuffer mNoiseBuffer;
AudioNoise(@NonNull AudioConfig config) {
//noinspection ConstantConditions
if (config.sampleSizePerChannel != 2) {
throw new IllegalArgumentException("AudioNoise expects 2bytes-1short samples.");
}
mNoiseBuffer = ByteBuffer
.allocateDirect(config.frameSize() * FRAMES)
.order(ByteOrder.nativeOrder());
double i = 0;
double frequency = config.frameSize() / 2D; // each X samples, the signal repeats
double step = Math.PI / frequency; // the increase in radians
double max = 10; // might choose this from 0 to Short.MAX_VALUE
while (mNoiseBuffer.hasRemaining()) {
short noise = (short) (Math.sin(++i * step) * max);
mNoiseBuffer.put((byte) noise);
mNoiseBuffer.put((byte) (noise >> 8));
}
mNoiseBuffer.rewind();
}
void fill(@NonNull ByteBuffer outBuffer) {
mNoiseBuffer.clear();
if (mNoiseBuffer.capacity() == outBuffer.remaining()) {
mNoiseBuffer.position(0); // Happens if FRAMES = 1.
} else {
mNoiseBuffer.position(RANDOM.nextInt(mNoiseBuffer.capacity()
- outBuffer.remaining()));
}
mNoiseBuffer.limit(mNoiseBuffer.position() + outBuffer.remaining());
outBuffer.put(mNoiseBuffer);
}
}

@ -14,6 +14,9 @@ import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.internal.utils.WorkerHandler; import com.otaliastudios.cameraview.internal.utils.WorkerHandler;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
/** /**
* Base class for single-track encoders, coordinated by a {@link MediaEncoderEngine}. * Base class for single-track encoders, coordinated by a {@link MediaEncoderEngine}.
@ -117,12 +120,13 @@ public abstract class MediaEncoder {
private OutputBufferPool mOutputBufferPool; private OutputBufferPool mOutputBufferPool;
private MediaCodec.BufferInfo mBufferInfo; private MediaCodec.BufferInfo mBufferInfo;
private MediaCodecBuffers mBuffers; private MediaCodecBuffers mBuffers;
private final Map<String, AtomicInteger> mPendingEvents = new HashMap<>();
private long mMaxLengthMillis; private long mMaxLengthMillis;
private boolean mMaxLengthReached; private boolean mMaxLengthReached;
private long mStartTimeMillis = 0; // In System.currentTimeMillis() private long mStartTimeMillis = 0; // In System.currentTimeMillis()
private long mStartTimeUs = Long.MIN_VALUE; // In unknown reference private long mFirstTimeUs = Long.MIN_VALUE; // In unknown reference
private long mLastTimeUs = 0; private long mLastTimeUs = 0;
private long mDebugSetStateTimestamp = Long.MIN_VALUE; private long mDebugSetStateTimestamp = Long.MIN_VALUE;
@ -176,6 +180,7 @@ public abstract class MediaEncoder {
mBufferInfo = new MediaCodec.BufferInfo(); mBufferInfo = new MediaCodec.BufferInfo();
mMaxLengthMillis = maxLengthMillis; mMaxLengthMillis = maxLengthMillis;
mWorker = WorkerHandler.get(mName); mWorker = WorkerHandler.get(mName);
mWorker.getThread().setPriority(Thread.MAX_PRIORITY);
LOG.i(mName, "Prepare was called. Posting."); LOG.i(mName, "Prepare was called. Posting.");
mWorker.post(new Runnable() { mWorker.post(new Runnable() {
@Override @Override
@ -223,13 +228,18 @@ public abstract class MediaEncoder {
* @param event what happened * @param event what happened
* @param data object * @param data object
*/ */
@SuppressWarnings("ConstantConditions")
final void notify(final @NonNull String event, final @Nullable Object data) { final void notify(final @NonNull String event, final @Nullable Object data) {
LOG.v(mName, "Notify was called. Posting."); if (!mPendingEvents.containsKey(event)) mPendingEvents.put(event, new AtomicInteger(0));
final AtomicInteger pendingEvents = mPendingEvents.get(event);
pendingEvents.incrementAndGet();
LOG.v(mName, "Notify was called. Posting. pendingEvents:", pendingEvents.intValue());
mWorker.post(new Runnable() { mWorker.post(new Runnable() {
@Override @Override
public void run() { public void run() {
LOG.v(mName, "Notify was called. Executing."); LOG.v(mName, "Notify was called. Executing. pendingEvents:", pendingEvents.intValue());
onEvent(event, data); onEvent(event, data);
pendingEvents.decrementAndGet();
} }
}); });
} }
@ -315,6 +325,7 @@ public abstract class MediaEncoder {
mOutputBufferPool = null; mOutputBufferPool = null;
mBuffers = null; mBuffers = null;
setState(STATE_STOPPED); setState(STATE_STOPPED);
mWorker.destroy();
} }
/** /**
@ -357,7 +368,9 @@ public abstract class MediaEncoder {
*/ */
@SuppressWarnings("WeakerAccess") @SuppressWarnings("WeakerAccess")
protected void encodeInputBuffer(InputBuffer buffer) { protected void encodeInputBuffer(InputBuffer buffer) {
LOG.v(mName, "ENCODING - Buffer:", buffer.index, "Bytes:", buffer.length, "Presentation:", buffer.timestamp); LOG.v(mName, "ENCODING - Buffer:", buffer.index,
"Bytes:", buffer.length,
"Presentation:", buffer.timestamp);
if (buffer.isEndOfStream) { // send EOS if (buffer.isEndOfStream) { // send EOS
mMediaCodec.queueInputBuffer(buffer.index, 0, 0, mMediaCodec.queueInputBuffer(buffer.index, 0, 0,
buffer.timestamp, MediaCodec.BUFFER_FLAG_END_OF_STREAM); buffer.timestamp, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
@ -379,8 +392,8 @@ public abstract class MediaEncoder {
*/ */
@SuppressLint("LogNotTimber") @SuppressLint("LogNotTimber")
@SuppressWarnings("WeakerAccess") @SuppressWarnings("WeakerAccess")
protected void drainOutput(boolean drainAll) { protected final void drainOutput(boolean drainAll) {
LOG.v(mName, "DRAINING - EOS:", drainAll); LOG.i(mName, "DRAINING - EOS:", drainAll);
if (mMediaCodec == null) { if (mMediaCodec == null) {
LOG.e("drain() was called before prepare() or after releasing."); LOG.e("drain() was called before prepare() or after releasing.");
return; return;
@ -422,9 +435,9 @@ public abstract class MediaEncoder {
// Store mStartTimeUs and mLastTimeUs, useful to detect the max length // Store mStartTimeUs and mLastTimeUs, useful to detect the max length
// reached and stop recording when needed. // reached and stop recording when needed.
if (mStartTimeUs == Long.MIN_VALUE) { if (mFirstTimeUs == Long.MIN_VALUE) {
mStartTimeUs = mBufferInfo.presentationTimeUs; mFirstTimeUs = mBufferInfo.presentationTimeUs;
LOG.w(mName, "DRAINING - Got the first presentation time:", mStartTimeUs); LOG.w(mName, "DRAINING - Got the first presentation time:", mFirstTimeUs);
} }
mLastTimeUs = mBufferInfo.presentationTimeUs; mLastTimeUs = mBufferInfo.presentationTimeUs;
@ -434,16 +447,16 @@ public abstract class MediaEncoder {
// To address this, encoders are required to call notifyFirstFrameMillis // To address this, encoders are required to call notifyFirstFrameMillis
// so we can adjust here - moving to 1970 reference. // so we can adjust here - moving to 1970 reference.
// Extra benefit: we never pass a pts equal to 0, which some encoders refuse. // Extra benefit: we never pass a pts equal to 0, which some encoders refuse.
mBufferInfo.presentationTimeUs = (mStartTimeMillis * 1000) + mLastTimeUs - mStartTimeUs; mBufferInfo.presentationTimeUs = (mStartTimeMillis * 1000) + mLastTimeUs - mFirstTimeUs;
// Write. // Write.
LOG.v(mName, "DRAINING - About to write(). Adjusted presentation:", mBufferInfo.presentationTimeUs); LOG.i(mName, "DRAINING - About to write(). Adjusted presentation:", mBufferInfo.presentationTimeUs);
OutputBuffer buffer = mOutputBufferPool.get(); OutputBuffer buffer = mOutputBufferPool.get();
//noinspection ConstantConditions //noinspection ConstantConditions
buffer.info = mBufferInfo; buffer.info = mBufferInfo;
buffer.trackIndex = mTrackIndex; buffer.trackIndex = mTrackIndex;
buffer.data = encodedData; buffer.data = encodedData;
mController.write(mOutputBufferPool, buffer); onWriteOutput(mOutputBufferPool, buffer);
} }
mMediaCodec.releaseOutputBuffer(encoderStatus, false); mMediaCodec.releaseOutputBuffer(encoderStatus, false);
@ -451,10 +464,11 @@ public abstract class MediaEncoder {
// Not needed if drainAll because we already were asked to stop // Not needed if drainAll because we already were asked to stop
if (!drainAll if (!drainAll
&& !mMaxLengthReached && !mMaxLengthReached
&& mStartTimeUs != Long.MIN_VALUE && mFirstTimeUs != Long.MIN_VALUE
&& mLastTimeUs - mStartTimeUs > mMaxLengthMillis * 1000) { && mLastTimeUs - mFirstTimeUs > mMaxLengthMillis * 1000) {
LOG.w(mName, "DRAINING - Reached maxLength! mLastTimeUs:", mLastTimeUs, LOG.w(mName, "DRAINING - Reached maxLength! mLastTimeUs:", mLastTimeUs,
"mStartTimeUs:", mStartTimeUs, "mStartTimeUs:", mFirstTimeUs,
"mDeltaUs:", mLastTimeUs - mFirstTimeUs,
"mMaxLengthUs:", mMaxLengthMillis * 1000); "mMaxLengthUs:", mMaxLengthMillis * 1000);
onMaxLengthReached(); onMaxLengthReached();
break; break;
@ -470,6 +484,11 @@ public abstract class MediaEncoder {
} }
} }
@CallSuper
protected void onWriteOutput(@NonNull OutputBufferPool pool, @NonNull OutputBuffer buffer) {
mController.write(pool, buffer);
}
protected abstract int getEncodedBitRate(); protected abstract int getEncodedBitRate();
/** /**
@ -494,6 +513,11 @@ public abstract class MediaEncoder {
onMaxLengthReached(); onMaxLengthReached();
} }
@SuppressWarnings("WeakerAccess")
protected boolean hasReachedMaxLength() {
return mMaxLengthReached;
}
/** /**
* Called by us (during {@link #drainOutput(boolean)}) or by subclasses * Called by us (during {@link #drainOutput(boolean)}) or by subclasses
* (through {@link #notifyMaxLengthReached()}) to notify that we reached the * (through {@link #notifyMaxLengthReached()}) to notify that we reached the
@ -520,7 +544,20 @@ public abstract class MediaEncoder {
* @param firstFrameMillis the milliseconds of the first frame presentation * @param firstFrameMillis the milliseconds of the first frame presentation
*/ */
@SuppressWarnings("WeakerAccess") @SuppressWarnings("WeakerAccess")
protected void notifyFirstFrameMillis(long firstFrameMillis) { protected final void notifyFirstFrameMillis(long firstFrameMillis) {
mStartTimeMillis = firstFrameMillis; mStartTimeMillis = firstFrameMillis;
} }
/**
* Returns the number of events (see {@link #onEvent(String, Object)}) that were scheduled
* but still not passed to that function. Could be used to drop some of them if this
* number is too high.
*
* @param event the event type
* @return the pending events number
*/
@SuppressWarnings({"SameParameterValue", "ConstantConditions", "WeakerAccess"})
protected final int getPendingEvents(@NonNull String event) {
return mPendingEvents.get(event).intValue();
}
} }

@ -7,6 +7,7 @@ import android.os.Build;
import android.text.format.DateFormat; import android.text.format.DateFormat;
import com.otaliastudios.cameraview.CameraLogger; import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.internal.utils.WorkerHandler;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
@ -68,8 +69,9 @@ public class MediaEncoderEngine {
void onEncodingStart(); void onEncodingStart();
/** /**
* Called when encoding stopped. At this point the mxuer might still be processing, * Called when encoding stopped. At this point the muxer or the encoders might still be
* but we have stopped receiving input (recording video and audio frames). * processing data, but we have stopped receiving input (recording video and audio frames).
* Actually, we will stop very soon.
* *
* The {@link #onEncodingEnd(int, Exception)} callback will soon be called * The {@link #onEncodingEnd(int, Exception)} callback will soon be called
* with the results. * with the results.
@ -96,17 +98,18 @@ public class MediaEncoderEngine {
public final static int END_BY_MAX_DURATION = 1; public final static int END_BY_MAX_DURATION = 1;
public final static int END_BY_MAX_SIZE = 2; public final static int END_BY_MAX_SIZE = 2;
private List<MediaEncoder> mEncoders; private final List<MediaEncoder> mEncoders = new ArrayList<>();
private MediaMuxer mMediaMuxer; private MediaMuxer mMediaMuxer;
private int mStartedEncodersCount; private int mStartedEncodersCount = 0;
private int mReleasedEncodersCount; private int mStoppedEncodersCount = 0;
private boolean mMediaMuxerStarted; private boolean mMediaMuxerStarted = false;
@SuppressWarnings("FieldCanBeLocal") @SuppressWarnings("FieldCanBeLocal")
private Controller mController; private final Controller mController = new Controller();
private final WorkerHandler mControllerThread = WorkerHandler.get("EncoderEngine");
private final Object mControllerLock = new Object();
private Listener mListener; private Listener mListener;
private int mEndReason = END_BY_USER; private int mEndReason = END_BY_USER;
private int mPossibleEndReason; private int mPossibleEndReason;
private final Object mControllerLock = new Object();
/** /**
* Creates a new engine for the given file, with the given encoders and max limits, * Creates a new engine for the given file, with the given encoders and max limits,
@ -126,8 +129,6 @@ public class MediaEncoderEngine {
final long maxSize, final long maxSize,
@Nullable Listener listener) { @Nullable Listener listener) {
mListener = listener; mListener = listener;
mController = new Controller();
mEncoders = new ArrayList<>();
mEncoders.add(videoEncoder); mEncoders.add(videoEncoder);
if (audioEncoder != null) { if (audioEncoder != null) {
mEncoders.add(audioEncoder); mEncoders.add(audioEncoder);
@ -137,9 +138,6 @@ public class MediaEncoderEngine {
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
mStartedEncodersCount = 0;
mMediaMuxerStarted = false;
mReleasedEncodersCount = 0;
// Trying to convert the size constraints to duration constraints, // Trying to convert the size constraints to duration constraints,
// because they are super easy to check. // because they are super easy to check.
@ -203,6 +201,9 @@ public class MediaEncoderEngine {
for (MediaEncoder encoder : mEncoders) { for (MediaEncoder encoder : mEncoders) {
encoder.stop(); encoder.stop();
} }
if (mListener != null) {
mListener.onEncodingStop();
}
} }
/** /**
@ -218,10 +219,14 @@ public class MediaEncoderEngine {
// went wrong, and we propagate that to the listener. // went wrong, and we propagate that to the listener.
try { try {
mMediaMuxer.stop(); mMediaMuxer.stop();
mMediaMuxer.release();
} catch (Exception e) { } catch (Exception e) {
error = e; error = e;
} }
try {
mMediaMuxer.release();
} catch (Exception e) {
if (error == null) error = e;
}
mMediaMuxer = null; mMediaMuxer = null;
} }
LOG.w("end:", "Dispatching end to listener - reason:", mEndReason, "error:", error); LOG.w("end:", "Dispatching end to listener - reason:", mEndReason, "error:", error);
@ -231,8 +236,9 @@ public class MediaEncoderEngine {
} }
mEndReason = END_BY_USER; mEndReason = END_BY_USER;
mStartedEncodersCount = 0; mStartedEncodersCount = 0;
mReleasedEncodersCount = 0; mStoppedEncodersCount = 0;
mMediaMuxerStarted = false; mMediaMuxerStarted = false;
mControllerThread.destroy();
LOG.i("end:", "Completed."); LOG.i("end:", "Completed.");
} }
@ -281,12 +287,19 @@ public class MediaEncoderEngine {
LOG.w("notifyStarted:", "Assigned track", track, "to format", format.getString(MediaFormat.KEY_MIME)); LOG.w("notifyStarted:", "Assigned track", track, "to format", format.getString(MediaFormat.KEY_MIME));
if (++mStartedEncodersCount == mEncoders.size()) { if (++mStartedEncodersCount == mEncoders.size()) {
LOG.w("notifyStarted:", "All encoders have started. Starting muxer and dispatching onEncodingStart()."); LOG.w("notifyStarted:", "All encoders have started. Starting muxer and dispatching onEncodingStart().");
// Go out of this thread since it might be very important for the
// encoders and we don't want to perform expensive operations here.
mControllerThread.run(new Runnable() {
@Override
public void run() {
mMediaMuxer.start(); mMediaMuxer.start();
mMediaMuxerStarted = true; mMediaMuxerStarted = true;
if (mListener != null) { if (mListener != null) {
mListener.onEncodingStart(); mListener.onEncodingStart();
} }
} }
});
}
return track; return track;
} }
} }
@ -322,10 +335,6 @@ public class MediaEncoderEngine {
* large differences. * large differences.
*/ */
public void write(@NonNull OutputBufferPool pool, @NonNull OutputBuffer buffer) { public void write(@NonNull OutputBufferPool pool, @NonNull OutputBuffer buffer) {
if (!mMediaMuxerStarted) {
throw new IllegalStateException("Trying to write before muxer started");
}
if (DEBUG_PERFORMANCE) { if (DEBUG_PERFORMANCE) {
// When AUDIO = mono, this is called about twice the time. (200 vs 100 for 5 sec). // When AUDIO = mono, this is called about twice the time. (200 vs 100 for 5 sec).
Integer count = mDebugCount.get(buffer.trackIndex); Integer count = mDebugCount.get(buffer.trackIndex);
@ -342,7 +351,6 @@ public class MediaEncoderEngine {
"track:", buffer.trackIndex, "track:", buffer.trackIndex,
"presentation:", buffer.info.presentationTimeUs); "presentation:", buffer.info.presentationTimeUs);
} }
mMediaMuxer.writeSampleData(buffer.trackIndex, buffer.data, buffer.info); mMediaMuxer.writeSampleData(buffer.trackIndex, buffer.data, buffer.info);
pool.recycle(buffer); pool.recycle(buffer);
} }
@ -360,8 +368,15 @@ public class MediaEncoderEngine {
if (--mStartedEncodersCount == 0) { if (--mStartedEncodersCount == 0) {
LOG.w("requestStop:", "All encoders have requested a stop. Stopping them."); LOG.w("requestStop:", "All encoders have requested a stop. Stopping them.");
mEndReason = mPossibleEndReason; mEndReason = mPossibleEndReason;
// Go out of this thread since it might be very important for the
// encoders and we don't want to perform expensive operations here.
mControllerThread.run(new Runnable() {
@Override
public void run() {
stop(); stop();
} }
});
}
} }
} }
@ -372,13 +387,17 @@ public class MediaEncoderEngine {
public void notifyStopped(int track) { public void notifyStopped(int track) {
synchronized (mControllerLock) { synchronized (mControllerLock) {
LOG.w("notifyStopped:", "Called for track", track); LOG.w("notifyStopped:", "Called for track", track);
if (++mReleasedEncodersCount == mEncoders.size()) { if (++mStoppedEncodersCount == mEncoders.size()) {
LOG.w("requestStop:", "All encoders have been released. Stopping the muxer."); LOG.w("requestStop:", "All encoders have been stopped. Stopping the muxer.");
if (mListener != null) { // Go out of this thread since it might be very important for the
mListener.onEncodingStop(); // encoders and we don't want to perform expensive operations here.
} mControllerThread.run(new Runnable() {
@Override
public void run() {
end(); end();
} }
});
}
} }
} }
} }

@ -1,12 +1,10 @@
package com.otaliastudios.cameraview.video.encoding; package com.otaliastudios.cameraview.video.encoding;
import android.graphics.SurfaceTexture; import android.graphics.SurfaceTexture;
import android.media.ImageReader;
import android.opengl.Matrix; import android.opengl.Matrix;
import android.os.Build; import android.os.Build;
import com.otaliastudios.cameraview.CameraLogger; import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.internal.Issue514Workaround;
import com.otaliastudios.cameraview.internal.egl.EglCore; import com.otaliastudios.cameraview.internal.egl.EglCore;
import com.otaliastudios.cameraview.internal.egl.EglViewport; import com.otaliastudios.cameraview.internal.egl.EglViewport;
import com.otaliastudios.cameraview.internal.egl.EglWindowSurface; import com.otaliastudios.cameraview.internal.egl.EglWindowSurface;
@ -38,6 +36,8 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
} }
}); });
private long mFirstTimeUs = Long.MIN_VALUE;
public TextureMediaEncoder(@NonNull TextureConfig config) { public TextureMediaEncoder(@NonNull TextureConfig config) {
super(config.copy()); super(config.copy());
} }
@ -53,7 +53,7 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
* Nanoseconds, in no meaningful time-base. Will be used for offsets only. * Nanoseconds, in no meaningful time-base. Will be used for offsets only.
* Typically this comes from {@link SurfaceTexture#getTimestamp()}. * Typically this comes from {@link SurfaceTexture#getTimestamp()}.
*/ */
public long timestamp; public long timestampNanos;
/** /**
* Milliseconds in the {@link System#currentTimeMillis()} reference. * Milliseconds in the {@link System#currentTimeMillis()} reference.
@ -65,6 +65,10 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
* The transformation matrix for the base texture. * The transformation matrix for the base texture.
*/ */
public float[] transform = new float[16]; public float[] transform = new float[16];
private long timestampUs() {
return timestampNanos / 1000L;
}
} }
/** /**
@ -95,6 +99,36 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
mViewport = new EglViewport(); mViewport = new EglViewport();
} }
/**
* Any number of pending events > 1 means that we should skip this frame.
* To avoid skipping too many frames, we'll use 2 for now, but this just means
* that we'll be drawing the same frame twice.
*
* When an event is posted, the textureId data has already been updated so we're
* too late to draw the old one and it should be skipped.
*
* This is especially important if we perform overlay drawing here, since that
* makes this class thread busy and slows down the event dispatching.
*
* @param timestampUs frame timestamp
* @return true to render
*/
@Override
protected boolean shouldRenderFrame(long timestampUs) {
if (!super.shouldRenderFrame(timestampUs)) {
return false;
} else if (mFrameNumber <= 10) {
// Always render the first few frames, or muxer fails.
return true;
} else if (getPendingEvents(FRAME_EVENT) > 2) {
LOG.w("shouldRenderFrame - Dropping frame because we already have too many pending events:",
getPendingEvents(FRAME_EVENT));
return false;
} else {
return true;
}
}
@EncoderThread @EncoderThread
@Override @Override
protected void onEvent(@NonNull String event, @Nullable Object data) { protected void onEvent(@NonNull String event, @Nullable Object data) {
@ -103,25 +137,42 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
if (frame == null) { if (frame == null) {
throw new IllegalArgumentException("Got null frame for FRAME_EVENT."); throw new IllegalArgumentException("Got null frame for FRAME_EVENT.");
} }
if (frame.timestamp == 0) { // grafika if (!shouldRenderFrame(frame.timestampUs())) {
mFramePool.recycle(frame);
return;
}
if (mFrameNumber < 0) { // We were asked to stop.
mFramePool.recycle(frame); mFramePool.recycle(frame);
return; return;
} }
mFrameNumber++;
// Notify we're got the first frame and its absolute time.
if (mFrameNumber == 1) { if (mFrameNumber == 1) {
notifyFirstFrameMillis(frame.timestampMillis); notifyFirstFrameMillis(frame.timestampMillis);
} }
// Notify we have reached the max length value.
if (mFirstTimeUs == Long.MIN_VALUE) mFirstTimeUs = frame.timestampUs();
if (!hasReachedMaxLength()) {
boolean didReachMaxLength = (frame.timestampUs() - mFirstTimeUs) > getMaxLengthMillis() * 1000L;
if (didReachMaxLength) {
LOG.w("onEvent -",
"frameNumber:", mFrameNumber,
"timestampUs:", frame.timestampUs(),
"firstTimeUs:", mFirstTimeUs,
"- reached max length! deltaUs:", frame.timestampUs() - mFirstTimeUs);
notifyMaxLengthReached();
}
}
// First, drain any previous data. // First, drain any previous data.
LOG.i("onEvent", "frameNumber:", mFrameNumber, "timestamp:", frame.timestamp, "- draining."); LOG.i("onEvent -",
"frameNumber:", mFrameNumber,
"timestampUs:", frame.timestampUs(),
"- draining.");
drainOutput(false); drainOutput(false);
// Then draw on the surface. // Then draw on the surface.
LOG.i("onEvent", "frameNumber:", mFrameNumber, "timestamp:", frame.timestamp, "- drawing."); LOG.i("onEvent -",
"frameNumber:", mFrameNumber,
"timestampUs:", frame.timestampUs(),
"- rendering.");
// 1. We must scale this matrix like GlCameraPreview does, because it might have some cropping. // 1. We must scale this matrix like GlCameraPreview does, because it might have some cropping.
// Scaling takes place with respect to the (0, 0, 0) point, so we must apply a Translation to compensate. // Scaling takes place with respect to the (0, 0, 0) point, so we must apply a Translation to compensate.
@ -152,7 +203,7 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
if (mConfig.hasOverlay()) { if (mConfig.hasOverlay()) {
mConfig.overlayDrawer.render(); mConfig.overlayDrawer.render();
} }
mWindow.setPresentationTime(frame.timestamp); mWindow.setPresentationTime(frame.timestampNanos);
mWindow.swapBuffers(); mWindow.swapBuffers();
mFramePool.recycle(frame); mFramePool.recycle(frame);
} }

@ -7,6 +7,7 @@ import android.os.Build;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
import androidx.annotation.RequiresApi; import androidx.annotation.RequiresApi;
import android.os.Bundle;
import android.view.Surface; import android.view.Surface;
import com.otaliastudios.cameraview.CameraLogger; import com.otaliastudios.cameraview.CameraLogger;
@ -43,6 +44,8 @@ abstract class VideoMediaEncoder<C extends VideoConfig> extends MediaEncoder {
@SuppressWarnings("WeakerAccess") @SuppressWarnings("WeakerAccess")
protected int mFrameNumber = -1; protected int mFrameNumber = -1;
private boolean mSyncFrameFound = false;
VideoMediaEncoder(@NonNull C config) { VideoMediaEncoder(@NonNull C config) {
super("VideoEncoder"); super("VideoEncoder");
mConfig = config; mConfig = config;
@ -60,7 +63,7 @@ abstract class VideoMediaEncoder<C extends VideoConfig> extends MediaEncoder {
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, mConfig.bitRate); format.setInteger(MediaFormat.KEY_BIT_RATE, mConfig.bitRate);
format.setInteger(MediaFormat.KEY_FRAME_RATE, mConfig.frameRate); format.setInteger(MediaFormat.KEY_FRAME_RATE, mConfig.frameRate);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5); format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1); // seconds between key frames!
format.setInteger("rotation-degrees", mConfig.rotation); format.setInteger("rotation-degrees", mConfig.rotation);
try { try {
@ -92,8 +95,52 @@ abstract class VideoMediaEncoder<C extends VideoConfig> extends MediaEncoder {
drainOutput(true); drainOutput(true);
} }
/**
* The first frame that we write MUST have the BUFFER_FLAG_SYNC_FRAME flag set.
* It sometimes doesn't because we might drop some frames in {@link #drainOutput(boolean)},
* basically if, at the time, the muxer was not started yet, due to Audio setup being slow.
*
* We can't add the BUFFER_FLAG_SYNC_FRAME flag to the first frame just because we'd like to.
* But we can drop frames until we get a sync one.
*
* @param pool the buffer pool
* @param buffer the buffer
*/
@Override
protected void onWriteOutput(@NonNull OutputBufferPool pool, @NonNull OutputBuffer buffer) {
if (!mSyncFrameFound) {
LOG.w("onWriteOutput:", "sync frame not found yet. Checking.");
int flag = MediaCodec.BUFFER_FLAG_SYNC_FRAME;
boolean hasFlag = (buffer.info.flags & flag) == flag;
if (hasFlag) {
LOG.w("onWriteOutput:", "SYNC FRAME FOUND!");
mSyncFrameFound = true;
super.onWriteOutput(pool, buffer);
} else {
LOG.w("onWriteOutput:", "DROPPING FRAME and requesting a sync frame soon.");
if (Build.VERSION.SDK_INT >= 19) {
Bundle params = new Bundle();
params.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
mMediaCodec.setParameters(params);
}
pool.recycle(buffer);
}
} else {
super.onWriteOutput(pool, buffer);
}
}
@Override @Override
protected int getEncodedBitRate() { protected int getEncodedBitRate() {
return mConfig.bitRate; return mConfig.bitRate;
} }
@SuppressWarnings("BooleanMethodIsAlwaysInverted")
protected boolean shouldRenderFrame(long timestampUs) {
if (timestampUs == 0) return false; // grafika said so
if (mFrameNumber < 0) return false; // We were asked to stop.
if (hasReachedMaxLength()) return false; // We were not asked yet, but we'll be soon.
mFrameNumber++;
return true;
}
} }

@ -225,6 +225,13 @@ public class CameraActivity extends AppCompatActivity implements View.OnClickLis
super.onVideoRecordingStart(); super.onVideoRecordingStart();
LOG.w("onVideoRecordingStart!"); LOG.w("onVideoRecordingStart!");
} }
@Override
public void onVideoRecordingEnd() {
super.onVideoRecordingEnd();
message("Video taken. Processing...", false);
LOG.w("onVideoRecordingEnd!");
}
} }
@Override @Override

@ -21,7 +21,7 @@
app:cameraPlaySounds="true" app:cameraPlaySounds="true"
app:cameraGrid="off" app:cameraGrid="off"
app:cameraFlash="off" app:cameraFlash="off"
app:cameraAudio="off" app:cameraAudio="on"
app:cameraFacing="back" app:cameraFacing="back"
app:cameraGestureTap="autoFocus" app:cameraGestureTap="autoFocus"
app:cameraGestureLongTap="none" app:cameraGestureLongTap="none"
@ -51,7 +51,7 @@
android:layout_margin="16dp" android:layout_margin="16dp"
android:layout_gravity="top|end" android:layout_gravity="top|end"
android:background="@drawable/background" android:background="@drawable/background"
android:elevation="6dp" android:elevation="3dp"
app:srcCompat="@drawable/ic_switch" /> app:srcCompat="@drawable/ic_switch" />
<!-- Controls --> <!-- Controls -->
@ -155,6 +155,7 @@
app:behavior_hideable="true" app:behavior_hideable="true"
app:behavior_peekHeight="300dp" app:behavior_peekHeight="300dp"
app:behavior_skipCollapsed="false" app:behavior_skipCollapsed="false"
android:elevation="4dp"
android:layout_width="match_parent" android:layout_width="match_parent"
android:layout_height="wrap_content"> android:layout_height="wrap_content">
<LinearLayout <LinearLayout

Loading…
Cancel
Save