Small improvements

pull/530/head
Mattia Iavarone 6 years ago
parent 3d8838409f
commit 8c4c909bdc
  1. 53
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioMediaEncoder.java
  2. 18
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoder.java
  3. 26
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoderEngine.java
  4. 17
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/TextureMediaEncoder.java
  5. 7
      demo/src/main/java/com/otaliastudios/cameraview/demo/CameraActivity.java

@ -47,11 +47,11 @@ public class AudioMediaEncoder extends MediaEncoder {
private final LinkedBlockingQueue<InputBuffer> mInputBufferQueue = new LinkedBlockingQueue<>(); private final LinkedBlockingQueue<InputBuffer> mInputBufferQueue = new LinkedBlockingQueue<>();
// Just to debug performance. // Just to debug performance.
private int mSendCount = 0; private int mDebugSendCount = 0;
private int mExecuteCount = 0; private int mDebugExecuteCount = 0;
private long mAvgSendDelay = 0; private long mDebugSendAvgDelay = 0;
private long mAvgExecuteDelay = 0; private long mDebugExecuteAvgDelay = 0;
private Map<Long, Long> mSendStartMap = new HashMap<>(); private Map<Long, Long> mDebugSendStartMap = new HashMap<>();
public AudioMediaEncoder(@NonNull AudioConfig config) { public AudioMediaEncoder(@NonNull AudioConfig config) {
super("AudioEncoder"); super("AudioEncoder");
@ -134,11 +134,14 @@ public class AudioMediaEncoder extends MediaEncoder {
private AudioRecord mAudioRecord; private AudioRecord mAudioRecord;
private ByteBuffer mCurrentBuffer; private ByteBuffer mCurrentBuffer;
private int mReadBytes; private int mCurrentReadBytes;
private long mLastTimeUs; private long mLastTimeUs;
private long mFirstTimeUs = Long.MIN_VALUE; private long mFirstTimeUs = Long.MIN_VALUE;
private boolean mReachedMaxLength = false;
private AudioRecordingThread() { private AudioRecordingThread() {
setPriority(Thread.MAX_PRIORITY);
final int minBufferSize = AudioRecord.getMinBufferSize( final int minBufferSize = AudioRecord.getMinBufferSize(
mConfig.samplingFrequency, mConfig.samplingFrequency,
mConfig.audioFormatChannels(), mConfig.audioFormatChannels(),
@ -156,7 +159,6 @@ public class AudioMediaEncoder extends MediaEncoder {
mConfig.audioFormatChannels(), mConfig.audioFormatChannels(),
mConfig.encoding, mConfig.encoding,
bufferSize); bufferSize);
setPriority(Thread.MAX_PRIORITY);
} }
@Override @Override
@ -196,25 +198,25 @@ public class AudioMediaEncoder extends MediaEncoder {
// with left and right bytes. https://stackoverflow.com/q/20594750/4288782 // with left and right bytes. https://stackoverflow.com/q/20594750/4288782
if (PERFORMANCE_DEBUG) { if (PERFORMANCE_DEBUG) {
long before = System.nanoTime(); long before = System.nanoTime();
mReadBytes = mAudioRecord.read(mCurrentBuffer, mConfig.frameSize()); mCurrentReadBytes = mAudioRecord.read(mCurrentBuffer, mConfig.frameSize());
long after = System.nanoTime(); long after = System.nanoTime();
float delayMillis = (after - before) / 1000000F; float delayMillis = (after - before) / 1000000F;
float durationMillis = AudioTimestamp.bytesToMillis(mReadBytes, mConfig.byteRate()); float durationMillis = AudioTimestamp.bytesToMillis(mCurrentReadBytes, mConfig.byteRate());
LOG.v("read thread - reading took:", delayMillis, LOG.v("read thread - reading took:", delayMillis,
"should be:", durationMillis, "should be:", durationMillis,
"delay:", delayMillis - durationMillis); "delay:", delayMillis - durationMillis);
} else { } else {
mReadBytes = mAudioRecord.read(mCurrentBuffer, mConfig.frameSize()); mCurrentReadBytes = mAudioRecord.read(mCurrentBuffer, mConfig.frameSize());
} }
LOG.i("read thread - eos:", endOfStream, "- Read new audio frame. Bytes:", mReadBytes); LOG.i("read thread - eos:", endOfStream, "- Read new audio frame. Bytes:", mCurrentReadBytes);
if (mReadBytes > 0) { // Good read: increase PTS. if (mCurrentReadBytes > 0) { // Good read: increase PTS.
increaseTime(mReadBytes, endOfStream); increaseTime(mCurrentReadBytes, endOfStream);
LOG.i("read thread - eos:", endOfStream, "- mLastTimeUs:", mLastTimeUs); LOG.i("read thread - eos:", endOfStream, "- mLastTimeUs:", mLastTimeUs);
mCurrentBuffer.limit(mReadBytes); mCurrentBuffer.limit(mCurrentReadBytes);
enqueue(mCurrentBuffer, mLastTimeUs, endOfStream); enqueue(mCurrentBuffer, mLastTimeUs, endOfStream);
} else if (mReadBytes == AudioRecord.ERROR_INVALID_OPERATION) { } else if (mCurrentReadBytes == AudioRecord.ERROR_INVALID_OPERATION) {
LOG.e("read thread - eos:", endOfStream, "- Got AudioRecord.ERROR_INVALID_OPERATION"); LOG.e("read thread - eos:", endOfStream, "- Got AudioRecord.ERROR_INVALID_OPERATION");
} else if (mReadBytes == AudioRecord.ERROR_BAD_VALUE) { } else if (mCurrentReadBytes == AudioRecord.ERROR_BAD_VALUE) {
LOG.e("read thread - eos:", endOfStream, "- Got AudioRecord.ERROR_BAD_VALUE"); LOG.e("read thread - eos:", endOfStream, "- Got AudioRecord.ERROR_BAD_VALUE");
} }
} }
@ -239,11 +241,14 @@ public class AudioMediaEncoder extends MediaEncoder {
} }
// See if we reached the max length value. // See if we reached the max length value.
if (!mReachedMaxLength) {
boolean didReachMaxLength = (mLastTimeUs - mFirstTimeUs) > getMaxLengthMillis() * 1000L; boolean didReachMaxLength = (mLastTimeUs - mFirstTimeUs) > getMaxLengthMillis() * 1000L;
if (didReachMaxLength && !endOfStream) { if (didReachMaxLength && !endOfStream) {
LOG.w("read thread - this frame reached the maxLength! deltaUs:", mLastTimeUs - mFirstTimeUs); LOG.w("read thread - this frame reached the maxLength! deltaUs:", mLastTimeUs - mFirstTimeUs);
mReachedMaxLength = true;
notifyMaxLengthReached(); notifyMaxLengthReached();
} }
}
// Add zeroes if we have huge gaps. Even if timestamps are correct, if we have gaps between // Add zeroes if we have huge gaps. Even if timestamps are correct, if we have gaps between
// them, the encoder might shrink all timestamps to have a continuous audio. This results // them, the encoder might shrink all timestamps to have a continuous audio. This results
@ -280,7 +285,7 @@ public class AudioMediaEncoder extends MediaEncoder {
private void enqueue(@NonNull ByteBuffer byteBuffer, long timestamp, boolean isEndOfStream) { private void enqueue(@NonNull ByteBuffer byteBuffer, long timestamp, boolean isEndOfStream) {
if (PERFORMANCE_DEBUG) { if (PERFORMANCE_DEBUG) {
mSendStartMap.put(timestamp, System.nanoTime() / 1000000); mDebugSendStartMap.put(timestamp, System.nanoTime() / 1000000);
} }
int readBytes = byteBuffer.remaining(); int readBytes = byteBuffer.remaining();
InputBuffer inputBuffer = mInputBufferPool.get(); InputBuffer inputBuffer = mInputBufferPool.get();
@ -320,11 +325,11 @@ public class AudioMediaEncoder extends MediaEncoder {
// Performance logging // Performance logging
if (PERFORMANCE_DEBUG) { if (PERFORMANCE_DEBUG) {
long sendEnd = System.nanoTime() / 1000000; long sendEnd = System.nanoTime() / 1000000;
Long sendStart = mSendStartMap.remove(inputBuffer.timestamp); Long sendStart = mDebugSendStartMap.remove(inputBuffer.timestamp);
//noinspection StatementWithEmptyBody //noinspection StatementWithEmptyBody
if (sendStart != null) { if (sendStart != null) {
mAvgSendDelay = ((mAvgSendDelay * mSendCount) + (sendEnd - sendStart)) / (++mSendCount); mDebugSendAvgDelay = ((mDebugSendAvgDelay * mDebugSendCount) + (sendEnd - sendStart)) / (++mDebugSendCount);
LOG.v("send delay millis:", sendEnd - sendStart, "average:", mAvgSendDelay); LOG.v("send delay millis:", sendEnd - sendStart, "average:", mDebugSendAvgDelay);
} else { } else {
// This input buffer was already processed (but tryAcquire failed for now). // This input buffer was already processed (but tryAcquire failed for now).
} }
@ -348,8 +353,8 @@ public class AudioMediaEncoder extends MediaEncoder {
if (PERFORMANCE_DEBUG) { if (PERFORMANCE_DEBUG) {
// After latest changes, the count here is not so different between MONO and STEREO. // After latest changes, the count here is not so different between MONO and STEREO.
// We get about 400 frames in both cases (430 for MONO, but doesn't seem like a big issue). // We get about 400 frames in both cases (430 for MONO, but doesn't seem like a big issue).
LOG.e("EXECUTE DELAY MILLIS:", mAvgExecuteDelay, "COUNT:", mExecuteCount); LOG.e("EXECUTE DELAY MILLIS:", mDebugExecuteAvgDelay, "COUNT:", mDebugExecuteCount);
LOG.e("SEND DELAY MILLIS:", mAvgSendDelay, "COUNT:", mSendCount); LOG.e("SEND DELAY MILLIS:", mDebugSendAvgDelay, "COUNT:", mDebugSendCount);
} }
} }
@ -371,8 +376,8 @@ public class AudioMediaEncoder extends MediaEncoder {
if (PERFORMANCE_DEBUG) { if (PERFORMANCE_DEBUG) {
long executeEnd = System.nanoTime() / 1000000; long executeEnd = System.nanoTime() / 1000000;
mAvgExecuteDelay = ((mAvgExecuteDelay * mExecuteCount) + (executeEnd - executeStart)) / (++mExecuteCount); mDebugExecuteAvgDelay = ((mDebugExecuteAvgDelay * mDebugExecuteCount) + (executeEnd - executeStart)) / (++mDebugExecuteCount);
LOG.v("execute delay millis:", executeEnd - executeStart, "average:", mAvgExecuteDelay); LOG.v("execute delay millis:", executeEnd - executeStart, "average:", mDebugExecuteAvgDelay);
} }
} }
} }

@ -122,7 +122,7 @@ public abstract class MediaEncoder {
private boolean mMaxLengthReached; private boolean mMaxLengthReached;
private long mStartTimeMillis = 0; // In System.currentTimeMillis() private long mStartTimeMillis = 0; // In System.currentTimeMillis()
private long mStartTimeUs = Long.MIN_VALUE; // In unknown reference private long mFirstTimeUs = Long.MIN_VALUE; // In unknown reference
private long mLastTimeUs = 0; private long mLastTimeUs = 0;
private long mDebugSetStateTimestamp = Long.MIN_VALUE; private long mDebugSetStateTimestamp = Long.MIN_VALUE;
@ -422,9 +422,9 @@ public abstract class MediaEncoder {
// Store mStartTimeUs and mLastTimeUs, useful to detect the max length // Store mStartTimeUs and mLastTimeUs, useful to detect the max length
// reached and stop recording when needed. // reached and stop recording when needed.
if (mStartTimeUs == Long.MIN_VALUE) { if (mFirstTimeUs == Long.MIN_VALUE) {
mStartTimeUs = mBufferInfo.presentationTimeUs; mFirstTimeUs = mBufferInfo.presentationTimeUs;
LOG.w(mName, "DRAINING - Got the first presentation time:", mStartTimeUs); LOG.w(mName, "DRAINING - Got the first presentation time:", mFirstTimeUs);
} }
mLastTimeUs = mBufferInfo.presentationTimeUs; mLastTimeUs = mBufferInfo.presentationTimeUs;
@ -434,7 +434,7 @@ public abstract class MediaEncoder {
// To address this, encoders are required to call notifyFirstFrameMillis // To address this, encoders are required to call notifyFirstFrameMillis
// so we can adjust here - moving to 1970 reference. // so we can adjust here - moving to 1970 reference.
// Extra benefit: we never pass a pts equal to 0, which some encoders refuse. // Extra benefit: we never pass a pts equal to 0, which some encoders refuse.
mBufferInfo.presentationTimeUs = (mStartTimeMillis * 1000) + mLastTimeUs - mStartTimeUs; mBufferInfo.presentationTimeUs = (mStartTimeMillis * 1000) + mLastTimeUs - mFirstTimeUs;
// Write. // Write.
LOG.v(mName, "DRAINING - About to write(). Adjusted presentation:", mBufferInfo.presentationTimeUs); LOG.v(mName, "DRAINING - About to write(). Adjusted presentation:", mBufferInfo.presentationTimeUs);
@ -451,10 +451,10 @@ public abstract class MediaEncoder {
// Not needed if drainAll because we already were asked to stop // Not needed if drainAll because we already were asked to stop
if (!drainAll if (!drainAll
&& !mMaxLengthReached && !mMaxLengthReached
&& mStartTimeUs != Long.MIN_VALUE && mFirstTimeUs != Long.MIN_VALUE
&& mLastTimeUs - mStartTimeUs > mMaxLengthMillis * 1000) { && mLastTimeUs - mFirstTimeUs > mMaxLengthMillis * 1000) {
LOG.w(mName, "DRAINING - Reached maxLength! mLastTimeUs:", mLastTimeUs, LOG.w(mName, "DRAINING - Reached maxLength! mLastTimeUs:", mLastTimeUs,
"mStartTimeUs:", mStartTimeUs, "mStartTimeUs:", mFirstTimeUs,
"mMaxLengthUs:", mMaxLengthMillis * 1000); "mMaxLengthUs:", mMaxLengthMillis * 1000);
onMaxLengthReached(); onMaxLengthReached();
break; break;
@ -520,7 +520,7 @@ public abstract class MediaEncoder {
* @param firstFrameMillis the milliseconds of the first frame presentation * @param firstFrameMillis the milliseconds of the first frame presentation
*/ */
@SuppressWarnings("WeakerAccess") @SuppressWarnings("WeakerAccess")
protected void notifyFirstFrameMillis(long firstFrameMillis) { protected final void notifyFirstFrameMillis(long firstFrameMillis) {
mStartTimeMillis = firstFrameMillis; mStartTimeMillis = firstFrameMillis;
} }
} }

@ -68,8 +68,9 @@ public class MediaEncoderEngine {
void onEncodingStart(); void onEncodingStart();
/** /**
* Called when encoding stopped. At this point the mxuer might still be processing, * Called when encoding stopped. At this point the muxer or the encoders might still be
* but we have stopped receiving input (recording video and audio frames). * processing data, but we have stopped receiving input (recording video and audio frames).
* Actually, we will stop very soon.
* *
* The {@link #onEncodingEnd(int, Exception)} callback will soon be called * The {@link #onEncodingEnd(int, Exception)} callback will soon be called
* with the results. * with the results.
@ -98,9 +99,9 @@ public class MediaEncoderEngine {
private List<MediaEncoder> mEncoders; private List<MediaEncoder> mEncoders;
private MediaMuxer mMediaMuxer; private MediaMuxer mMediaMuxer;
private int mStartedEncodersCount; private int mStartedEncodersCount = 0;
private int mReleasedEncodersCount; private int mStoppedEncodersCount = 0;
private boolean mMediaMuxerStarted; private boolean mMediaMuxerStarted = false;
@SuppressWarnings("FieldCanBeLocal") @SuppressWarnings("FieldCanBeLocal")
private Controller mController; private Controller mController;
private Listener mListener; private Listener mListener;
@ -137,9 +138,6 @@ public class MediaEncoderEngine {
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
mStartedEncodersCount = 0;
mMediaMuxerStarted = false;
mReleasedEncodersCount = 0;
// Trying to convert the size constraints to duration constraints, // Trying to convert the size constraints to duration constraints,
// because they are super easy to check. // because they are super easy to check.
@ -203,6 +201,9 @@ public class MediaEncoderEngine {
for (MediaEncoder encoder : mEncoders) { for (MediaEncoder encoder : mEncoders) {
encoder.stop(); encoder.stop();
} }
if (mListener != null) {
mListener.onEncodingStop();
}
} }
/** /**
@ -231,7 +232,7 @@ public class MediaEncoderEngine {
} }
mEndReason = END_BY_USER; mEndReason = END_BY_USER;
mStartedEncodersCount = 0; mStartedEncodersCount = 0;
mReleasedEncodersCount = 0; mStoppedEncodersCount = 0;
mMediaMuxerStarted = false; mMediaMuxerStarted = false;
LOG.i("end:", "Completed."); LOG.i("end:", "Completed.");
} }
@ -372,11 +373,8 @@ public class MediaEncoderEngine {
public void notifyStopped(int track) { public void notifyStopped(int track) {
synchronized (mControllerLock) { synchronized (mControllerLock) {
LOG.w("notifyStopped:", "Called for track", track); LOG.w("notifyStopped:", "Called for track", track);
if (++mReleasedEncodersCount == mEncoders.size()) { if (++mStoppedEncodersCount == mEncoders.size()) {
LOG.w("requestStop:", "All encoders have been released. Stopping the muxer."); LOG.w("requestStop:", "All encoders have been stopped. Stopping the muxer.");
if (mListener != null) {
mListener.onEncodingStop();
}
end(); end();
} }
} }

@ -31,7 +31,7 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
private EglCore mEglCore; private EglCore mEglCore;
private EglWindowSurface mWindow; private EglWindowSurface mWindow;
private EglViewport mViewport; private EglViewport mViewport;
private Pool<Frame> mFramePool = new Pool<>(100, new Pool.Factory<Frame>() { private Pool<Frame> mFramePool = new Pool<>(Integer.MAX_VALUE, new Pool.Factory<Frame>() {
@Override @Override
public Frame create() { public Frame create() {
return new Frame(); return new Frame();
@ -103,15 +103,11 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
if (frame == null) { if (frame == null) {
throw new IllegalArgumentException("Got null frame for FRAME_EVENT."); throw new IllegalArgumentException("Got null frame for FRAME_EVENT.");
} }
if (frame.timestamp == 0) { // grafika if (!shouldRenderFrame(frame.timestamp)) {
mFramePool.recycle(frame); mFramePool.recycle(frame);
return; return;
} }
if (mFrameNumber < 0) { // We were asked to stop.
mFramePool.recycle(frame);
return;
}
mFrameNumber++;
if (mFrameNumber == 1) { if (mFrameNumber == 1) {
notifyFirstFrameMillis(frame.timestampMillis); notifyFirstFrameMillis(frame.timestampMillis);
} }
@ -174,4 +170,11 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
mEglCore = null; mEglCore = null;
} }
} }
private boolean shouldRenderFrame(long timestamp) {
if (timestamp == 0) return false; // grafika said so
if (mFrameNumber < 0) return false; // We were asked to stop.
mFrameNumber++;
return true;
}
} }

@ -225,6 +225,13 @@ public class CameraActivity extends AppCompatActivity implements View.OnClickLis
super.onVideoRecordingStart(); super.onVideoRecordingStart();
LOG.w("onVideoRecordingStart!"); LOG.w("onVideoRecordingStart!");
} }
@Override
public void onVideoRecordingEnd() {
super.onVideoRecordingEnd();
message("Video taken. Processing...", false);
LOG.w("onVideoRecordingEnd!");
}
} }
@Override @Override

Loading…
Cancel
Save