Small improvements

pull/530/head
Mattia Iavarone 6 years ago
parent 3d8838409f
commit 8c4c909bdc
  1. 61
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioMediaEncoder.java
  2. 18
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoder.java
  3. 26
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoderEngine.java
  4. 17
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/TextureMediaEncoder.java
  5. 7
      demo/src/main/java/com/otaliastudios/cameraview/demo/CameraActivity.java

@ -47,11 +47,11 @@ public class AudioMediaEncoder extends MediaEncoder {
private final LinkedBlockingQueue<InputBuffer> mInputBufferQueue = new LinkedBlockingQueue<>();
// Just to debug performance.
private int mSendCount = 0;
private int mExecuteCount = 0;
private long mAvgSendDelay = 0;
private long mAvgExecuteDelay = 0;
private Map<Long, Long> mSendStartMap = new HashMap<>();
private int mDebugSendCount = 0;
private int mDebugExecuteCount = 0;
private long mDebugSendAvgDelay = 0;
private long mDebugExecuteAvgDelay = 0;
private Map<Long, Long> mDebugSendStartMap = new HashMap<>();
public AudioMediaEncoder(@NonNull AudioConfig config) {
super("AudioEncoder");
@ -134,11 +134,14 @@ public class AudioMediaEncoder extends MediaEncoder {
private AudioRecord mAudioRecord;
private ByteBuffer mCurrentBuffer;
private int mReadBytes;
private int mCurrentReadBytes;
private long mLastTimeUs;
private long mFirstTimeUs = Long.MIN_VALUE;
private boolean mReachedMaxLength = false;
private AudioRecordingThread() {
setPriority(Thread.MAX_PRIORITY);
final int minBufferSize = AudioRecord.getMinBufferSize(
mConfig.samplingFrequency,
mConfig.audioFormatChannels(),
@ -156,7 +159,6 @@ public class AudioMediaEncoder extends MediaEncoder {
mConfig.audioFormatChannels(),
mConfig.encoding,
bufferSize);
setPriority(Thread.MAX_PRIORITY);
}
@Override
@ -196,25 +198,25 @@ public class AudioMediaEncoder extends MediaEncoder {
// with left and right bytes. https://stackoverflow.com/q/20594750/4288782
if (PERFORMANCE_DEBUG) {
long before = System.nanoTime();
mReadBytes = mAudioRecord.read(mCurrentBuffer, mConfig.frameSize());
mCurrentReadBytes = mAudioRecord.read(mCurrentBuffer, mConfig.frameSize());
long after = System.nanoTime();
float delayMillis = (after - before) / 1000000F;
float durationMillis = AudioTimestamp.bytesToMillis(mReadBytes, mConfig.byteRate());
float durationMillis = AudioTimestamp.bytesToMillis(mCurrentReadBytes, mConfig.byteRate());
LOG.v("read thread - reading took:", delayMillis,
"should be:", durationMillis,
"delay:", delayMillis - durationMillis);
} else {
mReadBytes = mAudioRecord.read(mCurrentBuffer, mConfig.frameSize());
mCurrentReadBytes = mAudioRecord.read(mCurrentBuffer, mConfig.frameSize());
}
LOG.i("read thread - eos:", endOfStream, "- Read new audio frame. Bytes:", mReadBytes);
if (mReadBytes > 0) { // Good read: increase PTS.
increaseTime(mReadBytes, endOfStream);
LOG.i("read thread - eos:", endOfStream, "- Read new audio frame. Bytes:", mCurrentReadBytes);
if (mCurrentReadBytes > 0) { // Good read: increase PTS.
increaseTime(mCurrentReadBytes, endOfStream);
LOG.i("read thread - eos:", endOfStream, "- mLastTimeUs:", mLastTimeUs);
mCurrentBuffer.limit(mReadBytes);
mCurrentBuffer.limit(mCurrentReadBytes);
enqueue(mCurrentBuffer, mLastTimeUs, endOfStream);
} else if (mReadBytes == AudioRecord.ERROR_INVALID_OPERATION) {
} else if (mCurrentReadBytes == AudioRecord.ERROR_INVALID_OPERATION) {
LOG.e("read thread - eos:", endOfStream, "- Got AudioRecord.ERROR_INVALID_OPERATION");
} else if (mReadBytes == AudioRecord.ERROR_BAD_VALUE) {
} else if (mCurrentReadBytes == AudioRecord.ERROR_BAD_VALUE) {
LOG.e("read thread - eos:", endOfStream, "- Got AudioRecord.ERROR_BAD_VALUE");
}
}
@ -239,10 +241,13 @@ public class AudioMediaEncoder extends MediaEncoder {
}
// See if we reached the max length value.
boolean didReachMaxLength = (mLastTimeUs - mFirstTimeUs) > getMaxLengthMillis() * 1000L;
if (didReachMaxLength && !endOfStream) {
LOG.w("read thread - this frame reached the maxLength! deltaUs:", mLastTimeUs - mFirstTimeUs);
notifyMaxLengthReached();
if (!mReachedMaxLength) {
boolean didReachMaxLength = (mLastTimeUs - mFirstTimeUs) > getMaxLengthMillis() * 1000L;
if (didReachMaxLength && !endOfStream) {
LOG.w("read thread - this frame reached the maxLength! deltaUs:", mLastTimeUs - mFirstTimeUs);
mReachedMaxLength = true;
notifyMaxLengthReached();
}
}
// Add zeroes if we have huge gaps. Even if timestamps are correct, if we have gaps between
@ -280,7 +285,7 @@ public class AudioMediaEncoder extends MediaEncoder {
private void enqueue(@NonNull ByteBuffer byteBuffer, long timestamp, boolean isEndOfStream) {
if (PERFORMANCE_DEBUG) {
mSendStartMap.put(timestamp, System.nanoTime() / 1000000);
mDebugSendStartMap.put(timestamp, System.nanoTime() / 1000000);
}
int readBytes = byteBuffer.remaining();
InputBuffer inputBuffer = mInputBufferPool.get();
@ -320,11 +325,11 @@ public class AudioMediaEncoder extends MediaEncoder {
// Performance logging
if (PERFORMANCE_DEBUG) {
long sendEnd = System.nanoTime() / 1000000;
Long sendStart = mSendStartMap.remove(inputBuffer.timestamp);
Long sendStart = mDebugSendStartMap.remove(inputBuffer.timestamp);
//noinspection StatementWithEmptyBody
if (sendStart != null) {
mAvgSendDelay = ((mAvgSendDelay * mSendCount) + (sendEnd - sendStart)) / (++mSendCount);
LOG.v("send delay millis:", sendEnd - sendStart, "average:", mAvgSendDelay);
mDebugSendAvgDelay = ((mDebugSendAvgDelay * mDebugSendCount) + (sendEnd - sendStart)) / (++mDebugSendCount);
LOG.v("send delay millis:", sendEnd - sendStart, "average:", mDebugSendAvgDelay);
} else {
// This input buffer was already processed (but tryAcquire failed for now).
}
@ -348,8 +353,8 @@ public class AudioMediaEncoder extends MediaEncoder {
if (PERFORMANCE_DEBUG) {
// After latest changes, the count here is not so different between MONO and STEREO.
// We get about 400 frames in both cases (430 for MONO, but doesn't seem like a big issue).
LOG.e("EXECUTE DELAY MILLIS:", mAvgExecuteDelay, "COUNT:", mExecuteCount);
LOG.e("SEND DELAY MILLIS:", mAvgSendDelay, "COUNT:", mSendCount);
LOG.e("EXECUTE DELAY MILLIS:", mDebugExecuteAvgDelay, "COUNT:", mDebugExecuteCount);
LOG.e("SEND DELAY MILLIS:", mDebugSendAvgDelay, "COUNT:", mDebugSendCount);
}
}
@ -371,8 +376,8 @@ public class AudioMediaEncoder extends MediaEncoder {
if (PERFORMANCE_DEBUG) {
long executeEnd = System.nanoTime() / 1000000;
mAvgExecuteDelay = ((mAvgExecuteDelay * mExecuteCount) + (executeEnd - executeStart)) / (++mExecuteCount);
LOG.v("execute delay millis:", executeEnd - executeStart, "average:", mAvgExecuteDelay);
mDebugExecuteAvgDelay = ((mDebugExecuteAvgDelay * mDebugExecuteCount) + (executeEnd - executeStart)) / (++mDebugExecuteCount);
LOG.v("execute delay millis:", executeEnd - executeStart, "average:", mDebugExecuteAvgDelay);
}
}
}

@ -122,7 +122,7 @@ public abstract class MediaEncoder {
private boolean mMaxLengthReached;
private long mStartTimeMillis = 0; // In System.currentTimeMillis()
private long mStartTimeUs = Long.MIN_VALUE; // In unknown reference
private long mFirstTimeUs = Long.MIN_VALUE; // In unknown reference
private long mLastTimeUs = 0;
private long mDebugSetStateTimestamp = Long.MIN_VALUE;
@ -422,9 +422,9 @@ public abstract class MediaEncoder {
// Store mStartTimeUs and mLastTimeUs, useful to detect the max length
// reached and stop recording when needed.
if (mStartTimeUs == Long.MIN_VALUE) {
mStartTimeUs = mBufferInfo.presentationTimeUs;
LOG.w(mName, "DRAINING - Got the first presentation time:", mStartTimeUs);
if (mFirstTimeUs == Long.MIN_VALUE) {
mFirstTimeUs = mBufferInfo.presentationTimeUs;
LOG.w(mName, "DRAINING - Got the first presentation time:", mFirstTimeUs);
}
mLastTimeUs = mBufferInfo.presentationTimeUs;
@ -434,7 +434,7 @@ public abstract class MediaEncoder {
// To address this, encoders are required to call notifyFirstFrameMillis
// so we can adjust here - moving to 1970 reference.
// Extra benefit: we never pass a pts equal to 0, which some encoders refuse.
mBufferInfo.presentationTimeUs = (mStartTimeMillis * 1000) + mLastTimeUs - mStartTimeUs;
mBufferInfo.presentationTimeUs = (mStartTimeMillis * 1000) + mLastTimeUs - mFirstTimeUs;
// Write.
LOG.v(mName, "DRAINING - About to write(). Adjusted presentation:", mBufferInfo.presentationTimeUs);
@ -451,10 +451,10 @@ public abstract class MediaEncoder {
// Not needed if drainAll because we already were asked to stop
if (!drainAll
&& !mMaxLengthReached
&& mStartTimeUs != Long.MIN_VALUE
&& mLastTimeUs - mStartTimeUs > mMaxLengthMillis * 1000) {
&& mFirstTimeUs != Long.MIN_VALUE
&& mLastTimeUs - mFirstTimeUs > mMaxLengthMillis * 1000) {
LOG.w(mName, "DRAINING - Reached maxLength! mLastTimeUs:", mLastTimeUs,
"mStartTimeUs:", mStartTimeUs,
"mStartTimeUs:", mFirstTimeUs,
"mMaxLengthUs:", mMaxLengthMillis * 1000);
onMaxLengthReached();
break;
@ -520,7 +520,7 @@ public abstract class MediaEncoder {
* @param firstFrameMillis the milliseconds of the first frame presentation
*/
@SuppressWarnings("WeakerAccess")
protected void notifyFirstFrameMillis(long firstFrameMillis) {
protected final void notifyFirstFrameMillis(long firstFrameMillis) {
mStartTimeMillis = firstFrameMillis;
}
}

@ -68,8 +68,9 @@ public class MediaEncoderEngine {
void onEncodingStart();
/**
* Called when encoding stopped. At this point the mxuer might still be processing,
* but we have stopped receiving input (recording video and audio frames).
* Called when encoding stopped. At this point the muxer or the encoders might still be
* processing data, but we have stopped receiving input (recording video and audio frames).
* Actually, we will stop very soon.
*
* The {@link #onEncodingEnd(int, Exception)} callback will soon be called
* with the results.
@ -98,9 +99,9 @@ public class MediaEncoderEngine {
private List<MediaEncoder> mEncoders;
private MediaMuxer mMediaMuxer;
private int mStartedEncodersCount;
private int mReleasedEncodersCount;
private boolean mMediaMuxerStarted;
private int mStartedEncodersCount = 0;
private int mStoppedEncodersCount = 0;
private boolean mMediaMuxerStarted = false;
@SuppressWarnings("FieldCanBeLocal")
private Controller mController;
private Listener mListener;
@ -137,9 +138,6 @@ public class MediaEncoderEngine {
} catch (IOException e) {
throw new RuntimeException(e);
}
mStartedEncodersCount = 0;
mMediaMuxerStarted = false;
mReleasedEncodersCount = 0;
// Trying to convert the size constraints to duration constraints,
// because they are super easy to check.
@ -203,6 +201,9 @@ public class MediaEncoderEngine {
for (MediaEncoder encoder : mEncoders) {
encoder.stop();
}
if (mListener != null) {
mListener.onEncodingStop();
}
}
/**
@ -231,7 +232,7 @@ public class MediaEncoderEngine {
}
mEndReason = END_BY_USER;
mStartedEncodersCount = 0;
mReleasedEncodersCount = 0;
mStoppedEncodersCount = 0;
mMediaMuxerStarted = false;
LOG.i("end:", "Completed.");
}
@ -372,11 +373,8 @@ public class MediaEncoderEngine {
public void notifyStopped(int track) {
synchronized (mControllerLock) {
LOG.w("notifyStopped:", "Called for track", track);
if (++mReleasedEncodersCount == mEncoders.size()) {
LOG.w("requestStop:", "All encoders have been released. Stopping the muxer.");
if (mListener != null) {
mListener.onEncodingStop();
}
if (++mStoppedEncodersCount == mEncoders.size()) {
LOG.w("requestStop:", "All encoders have been stopped. Stopping the muxer.");
end();
}
}

@ -31,7 +31,7 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
private EglCore mEglCore;
private EglWindowSurface mWindow;
private EglViewport mViewport;
private Pool<Frame> mFramePool = new Pool<>(100, new Pool.Factory<Frame>() {
private Pool<Frame> mFramePool = new Pool<>(Integer.MAX_VALUE, new Pool.Factory<Frame>() {
@Override
public Frame create() {
return new Frame();
@ -103,15 +103,11 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
if (frame == null) {
throw new IllegalArgumentException("Got null frame for FRAME_EVENT.");
}
if (frame.timestamp == 0) { // grafika
if (!shouldRenderFrame(frame.timestamp)) {
mFramePool.recycle(frame);
return;
}
if (mFrameNumber < 0) { // We were asked to stop.
mFramePool.recycle(frame);
return;
}
mFrameNumber++;
if (mFrameNumber == 1) {
notifyFirstFrameMillis(frame.timestampMillis);
}
@ -174,4 +170,11 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
mEglCore = null;
}
}
private boolean shouldRenderFrame(long timestamp) {
if (timestamp == 0) return false; // grafika said so
if (mFrameNumber < 0) return false; // We were asked to stop.
mFrameNumber++;
return true;
}
}

@ -225,6 +225,13 @@ public class CameraActivity extends AppCompatActivity implements View.OnClickLis
super.onVideoRecordingStart();
LOG.w("onVideoRecordingStart!");
}
@Override
public void onVideoRecordingEnd() {
super.onVideoRecordingEnd();
message("Video taken. Processing...", false);
LOG.w("onVideoRecordingEnd!");
}
}
@Override

Loading…
Cancel
Save