Drop frames when we have too many pending events

pull/530/head
Mattia Iavarone 6 years ago
parent 8c4c909bdc
commit 43148ca49b
  1. 8
      cameraview/src/main/java/com/otaliastudios/cameraview/video/SnapshotVideoRecorder.java
  2. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioConfig.java
  3. 15
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioMediaEncoder.java
  4. 40
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoder.java
  5. 72
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/TextureMediaEncoder.java
  6. 9
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/VideoMediaEncoder.java

@ -1,20 +1,14 @@
package com.otaliastudios.cameraview.video;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.PorterDuff;
import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.os.Build;
import android.view.Surface;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.internal.Issue514Workaround;
import com.otaliastudios.cameraview.overlay.Overlay;
import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.controls.Audio;
import com.otaliastudios.cameraview.engine.CameraEngine;
import com.otaliastudios.cameraview.internal.egl.EglViewport;
import com.otaliastudios.cameraview.overlay.OverlayDrawer;
import com.otaliastudios.cameraview.preview.GlCameraPreview;
import com.otaliastudios.cameraview.preview.RendererFrameCallback;
@ -164,7 +158,7 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
LOG.v("dispatching frame.");
TextureMediaEncoder textureEncoder = (TextureMediaEncoder) mEncoderEngine.getVideoEncoder();
TextureMediaEncoder.Frame frame = textureEncoder.acquireFrame();
frame.timestamp = surfaceTexture.getTimestamp();
frame.timestampNanos = surfaceTexture.getTimestamp();
frame.timestampMillis = System.currentTimeMillis(); // NOTE: this is an approximation but it seems to work.
surfaceTexture.getTransformMatrix(frame.transform);
if (mEncoderEngine != null) { // Can happen on teardown. At least it used to.

@ -91,6 +91,6 @@ public class AudioConfig {
* @return the buffer pool max size
*/
int bufferPoolMaxSize() {
return 80;
return 200;
}
}

@ -34,7 +34,7 @@ public class AudioMediaEncoder extends MediaEncoder {
private final static Random NOISE = new Random();
private static short noise() {
return (short) NOISE.nextInt(300);
return (short) NOISE.nextInt(100);
}
private boolean mRequestStop = false;
@ -138,7 +138,6 @@ public class AudioMediaEncoder extends MediaEncoder {
private long mLastTimeUs;
private long mFirstTimeUs = Long.MIN_VALUE;
private boolean mReachedMaxLength = false;
private AudioRecordingThread() {
setPriority(Thread.MAX_PRIORITY);
@ -165,7 +164,16 @@ public class AudioMediaEncoder extends MediaEncoder {
public void run() {
mAudioRecord.startRecording();
while (!mRequestStop) {
if (!hasReachedMaxLength()) {
read(false);
} else {
// We have reached the max length, so stop reading.
// However, do not get out of the loop - the controller
// will call stop() on us soon. It's not our responsibility
// to stop ourselves.
//noinspection UnnecessaryContinue
continue;
}
}
LOG.w("Stop was requested. We're out of the loop. Will post an endOfStream.");
// Last input with 0 length. This will signal the endOfStream.
@ -241,11 +249,10 @@ public class AudioMediaEncoder extends MediaEncoder {
}
// See if we reached the max length value.
if (!mReachedMaxLength) {
if (!hasReachedMaxLength()) {
boolean didReachMaxLength = (mLastTimeUs - mFirstTimeUs) > getMaxLengthMillis() * 1000L;
if (didReachMaxLength && !endOfStream) {
LOG.w("read thread - this frame reached the maxLength! deltaUs:", mLastTimeUs - mFirstTimeUs);
mReachedMaxLength = true;
notifyMaxLengthReached();
}
}

@ -14,6 +14,9 @@ import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.internal.utils.WorkerHandler;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Base class for single-track encoders, coordinated by a {@link MediaEncoderEngine}.
@ -117,6 +120,7 @@ public abstract class MediaEncoder {
private OutputBufferPool mOutputBufferPool;
private MediaCodec.BufferInfo mBufferInfo;
private MediaCodecBuffers mBuffers;
private final Map<String, AtomicInteger> mPendingEvents = new HashMap<>();
private long mMaxLengthMillis;
private boolean mMaxLengthReached;
@ -223,13 +227,18 @@ public abstract class MediaEncoder {
* @param event what happened
* @param data object
*/
@SuppressWarnings("ConstantConditions")
final void notify(final @NonNull String event, final @Nullable Object data) {
LOG.v(mName, "Notify was called. Posting.");
if (!mPendingEvents.containsKey(event)) mPendingEvents.put(event, new AtomicInteger(0));
final AtomicInteger pendingEvents = mPendingEvents.get(event);
pendingEvents.incrementAndGet();
LOG.v(mName, "Notify was called. Posting. pendingEvents:", pendingEvents.intValue());
mWorker.post(new Runnable() {
@Override
public void run() {
LOG.v(mName, "Notify was called. Executing.");
LOG.v(mName, "Notify was called. Executing. pendingEvents:", pendingEvents.intValue());
onEvent(event, data);
pendingEvents.decrementAndGet();
}
});
}
@ -357,7 +366,9 @@ public abstract class MediaEncoder {
*/
@SuppressWarnings("WeakerAccess")
protected void encodeInputBuffer(InputBuffer buffer) {
LOG.v(mName, "ENCODING - Buffer:", buffer.index, "Bytes:", buffer.length, "Presentation:", buffer.timestamp);
LOG.v(mName, "ENCODING - Buffer:", buffer.index,
"Bytes:", buffer.length,
"Presentation:", buffer.timestamp);
if (buffer.isEndOfStream) { // send EOS
mMediaCodec.queueInputBuffer(buffer.index, 0, 0,
buffer.timestamp, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
@ -380,7 +391,7 @@ public abstract class MediaEncoder {
@SuppressLint("LogNotTimber")
@SuppressWarnings("WeakerAccess")
protected void drainOutput(boolean drainAll) {
LOG.v(mName, "DRAINING - EOS:", drainAll);
LOG.i(mName, "DRAINING - EOS:", drainAll);
if (mMediaCodec == null) {
LOG.e("drain() was called before prepare() or after releasing.");
return;
@ -437,7 +448,7 @@ public abstract class MediaEncoder {
mBufferInfo.presentationTimeUs = (mStartTimeMillis * 1000) + mLastTimeUs - mFirstTimeUs;
// Write.
LOG.v(mName, "DRAINING - About to write(). Adjusted presentation:", mBufferInfo.presentationTimeUs);
LOG.i(mName, "DRAINING - About to write(). Adjusted presentation:", mBufferInfo.presentationTimeUs);
OutputBuffer buffer = mOutputBufferPool.get();
//noinspection ConstantConditions
buffer.info = mBufferInfo;
@ -455,6 +466,7 @@ public abstract class MediaEncoder {
&& mLastTimeUs - mFirstTimeUs > mMaxLengthMillis * 1000) {
LOG.w(mName, "DRAINING - Reached maxLength! mLastTimeUs:", mLastTimeUs,
"mStartTimeUs:", mFirstTimeUs,
"mDeltaUs:", mLastTimeUs - mFirstTimeUs,
"mMaxLengthUs:", mMaxLengthMillis * 1000);
onMaxLengthReached();
break;
@ -494,6 +506,11 @@ public abstract class MediaEncoder {
onMaxLengthReached();
}
@SuppressWarnings("WeakerAccess")
protected boolean hasReachedMaxLength() {
return mMaxLengthReached;
}
/**
* Called by us (during {@link #drainOutput(boolean)}) or by subclasses
* (through {@link #notifyMaxLengthReached()}) to notify that we reached the
@ -523,4 +540,17 @@ public abstract class MediaEncoder {
protected final void notifyFirstFrameMillis(long firstFrameMillis) {
mStartTimeMillis = firstFrameMillis;
}
/**
* Returns the number of events (see {@link #onEvent(String, Object)}) that were scheduled
* but still not passed to that function. Could be used to drop some of them if this
* number is too high.
*
* @param event the event type
* @return the pending events number
*/
@SuppressWarnings({"SameParameterValue", "ConstantConditions", "WeakerAccess"})
protected final int getPendingEvents(@NonNull String event) {
return mPendingEvents.get(event).intValue();
}
}

@ -1,12 +1,10 @@
package com.otaliastudios.cameraview.video.encoding;
import android.graphics.SurfaceTexture;
import android.media.ImageReader;
import android.opengl.Matrix;
import android.os.Build;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.internal.Issue514Workaround;
import com.otaliastudios.cameraview.internal.egl.EglCore;
import com.otaliastudios.cameraview.internal.egl.EglViewport;
import com.otaliastudios.cameraview.internal.egl.EglWindowSurface;
@ -38,6 +36,8 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
}
});
private long mFirstTimeUs = Long.MIN_VALUE;
public TextureMediaEncoder(@NonNull TextureConfig config) {
super(config.copy());
}
@ -53,7 +53,7 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
* Nanoseconds, in no meaningful time-base. Will be used for offsets only.
* Typically this comes from {@link SurfaceTexture#getTimestamp()}.
*/
public long timestamp;
public long timestampNanos;
/**
* Milliseconds in the {@link System#currentTimeMillis()} reference.
@ -65,6 +65,10 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
* The transformation matrix for the base texture.
*/
public float[] transform = new float[16];
private long timestampUs() {
return timestampNanos / 1000L;
}
}
/**
@ -95,6 +99,32 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
mViewport = new EglViewport();
}
/**
* Any number of pending events > 1 means that we should skip this frame.
* To avoid skipping too many frames, we'll use 2 for now, but this just means
* that we'll be drawing the same frame twice.
*
* When an event is posted, the textureId data has already been updated so we're
* too late to draw the old one and it should be skipped.
*
* This is especially important if we perform overlay drawing here, since that
* makes this class thread busy and slows down the event dispatching.
*
* @param timestampUs frame timestamp
* @return true to render
*/
@Override
protected boolean shouldRenderFrame(long timestampUs) {
if (!super.shouldRenderFrame(timestampUs)) return false;
if (mFrameNumber <= 5) return true; // Always render the first few frames, or muxer fails.
int events = getPendingEvents(FRAME_EVENT);
if (events > 2) {
LOG.w("Dropping frame because we already have too many pending events.", events);
return false;
}
return true;
}
@EncoderThread
@Override
protected void onEvent(@NonNull String event, @Nullable Object data) {
@ -103,21 +133,42 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
if (frame == null) {
throw new IllegalArgumentException("Got null frame for FRAME_EVENT.");
}
if (!shouldRenderFrame(frame.timestamp)) {
if (!shouldRenderFrame(frame.timestampUs())) {
mFramePool.recycle(frame);
return;
}
// Notify we're got the first frame and its absolute time.
if (mFrameNumber == 1) {
notifyFirstFrameMillis(frame.timestampMillis);
}
// Notify we have reached the max length value.
if (mFirstTimeUs == Long.MIN_VALUE) mFirstTimeUs = frame.timestampUs();
if (!hasReachedMaxLength()) {
boolean didReachMaxLength = (frame.timestampUs() - mFirstTimeUs) > getMaxLengthMillis() * 1000L;
if (didReachMaxLength) {
LOG.w("onEvent -",
"frameNumber:", mFrameNumber,
"timestampUs:", frame.timestampUs(),
"firstTimeUs:", mFirstTimeUs,
"- reached max length! deltaUs:", frame.timestampUs() - mFirstTimeUs);
notifyMaxLengthReached();
}
}
// First, drain any previous data.
LOG.i("onEvent", "frameNumber:", mFrameNumber, "timestamp:", frame.timestamp, "- draining.");
LOG.i("onEvent -",
"frameNumber:", mFrameNumber,
"timestampUs:", frame.timestampUs(),
"- draining.");
drainOutput(false);
// Then draw on the surface.
LOG.i("onEvent", "frameNumber:", mFrameNumber, "timestamp:", frame.timestamp, "- drawing.");
LOG.i("onEvent -",
"frameNumber:", mFrameNumber,
"timestampUs:", frame.timestampUs(),
"- rendering.");
// 1. We must scale this matrix like GlCameraPreview does, because it might have some cropping.
// Scaling takes place with respect to the (0, 0, 0) point, so we must apply a Translation to compensate.
@ -148,7 +199,7 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
if (mConfig.hasOverlay()) {
mConfig.overlayDrawer.render();
}
mWindow.setPresentationTime(frame.timestamp);
mWindow.setPresentationTime(frame.timestampNanos);
mWindow.swapBuffers();
mFramePool.recycle(frame);
}
@ -170,11 +221,4 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
mEglCore = null;
}
}
private boolean shouldRenderFrame(long timestamp) {
if (timestamp == 0) return false; // grafika said so
if (mFrameNumber < 0) return false; // We were asked to stop.
mFrameNumber++;
return true;
}
}

@ -96,4 +96,13 @@ abstract class VideoMediaEncoder<C extends VideoConfig> extends MediaEncoder {
protected int getEncodedBitRate() {
return mConfig.bitRate;
}
@SuppressWarnings("WeakerAccess")
protected boolean shouldRenderFrame(long timestampUs) {
if (timestampUs == 0) return false; // grafika said so
if (mFrameNumber < 0) return false; // We were asked to stop.
if (hasReachedMaxLength()) return false; // We were not asked yet, but we'll be soon.
mFrameNumber++;
return true;
}
}

Loading…
Cancel
Save