Fix long standing sync bug

pull/506/head
Mattia Iavarone 6 years ago
parent 10b7c3f2a3
commit 17d7e03d14
  1. 11
      cameraview/src/main/java/com/otaliastudios/cameraview/video/SnapshotVideoRecorder.java
  2. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioMediaEncoder.java
  3. 96
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoder.java
  4. 5
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoderEngine.java
  5. 48
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/TextureMediaEncoder.java

@ -160,9 +160,10 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
if (mCurrentState == STATE_RECORDING) { if (mCurrentState == STATE_RECORDING) {
LOG.v("dispatching frame."); LOG.v("dispatching frame.");
TextureMediaEncoder textureEncoder = (TextureMediaEncoder) mEncoderEngine.getVideoEncoder(); TextureMediaEncoder textureEncoder = (TextureMediaEncoder) mEncoderEngine.getVideoEncoder();
TextureMediaEncoder.TextureFrame textureFrame = textureEncoder.acquireFrame(); TextureMediaEncoder.Frame frame = textureEncoder.acquireFrame();
textureFrame.timestamp = surfaceTexture.getTimestamp(); frame.timestamp = surfaceTexture.getTimestamp();
surfaceTexture.getTransformMatrix(textureFrame.transform); frame.timestampMillis = System.currentTimeMillis(); // NOTE: this is an approximation but it seems to work.
surfaceTexture.getTransformMatrix(frame.transform);
// get overlay // get overlay
if (mHasOverlay) { if (mHasOverlay) {
@ -175,12 +176,12 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
LOG.w("Got Surface.OutOfResourcesException while drawing video overlays", e); LOG.w("Got Surface.OutOfResourcesException while drawing video overlays", e);
} }
mOverlaySurfaceTexture.updateTexImage(); mOverlaySurfaceTexture.updateTexImage();
mOverlaySurfaceTexture.getTransformMatrix(textureFrame.overlayTransform); mOverlaySurfaceTexture.getTransformMatrix(frame.overlayTransform);
} }
if (mEncoderEngine != null) { if (mEncoderEngine != null) {
// can happen on teardown // can happen on teardown
mEncoderEngine.notify(TextureMediaEncoder.FRAME_EVENT, textureFrame); mEncoderEngine.notify(TextureMediaEncoder.FRAME_EVENT, frame);
} }
} }

@ -15,7 +15,6 @@ import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.internal.utils.WorkerHandler; import com.otaliastudios.cameraview.internal.utils.WorkerHandler;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi; import androidx.annotation.RequiresApi;
import java.io.IOException; import java.io.IOException;
@ -27,7 +26,7 @@ import java.util.concurrent.LinkedBlockingQueue;
/** /**
* Default implementation for audio encoding. * Default implementation for audio encoding.
*/ */
// TODO create onVideoRecordingStart/onVideoRecordingEnd callbacks // TODO create onVideoRecordingEnd callbacks
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2) @RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2)
public class AudioMediaEncoder extends MediaEncoder { public class AudioMediaEncoder extends MediaEncoder {
@ -225,6 +224,9 @@ public class AudioMediaEncoder extends MediaEncoder {
mLastTimeUs = mTimestamp.increaseUs(readBytes, BYTE_RATE); mLastTimeUs = mTimestamp.increaseUs(readBytes, BYTE_RATE);
if (mFirstTimeUs == Long.MIN_VALUE) { if (mFirstTimeUs == Long.MIN_VALUE) {
mFirstTimeUs = mLastTimeUs; mFirstTimeUs = mLastTimeUs;
// Compute the first frame milliseconds as well.
notifyFirstFrameMillis(System.currentTimeMillis()
- AudioTimestamp.bytesToUs(readBytes, BYTE_RATE) / 1000L);
} }
boolean didReachMaxLength = (mLastTimeUs - mFirstTimeUs) > getMaxLengthMillis() * 1000L; boolean didReachMaxLength = (mLastTimeUs - mFirstTimeUs) > getMaxLengthMillis() * 1000L;
if (didReachMaxLength && !endOfStream) { if (didReachMaxLength && !endOfStream) {

@ -56,10 +56,22 @@ import java.nio.ByteBuffer;
* For VIDEO encoders, things are much easier because we skip the whole input part. * For VIDEO encoders, things are much easier because we skip the whole input part.
* See description in {@link VideoMediaEncoder}. * See description in {@link VideoMediaEncoder}.
* *
* MAX LENGTH CONSTRAINT
*
* For max length constraint, it will be checked automatically during {@link #drainOutput(boolean)}, * For max length constraint, it will be checked automatically during {@link #drainOutput(boolean)},
* OR subclasses can provide an hint to this encoder using {@link #notifyMaxLengthReached()}. * OR subclasses can provide an hint to this encoder using {@link #notifyMaxLengthReached()}.
* In this second case, we can request a stop at reading time, so we avoid useless readings * In this second case, we can request a stop at reading time, so we avoid useless readings
* in certain setups (where drain is called a lot after reading). * in certain setups (where drain is called a lot after reading).
*
* TIMING
*
* Subclasses can use timestamps (in microseconds) in any reference system they prefer. For
* instance, it might be the {@link System#nanoTime()} reference, or some reference provided
* by SurfaceTextures.
*
* However, they are required to call {@link #notifyFirstFrameMillis(long)} and pass the
* milliseconds of the first frame in the {@link System#currentTimeMillis()} reference, so
* something that we can coordinate on.
*/ */
// https://github.com/saki4510t/AudioVideoRecordingSample/blob/master/app/src/main/java/com/serenegiant/encoder/MediaEncoder.java // https://github.com/saki4510t/AudioVideoRecordingSample/blob/master/app/src/main/java/com/serenegiant/encoder/MediaEncoder.java
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2) @RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2)
@ -109,8 +121,10 @@ abstract class MediaEncoder {
private long mMaxLengthMillis; private long mMaxLengthMillis;
private boolean mMaxLengthReached; private boolean mMaxLengthReached;
private long mStartPresentationTimeUs = Long.MIN_VALUE; private long mStartTimeMillis = 0; // In System.currentTimeMillis()
private long mLastPresentationTimeUs = 0; private long mStartTimeUs = Long.MIN_VALUE; // In unknown reference
private long mLastTimeUs = 0;
/** /**
* Needs a readable name for the thread and for logging. * Needs a readable name for the thread and for logging.
* @param name a name * @param name a name
@ -264,7 +278,7 @@ abstract class MediaEncoder {
* @param data object * @param data object
*/ */
@EncoderThread @EncoderThread
void onEvent(@NonNull String event, @Nullable Object data) {}; void onEvent(@NonNull String event, @Nullable Object data) {}
/** /**
* Stop recording. This involves signaling the end of stream and draining * Stop recording. This involves signaling the end of stream and draining
@ -392,26 +406,29 @@ abstract class MediaEncoder {
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it. // the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
boolean isCodecConfig = (mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0; boolean isCodecConfig = (mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0;
if (!isCodecConfig && mController.isStarted() && mBufferInfo.size != 0) { if (!isCodecConfig && mController.isStarted() && mBufferInfo.size != 0) {
// adjust the ByteBuffer values to match BufferInfo (not needed?) // adjust the ByteBuffer values to match BufferInfo (not needed?)
encodedData.position(mBufferInfo.offset); encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size); encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
// Store startPresentationTime and lastPresentationTime, useful for example to
// detect the mMaxLengthReached and stop recording. // Store mStartTimeUs and mLastTimeUs, useful to detect the max length
if (mStartPresentationTimeUs == Long.MIN_VALUE) { // reached and stop recording when needed.
mStartPresentationTimeUs = mBufferInfo.presentationTimeUs; if (mStartTimeUs == Long.MIN_VALUE) {
LOG.w(mName, "DRAINING - Got the first presentation time:", mStartPresentationTimeUs); mStartTimeUs = mBufferInfo.presentationTimeUs;
} LOG.w(mName, "DRAINING - Got the first presentation time:", mStartTimeUs);
mLastPresentationTimeUs = mBufferInfo.presentationTimeUs; }
// Pass presentation times as offets with respect to the mStartPresentationTimeUs. mLastTimeUs = mBufferInfo.presentationTimeUs;
// This ensures consistency between audio pts (coming from System.nanoTime()) and
// video pts (coming from SurfaceTexture) both of which have no meaningful time-base // Adjust the presentation times. Subclasses can pass a presentation time in any
// and should be used for offsets only. // reference system - possibly some that has no real meaning, and frequently,
// TODO find a better way, this causes sync issues. (+ note: this sends pts=0 at first) // presentation times from different encoders have a different time-base.
// mBufferInfo.presentationTimeUs = mLastPresentationTimeUs - mStartPresentationTimeUs; // To address this, encoders are required to call notifyFirstFrameMillis
LOG.v(mName, "DRAINING - About to write(). Presentation:", mBufferInfo.presentationTimeUs); // so we can adjust here - moving to 1970 reference.
// Extra benefit: we never pass a pts equal to 0, which some encoders refuse.
// TODO fix the mBufferInfo being the same, then implement delayed writing in Controller mBufferInfo.presentationTimeUs = (mStartTimeMillis * 1000) + mLastTimeUs - mStartTimeUs;
// and remove the isStarted() check here.
// Write.
LOG.v(mName, "DRAINING - About to write(). Adjusted presentation:", mBufferInfo.presentationTimeUs);
OutputBuffer buffer = mOutputBufferPool.get(); OutputBuffer buffer = mOutputBufferPool.get();
//noinspection ConstantConditions //noinspection ConstantConditions
buffer.info = mBufferInfo; buffer.info = mBufferInfo;
@ -425,10 +442,10 @@ abstract class MediaEncoder {
// Not needed if drainAll because we already were asked to stop // Not needed if drainAll because we already were asked to stop
if (!drainAll if (!drainAll
&& !mMaxLengthReached && !mMaxLengthReached
&& mStartPresentationTimeUs != Long.MIN_VALUE && mStartTimeUs != Long.MIN_VALUE
&& mLastPresentationTimeUs - mStartPresentationTimeUs > mMaxLengthMillis * 1000) { && mLastTimeUs - mStartTimeUs > mMaxLengthMillis * 1000) {
LOG.w(mName, "DRAINING - Reached maxLength! mLastPresentationTimeUs:", mLastPresentationTimeUs, LOG.w(mName, "DRAINING - Reached maxLength! mLastTimeUs:", mLastTimeUs,
"mStartPresentationTimeUs:", mStartPresentationTimeUs, "mStartTimeUs:", mStartTimeUs,
"mMaxLengthUs:", mMaxLengthMillis * 1000); "mMaxLengthUs:", mMaxLengthMillis * 1000);
onMaxLengthReached(); onMaxLengthReached();
break; break;
@ -446,15 +463,33 @@ abstract class MediaEncoder {
abstract int getEncodedBitRate(); abstract int getEncodedBitRate();
/**
* Returns the max length setting, in milliseconds, which can be used
* to compute the current state and eventually call {@link #notifyMaxLengthReached()}.
* This is not a requirement for subclasses - we do this check anyway when draining,
* but doing so might be better.
*
* @return the max length setting
*/
@SuppressWarnings("WeakerAccess") @SuppressWarnings("WeakerAccess")
protected long getMaxLengthMillis() { protected long getMaxLengthMillis() {
return mMaxLengthMillis; return mMaxLengthMillis;
} }
/**
* Called by subclasses to notify that the max length was reached.
* We will move to {@link #STATE_LIMIT_REACHED} and request a stop.
*/
@SuppressWarnings("WeakerAccess")
protected void notifyMaxLengthReached() { protected void notifyMaxLengthReached() {
onMaxLengthReached(); onMaxLengthReached();
} }
/**
* Called by us (during {@link #drainOutput(boolean)}) or by subclasses
* (through {@link #notifyMaxLengthReached()}) to notify that we reached the
* max length allowed. We will move to {@link #STATE_LIMIT_REACHED} and request a stop.
*/
private void onMaxLengthReached() { private void onMaxLengthReached() {
if (mMaxLengthReached) return; if (mMaxLengthReached) return;
mMaxLengthReached = true; mMaxLengthReached = true;
@ -466,4 +501,17 @@ abstract class MediaEncoder {
mController.requestStop(mTrackIndex); mController.requestStop(mTrackIndex);
} }
} }
/**
* Should be called by subclasses to pass the milliseconds of the first frame - as soon
* as this information is available. The milliseconds should be in the
* {@link System#currentTimeMillis()} reference system, so we can coordinate between different
* encoders.
*
* @param firstFrameMillis the milliseconds of the first frame presentation
*/
@SuppressWarnings("WeakerAccess")
protected void notifyFirstFrameMillis(long firstFrameMillis) {
mStartTimeMillis = firstFrameMillis;
}
} }

@ -286,16 +286,11 @@ public class MediaEncoderEngine {
/** /**
* Writes the given data to the muxer. Should be called after {@link #isStarted()} * Writes the given data to the muxer. Should be called after {@link #isStarted()}
* returns true. Note: this seems to be thread safe, no lock. * returns true. Note: this seems to be thread safe, no lock.
* TODO cache values if not started yet, then apply later. Read comments in drain().
* Currently they are recycled instantly.
*/ */
void write(@NonNull OutputBufferPool pool, @NonNull OutputBuffer buffer) { void write(@NonNull OutputBufferPool pool, @NonNull OutputBuffer buffer) {
if (!mMediaMuxerStarted) { if (!mMediaMuxerStarted) {
throw new IllegalStateException("Trying to write before muxer started"); throw new IllegalStateException("Trying to write before muxer started");
} }
// This is a bad idea and causes crashes.
// if (info.presentationTimeUs < mLastTimestampUs) info.presentationTimeUs = mLastTimestampUs;
// mLastTimestampUs = info.presentationTimeUs;
LOG.v("write:", "Writing OutputBuffer - track:", buffer.trackIndex, "presentation:", buffer.info.presentationTimeUs); LOG.v("write:", "Writing OutputBuffer - track:", buffer.trackIndex, "presentation:", buffer.info.presentationTimeUs);
mMediaMuxer.writeSampleData(buffer.trackIndex, buffer.data, buffer.info); mMediaMuxer.writeSampleData(buffer.trackIndex, buffer.data, buffer.info);
pool.recycle(buffer); pool.recycle(buffer);

@ -1,5 +1,6 @@
package com.otaliastudios.cameraview.video.encoding; package com.otaliastudios.cameraview.video.encoding;
import android.graphics.SurfaceTexture;
import android.opengl.EGLContext; import android.opengl.EGLContext;
import android.opengl.Matrix; import android.opengl.Matrix;
import android.os.Build; import android.os.Build;
@ -52,10 +53,10 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureMediaEncoder.C
private EglCore mEglCore; private EglCore mEglCore;
private EglWindowSurface mWindow; private EglWindowSurface mWindow;
private EglViewport mViewport; private EglViewport mViewport;
private Pool<TextureFrame> mFramePool = new Pool<>(100, new Pool.Factory<TextureFrame>() { private Pool<Frame> mFramePool = new Pool<>(100, new Pool.Factory<Frame>() {
@Override @Override
public TextureFrame create() { public Frame create() {
return new TextureFrame(); return new Frame();
} }
}); });
@ -63,17 +64,42 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureMediaEncoder.C
super(config.copy()); super(config.copy());
} }
public static class TextureFrame { /**
private TextureFrame() {} * Should be acquired with {@link #acquireFrame()}, filled and then passed
// Nanoseconds, in no meaningful time-base. Should be for offsets only. * to {@link MediaEncoderEngine#notify(String, Object)} with {@link #FRAME_EVENT}.
// Typically coming from SurfaceTexture.getTimestamp(). */
public static class Frame {
private Frame() {}
/**
* Nanoseconds, in no meaningful time-base. Will be used for offsets only.
* Typically this comes from {@link SurfaceTexture#getTimestamp()}.
*/
public long timestamp; public long timestamp;
/**
* Milliseconds in the {@link System#currentTimeMillis()} reference.
* This is actually needed/read only for the first frame.
*/
public long timestampMillis;
/**
* The transformation matrix for the base texture.
*/
public float[] transform = new float[16]; public float[] transform = new float[16];
/**
* The transformation matrix for the overlay texture, if any.
*/
public float[] overlayTransform = new float[16]; public float[] overlayTransform = new float[16];
} }
/**
* Returns a new frame to be filled. See {@link Frame} for details.
* @return a new frame
*/
@NonNull @NonNull
public TextureFrame acquireFrame() { public Frame acquireFrame() {
if (mFramePool.isEmpty()) { if (mFramePool.isEmpty()) {
throw new RuntimeException("Need more frames than this! Please increase the pool size."); throw new RuntimeException("Need more frames than this! Please increase the pool size.");
} else { } else {
@ -82,7 +108,6 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureMediaEncoder.C
} }
} }
@EncoderThread @EncoderThread
@Override @Override
void onPrepare(@NonNull MediaEncoderEngine.Controller controller, long maxLengthMillis) { void onPrepare(@NonNull MediaEncoderEngine.Controller controller, long maxLengthMillis) {
@ -102,7 +127,7 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureMediaEncoder.C
@Override @Override
void onEvent(@NonNull String event, @Nullable Object data) { void onEvent(@NonNull String event, @Nullable Object data) {
if (!event.equals(FRAME_EVENT)) return; if (!event.equals(FRAME_EVENT)) return;
TextureFrame frame = (TextureFrame) data; Frame frame = (Frame) data;
if (frame == null) { if (frame == null) {
throw new IllegalArgumentException("Got null frame for FRAME_EVENT."); throw new IllegalArgumentException("Got null frame for FRAME_EVENT.");
} }
@ -115,6 +140,9 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureMediaEncoder.C
return; return;
} }
mFrameNumber++; mFrameNumber++;
if (mFrameNumber == 1) {
notifyFirstFrameMillis(frame.timestampMillis);
}
// First, drain any previous data. // First, drain any previous data.
LOG.i("onEvent", "frameNumber:", mFrameNumber, "timestamp:", frame.timestamp, "- draining."); LOG.i("onEvent", "frameNumber:", mFrameNumber, "timestamp:", frame.timestamp, "- draining.");

Loading…
Cancel
Save