Fix bugs, better logs and comments

pull/506/head
Mattia Iavarone 6 years ago
parent da6a0a6299
commit 10b7c3f2a3
  1. 11
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/GlCameraPreview.java
  2. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/video/FullVideoRecorder.java
  3. 42
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioMediaEncoder.java
  4. 1
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoder.java
  5. 33
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/TextureMediaEncoder.java
  6. 8
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/VideoMediaEncoder.java

@ -63,7 +63,7 @@ public class GlCameraPreview extends CameraPreview<GLSurfaceView, SurfaceTexture
private int mOutputTextureId = 0;
private SurfaceTexture mInputSurfaceTexture;
private EglViewport mOutputViewport;
private Set<RendererFrameCallback> mRendererFrameCallbacks = Collections.synchronizedSet(new HashSet<RendererFrameCallback>());
private final Set<RendererFrameCallback> mRendererFrameCallbacks = Collections.synchronizedSet(new HashSet<RendererFrameCallback>());
@VisibleForTesting float mCropScaleX = 1F;
@VisibleForTesting float mCropScaleY = 1F;
private View mRootView;
@ -144,10 +144,13 @@ public class GlCameraPreview extends CameraPreview<GLSurfaceView, SurfaceTexture
getView().queueEvent(new Runnable() {
@Override
public void run() {
// Need to synchronize when iterating the Collections.synchronizedSet
synchronized (mRendererFrameCallbacks) {
for (RendererFrameCallback callback : mRendererFrameCallbacks) {
callback.onRendererTextureCreated(mOutputTextureId);
}
}
}
});
// Since we are using GLSurfaceView.RENDERMODE_WHEN_DIRTY, we must notify the SurfaceView
@ -202,14 +205,15 @@ public class GlCameraPreview extends CameraPreview<GLSurfaceView, SurfaceTexture
Matrix.translateM(mTransformMatrix, 0, translX, translY, 0);
Matrix.scaleM(mTransformMatrix, 0, mCropScaleX, mCropScaleY, 1);
}
// Future note: passing scale to the viewport?
// They are scaleX an scaleY, but flipped based on mInputFlipped.
mOutputViewport.drawFrame(mOutputTextureId, mTransformMatrix);
synchronized (mRendererFrameCallbacks) {
// Need to synchronize when iterating the Collections.synchronizedSet
for (RendererFrameCallback callback : mRendererFrameCallbacks) {
callback.onRendererFrame(mInputSurfaceTexture, mCropScaleX, mCropScaleY);
}
}
}
}
@NonNull
@Override
@ -299,6 +303,7 @@ public class GlCameraPreview extends CameraPreview<GLSurfaceView, SurfaceTexture
* Creates the renderer for this GL surface.
* @return the renderer for this GL surface
*/
@SuppressWarnings("WeakerAccess")
@NonNull
protected Renderer instantiateRenderer() {
return new Renderer();

@ -37,13 +37,12 @@ public abstract class FullVideoRecorder extends VideoRecorder {
super(listener);
}
@SuppressWarnings({"WeakerAccess", "UnusedReturnValue", "BooleanMethodIsAlwaysInverted"})
@SuppressWarnings({"WeakerAccess", "UnusedReturnValue"})
protected boolean prepareMediaRecorder(@NonNull VideoResult.Stub stub) {
if (mMediaRecorderPrepared) return true;
return onPrepareMediaRecorder(stub, new MediaRecorder());
}
@SuppressWarnings("WeakerAccess")
protected boolean onPrepareMediaRecorder(@NonNull VideoResult.Stub stub, @NonNull MediaRecorder mediaRecorder) {
mMediaRecorder = mediaRecorder;
Size size = stub.rotation % 180 != 0 ? stub.size.flip() : stub.size;

@ -20,6 +20,8 @@ import androidx.annotation.RequiresApi;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.LinkedBlockingQueue;
/**
@ -79,6 +81,10 @@ public class AudioMediaEncoder extends MediaEncoder {
super("AudioEncoder");
mConfig = config.copy();
mTimestamp = new AudioTimestamp();
// These two were in onPrepare() but it's better to do warm-up here
// since thread and looper creation is expensive.
mEncoder = new AudioEncodingHandler();
mRecorder = new AudioRecordingThread();
}
@EncoderThread
@ -97,8 +103,6 @@ public class AudioMediaEncoder extends MediaEncoder {
mMediaCodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mMediaCodec.start();
mByteBufferPool = new ByteBufferPool(FRAME_SIZE, BUFFER_POOL_MAX_SIZE);
mEncoder = new AudioEncodingHandler();
mRecorder = new AudioRecordingThread();
}
@EncoderThread
@ -200,12 +204,12 @@ public class AudioMediaEncoder extends MediaEncoder {
}
/**
* Sleeps for a frame duration, to skip it. This can be used to slow down
* Sleeps for some frames duration, to skip them. This can be used to slow down
* the recording operation to balance it with encoding.
*/
private void sleep() {
try {
Thread.sleep(AudioTimestamp.bytesToUs(FRAME_SIZE, BYTE_RATE) / 1000);
Thread.sleep(AudioTimestamp.bytesToUs(FRAME_SIZE * 6, BYTE_RATE) / 1000);
} catch (InterruptedException ignore) {}
}
@ -247,7 +251,15 @@ public class AudioMediaEncoder extends MediaEncoder {
super(WorkerHandler.get("AudioEncodingHandler").getLooper());
}
// Just to debug performance.
private int mSendCount = 0;
private int mExecuteCount = 0;
private long mAvgSendDelay = 0;
private long mAvgExecuteDelay = 0;
private Map<Long, Long> mSendStartMap = new HashMap<>();
private void sendInputBuffer(ByteBuffer buffer, long presentationTimeUs, boolean endOfStream) {
mSendStartMap.put(presentationTimeUs, System.nanoTime() / 1000000);
sendMessage(obtainMessage(
endOfStream ? 1 : 0,
(int) (presentationTimeUs >> 32),
@ -258,9 +270,19 @@ public class AudioMediaEncoder extends MediaEncoder {
@Override
public void handleMessage(Message msg) {
super.handleMessage(msg);
boolean endOfStream = msg.what == 1;
long timestamp = (((long) msg.arg1) << 32) | (((long) msg.arg2) & 0xffffffffL);
boolean endOfStream = msg.what == 1;
LOG.i("encoding thread - got buffer. timestamp:", timestamp, "eos:", endOfStream);
// Performance logging
long sendEnd = System.nanoTime() / 1000000;
//noinspection ConstantConditions
long sendStart = mSendStartMap.remove(timestamp);
mAvgSendDelay = ((mAvgSendDelay * mSendCount) + (sendEnd - sendStart)) / (++mSendCount);
LOG.v("send delay millis:", sendEnd - sendStart, "average:", mAvgSendDelay);
long executeStart = System.nanoTime() / 1000000;
// Actual work
ByteBuffer buffer = (ByteBuffer) msg.obj;
int readBytes = buffer.remaining();
InputBuffer inputBuffer = mInputBufferPool.get();
@ -282,6 +304,10 @@ public class AudioMediaEncoder extends MediaEncoder {
break; // Will try later.
}
}
long executeEnd = System.nanoTime() / 1000000;
mAvgExecuteDelay = ((mAvgExecuteDelay * mExecuteCount) + (executeEnd - executeStart)) / (++mExecuteCount);
LOG.v("execute delay millis:", executeEnd - executeStart, "average:", mAvgExecuteDelay);
}
private void performPendingOp(InputBuffer buffer) {
@ -298,7 +324,11 @@ public class AudioMediaEncoder extends MediaEncoder {
// use an even smaller BUFFER_POOL_MAX_SIZE without losing audio frames. But this way
// we can accumulate delay on this new thread without noticing (no pool getting empty).
drainOutput(eos);
if (eos) WorkerHandler.get("AudioEncodingHandler").getThread().interrupt();
if (eos) {
// Not sure we want this: WorkerHandler.get("AudioEncodingHandler").getThread().interrupt();
LOG.e("EXECUTE DELAY MILLIS:", mAvgExecuteDelay);
LOG.e("SEND DELAY MILLIS:", mAvgSendDelay);
}
}
}
}

@ -399,6 +399,7 @@ abstract class MediaEncoder {
// detect the mMaxLengthReached and stop recording.
if (mStartPresentationTimeUs == Long.MIN_VALUE) {
mStartPresentationTimeUs = mBufferInfo.presentationTimeUs;
LOG.w(mName, "DRAINING - Got the first presentation time:", mStartPresentationTimeUs);
}
mLastPresentationTimeUs = mBufferInfo.presentationTimeUs;
// Pass presentation times as offets with respect to the mStartPresentationTimeUs.

@ -103,18 +103,27 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureMediaEncoder.C
void onEvent(@NonNull String event, @Nullable Object data) {
if (!event.equals(FRAME_EVENT)) return;
TextureFrame frame = (TextureFrame) data;
if (frame == null) return; // Should not happen
if (frame.timestamp == 0 || mFrameNum < 0) {
// The first condition comes from grafika.
// The second condition means we were asked to stop.
if (frame == null) {
throw new IllegalArgumentException("Got null frame for FRAME_EVENT.");
}
if (frame.timestamp == 0) { // grafika
mFramePool.recycle(frame);
return;
}
if (mFrameNumber < 0) { // We were asked to stop.
mFramePool.recycle(frame);
return;
}
mFrameNumber++;
// First, drain any previous data.
LOG.i("onEvent", "frameNumber:", mFrameNumber, "timestamp:", frame.timestamp, "- draining.");
drainOutput(false);
// Then draw on the surface.
LOG.i("onEvent", "frameNumber:", mFrameNumber, "timestamp:", frame.timestamp, "- drawing.");
mFrameNum++;
int thisFrameNum = mFrameNum;
LOG.v("onEvent", "frameNum:", thisFrameNum, "realFrameNum:", mFrameNum, "timestamp:", frame.timestamp);
// We must scale this matrix like GlCameraPreview does, because it might have some cropping.
// 1. We must scale this matrix like GlCameraPreview does, because it might have some cropping.
// Scaling takes place with respect to the (0, 0, 0) point, so we must apply a Translation to compensate.
float[] transform = frame.transform;
float[] overlayTransform = frame.overlayTransform;
@ -125,7 +134,7 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureMediaEncoder.C
Matrix.translateM(transform, 0, scaleTranslX, scaleTranslY, 0);
Matrix.scaleM(transform, 0, scaleX, scaleY, 1);
// We also must rotate this matrix. In GlCameraPreview it is not needed because it is a live
// 2. We also must rotate this matrix. In GlCameraPreview it is not needed because it is a live
// stream, but the output video, must be correctly rotated based on the device rotation at the moment.
// Rotation also takes place with respect to the origin (the Z axis), so we must
// translate to origin, rotate, then back to where we were.
@ -133,21 +142,17 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureMediaEncoder.C
Matrix.rotateM(transform, 0, mTransformRotation, 0, 0, 1);
Matrix.translateM(transform, 0, -0.5F, -0.5F, 0);
// 3. Do the same for overlays with their own rotation.
boolean hasOverlay = mConfig.overlayTextureId != NO_TEXTURE;
if (hasOverlay) {
Matrix.translateM(overlayTransform, 0, 0.5F, 0.5F, 0);
Matrix.rotateM(overlayTransform, 0, mConfig.overlayRotation, 0, 0, 1);
Matrix.translateM(overlayTransform, 0, -0.5F, -0.5F, 0);
}
LOG.v("onEvent", "frameNum:", thisFrameNum, "realFrameNum:", mFrameNum, "calling drainOutput.");
drainOutput(false);
LOG.v("onEvent", "frameNum:", thisFrameNum, "realFrameNum:", mFrameNum, "calling drawFrame.");
mViewport.drawFrame(mConfig.textureId, transform);
if (hasOverlay) {
mViewport.drawFrame(mConfig.overlayTextureId, overlayTransform);
}
mWindow.setPresentationTime(frame.timestamp);
mWindow.swapBuffers();
mFramePool.recycle(frame);

@ -41,7 +41,7 @@ abstract class VideoMediaEncoder<C extends VideoMediaEncoder.Config> extends Med
protected Surface mSurface;
@SuppressWarnings("WeakerAccess")
protected int mFrameNum = -1;
protected int mFrameNumber = -1;
protected static class Config {
public int width;
@ -95,14 +95,14 @@ abstract class VideoMediaEncoder<C extends VideoMediaEncoder.Config> extends Med
@Override
void onStart() {
// Nothing to do here. Waiting for the first frame.
mFrameNum = 0;
mFrameNumber = 0;
}
@EncoderThread
@Override
void onStop() {
LOG.i("onStop", "setting mFrameNum to 1 and signaling the end of input stream.");
mFrameNum = -1;
LOG.i("onStop", "setting mFrameNumber to 1 and signaling the end of input stream.");
mFrameNumber = -1;
// Signals the end of input stream. This is a Video only API, as in the normal case,
// we use input buffers to signal the end. In the video case, we don't have input buffers
// because we use an input surface instead.

Loading…
Cancel
Save