diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/preview/GlCameraPreview.java b/cameraview/src/main/java/com/otaliastudios/cameraview/preview/GlCameraPreview.java index 54f62371..525c579d 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/preview/GlCameraPreview.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/preview/GlCameraPreview.java @@ -63,7 +63,7 @@ public class GlCameraPreview extends CameraPreview mRendererFrameCallbacks = Collections.synchronizedSet(new HashSet()); + private final Set mRendererFrameCallbacks = Collections.synchronizedSet(new HashSet()); @VisibleForTesting float mCropScaleX = 1F; @VisibleForTesting float mCropScaleY = 1F; private View mRootView; @@ -144,8 +144,11 @@ public class GlCameraPreview extends CameraPreview mSendStartMap = new HashMap<>(); + private void sendInputBuffer(ByteBuffer buffer, long presentationTimeUs, boolean endOfStream) { + mSendStartMap.put(presentationTimeUs, System.nanoTime() / 1000000); sendMessage(obtainMessage( endOfStream ? 1 : 0, (int) (presentationTimeUs >> 32), @@ -258,9 +270,19 @@ public class AudioMediaEncoder extends MediaEncoder { @Override public void handleMessage(Message msg) { super.handleMessage(msg); - boolean endOfStream = msg.what == 1; long timestamp = (((long) msg.arg1) << 32) | (((long) msg.arg2) & 0xffffffffL); + boolean endOfStream = msg.what == 1; LOG.i("encoding thread - got buffer. timestamp:", timestamp, "eos:", endOfStream); + + // Performance logging + long sendEnd = System.nanoTime() / 1000000; + //noinspection ConstantConditions + long sendStart = mSendStartMap.remove(timestamp); + mAvgSendDelay = ((mAvgSendDelay * mSendCount) + (sendEnd - sendStart)) / (++mSendCount); + LOG.v("send delay millis:", sendEnd - sendStart, "average:", mAvgSendDelay); + long executeStart = System.nanoTime() / 1000000; + + // Actual work ByteBuffer buffer = (ByteBuffer) msg.obj; int readBytes = buffer.remaining(); InputBuffer inputBuffer = mInputBufferPool.get(); @@ -282,6 +304,10 @@ public class AudioMediaEncoder extends MediaEncoder { break; // Will try later. } } + + long executeEnd = System.nanoTime() / 1000000; + mAvgExecuteDelay = ((mAvgExecuteDelay * mExecuteCount) + (executeEnd - executeStart)) / (++mExecuteCount); + LOG.v("execute delay millis:", executeEnd - executeStart, "average:", mAvgExecuteDelay); } private void performPendingOp(InputBuffer buffer) { @@ -298,7 +324,11 @@ public class AudioMediaEncoder extends MediaEncoder { // use an even smaller BUFFER_POOL_MAX_SIZE without losing audio frames. But this way // we can accumulate delay on this new thread without noticing (no pool getting empty). drainOutput(eos); - if (eos) WorkerHandler.get("AudioEncodingHandler").getThread().interrupt(); + if (eos) { + // Not sure we want this: WorkerHandler.get("AudioEncodingHandler").getThread().interrupt(); + LOG.e("EXECUTE DELAY MILLIS:", mAvgExecuteDelay); + LOG.e("SEND DELAY MILLIS:", mAvgSendDelay); + } } } } diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoder.java b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoder.java index 40b2b257..ba801e49 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoder.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoder.java @@ -399,6 +399,7 @@ abstract class MediaEncoder { // detect the mMaxLengthReached and stop recording. if (mStartPresentationTimeUs == Long.MIN_VALUE) { mStartPresentationTimeUs = mBufferInfo.presentationTimeUs; + LOG.w(mName, "DRAINING - Got the first presentation time:", mStartPresentationTimeUs); } mLastPresentationTimeUs = mBufferInfo.presentationTimeUs; // Pass presentation times as offets with respect to the mStartPresentationTimeUs. diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/TextureMediaEncoder.java b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/TextureMediaEncoder.java index 3744fe73..629143ab 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/TextureMediaEncoder.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/TextureMediaEncoder.java @@ -103,18 +103,27 @@ public class TextureMediaEncoder extends VideoMediaEncoder extends Med protected Surface mSurface; @SuppressWarnings("WeakerAccess") - protected int mFrameNum = -1; + protected int mFrameNumber = -1; protected static class Config { public int width; @@ -95,14 +95,14 @@ abstract class VideoMediaEncoder extends Med @Override void onStart() { // Nothing to do here. Waiting for the first frame. - mFrameNum = 0; + mFrameNumber = 0; } @EncoderThread @Override void onStop() { - LOG.i("onStop", "setting mFrameNum to 1 and signaling the end of input stream."); - mFrameNum = -1; + LOG.i("onStop", "setting mFrameNumber to 1 and signaling the end of input stream."); + mFrameNumber = -1; // Signals the end of input stream. This is a Video only API, as in the normal case, // we use input buffers to signal the end. In the video case, we don't have input buffers // because we use an input surface instead.