diff --git a/cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/CameraIntegrationTest.java b/cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/CameraIntegrationTest.java index d8bf0e7d..14787e17 100644 --- a/cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/CameraIntegrationTest.java +++ b/cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/CameraIntegrationTest.java @@ -140,6 +140,8 @@ public abstract class CameraIntegrationTest extends BaseTest { if (expectSuccess) { assertNotNull("Can open", result); // Extra wait for the bind state. + // TODO fix this and other while {} in this class in a more elegant way. + //noinspection StatementWithEmptyBody while (controller.getBindState() != CameraEngine.STATE_STARTED) {} } else { assertNull("Should not open", result); @@ -226,6 +228,7 @@ public abstract class CameraIntegrationTest extends BaseTest { } } + @SuppressWarnings("unused") private void takeVideoSnapshotSync(boolean expectSuccess) { takeVideoSnapshotSync(expectSuccess,0); } @@ -438,7 +441,6 @@ public abstract class CameraIntegrationTest extends BaseTest { @Test public void testSetAudio() { - // TODO: when permissions are managed, check that Audio.ON triggers the audio permission openSync(true); Audio[] values = Audio.values(); for (Audio value : values) { @@ -479,7 +481,7 @@ public abstract class CameraIntegrationTest extends BaseTest { assertEquals(oldValue, camera.getPlaySounds()); } } else { - // TODO do when Camera2 is completed + assertEquals(newValue, camera.getPlaySounds()); } } @@ -511,10 +513,10 @@ public abstract class CameraIntegrationTest extends BaseTest { @Test public void testStartEndVideoSnapshot() { - // TODO should check api - waitForOpen(true); - camera.takeVideoSnapshot(new File(context().getFilesDir(), "video.mp4"), 4000); - waitForVideoEnd(true); + // TODO should check api level for snapshot? + openSync(true); + takeVideoSnapshotSync(true, 4000); + waitForVideoResult(true); } @Test @@ -525,13 +527,6 @@ public abstract class CameraIntegrationTest extends BaseTest { waitForVideoResult(false); } - @Test - public void testEndVideoSnapshot_withoutStarting() { - waitForOpen(true); - camera.stopVideo(); - waitForVideoEnd(false); - } - @Test public void testEndVideo_withMaxSize() { camera.setMode(Mode.VIDEO); @@ -632,14 +627,17 @@ public abstract class CameraIntegrationTest extends BaseTest { assertEquals(latch.getCount(), 1); } + @SuppressWarnings("StatementWithEmptyBody") @Test public void testCapturePicture_size() throws Exception { openSync(true); // PictureSize can still be null after opened. + // TODO be more elegant while (camera.getPictureSize() == null) {} Size size = camera.getPictureSize(); camera.takePicture(); PictureResult result = waitForPictureResult(true); + assertNotNull(result); Bitmap bitmap = CameraUtils.decodeBitmap(result.getData(), Integer.MAX_VALUE, Integer.MAX_VALUE); assertNotNull(bitmap); assertEquals(result.getSize(), size); @@ -679,16 +677,20 @@ public abstract class CameraIntegrationTest extends BaseTest { assertEquals(1, latch.getCount()); } + @SuppressWarnings("StatementWithEmptyBody") @Test public void testCaptureSnapshot_size() throws Exception { openSync(true); // SnapshotSize can still be null after opened. + // TODO be more elegant while (camera.getSnapshotSize() == null) {} Size size = camera.getSnapshotSize(); camera.takePictureSnapshot(); PictureResult result = waitForPictureResult(true); + assertNotNull(result); Bitmap bitmap = CameraUtils.decodeBitmap(result.getData(), Integer.MAX_VALUE, Integer.MAX_VALUE); + assertNotNull(bitmap); assertEquals(result.getSize(), size); assertEquals(bitmap.getWidth(), size.getWidth()); assertEquals(bitmap.getHeight(), size.getHeight()); @@ -784,9 +786,9 @@ public abstract class CameraIntegrationTest extends BaseTest { Overlay overlay = mock(Overlay.class); when(overlay.drawsOn(any(Overlay.Target.class))).thenReturn(true); controller.setOverlay(overlay); - waitForOpen(true); + openSync(true); camera.takePictureSnapshot(); - waitForPicture(true); + waitForPictureResult(true); verify(overlay, atLeastOnce()).drawsOn(Overlay.Target.PICTURE_SNAPSHOT); verify(overlay, times(1)).drawOn(eq(Overlay.Target.PICTURE_SNAPSHOT), any(Canvas.class)); } @@ -796,10 +798,9 @@ public abstract class CameraIntegrationTest extends BaseTest { Overlay overlay = mock(Overlay.class); when(overlay.drawsOn(any(Overlay.Target.class))).thenReturn(true); controller.setOverlay(overlay); - waitForOpen(true); - camera.takeVideoSnapshot(new File(context().getFilesDir(), "video.mp4"), 4000); - waitForVideoStart(); - waitForVideoEnd(true); + openSync(true); + takeVideoSnapshotSync(true, 4000); + waitForVideoResult(true); verify(overlay, atLeastOnce()).drawsOn(Overlay.Target.VIDEO_SNAPSHOT); verify(overlay, atLeastOnce()).drawOn(eq(Overlay.Target.VIDEO_SNAPSHOT), any(Canvas.class)); } diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java b/cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java index d2ae9e03..f8e0d6ff 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java @@ -244,7 +244,9 @@ public class CameraView extends FrameLayout implements LifecycleObserver { * {@link #setEngine(Engine)} is called. */ private void doInstantiateEngine() { + LOG.w("doInstantiateEngine:", "instantiating. engine:", mEngine); mCameraEngine = instantiateCameraEngine(mEngine, mCameraCallbacks); + LOG.w("doInstantiateEngine:", "instantiated. engine:", mCameraEngine.getClass().getSimpleName()); mCameraEngine.setOverlay(mOverlayLayout); } @@ -255,7 +257,9 @@ public class CameraView extends FrameLayout implements LifecycleObserver { */ @VisibleForTesting void doInstantiatePreview() { + LOG.w("doInstantiateEngine:", "instantiating. preview:", mPreview); mCameraPreview = instantiatePreview(mPreview, getContext(), this); + LOG.w("doInstantiateEngine:", "instantiated. preview:", mCameraPreview.getClass().getSimpleName()); mCameraEngine.setPreview(mCameraPreview); } @@ -287,7 +291,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver { */ @NonNull protected CameraPreview instantiatePreview(@NonNull Preview preview, @NonNull Context context, @NonNull ViewGroup container) { - LOG.w("preview:", "isHardwareAccelerated:", isHardwareAccelerated()); switch (preview) { case SURFACE: return new SurfaceCameraPreview(context, container); @@ -393,7 +396,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver { // other than respect it. The preview will eventually be cropped at the sides (by PreviewImpl scaling) // except the case in which these fixed dimensions manage to fit exactly the preview aspect ratio. if (widthMode == EXACTLY && heightMode == EXACTLY) { - LOG.w("onMeasure:", "both are MATCH_PARENT or fixed value. We adapt.", + LOG.i("onMeasure:", "both are MATCH_PARENT or fixed value. We adapt.", "This means CROP_CENTER.", "(" + widthValue + "x" + heightValue + ")"); super.onMeasure(widthMeasureSpec, heightMeasureSpec); return; diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java index 40748e03..392529e1 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java @@ -670,7 +670,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv private void doTakeVideo(@NonNull final VideoResult.Stub stub) { if (!(mVideoRecorder instanceof Full2VideoRecorder)) { - mVideoRecorder = new Full2VideoRecorder(this, mCameraId); + throw new IllegalStateException("doTakeVideo called, but video recorder is not a Full2VideoRecorder! " + mVideoRecorder); } Full2VideoRecorder recorder = (Full2VideoRecorder) mVideoRecorder; try { diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/Pool.java b/cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/Pool.java index b59a0819..4a0408b9 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/Pool.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/Pool.java @@ -63,17 +63,17 @@ public class Pool { T item = mQueue.poll(); if (item != null) { activeCount++; // poll decreases, this fixes - LOG.v("GET: Reusing recycled item.", this); + LOG.v("GET - Reusing recycled item.", this); return item; } if (isEmpty()) { - LOG.v("GET: Returning null. Too much items requested.", this); + LOG.v("GET - Returning null. Too much items requested.", this); return null; } activeCount++; - LOG.v("GET: Creating a new item.", this); + LOG.v("GET - Creating a new item.", this); return factory.create(); } @@ -84,7 +84,7 @@ public class Pool { * @param item used item */ public void recycle(@NonNull T item) { - LOG.v("RECYCLE: Recycling item.", this); + LOG.v("RECYCLE - Recycling item.", this); if (--activeCount < 0) { throw new IllegalStateException("Trying to recycle an item which makes activeCount < 0." + "This means that this or some previous items being recycled were not coming from " + @@ -112,6 +112,7 @@ public class Pool { * * @return count */ + @SuppressWarnings("WeakerAccess") public final int count() { return activeCount() + recycledCount(); } @@ -122,6 +123,7 @@ public class Pool { * * @return active count */ + @SuppressWarnings("WeakerAccess") public final int activeCount() { return activeCount; } @@ -133,6 +135,7 @@ public class Pool { * * @return recycled count */ + @SuppressWarnings("WeakerAccess") public final int recycledCount() { return mQueue.size(); } @@ -140,6 +143,6 @@ public class Pool { @NonNull @Override public String toString() { - return getClass().getSimpleName() + " -- count:" + count() + ", active:" + activeCount() + ", recycled:" + recycledCount(); + return getClass().getSimpleName() + " - count:" + count() + ", active:" + activeCount() + ", recycled:" + recycledCount(); } } diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/video/SnapshotVideoRecorder.java b/cameraview/src/main/java/com/otaliastudios/cameraview/video/SnapshotVideoRecorder.java index f398139e..92b43341 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/video/SnapshotVideoRecorder.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/video/SnapshotVideoRecorder.java @@ -190,24 +190,29 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram } + @Override + public void onEncodingStart() { + // Do nothing. + } + @EncoderThread @Override - public void onEncoderStop(int stopReason, @Nullable Exception e) { + public void onEncodingEnd(int stopReason, @Nullable Exception e) { // If something failed, undo the result, since this is the mechanism // to notify Camera1Engine about this. if (e != null) { - LOG.e("Error onEncoderStop", e); + LOG.e("Error onEncodingEnd", e); mResult = null; mError = e; } else { - if (stopReason == MediaEncoderEngine.STOP_BY_MAX_DURATION) { - LOG.i("onEncoderStop because of max duration."); + if (stopReason == MediaEncoderEngine.END_BY_MAX_DURATION) { + LOG.i("onEncodingEnd because of max duration."); mResult.endReason = VideoResult.REASON_MAX_DURATION_REACHED; - } else if (stopReason == MediaEncoderEngine.STOP_BY_MAX_SIZE) { - LOG.i("onEncoderStop because of max size."); + } else if (stopReason == MediaEncoderEngine.END_BY_MAX_SIZE) { + LOG.i("onEncodingEnd because of max size."); mResult.endReason = VideoResult.REASON_MAX_SIZE_REACHED; } else { - LOG.i("onEncoderStop because of user."); + LOG.i("onEncodingEnd because of user."); } } // Cleanup diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioMediaEncoder.java b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioMediaEncoder.java index c2e15d2e..64b3aff8 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioMediaEncoder.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioMediaEncoder.java @@ -157,7 +157,7 @@ public class AudioMediaEncoder extends MediaEncoder { while (!mRequestStop) { read(false); } - LOG.w("RECORDER: Stop was requested. We're out of the loop. Will post an endOfStream."); + LOG.w("Stop was requested. We're out of the loop. Will post an endOfStream."); // Last input with 0 length. This will signal the endOfStream. // Can't use drain(true); it is only available when writing to the codec InputSurface. read(true); @@ -169,20 +169,21 @@ public class AudioMediaEncoder extends MediaEncoder { private void read(boolean endOfStream) { mCurrentBuffer = mByteBufferPool.get(); if (mCurrentBuffer == null) { - LOG.e("Skipping audio frame, encoding is too slow."); - // TODO should fix the next presentation time here. However this is - // extremely unlikely based on my tests. The mByteBufferPool should be big enough. + LOG.e("read thread - Skipping audio frame, encoding is too slow."); + // TODO should fix the next presentation time here. } else { mCurrentBuffer.clear(); mReadBytes = mAudioRecord.read(mCurrentBuffer, FRAME_SIZE); + LOG.v("read thread - Read new audio frame. Bytes:", mReadBytes); if (mReadBytes > 0) { // Good read: increase PTS. - increaseTime(mReadBytes); + mLastTimeUs = increaseTime(mReadBytes); + LOG.v("read thread - Increasing PTS to", mLastTimeUs); mCurrentBuffer.limit(mReadBytes); onBuffer(endOfStream); } else if (mReadBytes == AudioRecord.ERROR_INVALID_OPERATION) { - LOG.e("Got AudioRecord.ERROR_INVALID_OPERATION"); + LOG.e("read thread - Got AudioRecord.ERROR_INVALID_OPERATION"); } else if (mReadBytes == AudioRecord.ERROR_BAD_VALUE) { - LOG.e("Got AudioRecord.ERROR_BAD_VALUE"); + LOG.e("read thread - Got AudioRecord.ERROR_BAD_VALUE"); } } } @@ -193,12 +194,20 @@ public class AudioMediaEncoder extends MediaEncoder { * to the consumer. */ private void onBuffer(boolean endOfStream) { + LOG.v("read thread - Sending buffer to encoder thread."); mEncoder.sendInputBuffer(mCurrentBuffer, mLastTimeUs, endOfStream); } - private void increaseTime(int readBytes) { - increaseTime3(readBytes); - LOG.v("Read", readBytes, "bytes, increasing PTS to", mLastTimeUs); + private long bytesToUs(int bytes) { + return (1000000L * bytes) / BYTE_RATE; + } + + private long bytesToUs(long bytes) { + return (1000000L * bytes) / BYTE_RATE; + } + + private long increaseTime(int readBytes) { + return increaseTime3(readBytes); } /** @@ -206,21 +215,23 @@ public class AudioMediaEncoder extends MediaEncoder { * It will use System.nanoTime() just once, as the starting point. * Of course we don't as there are things going on in this thread. */ - private void increaseTime1(int readBytes) { - mLastTimeUs += (1000000L * readBytes) / BYTE_RATE; + @SuppressWarnings("unused") + private long increaseTime1(int readBytes) { + return mLastTimeUs + bytesToUs(readBytes); } /** * Just for testing, this method will use Api 24 method to retrieve the timestamp. * This way we let the platform choose instead of making assumptions. */ + @SuppressWarnings("unused") @RequiresApi(24) - private void increaseTime2(int readBytes) { + private long increaseTime2(int readBytes) { if (mApi24Timestamp == null) { mApi24Timestamp = new AudioTimestamp(); } mAudioRecord.getTimestamp(mApi24Timestamp, AudioTimestamp.TIMEBASE_MONOTONIC); - mLastTimeUs = mApi24Timestamp.nanoTime / 1000; + return mApi24Timestamp.nanoTime / 1000; } private AudioTimestamp mApi24Timestamp; @@ -228,27 +239,33 @@ public class AudioMediaEncoder extends MediaEncoder { * This method looks like an improvement over {@link #increaseTime1(int)} as it * accounts for the current time as well. Adapted & improved. from Kickflip. */ - private void increaseTime3(int readBytes) { - long currentTime = System.nanoTime() / 1000; - long correctedTime; - long bufferDuration = (1000000 * readBytes) / BYTE_RATE; - long bufferTime = currentTime - bufferDuration; // delay of acquiring the audio buffer - if (mTotalReadBytes == 0) { - mStartTimeUs = bufferTime; - } + private long increaseTime3(int readBytes) { + long bufferDurationUs = bytesToUs(readBytes); + long bufferEndTimeUs = System.nanoTime() / 1000; // now + long bufferStartTimeUs = bufferEndTimeUs - bufferDurationUs; + + // If this is the first time, the base time is the buffer start time. + if (mBytesSinceBaseTime == 0) mBaseTimeUs = bufferStartTimeUs; + // Recompute time assuming that we are respecting the sampling frequency. - // However, if the correction is too big (> 2*bufferDuration), reset to this point. - correctedTime = mStartTimeUs + (1000000 * mTotalReadBytes) / BYTE_RATE; - if(bufferTime - correctedTime >= 2 * bufferDuration) { - mStartTimeUs = bufferTime; - mTotalReadBytes = 0; - correctedTime = mStartTimeUs; + // This puts the time at the end of last read buffer, which means, where we + // should be if we had no delay / missed buffers. + long correctedTimeUs = mBaseTimeUs + bytesToUs(mBytesSinceBaseTime); + long correctionUs = bufferStartTimeUs - correctedTimeUs; + + // However, if the correction is too big (> 2*bufferDurationUs), reset to this point. + // This is triggered if we lose buffers and are recording/encoding at a slower rate. + if (correctionUs >= 2L * bufferDurationUs) { + mBaseTimeUs = bufferStartTimeUs; + mBytesSinceBaseTime = readBytes; + return mBaseTimeUs; + } else { + mBytesSinceBaseTime += readBytes; + return correctedTimeUs; } - mTotalReadBytes += readBytes; - mLastTimeUs = correctedTime; } - private long mStartTimeUs; - private long mTotalReadBytes; + private long mBaseTimeUs; + private long mBytesSinceBaseTime; } /** @@ -278,6 +295,7 @@ public class AudioMediaEncoder extends MediaEncoder { super.handleMessage(msg); boolean endOfStream = msg.what == 1; long timestamp = (((long) msg.arg1) << 32) | (((long) msg.arg2) & 0xffffffffL); + LOG.v("encoding thread - got buffer. timestamp:", timestamp, "eos:", endOfStream); ByteBuffer buffer = (ByteBuffer) msg.obj; int readBytes = buffer.remaining(); InputBuffer inputBuffer = mInputBufferPool.get(); @@ -290,7 +308,7 @@ public class AudioMediaEncoder extends MediaEncoder { } private void performPendingOps(boolean force) { - LOG.v("Performing", mPendingOps.size(), "Pending operations."); + LOG.v("encoding thread - performing", mPendingOps.size(), "pending operations."); InputBuffer buffer; while ((buffer = mPendingOps.peek()) != null) { if (force) { @@ -305,12 +323,15 @@ public class AudioMediaEncoder extends MediaEncoder { } private void performPendingOp(InputBuffer buffer) { + LOG.v("encoding thread - performing pending operation for timestamp:", buffer.timestamp); buffer.data.put(buffer.source); mByteBufferPool.recycle(buffer.source); mPendingOps.remove(buffer); + LOG.v("encoding thread - performing pending operation for timestamp:", buffer.timestamp, "- encoding."); encodeInputBuffer(buffer); boolean eos = buffer.isEndOfStream; mInputBufferPool.recycle(buffer); + LOG.v("encoding thread - performing pending operation for timestamp:", buffer.timestamp, "- draining."); drainOutput(eos); if (eos) { mInputBufferPool.clear(); diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoder.java b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoder.java index 2fa7b0f0..f54277cb 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoder.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoder.java @@ -88,11 +88,11 @@ abstract class MediaEncoder { * NOTE: it's important to call {@link WorkerHandler#post(Runnable)} instead of run()! */ final void start() { - LOG.i(getName(), "Start was called. Posting."); + LOG.w(getName(), "Start was called. Posting."); mWorker.post(new Runnable() { @Override public void run() { - LOG.i(getName(), "Start was called. Executing."); + LOG.w(getName(), "Start was called. Executing."); onStart(); } }); @@ -124,11 +124,11 @@ abstract class MediaEncoder { * NOTE: it's important to call {@link WorkerHandler#post(Runnable)} instead of run()! */ final void stop() { - LOG.i(getName(), "Stop was called. Posting."); + LOG.w(getName(), "Stop was called. Posting."); mWorker.post(new Runnable() { @Override public void run() { - LOG.i(getName(), "Stop was called. Executing."); + LOG.w(getName(), "Stop was called. Executing."); onStop(); } }); @@ -175,8 +175,9 @@ abstract class MediaEncoder { * parameters, might also be through an input buffer flag). */ private void release() { - LOG.w("Subclass", getName(), "Notified that it is released."); - mController.requestRelease(mTrackIndex); + LOG.w(getName(), "is being released. Notifying controller and releasing codecs."); + // TODO should we notify after this method? + mController.notifyReleased(mTrackIndex); mMediaCodec.stop(); mMediaCodec.release(); mMediaCodec = null; @@ -217,7 +218,7 @@ abstract class MediaEncoder { /** * Returns a new input buffer and index, waiting indefinitely if none is available. - * The buffer should be written into, then the index should be passed to {@link #encodeInputBuffer(InputBuffer)}. + * The buffer should be written into, then be passed to {@link #encodeInputBuffer(InputBuffer)}. * * @param holder the input buffer holder */ @@ -233,7 +234,7 @@ abstract class MediaEncoder { */ @SuppressWarnings("WeakerAccess") protected void encodeInputBuffer(InputBuffer buffer) { - LOG.w("ENCODING:", getName(), "Buffer:", buffer.index, "Bytes:", buffer.length, "Presentation:", buffer.timestamp); + LOG.v(getName(), "ENCODING - Buffer:", buffer.index, "Bytes:", buffer.length, "Presentation:", buffer.timestamp); if (buffer.isEndOfStream) { // send EOS mMediaCodec.queueInputBuffer(buffer.index, 0, 0, buffer.timestamp, MediaCodec.BUFFER_FLAG_END_OF_STREAM); @@ -266,7 +267,7 @@ abstract class MediaEncoder { @SuppressLint("LogNotTimber") @SuppressWarnings("WeakerAccess") protected void drainOutput(boolean drainAll) { - LOG.w("DRAINING:", getName(), "EOS:", drainAll); + LOG.v(getName(), "DRAINING - EOS:", drainAll); if (mMediaCodec == null) { LOG.e("drain() was called before prepare() or after releasing."); return; @@ -315,7 +316,7 @@ abstract class MediaEncoder { // and should be used for offsets only. // TODO find a better way, this causes sync issues. (+ note: this sends pts=0 at first) // mBufferInfo.presentationTimeUs = mLastPresentationTimeUs - mStartPresentationTimeUs; - LOG.i("DRAINING:", getName(), "Dispatching write(). Presentation:", mBufferInfo.presentationTimeUs); + LOG.v(getName(), "DRAINING - About to write(). Presentation:", mBufferInfo.presentationTimeUs); // TODO fix the mBufferInfo being the same, then implement delayed writing in Controller // and remove the isStarted() check here. @@ -336,17 +337,18 @@ abstract class MediaEncoder { && !mMaxLengthReached && mStartPresentationTimeUs != Long.MIN_VALUE && mLastPresentationTimeUs - mStartPresentationTimeUs > mMaxLengthMillis * 1000) { - LOG.w("DRAINING: Reached maxLength! mLastPresentationTimeUs:", mLastPresentationTimeUs, + LOG.w(getName(), "DRAINING - Reached maxLength! mLastPresentationTimeUs:", mLastPresentationTimeUs, "mStartPresentationTimeUs:", mStartPresentationTimeUs, "mMaxLengthUs:", mMaxLengthMillis * 1000); mMaxLengthReached = true; + LOG.w(getName(), "DRAINING - Requesting a stop."); mController.requestStop(mTrackIndex); break; } // Check for the EOS flag so we can release the encoder. if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { - LOG.w("DRAINING:", getName(), "Dispatching release()."); + LOG.w(getName(), "DRAINING - Got EOS. Releasing the codec."); release(); break; } diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoderEngine.java b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoderEngine.java index 8ba29f56..3666db54 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoderEngine.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoderEngine.java @@ -26,34 +26,40 @@ public class MediaEncoderEngine { */ public interface Listener { + /** + * Called when encoding started. + */ + @EncoderThread + void onEncodingStart(); + /** * Called when encoding stopped for some reason. * If there's an exception, it failed. - * @param stopReason the reason + * @param reason the reason * @param e the error, if present */ @EncoderThread - void onEncoderStop(int stopReason, @Nullable Exception e); + void onEncodingEnd(int reason, @Nullable Exception e); } private final static String TAG = MediaEncoderEngine.class.getSimpleName(); private final static CameraLogger LOG = CameraLogger.create(TAG); @SuppressWarnings("WeakerAccess") - public final static int STOP_BY_USER = 0; - public final static int STOP_BY_MAX_DURATION = 1; - public final static int STOP_BY_MAX_SIZE = 2; + public final static int END_BY_USER = 0; + public final static int END_BY_MAX_DURATION = 1; + public final static int END_BY_MAX_SIZE = 2; private ArrayList mEncoders; private MediaMuxer mMediaMuxer; private int mStartedEncodersCount; - private int mStoppedEncodersCount; + private int mReleasedEncodersCount; private boolean mMediaMuxerStarted; @SuppressWarnings("FieldCanBeLocal") private Controller mController; private Listener mListener; - private int mStopReason = STOP_BY_USER; - private int mPossibleStopReason; + private int mEndReason = END_BY_USER; + private int mPossibleEndReason; private final Object mControllerLock = new Object(); /** @@ -87,7 +93,7 @@ public class MediaEncoderEngine { } mStartedEncodersCount = 0; mMediaMuxerStarted = false; - mStoppedEncodersCount = 0; + mReleasedEncodersCount = 0; // Trying to convert the size constraints to duration constraints, // because they are super easy to check. @@ -101,13 +107,13 @@ public class MediaEncoderEngine { long finalMaxDuration = Long.MAX_VALUE; if (maxSize > 0 && maxDuration > 0) { - mPossibleStopReason = sizeMaxDuration < maxDuration ? STOP_BY_MAX_SIZE : STOP_BY_MAX_DURATION; + mPossibleEndReason = sizeMaxDuration < maxDuration ? END_BY_MAX_SIZE : END_BY_MAX_DURATION; finalMaxDuration = Math.min(sizeMaxDuration, maxDuration); } else if (maxSize > 0) { - mPossibleStopReason = STOP_BY_MAX_SIZE; + mPossibleEndReason = END_BY_MAX_SIZE; finalMaxDuration = sizeMaxDuration; } else if (maxDuration > 0) { - mPossibleStopReason = STOP_BY_MAX_DURATION; + mPossibleEndReason = END_BY_MAX_DURATION; finalMaxDuration = maxDuration; } LOG.w("Computed a max duration of", (finalMaxDuration / 1000F)); @@ -120,6 +126,7 @@ public class MediaEncoderEngine { * Asks encoders to start (each one on its own track). */ public final void start() { + LOG.i("Passing event to encoders:", "START"); for (MediaEncoder encoder : mEncoders) { encoder.start(); } @@ -133,6 +140,7 @@ public class MediaEncoderEngine { */ @SuppressWarnings("SameParameterValue") public final void notify(final String event, final Object data) { + LOG.i("Passing event to encoders:", event); for (MediaEncoder encoder : mEncoders) { encoder.notify(event, data); } @@ -140,21 +148,23 @@ public class MediaEncoderEngine { /** * Asks encoders to stop. This is not sync, of course we will ask for encoders - * to call {@link Controller#requestRelease(int)} before actually stop the muxer. + * to call {@link Controller#notifyReleased(int)} before actually stop the muxer. * When all encoders request a release, {@link #release()} is called to do cleanup * and notify the listener. */ public final void stop() { + LOG.i("Passing event to encoders:", "STOP"); for (MediaEncoder encoder : mEncoders) { encoder.stop(); } } /** - * Called after all encoders have requested a release using {@link Controller#requestRelease(int)}. + * Called after all encoders have requested a release using {@link Controller#notifyReleased(int)}. * At this point we will do cleanup and notify the listener. */ private void release() { + LOG.i("release:", "Releasing muxer after all encoders have been released."); Exception error = null; if (mMediaMuxer != null) { // stop() throws an exception if you haven't fed it any data. @@ -168,14 +178,16 @@ public class MediaEncoderEngine { } mMediaMuxer = null; } + LOG.w("release:", "Dispatching end to listener - reason:", mEndReason, "error:", error); if (mListener != null) { - mListener.onEncoderStop(mStopReason, error); + mListener.onEncodingEnd(mEndReason, error); mListener = null; } - mStopReason = STOP_BY_USER; + mEndReason = END_BY_USER; mStartedEncodersCount = 0; - mStoppedEncodersCount = 0; + mReleasedEncodersCount = 0; mMediaMuxerStarted = false; + LOG.i("release:", "Completed."); } /** @@ -219,10 +231,14 @@ public class MediaEncoderEngine { throw new IllegalStateException("Trying to start but muxer started already"); } int track = mMediaMuxer.addTrack(format); - LOG.w("Controller:", "Assigned track", track, "to format", format.getString(MediaFormat.KEY_MIME)); + LOG.w("requestStart:", "Assigned track", track, "to format", format.getString(MediaFormat.KEY_MIME)); if (++mStartedEncodersCount == mEncoders.size()) { + LOG.w("requestStart:", "All encoders have started. Starting muxer and dispatching onEncodingStart()."); mMediaMuxer.start(); mMediaMuxerStarted = true; + if (mListener != null) { + mListener.onEncodingStart(); + } } return track; } @@ -251,7 +267,7 @@ public class MediaEncoderEngine { // This is a bad idea and causes crashes. // if (info.presentationTimeUs < mLastTimestampUs) info.presentationTimeUs = mLastTimestampUs; // mLastTimestampUs = info.presentationTimeUs; - LOG.v("Writing for track", buffer.trackIndex, ". Presentation:", buffer.info.presentationTimeUs); + LOG.v("write:", "Writing OutputBuffer - track:", buffer.trackIndex, "presentation:", buffer.info.presentationTimeUs); mMediaMuxer.writeSampleData(buffer.trackIndex, buffer.data, buffer.info); pool.recycle(buffer); } @@ -264,10 +280,11 @@ public class MediaEncoderEngine { * When this succeeds, {@link MediaEncoder#stop()} is called. */ void requestStop(int track) { - LOG.i("RequestStop was called for track", track); synchronized (mControllerLock) { + LOG.w("requestStop:", "Called for track", track); if (--mStartedEncodersCount == 0) { - mStopReason = mPossibleStopReason; + LOG.w("requestStop:", "All encoders have requested a stop. Stopping them."); + mEndReason = mPossibleEndReason; stop(); } } @@ -277,10 +294,11 @@ public class MediaEncoderEngine { * Notifies that the encoder was stopped. After this is called by all encoders, * we will actually stop the muxer. */ - void requestRelease(int track) { - LOG.i("requestRelease was called for track", track); + void notifyReleased(int track) { synchronized (mControllerLock) { - if (++mStoppedEncodersCount == mEncoders.size()) { + LOG.w("notifyReleased:", "Called for track", track); + if (++mReleasedEncodersCount == mEncoders.size()) { + LOG.w("requestStop:", "All encoders have been released. Stopping the muxer."); release(); } }