Fix tests and logs

pull/502/head
Mattia Iavarone 6 years ago
parent ddea8cbe1f
commit 04e23b861f
  1. 39
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/CameraIntegrationTest.java
  2. 7
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java
  3. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java
  4. 13
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/Pool.java
  5. 19
      cameraview/src/main/java/com/otaliastudios/cameraview/video/SnapshotVideoRecorder.java
  6. 87
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioMediaEncoder.java
  7. 26
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoder.java
  8. 66
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoderEngine.java

@ -140,6 +140,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
if (expectSuccess) { if (expectSuccess) {
assertNotNull("Can open", result); assertNotNull("Can open", result);
// Extra wait for the bind state. // Extra wait for the bind state.
// TODO fix this and other while {} in this class in a more elegant way.
//noinspection StatementWithEmptyBody
while (controller.getBindState() != CameraEngine.STATE_STARTED) {} while (controller.getBindState() != CameraEngine.STATE_STARTED) {}
} else { } else {
assertNull("Should not open", result); assertNull("Should not open", result);
@ -226,6 +228,7 @@ public abstract class CameraIntegrationTest extends BaseTest {
} }
} }
@SuppressWarnings("unused")
private void takeVideoSnapshotSync(boolean expectSuccess) { private void takeVideoSnapshotSync(boolean expectSuccess) {
takeVideoSnapshotSync(expectSuccess,0); takeVideoSnapshotSync(expectSuccess,0);
} }
@ -438,7 +441,6 @@ public abstract class CameraIntegrationTest extends BaseTest {
@Test @Test
public void testSetAudio() { public void testSetAudio() {
// TODO: when permissions are managed, check that Audio.ON triggers the audio permission
openSync(true); openSync(true);
Audio[] values = Audio.values(); Audio[] values = Audio.values();
for (Audio value : values) { for (Audio value : values) {
@ -479,7 +481,7 @@ public abstract class CameraIntegrationTest extends BaseTest {
assertEquals(oldValue, camera.getPlaySounds()); assertEquals(oldValue, camera.getPlaySounds());
} }
} else { } else {
// TODO do when Camera2 is completed assertEquals(newValue, camera.getPlaySounds());
} }
} }
@ -511,10 +513,10 @@ public abstract class CameraIntegrationTest extends BaseTest {
@Test @Test
public void testStartEndVideoSnapshot() { public void testStartEndVideoSnapshot() {
// TODO should check api // TODO should check api level for snapshot?
waitForOpen(true); openSync(true);
camera.takeVideoSnapshot(new File(context().getFilesDir(), "video.mp4"), 4000); takeVideoSnapshotSync(true, 4000);
waitForVideoEnd(true); waitForVideoResult(true);
} }
@Test @Test
@ -525,13 +527,6 @@ public abstract class CameraIntegrationTest extends BaseTest {
waitForVideoResult(false); waitForVideoResult(false);
} }
@Test
public void testEndVideoSnapshot_withoutStarting() {
waitForOpen(true);
camera.stopVideo();
waitForVideoEnd(false);
}
@Test @Test
public void testEndVideo_withMaxSize() { public void testEndVideo_withMaxSize() {
camera.setMode(Mode.VIDEO); camera.setMode(Mode.VIDEO);
@ -632,14 +627,17 @@ public abstract class CameraIntegrationTest extends BaseTest {
assertEquals(latch.getCount(), 1); assertEquals(latch.getCount(), 1);
} }
@SuppressWarnings("StatementWithEmptyBody")
@Test @Test
public void testCapturePicture_size() throws Exception { public void testCapturePicture_size() throws Exception {
openSync(true); openSync(true);
// PictureSize can still be null after opened. // PictureSize can still be null after opened.
// TODO be more elegant
while (camera.getPictureSize() == null) {} while (camera.getPictureSize() == null) {}
Size size = camera.getPictureSize(); Size size = camera.getPictureSize();
camera.takePicture(); camera.takePicture();
PictureResult result = waitForPictureResult(true); PictureResult result = waitForPictureResult(true);
assertNotNull(result);
Bitmap bitmap = CameraUtils.decodeBitmap(result.getData(), Integer.MAX_VALUE, Integer.MAX_VALUE); Bitmap bitmap = CameraUtils.decodeBitmap(result.getData(), Integer.MAX_VALUE, Integer.MAX_VALUE);
assertNotNull(bitmap); assertNotNull(bitmap);
assertEquals(result.getSize(), size); assertEquals(result.getSize(), size);
@ -679,16 +677,20 @@ public abstract class CameraIntegrationTest extends BaseTest {
assertEquals(1, latch.getCount()); assertEquals(1, latch.getCount());
} }
@SuppressWarnings("StatementWithEmptyBody")
@Test @Test
public void testCaptureSnapshot_size() throws Exception { public void testCaptureSnapshot_size() throws Exception {
openSync(true); openSync(true);
// SnapshotSize can still be null after opened. // SnapshotSize can still be null after opened.
// TODO be more elegant
while (camera.getSnapshotSize() == null) {} while (camera.getSnapshotSize() == null) {}
Size size = camera.getSnapshotSize(); Size size = camera.getSnapshotSize();
camera.takePictureSnapshot(); camera.takePictureSnapshot();
PictureResult result = waitForPictureResult(true); PictureResult result = waitForPictureResult(true);
assertNotNull(result);
Bitmap bitmap = CameraUtils.decodeBitmap(result.getData(), Integer.MAX_VALUE, Integer.MAX_VALUE); Bitmap bitmap = CameraUtils.decodeBitmap(result.getData(), Integer.MAX_VALUE, Integer.MAX_VALUE);
assertNotNull(bitmap);
assertEquals(result.getSize(), size); assertEquals(result.getSize(), size);
assertEquals(bitmap.getWidth(), size.getWidth()); assertEquals(bitmap.getWidth(), size.getWidth());
assertEquals(bitmap.getHeight(), size.getHeight()); assertEquals(bitmap.getHeight(), size.getHeight());
@ -784,9 +786,9 @@ public abstract class CameraIntegrationTest extends BaseTest {
Overlay overlay = mock(Overlay.class); Overlay overlay = mock(Overlay.class);
when(overlay.drawsOn(any(Overlay.Target.class))).thenReturn(true); when(overlay.drawsOn(any(Overlay.Target.class))).thenReturn(true);
controller.setOverlay(overlay); controller.setOverlay(overlay);
waitForOpen(true); openSync(true);
camera.takePictureSnapshot(); camera.takePictureSnapshot();
waitForPicture(true); waitForPictureResult(true);
verify(overlay, atLeastOnce()).drawsOn(Overlay.Target.PICTURE_SNAPSHOT); verify(overlay, atLeastOnce()).drawsOn(Overlay.Target.PICTURE_SNAPSHOT);
verify(overlay, times(1)).drawOn(eq(Overlay.Target.PICTURE_SNAPSHOT), any(Canvas.class)); verify(overlay, times(1)).drawOn(eq(Overlay.Target.PICTURE_SNAPSHOT), any(Canvas.class));
} }
@ -796,10 +798,9 @@ public abstract class CameraIntegrationTest extends BaseTest {
Overlay overlay = mock(Overlay.class); Overlay overlay = mock(Overlay.class);
when(overlay.drawsOn(any(Overlay.Target.class))).thenReturn(true); when(overlay.drawsOn(any(Overlay.Target.class))).thenReturn(true);
controller.setOverlay(overlay); controller.setOverlay(overlay);
waitForOpen(true); openSync(true);
camera.takeVideoSnapshot(new File(context().getFilesDir(), "video.mp4"), 4000); takeVideoSnapshotSync(true, 4000);
waitForVideoStart(); waitForVideoResult(true);
waitForVideoEnd(true);
verify(overlay, atLeastOnce()).drawsOn(Overlay.Target.VIDEO_SNAPSHOT); verify(overlay, atLeastOnce()).drawsOn(Overlay.Target.VIDEO_SNAPSHOT);
verify(overlay, atLeastOnce()).drawOn(eq(Overlay.Target.VIDEO_SNAPSHOT), any(Canvas.class)); verify(overlay, atLeastOnce()).drawOn(eq(Overlay.Target.VIDEO_SNAPSHOT), any(Canvas.class));
} }

@ -244,7 +244,9 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* {@link #setEngine(Engine)} is called. * {@link #setEngine(Engine)} is called.
*/ */
private void doInstantiateEngine() { private void doInstantiateEngine() {
LOG.w("doInstantiateEngine:", "instantiating. engine:", mEngine);
mCameraEngine = instantiateCameraEngine(mEngine, mCameraCallbacks); mCameraEngine = instantiateCameraEngine(mEngine, mCameraCallbacks);
LOG.w("doInstantiateEngine:", "instantiated. engine:", mCameraEngine.getClass().getSimpleName());
mCameraEngine.setOverlay(mOverlayLayout); mCameraEngine.setOverlay(mOverlayLayout);
} }
@ -255,7 +257,9 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
*/ */
@VisibleForTesting @VisibleForTesting
void doInstantiatePreview() { void doInstantiatePreview() {
LOG.w("doInstantiateEngine:", "instantiating. preview:", mPreview);
mCameraPreview = instantiatePreview(mPreview, getContext(), this); mCameraPreview = instantiatePreview(mPreview, getContext(), this);
LOG.w("doInstantiateEngine:", "instantiated. preview:", mCameraPreview.getClass().getSimpleName());
mCameraEngine.setPreview(mCameraPreview); mCameraEngine.setPreview(mCameraPreview);
} }
@ -287,7 +291,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
*/ */
@NonNull @NonNull
protected CameraPreview instantiatePreview(@NonNull Preview preview, @NonNull Context context, @NonNull ViewGroup container) { protected CameraPreview instantiatePreview(@NonNull Preview preview, @NonNull Context context, @NonNull ViewGroup container) {
LOG.w("preview:", "isHardwareAccelerated:", isHardwareAccelerated());
switch (preview) { switch (preview) {
case SURFACE: case SURFACE:
return new SurfaceCameraPreview(context, container); return new SurfaceCameraPreview(context, container);
@ -393,7 +396,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
// other than respect it. The preview will eventually be cropped at the sides (by PreviewImpl scaling) // other than respect it. The preview will eventually be cropped at the sides (by PreviewImpl scaling)
// except the case in which these fixed dimensions manage to fit exactly the preview aspect ratio. // except the case in which these fixed dimensions manage to fit exactly the preview aspect ratio.
if (widthMode == EXACTLY && heightMode == EXACTLY) { if (widthMode == EXACTLY && heightMode == EXACTLY) {
LOG.w("onMeasure:", "both are MATCH_PARENT or fixed value. We adapt.", LOG.i("onMeasure:", "both are MATCH_PARENT or fixed value. We adapt.",
"This means CROP_CENTER.", "(" + widthValue + "x" + heightValue + ")"); "This means CROP_CENTER.", "(" + widthValue + "x" + heightValue + ")");
super.onMeasure(widthMeasureSpec, heightMeasureSpec); super.onMeasure(widthMeasureSpec, heightMeasureSpec);
return; return;

@ -670,7 +670,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
private void doTakeVideo(@NonNull final VideoResult.Stub stub) { private void doTakeVideo(@NonNull final VideoResult.Stub stub) {
if (!(mVideoRecorder instanceof Full2VideoRecorder)) { if (!(mVideoRecorder instanceof Full2VideoRecorder)) {
mVideoRecorder = new Full2VideoRecorder(this, mCameraId); throw new IllegalStateException("doTakeVideo called, but video recorder is not a Full2VideoRecorder! " + mVideoRecorder);
} }
Full2VideoRecorder recorder = (Full2VideoRecorder) mVideoRecorder; Full2VideoRecorder recorder = (Full2VideoRecorder) mVideoRecorder;
try { try {

@ -63,17 +63,17 @@ public class Pool<T> {
T item = mQueue.poll(); T item = mQueue.poll();
if (item != null) { if (item != null) {
activeCount++; // poll decreases, this fixes activeCount++; // poll decreases, this fixes
LOG.v("GET: Reusing recycled item.", this); LOG.v("GET - Reusing recycled item.", this);
return item; return item;
} }
if (isEmpty()) { if (isEmpty()) {
LOG.v("GET: Returning null. Too much items requested.", this); LOG.v("GET - Returning null. Too much items requested.", this);
return null; return null;
} }
activeCount++; activeCount++;
LOG.v("GET: Creating a new item.", this); LOG.v("GET - Creating a new item.", this);
return factory.create(); return factory.create();
} }
@ -84,7 +84,7 @@ public class Pool<T> {
* @param item used item * @param item used item
*/ */
public void recycle(@NonNull T item) { public void recycle(@NonNull T item) {
LOG.v("RECYCLE: Recycling item.", this); LOG.v("RECYCLE - Recycling item.", this);
if (--activeCount < 0) { if (--activeCount < 0) {
throw new IllegalStateException("Trying to recycle an item which makes activeCount < 0." + throw new IllegalStateException("Trying to recycle an item which makes activeCount < 0." +
"This means that this or some previous items being recycled were not coming from " + "This means that this or some previous items being recycled were not coming from " +
@ -112,6 +112,7 @@ public class Pool<T> {
* *
* @return count * @return count
*/ */
@SuppressWarnings("WeakerAccess")
public final int count() { public final int count() {
return activeCount() + recycledCount(); return activeCount() + recycledCount();
} }
@ -122,6 +123,7 @@ public class Pool<T> {
* *
* @return active count * @return active count
*/ */
@SuppressWarnings("WeakerAccess")
public final int activeCount() { public final int activeCount() {
return activeCount; return activeCount;
} }
@ -133,6 +135,7 @@ public class Pool<T> {
* *
* @return recycled count * @return recycled count
*/ */
@SuppressWarnings("WeakerAccess")
public final int recycledCount() { public final int recycledCount() {
return mQueue.size(); return mQueue.size();
} }
@ -140,6 +143,6 @@ public class Pool<T> {
@NonNull @NonNull
@Override @Override
public String toString() { public String toString() {
return getClass().getSimpleName() + " -- count:" + count() + ", active:" + activeCount() + ", recycled:" + recycledCount(); return getClass().getSimpleName() + " - count:" + count() + ", active:" + activeCount() + ", recycled:" + recycledCount();
} }
} }

@ -190,24 +190,29 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
} }
@Override
public void onEncodingStart() {
// Do nothing.
}
@EncoderThread @EncoderThread
@Override @Override
public void onEncoderStop(int stopReason, @Nullable Exception e) { public void onEncodingEnd(int stopReason, @Nullable Exception e) {
// If something failed, undo the result, since this is the mechanism // If something failed, undo the result, since this is the mechanism
// to notify Camera1Engine about this. // to notify Camera1Engine about this.
if (e != null) { if (e != null) {
LOG.e("Error onEncoderStop", e); LOG.e("Error onEncodingEnd", e);
mResult = null; mResult = null;
mError = e; mError = e;
} else { } else {
if (stopReason == MediaEncoderEngine.STOP_BY_MAX_DURATION) { if (stopReason == MediaEncoderEngine.END_BY_MAX_DURATION) {
LOG.i("onEncoderStop because of max duration."); LOG.i("onEncodingEnd because of max duration.");
mResult.endReason = VideoResult.REASON_MAX_DURATION_REACHED; mResult.endReason = VideoResult.REASON_MAX_DURATION_REACHED;
} else if (stopReason == MediaEncoderEngine.STOP_BY_MAX_SIZE) { } else if (stopReason == MediaEncoderEngine.END_BY_MAX_SIZE) {
LOG.i("onEncoderStop because of max size."); LOG.i("onEncodingEnd because of max size.");
mResult.endReason = VideoResult.REASON_MAX_SIZE_REACHED; mResult.endReason = VideoResult.REASON_MAX_SIZE_REACHED;
} else { } else {
LOG.i("onEncoderStop because of user."); LOG.i("onEncodingEnd because of user.");
} }
} }
// Cleanup // Cleanup

@ -157,7 +157,7 @@ public class AudioMediaEncoder extends MediaEncoder {
while (!mRequestStop) { while (!mRequestStop) {
read(false); read(false);
} }
LOG.w("RECORDER: Stop was requested. We're out of the loop. Will post an endOfStream."); LOG.w("Stop was requested. We're out of the loop. Will post an endOfStream.");
// Last input with 0 length. This will signal the endOfStream. // Last input with 0 length. This will signal the endOfStream.
// Can't use drain(true); it is only available when writing to the codec InputSurface. // Can't use drain(true); it is only available when writing to the codec InputSurface.
read(true); read(true);
@ -169,20 +169,21 @@ public class AudioMediaEncoder extends MediaEncoder {
private void read(boolean endOfStream) { private void read(boolean endOfStream) {
mCurrentBuffer = mByteBufferPool.get(); mCurrentBuffer = mByteBufferPool.get();
if (mCurrentBuffer == null) { if (mCurrentBuffer == null) {
LOG.e("Skipping audio frame, encoding is too slow."); LOG.e("read thread - Skipping audio frame, encoding is too slow.");
// TODO should fix the next presentation time here. However this is // TODO should fix the next presentation time here.
// extremely unlikely based on my tests. The mByteBufferPool should be big enough.
} else { } else {
mCurrentBuffer.clear(); mCurrentBuffer.clear();
mReadBytes = mAudioRecord.read(mCurrentBuffer, FRAME_SIZE); mReadBytes = mAudioRecord.read(mCurrentBuffer, FRAME_SIZE);
LOG.v("read thread - Read new audio frame. Bytes:", mReadBytes);
if (mReadBytes > 0) { // Good read: increase PTS. if (mReadBytes > 0) { // Good read: increase PTS.
increaseTime(mReadBytes); mLastTimeUs = increaseTime(mReadBytes);
LOG.v("read thread - Increasing PTS to", mLastTimeUs);
mCurrentBuffer.limit(mReadBytes); mCurrentBuffer.limit(mReadBytes);
onBuffer(endOfStream); onBuffer(endOfStream);
} else if (mReadBytes == AudioRecord.ERROR_INVALID_OPERATION) { } else if (mReadBytes == AudioRecord.ERROR_INVALID_OPERATION) {
LOG.e("Got AudioRecord.ERROR_INVALID_OPERATION"); LOG.e("read thread - Got AudioRecord.ERROR_INVALID_OPERATION");
} else if (mReadBytes == AudioRecord.ERROR_BAD_VALUE) { } else if (mReadBytes == AudioRecord.ERROR_BAD_VALUE) {
LOG.e("Got AudioRecord.ERROR_BAD_VALUE"); LOG.e("read thread - Got AudioRecord.ERROR_BAD_VALUE");
} }
} }
} }
@ -193,12 +194,20 @@ public class AudioMediaEncoder extends MediaEncoder {
* to the consumer. * to the consumer.
*/ */
private void onBuffer(boolean endOfStream) { private void onBuffer(boolean endOfStream) {
LOG.v("read thread - Sending buffer to encoder thread.");
mEncoder.sendInputBuffer(mCurrentBuffer, mLastTimeUs, endOfStream); mEncoder.sendInputBuffer(mCurrentBuffer, mLastTimeUs, endOfStream);
} }
private void increaseTime(int readBytes) { private long bytesToUs(int bytes) {
increaseTime3(readBytes); return (1000000L * bytes) / BYTE_RATE;
LOG.v("Read", readBytes, "bytes, increasing PTS to", mLastTimeUs); }
private long bytesToUs(long bytes) {
return (1000000L * bytes) / BYTE_RATE;
}
private long increaseTime(int readBytes) {
return increaseTime3(readBytes);
} }
/** /**
@ -206,21 +215,23 @@ public class AudioMediaEncoder extends MediaEncoder {
* It will use System.nanoTime() just once, as the starting point. * It will use System.nanoTime() just once, as the starting point.
* Of course we don't as there are things going on in this thread. * Of course we don't as there are things going on in this thread.
*/ */
private void increaseTime1(int readBytes) { @SuppressWarnings("unused")
mLastTimeUs += (1000000L * readBytes) / BYTE_RATE; private long increaseTime1(int readBytes) {
return mLastTimeUs + bytesToUs(readBytes);
} }
/** /**
* Just for testing, this method will use Api 24 method to retrieve the timestamp. * Just for testing, this method will use Api 24 method to retrieve the timestamp.
* This way we let the platform choose instead of making assumptions. * This way we let the platform choose instead of making assumptions.
*/ */
@SuppressWarnings("unused")
@RequiresApi(24) @RequiresApi(24)
private void increaseTime2(int readBytes) { private long increaseTime2(int readBytes) {
if (mApi24Timestamp == null) { if (mApi24Timestamp == null) {
mApi24Timestamp = new AudioTimestamp(); mApi24Timestamp = new AudioTimestamp();
} }
mAudioRecord.getTimestamp(mApi24Timestamp, AudioTimestamp.TIMEBASE_MONOTONIC); mAudioRecord.getTimestamp(mApi24Timestamp, AudioTimestamp.TIMEBASE_MONOTONIC);
mLastTimeUs = mApi24Timestamp.nanoTime / 1000; return mApi24Timestamp.nanoTime / 1000;
} }
private AudioTimestamp mApi24Timestamp; private AudioTimestamp mApi24Timestamp;
@ -228,27 +239,33 @@ public class AudioMediaEncoder extends MediaEncoder {
* This method looks like an improvement over {@link #increaseTime1(int)} as it * This method looks like an improvement over {@link #increaseTime1(int)} as it
* accounts for the current time as well. Adapted & improved. from Kickflip. * accounts for the current time as well. Adapted & improved. from Kickflip.
*/ */
private void increaseTime3(int readBytes) { private long increaseTime3(int readBytes) {
long currentTime = System.nanoTime() / 1000; long bufferDurationUs = bytesToUs(readBytes);
long correctedTime; long bufferEndTimeUs = System.nanoTime() / 1000; // now
long bufferDuration = (1000000 * readBytes) / BYTE_RATE; long bufferStartTimeUs = bufferEndTimeUs - bufferDurationUs;
long bufferTime = currentTime - bufferDuration; // delay of acquiring the audio buffer
if (mTotalReadBytes == 0) { // If this is the first time, the base time is the buffer start time.
mStartTimeUs = bufferTime; if (mBytesSinceBaseTime == 0) mBaseTimeUs = bufferStartTimeUs;
}
// Recompute time assuming that we are respecting the sampling frequency. // Recompute time assuming that we are respecting the sampling frequency.
// However, if the correction is too big (> 2*bufferDuration), reset to this point. // This puts the time at the end of last read buffer, which means, where we
correctedTime = mStartTimeUs + (1000000 * mTotalReadBytes) / BYTE_RATE; // should be if we had no delay / missed buffers.
if(bufferTime - correctedTime >= 2 * bufferDuration) { long correctedTimeUs = mBaseTimeUs + bytesToUs(mBytesSinceBaseTime);
mStartTimeUs = bufferTime; long correctionUs = bufferStartTimeUs - correctedTimeUs;
mTotalReadBytes = 0;
correctedTime = mStartTimeUs; // However, if the correction is too big (> 2*bufferDurationUs), reset to this point.
// This is triggered if we lose buffers and are recording/encoding at a slower rate.
if (correctionUs >= 2L * bufferDurationUs) {
mBaseTimeUs = bufferStartTimeUs;
mBytesSinceBaseTime = readBytes;
return mBaseTimeUs;
} else {
mBytesSinceBaseTime += readBytes;
return correctedTimeUs;
} }
mTotalReadBytes += readBytes;
mLastTimeUs = correctedTime;
} }
private long mStartTimeUs; private long mBaseTimeUs;
private long mTotalReadBytes; private long mBytesSinceBaseTime;
} }
/** /**
@ -278,6 +295,7 @@ public class AudioMediaEncoder extends MediaEncoder {
super.handleMessage(msg); super.handleMessage(msg);
boolean endOfStream = msg.what == 1; boolean endOfStream = msg.what == 1;
long timestamp = (((long) msg.arg1) << 32) | (((long) msg.arg2) & 0xffffffffL); long timestamp = (((long) msg.arg1) << 32) | (((long) msg.arg2) & 0xffffffffL);
LOG.v("encoding thread - got buffer. timestamp:", timestamp, "eos:", endOfStream);
ByteBuffer buffer = (ByteBuffer) msg.obj; ByteBuffer buffer = (ByteBuffer) msg.obj;
int readBytes = buffer.remaining(); int readBytes = buffer.remaining();
InputBuffer inputBuffer = mInputBufferPool.get(); InputBuffer inputBuffer = mInputBufferPool.get();
@ -290,7 +308,7 @@ public class AudioMediaEncoder extends MediaEncoder {
} }
private void performPendingOps(boolean force) { private void performPendingOps(boolean force) {
LOG.v("Performing", mPendingOps.size(), "Pending operations."); LOG.v("encoding thread - performing", mPendingOps.size(), "pending operations.");
InputBuffer buffer; InputBuffer buffer;
while ((buffer = mPendingOps.peek()) != null) { while ((buffer = mPendingOps.peek()) != null) {
if (force) { if (force) {
@ -305,12 +323,15 @@ public class AudioMediaEncoder extends MediaEncoder {
} }
private void performPendingOp(InputBuffer buffer) { private void performPendingOp(InputBuffer buffer) {
LOG.v("encoding thread - performing pending operation for timestamp:", buffer.timestamp);
buffer.data.put(buffer.source); buffer.data.put(buffer.source);
mByteBufferPool.recycle(buffer.source); mByteBufferPool.recycle(buffer.source);
mPendingOps.remove(buffer); mPendingOps.remove(buffer);
LOG.v("encoding thread - performing pending operation for timestamp:", buffer.timestamp, "- encoding.");
encodeInputBuffer(buffer); encodeInputBuffer(buffer);
boolean eos = buffer.isEndOfStream; boolean eos = buffer.isEndOfStream;
mInputBufferPool.recycle(buffer); mInputBufferPool.recycle(buffer);
LOG.v("encoding thread - performing pending operation for timestamp:", buffer.timestamp, "- draining.");
drainOutput(eos); drainOutput(eos);
if (eos) { if (eos) {
mInputBufferPool.clear(); mInputBufferPool.clear();

@ -88,11 +88,11 @@ abstract class MediaEncoder {
* NOTE: it's important to call {@link WorkerHandler#post(Runnable)} instead of run()! * NOTE: it's important to call {@link WorkerHandler#post(Runnable)} instead of run()!
*/ */
final void start() { final void start() {
LOG.i(getName(), "Start was called. Posting."); LOG.w(getName(), "Start was called. Posting.");
mWorker.post(new Runnable() { mWorker.post(new Runnable() {
@Override @Override
public void run() { public void run() {
LOG.i(getName(), "Start was called. Executing."); LOG.w(getName(), "Start was called. Executing.");
onStart(); onStart();
} }
}); });
@ -124,11 +124,11 @@ abstract class MediaEncoder {
* NOTE: it's important to call {@link WorkerHandler#post(Runnable)} instead of run()! * NOTE: it's important to call {@link WorkerHandler#post(Runnable)} instead of run()!
*/ */
final void stop() { final void stop() {
LOG.i(getName(), "Stop was called. Posting."); LOG.w(getName(), "Stop was called. Posting.");
mWorker.post(new Runnable() { mWorker.post(new Runnable() {
@Override @Override
public void run() { public void run() {
LOG.i(getName(), "Stop was called. Executing."); LOG.w(getName(), "Stop was called. Executing.");
onStop(); onStop();
} }
}); });
@ -175,8 +175,9 @@ abstract class MediaEncoder {
* parameters, might also be through an input buffer flag). * parameters, might also be through an input buffer flag).
*/ */
private void release() { private void release() {
LOG.w("Subclass", getName(), "Notified that it is released."); LOG.w(getName(), "is being released. Notifying controller and releasing codecs.");
mController.requestRelease(mTrackIndex); // TODO should we notify after this method?
mController.notifyReleased(mTrackIndex);
mMediaCodec.stop(); mMediaCodec.stop();
mMediaCodec.release(); mMediaCodec.release();
mMediaCodec = null; mMediaCodec = null;
@ -217,7 +218,7 @@ abstract class MediaEncoder {
/** /**
* Returns a new input buffer and index, waiting indefinitely if none is available. * Returns a new input buffer and index, waiting indefinitely if none is available.
* The buffer should be written into, then the index should be passed to {@link #encodeInputBuffer(InputBuffer)}. * The buffer should be written into, then be passed to {@link #encodeInputBuffer(InputBuffer)}.
* *
* @param holder the input buffer holder * @param holder the input buffer holder
*/ */
@ -233,7 +234,7 @@ abstract class MediaEncoder {
*/ */
@SuppressWarnings("WeakerAccess") @SuppressWarnings("WeakerAccess")
protected void encodeInputBuffer(InputBuffer buffer) { protected void encodeInputBuffer(InputBuffer buffer) {
LOG.w("ENCODING:", getName(), "Buffer:", buffer.index, "Bytes:", buffer.length, "Presentation:", buffer.timestamp); LOG.v(getName(), "ENCODING - Buffer:", buffer.index, "Bytes:", buffer.length, "Presentation:", buffer.timestamp);
if (buffer.isEndOfStream) { // send EOS if (buffer.isEndOfStream) { // send EOS
mMediaCodec.queueInputBuffer(buffer.index, 0, 0, mMediaCodec.queueInputBuffer(buffer.index, 0, 0,
buffer.timestamp, MediaCodec.BUFFER_FLAG_END_OF_STREAM); buffer.timestamp, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
@ -266,7 +267,7 @@ abstract class MediaEncoder {
@SuppressLint("LogNotTimber") @SuppressLint("LogNotTimber")
@SuppressWarnings("WeakerAccess") @SuppressWarnings("WeakerAccess")
protected void drainOutput(boolean drainAll) { protected void drainOutput(boolean drainAll) {
LOG.w("DRAINING:", getName(), "EOS:", drainAll); LOG.v(getName(), "DRAINING - EOS:", drainAll);
if (mMediaCodec == null) { if (mMediaCodec == null) {
LOG.e("drain() was called before prepare() or after releasing."); LOG.e("drain() was called before prepare() or after releasing.");
return; return;
@ -315,7 +316,7 @@ abstract class MediaEncoder {
// and should be used for offsets only. // and should be used for offsets only.
// TODO find a better way, this causes sync issues. (+ note: this sends pts=0 at first) // TODO find a better way, this causes sync issues. (+ note: this sends pts=0 at first)
// mBufferInfo.presentationTimeUs = mLastPresentationTimeUs - mStartPresentationTimeUs; // mBufferInfo.presentationTimeUs = mLastPresentationTimeUs - mStartPresentationTimeUs;
LOG.i("DRAINING:", getName(), "Dispatching write(). Presentation:", mBufferInfo.presentationTimeUs); LOG.v(getName(), "DRAINING - About to write(). Presentation:", mBufferInfo.presentationTimeUs);
// TODO fix the mBufferInfo being the same, then implement delayed writing in Controller // TODO fix the mBufferInfo being the same, then implement delayed writing in Controller
// and remove the isStarted() check here. // and remove the isStarted() check here.
@ -336,17 +337,18 @@ abstract class MediaEncoder {
&& !mMaxLengthReached && !mMaxLengthReached
&& mStartPresentationTimeUs != Long.MIN_VALUE && mStartPresentationTimeUs != Long.MIN_VALUE
&& mLastPresentationTimeUs - mStartPresentationTimeUs > mMaxLengthMillis * 1000) { && mLastPresentationTimeUs - mStartPresentationTimeUs > mMaxLengthMillis * 1000) {
LOG.w("DRAINING: Reached maxLength! mLastPresentationTimeUs:", mLastPresentationTimeUs, LOG.w(getName(), "DRAINING - Reached maxLength! mLastPresentationTimeUs:", mLastPresentationTimeUs,
"mStartPresentationTimeUs:", mStartPresentationTimeUs, "mStartPresentationTimeUs:", mStartPresentationTimeUs,
"mMaxLengthUs:", mMaxLengthMillis * 1000); "mMaxLengthUs:", mMaxLengthMillis * 1000);
mMaxLengthReached = true; mMaxLengthReached = true;
LOG.w(getName(), "DRAINING - Requesting a stop.");
mController.requestStop(mTrackIndex); mController.requestStop(mTrackIndex);
break; break;
} }
// Check for the EOS flag so we can release the encoder. // Check for the EOS flag so we can release the encoder.
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
LOG.w("DRAINING:", getName(), "Dispatching release()."); LOG.w(getName(), "DRAINING - Got EOS. Releasing the codec.");
release(); release();
break; break;
} }

@ -26,34 +26,40 @@ public class MediaEncoderEngine {
*/ */
public interface Listener { public interface Listener {
/**
* Called when encoding started.
*/
@EncoderThread
void onEncodingStart();
/** /**
* Called when encoding stopped for some reason. * Called when encoding stopped for some reason.
* If there's an exception, it failed. * If there's an exception, it failed.
* @param stopReason the reason * @param reason the reason
* @param e the error, if present * @param e the error, if present
*/ */
@EncoderThread @EncoderThread
void onEncoderStop(int stopReason, @Nullable Exception e); void onEncodingEnd(int reason, @Nullable Exception e);
} }
private final static String TAG = MediaEncoderEngine.class.getSimpleName(); private final static String TAG = MediaEncoderEngine.class.getSimpleName();
private final static CameraLogger LOG = CameraLogger.create(TAG); private final static CameraLogger LOG = CameraLogger.create(TAG);
@SuppressWarnings("WeakerAccess") @SuppressWarnings("WeakerAccess")
public final static int STOP_BY_USER = 0; public final static int END_BY_USER = 0;
public final static int STOP_BY_MAX_DURATION = 1; public final static int END_BY_MAX_DURATION = 1;
public final static int STOP_BY_MAX_SIZE = 2; public final static int END_BY_MAX_SIZE = 2;
private ArrayList<MediaEncoder> mEncoders; private ArrayList<MediaEncoder> mEncoders;
private MediaMuxer mMediaMuxer; private MediaMuxer mMediaMuxer;
private int mStartedEncodersCount; private int mStartedEncodersCount;
private int mStoppedEncodersCount; private int mReleasedEncodersCount;
private boolean mMediaMuxerStarted; private boolean mMediaMuxerStarted;
@SuppressWarnings("FieldCanBeLocal") @SuppressWarnings("FieldCanBeLocal")
private Controller mController; private Controller mController;
private Listener mListener; private Listener mListener;
private int mStopReason = STOP_BY_USER; private int mEndReason = END_BY_USER;
private int mPossibleStopReason; private int mPossibleEndReason;
private final Object mControllerLock = new Object(); private final Object mControllerLock = new Object();
/** /**
@ -87,7 +93,7 @@ public class MediaEncoderEngine {
} }
mStartedEncodersCount = 0; mStartedEncodersCount = 0;
mMediaMuxerStarted = false; mMediaMuxerStarted = false;
mStoppedEncodersCount = 0; mReleasedEncodersCount = 0;
// Trying to convert the size constraints to duration constraints, // Trying to convert the size constraints to duration constraints,
// because they are super easy to check. // because they are super easy to check.
@ -101,13 +107,13 @@ public class MediaEncoderEngine {
long finalMaxDuration = Long.MAX_VALUE; long finalMaxDuration = Long.MAX_VALUE;
if (maxSize > 0 && maxDuration > 0) { if (maxSize > 0 && maxDuration > 0) {
mPossibleStopReason = sizeMaxDuration < maxDuration ? STOP_BY_MAX_SIZE : STOP_BY_MAX_DURATION; mPossibleEndReason = sizeMaxDuration < maxDuration ? END_BY_MAX_SIZE : END_BY_MAX_DURATION;
finalMaxDuration = Math.min(sizeMaxDuration, maxDuration); finalMaxDuration = Math.min(sizeMaxDuration, maxDuration);
} else if (maxSize > 0) { } else if (maxSize > 0) {
mPossibleStopReason = STOP_BY_MAX_SIZE; mPossibleEndReason = END_BY_MAX_SIZE;
finalMaxDuration = sizeMaxDuration; finalMaxDuration = sizeMaxDuration;
} else if (maxDuration > 0) { } else if (maxDuration > 0) {
mPossibleStopReason = STOP_BY_MAX_DURATION; mPossibleEndReason = END_BY_MAX_DURATION;
finalMaxDuration = maxDuration; finalMaxDuration = maxDuration;
} }
LOG.w("Computed a max duration of", (finalMaxDuration / 1000F)); LOG.w("Computed a max duration of", (finalMaxDuration / 1000F));
@ -120,6 +126,7 @@ public class MediaEncoderEngine {
* Asks encoders to start (each one on its own track). * Asks encoders to start (each one on its own track).
*/ */
public final void start() { public final void start() {
LOG.i("Passing event to encoders:", "START");
for (MediaEncoder encoder : mEncoders) { for (MediaEncoder encoder : mEncoders) {
encoder.start(); encoder.start();
} }
@ -133,6 +140,7 @@ public class MediaEncoderEngine {
*/ */
@SuppressWarnings("SameParameterValue") @SuppressWarnings("SameParameterValue")
public final void notify(final String event, final Object data) { public final void notify(final String event, final Object data) {
LOG.i("Passing event to encoders:", event);
for (MediaEncoder encoder : mEncoders) { for (MediaEncoder encoder : mEncoders) {
encoder.notify(event, data); encoder.notify(event, data);
} }
@ -140,21 +148,23 @@ public class MediaEncoderEngine {
/** /**
* Asks encoders to stop. This is not sync, of course we will ask for encoders * Asks encoders to stop. This is not sync, of course we will ask for encoders
* to call {@link Controller#requestRelease(int)} before actually stop the muxer. * to call {@link Controller#notifyReleased(int)} before actually stop the muxer.
* When all encoders request a release, {@link #release()} is called to do cleanup * When all encoders request a release, {@link #release()} is called to do cleanup
* and notify the listener. * and notify the listener.
*/ */
public final void stop() { public final void stop() {
LOG.i("Passing event to encoders:", "STOP");
for (MediaEncoder encoder : mEncoders) { for (MediaEncoder encoder : mEncoders) {
encoder.stop(); encoder.stop();
} }
} }
/** /**
* Called after all encoders have requested a release using {@link Controller#requestRelease(int)}. * Called after all encoders have requested a release using {@link Controller#notifyReleased(int)}.
* At this point we will do cleanup and notify the listener. * At this point we will do cleanup and notify the listener.
*/ */
private void release() { private void release() {
LOG.i("release:", "Releasing muxer after all encoders have been released.");
Exception error = null; Exception error = null;
if (mMediaMuxer != null) { if (mMediaMuxer != null) {
// stop() throws an exception if you haven't fed it any data. // stop() throws an exception if you haven't fed it any data.
@ -168,14 +178,16 @@ public class MediaEncoderEngine {
} }
mMediaMuxer = null; mMediaMuxer = null;
} }
LOG.w("release:", "Dispatching end to listener - reason:", mEndReason, "error:", error);
if (mListener != null) { if (mListener != null) {
mListener.onEncoderStop(mStopReason, error); mListener.onEncodingEnd(mEndReason, error);
mListener = null; mListener = null;
} }
mStopReason = STOP_BY_USER; mEndReason = END_BY_USER;
mStartedEncodersCount = 0; mStartedEncodersCount = 0;
mStoppedEncodersCount = 0; mReleasedEncodersCount = 0;
mMediaMuxerStarted = false; mMediaMuxerStarted = false;
LOG.i("release:", "Completed.");
} }
/** /**
@ -219,10 +231,14 @@ public class MediaEncoderEngine {
throw new IllegalStateException("Trying to start but muxer started already"); throw new IllegalStateException("Trying to start but muxer started already");
} }
int track = mMediaMuxer.addTrack(format); int track = mMediaMuxer.addTrack(format);
LOG.w("Controller:", "Assigned track", track, "to format", format.getString(MediaFormat.KEY_MIME)); LOG.w("requestStart:", "Assigned track", track, "to format", format.getString(MediaFormat.KEY_MIME));
if (++mStartedEncodersCount == mEncoders.size()) { if (++mStartedEncodersCount == mEncoders.size()) {
LOG.w("requestStart:", "All encoders have started. Starting muxer and dispatching onEncodingStart().");
mMediaMuxer.start(); mMediaMuxer.start();
mMediaMuxerStarted = true; mMediaMuxerStarted = true;
if (mListener != null) {
mListener.onEncodingStart();
}
} }
return track; return track;
} }
@ -251,7 +267,7 @@ public class MediaEncoderEngine {
// This is a bad idea and causes crashes. // This is a bad idea and causes crashes.
// if (info.presentationTimeUs < mLastTimestampUs) info.presentationTimeUs = mLastTimestampUs; // if (info.presentationTimeUs < mLastTimestampUs) info.presentationTimeUs = mLastTimestampUs;
// mLastTimestampUs = info.presentationTimeUs; // mLastTimestampUs = info.presentationTimeUs;
LOG.v("Writing for track", buffer.trackIndex, ". Presentation:", buffer.info.presentationTimeUs); LOG.v("write:", "Writing OutputBuffer - track:", buffer.trackIndex, "presentation:", buffer.info.presentationTimeUs);
mMediaMuxer.writeSampleData(buffer.trackIndex, buffer.data, buffer.info); mMediaMuxer.writeSampleData(buffer.trackIndex, buffer.data, buffer.info);
pool.recycle(buffer); pool.recycle(buffer);
} }
@ -264,10 +280,11 @@ public class MediaEncoderEngine {
* When this succeeds, {@link MediaEncoder#stop()} is called. * When this succeeds, {@link MediaEncoder#stop()} is called.
*/ */
void requestStop(int track) { void requestStop(int track) {
LOG.i("RequestStop was called for track", track);
synchronized (mControllerLock) { synchronized (mControllerLock) {
LOG.w("requestStop:", "Called for track", track);
if (--mStartedEncodersCount == 0) { if (--mStartedEncodersCount == 0) {
mStopReason = mPossibleStopReason; LOG.w("requestStop:", "All encoders have requested a stop. Stopping them.");
mEndReason = mPossibleEndReason;
stop(); stop();
} }
} }
@ -277,10 +294,11 @@ public class MediaEncoderEngine {
* Notifies that the encoder was stopped. After this is called by all encoders, * Notifies that the encoder was stopped. After this is called by all encoders,
* we will actually stop the muxer. * we will actually stop the muxer.
*/ */
void requestRelease(int track) { void notifyReleased(int track) {
LOG.i("requestRelease was called for track", track);
synchronized (mControllerLock) { synchronized (mControllerLock) {
if (++mStoppedEncodersCount == mEncoders.size()) { LOG.w("notifyReleased:", "Called for track", track);
if (++mReleasedEncodersCount == mEncoders.size()) {
LOG.w("requestStop:", "All encoders have been released. Stopping the muxer.");
release(); release();
} }
} }

Loading…
Cancel
Save