Improve snapshot recording (#374)

* Timestamp changes

* Revisit Audio encoding, create object pools

* Use a Pool for float[] arrays

* Remove unused audioBitRate from audio encoder

* Fix demo app video duration

* Correctly release pools

* Restore output bitrate

* Release textureFrame pool
pull/402/head
Mattia Iavarone 6 years ago committed by GitHub
parent aec17d3e49
commit 7411614433
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 279
      cameraview/src/main/gles/com/otaliastudios/cameraview/AudioMediaEncoder.java
  2. 15
      cameraview/src/main/gles/com/otaliastudios/cameraview/ByteBufferPool.java
  3. 1
      cameraview/src/main/gles/com/otaliastudios/cameraview/EglBaseSurface.java
  4. 1
      cameraview/src/main/gles/com/otaliastudios/cameraview/EglCore.java
  5. 12
      cameraview/src/main/gles/com/otaliastudios/cameraview/InputBuffer.java
  6. 15
      cameraview/src/main/gles/com/otaliastudios/cameraview/InputBufferPool.java
  7. 50
      cameraview/src/main/gles/com/otaliastudios/cameraview/MediaCodecBuffers.java
  8. 297
      cameraview/src/main/gles/com/otaliastudios/cameraview/MediaEncoder.java
  9. 143
      cameraview/src/main/gles/com/otaliastudios/cameraview/MediaEncoderEngine.java
  10. 11
      cameraview/src/main/gles/com/otaliastudios/cameraview/OutputBuffer.java
  11. 18
      cameraview/src/main/gles/com/otaliastudios/cameraview/OutputBufferPool.java
  12. 89
      cameraview/src/main/gles/com/otaliastudios/cameraview/Pool.java
  13. 106
      cameraview/src/main/gles/com/otaliastudios/cameraview/TextureMediaEncoder.java
  14. 19
      cameraview/src/main/gles/com/otaliastudios/cameraview/VideoMediaEncoder.java
  15. 25
      cameraview/src/main/java/com/otaliastudios/cameraview/SnapshotVideoRecorder.java
  16. 1
      cameraview/src/main/java/com/otaliastudios/cameraview/VideoRecorder.java
  17. 10
      cameraview/src/main/utils/com/otaliastudios/cameraview/WorkerHandler.java
  18. 1
      demo/src/main/java/com/otaliastudios/cameraview/demo/CameraActivity.java

@ -1,34 +1,64 @@
package com.otaliastudios.cameraview; package com.otaliastudios.cameraview;
import android.annotation.SuppressLint;
import android.media.AudioFormat; import android.media.AudioFormat;
import android.media.AudioRecord; import android.media.AudioRecord;
import android.media.AudioTimestamp;
import android.media.MediaCodec; import android.media.MediaCodec;
import android.media.MediaCodecInfo; import android.media.MediaCodecInfo;
import android.media.MediaFormat; import android.media.MediaFormat;
import android.media.MediaRecorder; import android.media.MediaRecorder;
import android.os.Build; import android.os.Build;
import android.os.Handler;
import android.os.Message;
import android.util.Log;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi; import androidx.annotation.RequiresApi;
import java.io.IOException; import java.io.IOException;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.concurrent.LinkedBlockingQueue;
// TODO create onVideoRecordingStart/onVideoRecordingEnd callbacks
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2) @RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2)
class AudioMediaEncoder extends MediaEncoder { class AudioMediaEncoder extends MediaEncoder {
private static final String TAG = AudioMediaEncoder.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
private static final String MIME_TYPE = "audio/mp4a-latm"; private static final String MIME_TYPE = "audio/mp4a-latm";
private static final int SAMPLE_RATE = 44100; // 44.1[KHz] is only setting guaranteed to be available on all devices. private static final int ENCODING = AudioFormat.ENCODING_PCM_16BIT; // Determines the SAMPLE_SIZE
public static final int SAMPLES_PER_FRAME = 1024; // AAC, bytes/frame/channel private static final int CHANNELS = AudioFormat.CHANNEL_IN_MONO; // AudioFormat.CHANNEL_IN_STEREO;
public static final int FRAMES_PER_BUFFER = 25; // AAC, frame/buffer/sec
// The 44.1KHz frequency is the only setting guaranteed to be available on all devices.
private static final int SAMPLING_FREQUENCY = 44100; // samples/sec
private static final int CHANNELS_COUNT = 1; // 2;
private static final int SAMPLE_SIZE = 2; // byte/sample/channel
private static final int BYTE_RATE_PER_CHANNEL = SAMPLING_FREQUENCY * SAMPLE_SIZE; // byte/sec/channel
private static final int BYTE_RATE = BYTE_RATE_PER_CHANNEL * CHANNELS_COUNT; // byte/sec
static final int BIT_RATE = BYTE_RATE * 8; // bit/sec
// We call FRAME here the chunk of data that we want to read at each loop cycle
private static final int FRAME_SIZE_PER_CHANNEL = 1024; // bytes/frame/channel [AAC constant]
private static final int FRAME_SIZE = FRAME_SIZE_PER_CHANNEL * CHANNELS_COUNT; // bytes/frame
// We allocate buffers of 1KB each, which is not so much. I would say that allocating
// at most 200 of them is a reasonable value. With the current setup, in device tests,
// we manage to use 50 at most.
private static final int BUFFER_POOL_MAX_SIZE = 200;
private final Object mLock = new Object();
private boolean mRequestStop = false; private boolean mRequestStop = false;
private AudioEncodingHandler mEncoder;
private AudioRecordingThread mRecorder;
private ByteBufferPool mByteBufferPool;
private Config mConfig; private Config mConfig;
static class Config { static class Config {
int bitRate; int bitRate;
Config(int bitRate) { Config(int bitRate) {
this.bitRate = bitRate; this.bitRate = bitRate;
} }
@ -38,15 +68,20 @@ class AudioMediaEncoder extends MediaEncoder {
mConfig = config; mConfig = config;
} }
@NonNull
@Override
String getName() {
return "AudioEncoder";
}
@EncoderThread @EncoderThread
@Override @Override
void prepare(@NonNull MediaEncoderEngine.Controller controller, long maxLengthMillis) { void onPrepare(@NonNull MediaEncoderEngine.Controller controller, long maxLengthMillis) {
super.prepare(controller, maxLengthMillis); final MediaFormat audioFormat = MediaFormat.createAudioFormat(MIME_TYPE, SAMPLING_FREQUENCY, CHANNELS_COUNT);
final MediaFormat audioFormat = MediaFormat.createAudioFormat(MIME_TYPE, SAMPLE_RATE, 1);
audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC); audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
audioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, AudioFormat.CHANNEL_IN_MONO); audioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, CHANNELS);
audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, mConfig.bitRate); audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, mConfig.bitRate);
audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1); audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, CHANNELS_COUNT);
try { try {
mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE); mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE);
} catch (IOException e) { } catch (IOException e) {
@ -54,86 +89,228 @@ class AudioMediaEncoder extends MediaEncoder {
} }
mMediaCodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); mMediaCodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mMediaCodec.start(); mMediaCodec.start();
mByteBufferPool = new ByteBufferPool(FRAME_SIZE, BUFFER_POOL_MAX_SIZE);
mEncoder = new AudioEncodingHandler();
mRecorder = new AudioRecordingThread();
} }
@EncoderThread @EncoderThread
@Override @Override
void start() { void onStart() {
mRequestStop = false; mRequestStop = false;
new AudioThread().start(); mRecorder.start();
} }
@EncoderThread @EncoderThread
@Override @Override
void notify(@NonNull String event, @Nullable Object data) { } void onEvent(@NonNull String event, @Nullable Object data) { }
@EncoderThread @EncoderThread
@Override @Override
void stop() { void onStop() {
mRequestStop = true; mRequestStop = true;
synchronized (mLock) {
try {
mLock.wait();
} catch (InterruptedException e) {
// do nothing
} }
@Override
void onRelease() {
mRequestStop = false;
mEncoder = null;
mRecorder = null;
if (mByteBufferPool != null) {
mByteBufferPool.clear();
mByteBufferPool = null;
} }
} }
@Override @Override
void release() { int getEncodedBitRate() {
super.release(); return mConfig.bitRate;
mRequestStop = false;
} }
class AudioThread extends Thread { class AudioRecordingThread extends Thread {
private AudioRecord mAudioRecord; private AudioRecord mAudioRecord;
private ByteBuffer mCurrentBuffer;
private int mReadBytes;
private long mLastTimeUs;
AudioThread() { AudioRecordingThread() {
final int minBufferSize = AudioRecord.getMinBufferSize( final int minBufferSize = AudioRecord.getMinBufferSize(SAMPLING_FREQUENCY, CHANNELS, ENCODING);
SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, int bufferSize = FRAME_SIZE * 25; // Make this bigger so we don't skip frames.
AudioFormat.ENCODING_PCM_16BIT); while (bufferSize < minBufferSize) {
int bufferSize = SAMPLES_PER_FRAME * FRAMES_PER_BUFFER; bufferSize += FRAME_SIZE; // Unlikely I think.
if (bufferSize < minBufferSize) {
bufferSize = ((minBufferSize / SAMPLES_PER_FRAME) + 1) * SAMPLES_PER_FRAME * 2;
} }
mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.CAMCORDER, SAMPLE_RATE, mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.CAMCORDER,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize); SAMPLING_FREQUENCY, CHANNELS, ENCODING, bufferSize);
setPriority(Thread.MAX_PRIORITY);
} }
@Override @Override
public void run() { public void run() {
super.run(); mLastTimeUs = System.nanoTime() / 1000L;
mAudioRecord.startRecording(); mAudioRecord.startRecording();
final ByteBuffer buffer = ByteBuffer.allocateDirect(SAMPLES_PER_FRAME);
int readBytes;
while (!mRequestStop) { while (!mRequestStop) {
buffer.clear(); read(false);
readBytes = mAudioRecord.read(buffer, SAMPLES_PER_FRAME);
if (readBytes > 0) {
// set audio data to encoder
buffer.position(readBytes);
buffer.flip();
encode(buffer, readBytes, getPresentationTime());
drain(false);
}
} }
// This will signal the endOfStream. LOG.w("RECORDER: Stop was requested. We're out of the loop. Will post an endOfStream.");
// Last input with 0 length. This will signal the endOfStream.
// Can't use drain(true); it is only available when writing to the codec InputSurface. // Can't use drain(true); it is only available when writing to the codec InputSurface.
encode(null, 0, getPresentationTime()); read(true);
drain(false);
mAudioRecord.stop(); mAudioRecord.stop();
mAudioRecord.release(); mAudioRecord.release();
mAudioRecord = null; mAudioRecord = null;
synchronized (mLock) {
mLock.notify();
} }
private void read(boolean endOfStream) {
mCurrentBuffer = mByteBufferPool.get();
if (mCurrentBuffer == null) {
LOG.e("Skipping audio frame, encoding is too slow.");
// TODO should fix the next presentation time here. However this is
// extremely unlikely based on my tests. The mByteBufferPool should be big enough.
} else {
mCurrentBuffer.clear();
mReadBytes = mAudioRecord.read(mCurrentBuffer, FRAME_SIZE);
if (mReadBytes > 0) { // Good read: increase PTS.
increaseTime(mReadBytes);
mCurrentBuffer.limit(mReadBytes);
onBuffer(endOfStream);
} else if (mReadBytes == AudioRecord.ERROR_INVALID_OPERATION) {
LOG.e("Got AudioRecord.ERROR_INVALID_OPERATION");
} else if (mReadBytes == AudioRecord.ERROR_BAD_VALUE) {
LOG.e("Got AudioRecord.ERROR_BAD_VALUE");
}
}
}
/**
* New data at position buffer.position() of size buffer.remaining()
* has been written into this buffer. This method should pass the data
* to the consumer.
*/
private void onBuffer(boolean endOfStream) {
mEncoder.sendInputBuffer(mCurrentBuffer, mLastTimeUs, endOfStream);
} }
private void increaseTime(int readBytes) {
increaseTime3(readBytes);
LOG.v("Read", readBytes, "bytes, increasing PTS to", mLastTimeUs);
}
/**
* This method simply assumes that we read everything without losing a single US.
* It will use System.nanoTime() just once, as the starting point.
* Of course we don't as there are things going on in this thread.
*/
private void increaseTime1(int readBytes) {
mLastTimeUs += (1000000L * readBytes) / BYTE_RATE;
}
/**
* Just for testing, this method will use Api 24 method to retrieve the timestamp.
* This way we let the platform choose instead of making assumptions.
*/
@RequiresApi(24)
private void increaseTime2(int readBytes) {
if (mApi24Timestamp == null) {
mApi24Timestamp = new AudioTimestamp();
}
mAudioRecord.getTimestamp(mApi24Timestamp, AudioTimestamp.TIMEBASE_MONOTONIC);
mLastTimeUs = mApi24Timestamp.nanoTime / 1000;
}
private AudioTimestamp mApi24Timestamp;
/**
* This method looks like an improvement over {@link #increaseTime1(int)} as it
* accounts for the current time as well. Adapted & improved. from Kickflip.
*/
private void increaseTime3(int readBytes) {
long currentTime = System.nanoTime() / 1000;
long correctedTime;
long bufferDuration = (1000000 * readBytes) / BYTE_RATE;
long bufferTime = currentTime - bufferDuration; // delay of acquiring the audio buffer
if (mTotalReadBytes == 0) {
mStartTimeUs = bufferTime;
}
// Recompute time assuming that we are respecting the sampling frequency.
// However, if the correction is too big (> 2*bufferDuration), reset to this point.
correctedTime = mStartTimeUs + (1000000 * mTotalReadBytes) / BYTE_RATE;
if(bufferTime - correctedTime >= 2 * bufferDuration) {
mStartTimeUs = bufferTime;
mTotalReadBytes = 0;
correctedTime = mStartTimeUs;
}
mTotalReadBytes += readBytes;
mLastTimeUs = correctedTime;
}
private long mStartTimeUs;
private long mTotalReadBytes;
}
/**
* This will be a super busy thread. It's important for it to be:
* - different than the recording thread: or we would miss a lot of audio
* - different than the 'encoder' thread: we want that to be reactive.
* For example, a stop() must become onStop() soon, can't wait for all this draining.
*/
@SuppressLint("HandlerLeak")
class AudioEncodingHandler extends Handler {
InputBufferPool mInputBufferPool = new InputBufferPool();
LinkedBlockingQueue<InputBuffer> mPendingOps = new LinkedBlockingQueue<>();
AudioEncodingHandler() {
super(WorkerHandler.get("AudioEncodingHandler").getLooper());
}
void sendInputBuffer(ByteBuffer buffer, long presentationTimeUs, boolean endOfStream) {
int presentation1 = (int) (presentationTimeUs >> 32);
int presentation2 = (int) (presentationTimeUs);
sendMessage(obtainMessage(endOfStream ? 1 : 0, presentation1, presentation2, buffer));
} }
@Override @Override
int getBitRate() { public void handleMessage(Message msg) {
return mConfig.bitRate; super.handleMessage(msg);
boolean endOfStream = msg.what == 1;
long timestamp = (((long) msg.arg1) << 32) | (((long) msg.arg2) & 0xffffffffL);
ByteBuffer buffer = (ByteBuffer) msg.obj;
int readBytes = buffer.remaining();
InputBuffer inputBuffer = mInputBufferPool.get();
inputBuffer.source = buffer;
inputBuffer.timestamp = timestamp;
inputBuffer.length = readBytes;
inputBuffer.isEndOfStream = endOfStream;
mPendingOps.add(inputBuffer);
performPendingOps(endOfStream);
}
private void performPendingOps(boolean force) {
LOG.v("Performing", mPendingOps.size(), "Pending operations.");
InputBuffer buffer;
while ((buffer = mPendingOps.peek()) != null) {
if (force) {
acquireInputBuffer(buffer);
performPendingOp(buffer);
} else if (tryAcquireInputBuffer(buffer)) {
performPendingOp(buffer);
} else {
break; // Will try later.
}
}
}
private void performPendingOp(InputBuffer buffer) {
buffer.data.put(buffer.source);
mByteBufferPool.recycle(buffer.source);
mPendingOps.remove(buffer);
encodeInputBuffer(buffer);
boolean eos = buffer.isEndOfStream;
mInputBufferPool.recycle(buffer);
drainOutput(eos);
if (eos) {
mInputBufferPool.clear();
WorkerHandler.get("AudioEncodingHandler").getThread().interrupt();
}
}
} }
} }

@ -0,0 +1,15 @@
package com.otaliastudios.cameraview;
import java.nio.ByteBuffer;
class ByteBufferPool extends Pool<ByteBuffer> {
ByteBufferPool(final int bufferSize, int maxPoolSize) {
super(maxPoolSize, new Factory<ByteBuffer>() {
@Override
public ByteBuffer create() {
return ByteBuffer.allocateDirect(bufferSize);
}
});
}
}

@ -151,6 +151,7 @@ class EglBaseSurface extends EglElement {
/** /**
* Sends the presentation time stamp to EGL. * Sends the presentation time stamp to EGL.
* https://www.khronos.org/registry/EGL/extensions/ANDROID/EGL_ANDROID_presentation_time.txt
* *
* @param nsecs Timestamp, in nanoseconds. * @param nsecs Timestamp, in nanoseconds.
*/ */

@ -314,6 +314,7 @@ final class EglCore {
/** /**
* Sends the presentation time stamp to EGL. Time is expressed in nanoseconds. * Sends the presentation time stamp to EGL. Time is expressed in nanoseconds.
* https://www.khronos.org/registry/EGL/extensions/ANDROID/EGL_ANDROID_presentation_time.txt
*/ */
public void setPresentationTime(EGLSurface eglSurface, long nsecs) { public void setPresentationTime(EGLSurface eglSurface, long nsecs) {
EGLExt.eglPresentationTimeANDROID(mEGLDisplay, eglSurface, nsecs); EGLExt.eglPresentationTimeANDROID(mEGLDisplay, eglSurface, nsecs);

@ -0,0 +1,12 @@
package com.otaliastudios.cameraview;
import java.nio.ByteBuffer;
class InputBuffer {
ByteBuffer data;
ByteBuffer source;
int index;
int length;
long timestamp;
boolean isEndOfStream;
}

@ -0,0 +1,15 @@
package com.otaliastudios.cameraview;
import java.nio.ByteBuffer;
class InputBufferPool extends Pool<InputBuffer> {
InputBufferPool() {
super(Integer.MAX_VALUE, new Factory<InputBuffer>() {
@Override
public InputBuffer create() {
return new InputBuffer();
}
});
}
}

@ -0,0 +1,50 @@
package com.otaliastudios.cameraview;
import android.media.MediaCodec;
import android.os.Build;
import java.nio.ByteBuffer;
/**
* A Wrapper to MediaCodec that facilitates the use of API-dependent get{Input/Output}Buffer methods,
* in order to prevent: http://stackoverflow.com/q/30646885
*/
class MediaCodecBuffers {
private final MediaCodec mMediaCodec;
private final ByteBuffer[] mInputBuffers;
private ByteBuffer[] mOutputBuffers;
MediaCodecBuffers(MediaCodec mediaCodec) {
mMediaCodec = mediaCodec;
if (Build.VERSION.SDK_INT < 21) {
mInputBuffers = mediaCodec.getInputBuffers();
mOutputBuffers = mediaCodec.getOutputBuffers();
} else {
mInputBuffers = mOutputBuffers = null;
}
}
public ByteBuffer getInputBuffer(final int index) {
if (Build.VERSION.SDK_INT >= 21) {
return mMediaCodec.getInputBuffer(index);
}
ByteBuffer buffer = mInputBuffers[index];
buffer.clear();
return buffer;
}
public ByteBuffer getOutputBuffer(final int index) {
if (Build.VERSION.SDK_INT >= 21) {
return mMediaCodec.getOutputBuffer(index);
}
return mOutputBuffers[index];
}
public void onOutputBuffersChanged() {
if (Build.VERSION.SDK_INT < 21) {
mOutputBuffers = mMediaCodec.getOutputBuffers();
}
}
}

@ -1,8 +1,10 @@
package com.otaliastudios.cameraview; package com.otaliastudios.cameraview;
import android.annotation.SuppressLint;
import android.media.MediaCodec; import android.media.MediaCodec;
import android.media.MediaFormat; import android.media.MediaFormat;
import android.os.Build; import android.os.Build;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi; import androidx.annotation.RequiresApi;
@ -14,17 +16,107 @@ import java.nio.ByteBuffer;
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2) @RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2)
abstract class MediaEncoder { abstract class MediaEncoder {
private final static int TIMEOUT_USEC = 10000; // 10 msec private final static String TAG = MediaEncoder.class.getSimpleName();
private final static CameraLogger LOG = CameraLogger.create(TAG);
// Did some test to see which value would maximize our performance in the current setup (infinite audio pool).
// Measured the time it would take to write a 30 seconds video. Based on this, we'll go with TIMEOUT=0 for now.
// INPUT_TIMEOUT_US 10000: 46 seconds
// INPUT_TIMEOUT_US 1000: 37 seconds
// INPUT_TIMEOUT_US 100: 33 seconds
// INPUT_TIMEOUT_US 0: 32 seconds
private final static int INPUT_TIMEOUT_US = 0;
// 0 also seems to be the best, although it does not change so much.
// Can't go too high or this is a bottleneck for the audio encoder.
private final static int OUTPUT_TIMEOUT_US = 0;
@SuppressWarnings("WeakerAccess") @SuppressWarnings("WeakerAccess")
protected MediaCodec mMediaCodec; protected MediaCodec mMediaCodec;
private MediaCodec.BufferInfo mBufferInfo; @SuppressWarnings("WeakerAccess")
protected WorkerHandler mWorker;
private MediaEncoderEngine.Controller mController; private MediaEncoderEngine.Controller mController;
private int mTrackIndex; private int mTrackIndex;
private OutputBufferPool mOutputBufferPool;
private MediaCodec.BufferInfo mBufferInfo;
private MediaCodecBuffers mBuffers;
private long mMaxLengthMillis; private long mMaxLengthMillis;
private boolean mMaxLengthReached; private boolean mMaxLengthReached;
/**
* A readable name for the thread.
*/
@NonNull
abstract String getName();
/**
* This encoder was attached to the engine. Keep the controller
* and run the internal thread.
*/
final void prepare(@NonNull final MediaEncoderEngine.Controller controller, final long maxLengthMillis) {
mController = controller;
mBufferInfo = new MediaCodec.BufferInfo();
mMaxLengthMillis = maxLengthMillis;
mWorker = WorkerHandler.get(getName());
LOG.i(getName(), "Prepare was called. Posting.");
mWorker.post(new Runnable() {
@Override
public void run() {
LOG.i(getName(), "Prepare was called. Executing.");
onPrepare(controller, maxLengthMillis);
}
});
}
/**
* Start recording. This might be a lightweight operation
* in case the encoder needs to wait for a certain event
* like a "frame available".
*/
final void start() {
LOG.i(getName(), "Start was called. Posting.");
mWorker.post(new Runnable() {
@Override
public void run() {
LOG.i(getName(), "Start was called. Executing.");
onStart();
}
});
}
/**
* The caller notifying of a certain event occurring.
* Should analyze the string and see if the event is important.
* @param event what happened
* @param data object
*/
final void notify(final @NonNull String event, final @Nullable Object data) {
LOG.i(getName(), "Notify was called. Posting.");
mWorker.post(new Runnable() {
@Override
public void run() {
LOG.i(getName(), "Notify was called. Executing.");
onEvent(event, data);
}
});
}
/**
* Stop recording.
*/
final void stop() {
LOG.i(getName(), "Stop was called. Posting.");
mWorker.post(new Runnable() {
@Override
public void run() {
LOG.i(getName(), "Stop was called. Executing.");
onStop();
}
});
}
/** /**
* Called to prepare this encoder before starting. * Called to prepare this encoder before starting.
* Any initialization should be done here as it does not interfere with the original * Any initialization should be done here as it does not interfere with the original
@ -33,13 +125,10 @@ abstract class MediaEncoder {
* At this point subclasses MUST create the {@link #mMediaCodec} object. * At this point subclasses MUST create the {@link #mMediaCodec} object.
* *
* @param controller the muxer controller * @param controller the muxer controller
* @param maxLengthMillis the maxLength in millis
*/ */
@EncoderThread @EncoderThread
void prepare(@NonNull MediaEncoderEngine.Controller controller, long maxLengthMillis) { abstract void onPrepare(@NonNull final MediaEncoderEngine.Controller controller, final long maxLengthMillis);
mController = controller;
mBufferInfo = new MediaCodec.BufferInfo();
mMaxLengthMillis = maxLengthMillis;
}
/** /**
* Start recording. This might be a lightweight operation * Start recording. This might be a lightweight operation
@ -47,7 +136,7 @@ abstract class MediaEncoder {
* like a "frame available". * like a "frame available".
*/ */
@EncoderThread @EncoderThread
abstract void start(); abstract void onStart();
/** /**
* The caller notifying of a certain event occurring. * The caller notifying of a certain event occurring.
@ -56,97 +145,130 @@ abstract class MediaEncoder {
* @param data object * @param data object
*/ */
@EncoderThread @EncoderThread
abstract void notify(@NonNull String event, @Nullable Object data); abstract void onEvent(@NonNull String event, @Nullable Object data);
/** /**
* Stop recording. * Stop recording.
* This MUST happen SYNCHRONOUSLY!
*/ */
@EncoderThread @EncoderThread
abstract void stop(); abstract void onStop();
/** /**
* Release resources here. * Called by {@link #drainOutput(boolean)} when we get an EOS signal (not necessarily in the
* parameters, might also be through an input buffer flag).
*/ */
@EncoderThread private void release() {
void release() { LOG.w("Subclass", getName(), "Notified that it is released.");
if (mMediaCodec != null) { mController.requestRelease(mTrackIndex);
mMediaCodec.stop(); mMediaCodec.stop();
mMediaCodec.release(); mMediaCodec.release();
mMediaCodec = null; mMediaCodec = null;
mOutputBufferPool.clear();
mOutputBufferPool = null;
mBuffers = null;
onRelease();
}
/**
* This is called when we are stopped.
* It is a good moment to release all resources, although the muxer
* might still be alive (we wait for the other Encoder, see Controller).
*/
abstract void onRelease();
/**
* Returns a new input buffer and index, waiting at most {@link #INPUT_TIMEOUT_US} if none is available.
* Callers should check the boolean result - true if the buffer was filled.
*/
@SuppressWarnings("WeakerAccess")
protected boolean tryAcquireInputBuffer(@NonNull InputBuffer holder) {
if (mBuffers == null) {
mBuffers = new MediaCodecBuffers(mMediaCodec);
}
int inputBufferIndex = mMediaCodec.dequeueInputBuffer(INPUT_TIMEOUT_US);
if (inputBufferIndex < 0) {
return false;
} else {
holder.index = inputBufferIndex;
holder.data = mBuffers.getInputBuffer(inputBufferIndex);
return true;
} }
} }
/**
* Returns a new input buffer and index, waiting indefinitely if none is available.
* The buffer should be written into, then the index should be passed to {@link #encodeInputBuffer(InputBuffer)}.
*/
@SuppressWarnings({"StatementWithEmptyBody", "WeakerAccess"})
protected void acquireInputBuffer(@NonNull InputBuffer holder) {
while (!tryAcquireInputBuffer(holder)) {}
}
/** /**
* Encode data into the {@link #mMediaCodec}. * Encode data into the {@link #mMediaCodec}.
*/ */
@SuppressWarnings("WeakerAccess") @SuppressWarnings("WeakerAccess")
protected void encode(@Nullable final ByteBuffer buffer, final int length, final long presentationTimeUs) { protected void encodeInputBuffer(InputBuffer buffer) {
final ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers(); LOG.w("ENCODING:", getName(), "Buffer:", buffer.index, "Bytes:", buffer.length, "Presentation:", buffer.timestamp);
while (true) { if (buffer.isEndOfStream) { // send EOS
final int inputBufferIndex = mMediaCodec.dequeueInputBuffer(TIMEOUT_USEC); mMediaCodec.queueInputBuffer(buffer.index, 0, 0,
if (inputBufferIndex >= 0) { buffer.timestamp, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
final ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
if (buffer != null) {
inputBuffer.put(buffer);
}
if (length <= 0) { // send EOS
mMediaCodec.queueInputBuffer(inputBufferIndex, 0, 0,
presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
} else { } else {
mMediaCodec.queueInputBuffer(inputBufferIndex, 0, length, mMediaCodec.queueInputBuffer(buffer.index, 0, buffer.length,
presentationTimeUs, 0); buffer.timestamp, 0);
}
break;
} else if (inputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
// wait for MediaCodec encoder is ready to encode
// nothing to do here because MediaCodec#dequeueInputBuffer(TIMEOUT_USEC)
// will wait for maximum TIMEOUT_USEC(10msec) on each call
} }
} }
/**
* Signals the end of input stream. This is a Video only API, as in the normal case,
* we use input buffers to signal the end. In the video case, we don't have input buffers
* because we use an input surface instead.
*/
@SuppressWarnings("WeakerAccess")
protected void signalEndOfInputStream() {
mMediaCodec.signalEndOfInputStream();
} }
/** /**
* Extracts all pending data that was written and encoded into {@link #mMediaCodec}, * Extracts all pending data that was written and encoded into {@link #mMediaCodec},
* and forwards it to the muxer. * and forwards it to the muxer.
* <p> *
* If endOfStream is not set, this returns when there is no more data to drain. If it * If drainAll is not set, this returns after TIMEOUT_USEC if there is no more data to drain.
* is set, we send EOS to the encoder, and then iterate until we see EOS on the output. * If drainAll is set, we wait until we see EOS on the output.
* Calling this with endOfStream set should be done once, right before stopping the muxer. * Calling this with drainAll set should be done once, right before stopping the muxer.
*/ */
@SuppressLint("LogNotTimber")
@SuppressWarnings("WeakerAccess") @SuppressWarnings("WeakerAccess")
protected void drain(boolean endOfStream) { protected void drainOutput(boolean drainAll) {
if (endOfStream) { LOG.w("DRAINING:", getName(), "EOS:", drainAll);
mMediaCodec.signalEndOfInputStream(); if (mMediaCodec == null) {
LOG.e("drain() was called before prepare() or after releasing.");
return;
}
if (mBuffers == null) {
mBuffers = new MediaCodecBuffers(mMediaCodec);
} }
ByteBuffer[] encoderOutputBuffers = mMediaCodec.getOutputBuffers();
while (true) { while (true) {
int encoderStatus = mMediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC); int encoderStatus = mMediaCodec.dequeueOutputBuffer(mBufferInfo, OUTPUT_TIMEOUT_US);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet // no output available yet
if (!endOfStream) break; // out of while if (!drainAll) break; // out of while
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder // not expected for an encoder
encoderOutputBuffers = mMediaCodec.getOutputBuffers(); mBuffers.onOutputBuffersChanged();
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// should happen before receiving buffers, and should only happen once // should happen before receiving buffers, and should only happen once
if (mController.isStarted()) throw new RuntimeException("format changed twice"); if (mController.isStarted()) throw new RuntimeException("MediaFormat changed twice.");
MediaFormat newFormat = mMediaCodec.getOutputFormat(); MediaFormat newFormat = mMediaCodec.getOutputFormat();
mTrackIndex = mController.requestStart(newFormat);
// now that we have the Magic Goodies, start the muxer mOutputBufferPool = new OutputBufferPool(mTrackIndex);
mTrackIndex = mController.start(newFormat);
} else if (encoderStatus < 0) { } else if (encoderStatus < 0) {
Log.w("VideoMediaEncoder", "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus); LOG.e("Unexpected result from dequeueOutputBuffer: " + encoderStatus);
// let's ignore it // let's ignore it
} else { } else {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus]; ByteBuffer encodedData = mBuffers.getOutputBuffer(encoderStatus);
if (encodedData == null) {
throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null");
}
// Codec config means that config data was pulled out and fed to the muxer when we got // Codec config means that config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it. // the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
@ -155,41 +277,56 @@ abstract class MediaEncoder {
// adjust the ByteBuffer values to match BufferInfo (not needed?) // adjust the ByteBuffer values to match BufferInfo (not needed?)
encodedData.position(mBufferInfo.offset); encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size); encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
mController.write(mTrackIndex, encodedData, mBufferInfo); // Store startPresentationTime and lastPresentationTime, useful for example to
mLastPresentationTime = mBufferInfo.presentationTimeUs; // detect the mMaxLengthReached and stop recording.
if (mStartPresentationTime == 0) { if (mStartPresentationTimeUs == Long.MIN_VALUE) {
mStartPresentationTime = mLastPresentationTime; mStartPresentationTimeUs = mBufferInfo.presentationTimeUs;
} }
mLastPresentationTimeUs = mBufferInfo.presentationTimeUs;
// Pass presentation times as offets with respect to the mStartPresentationTimeUs.
// This ensures consistency between audio pts (coming from System.nanoTime()) and
// video pts (coming from SurfaceTexture) both of which have no meaningful time-base
// and should be used for offsets only.
// TODO find a better way, this causes sync issues. (+ note: this sends pts=0 at first)
// mBufferInfo.presentationTimeUs = mLastPresentationTimeUs - mStartPresentationTimeUs;
LOG.i("DRAINING:", getName(), "Dispatching write(). Presentation:", mBufferInfo.presentationTimeUs);
// TODO fix the mBufferInfo being the same, then implement delayed writing in Controller
// and remove the isStarted() check here.
OutputBuffer buffer = mOutputBufferPool.get();
buffer.info = mBufferInfo;
buffer.trackIndex = mTrackIndex;
buffer.data = encodedData;
mController.write(mOutputBufferPool, buffer);
} }
mMediaCodec.releaseOutputBuffer(encoderStatus, false); mMediaCodec.releaseOutputBuffer(encoderStatus, false);
if (!mMaxLengthReached) {
if (mLastPresentationTime / 1000 - mStartPresentationTime / 1000 > mMaxLengthMillis) { // Check for the maxLength constraint (with appropriate conditions)
// Not needed if drainAll because we already were asked to stop
if (!drainAll
&& !mMaxLengthReached
&& mStartPresentationTimeUs != Long.MIN_VALUE
&& mLastPresentationTimeUs - mStartPresentationTimeUs > mMaxLengthMillis * 1000) {
LOG.w("DRAINING: Reached maxLength! mLastPresentationTimeUs:", mLastPresentationTimeUs,
"mStartPresentationTimeUs:", mStartPresentationTimeUs,
"mMaxLengthUs:", mMaxLengthMillis * 1000);
mMaxLengthReached = true; mMaxLengthReached = true;
// Log.e("MediaEncoder", this.getClass().getSimpleName() + " requested stop at " + (mLastPresentationTime * 1000 * 1000)); mController.requestStop(mTrackIndex);
mController.requestStop();
break; break;
} }
}
// Check for the EOS flag so we can release the encoder.
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
break; // out of while LOG.w("DRAINING:", getName(), "Dispatching release().");
release();
break;
} }
} }
} }
} }
private long mStartPresentationTime = 0; private long mStartPresentationTimeUs = Long.MIN_VALUE;
private long mLastPresentationTime = 0; private long mLastPresentationTimeUs = 0;
long getPresentationTime() {
long result = System.nanoTime() / 1000L;
// presentationTimeUs should be monotonic
// otherwise muxer fail to write
if (result < mLastPresentationTime) {
result = (mLastPresentationTime - result) + result;
}
return result;
}
abstract int getBitRate(); abstract int getEncodedBitRate();
} }

@ -1,6 +1,5 @@
package com.otaliastudios.cameraview; package com.otaliastudios.cameraview;
import android.media.MediaCodec;
import android.media.MediaFormat; import android.media.MediaFormat;
import android.media.MediaMuxer; import android.media.MediaMuxer;
import android.os.Build; import android.os.Build;
@ -10,13 +9,12 @@ import androidx.annotation.RequiresApi;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList; import java.util.ArrayList;
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2) @RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2)
class MediaEncoderEngine { class MediaEncoderEngine {
private final static String TAG = MediaEncoder.class.getSimpleName(); private final static String TAG = MediaEncoderEngine.class.getSimpleName();
private final static CameraLogger LOG = CameraLogger.create(TAG); private final static CameraLogger LOG = CameraLogger.create(TAG);
@SuppressWarnings("WeakerAccess") @SuppressWarnings("WeakerAccess")
@ -24,20 +22,19 @@ class MediaEncoderEngine {
final static int STOP_BY_MAX_DURATION = 1; final static int STOP_BY_MAX_DURATION = 1;
final static int STOP_BY_MAX_SIZE = 2; final static int STOP_BY_MAX_SIZE = 2;
private WorkerHandler mWorker;
private ArrayList<MediaEncoder> mEncoders; private ArrayList<MediaEncoder> mEncoders;
private MediaMuxer mMediaMuxer; private MediaMuxer mMediaMuxer;
private int mMediaMuxerStartCount; private int mStartedEncodersCount;
private int mStoppedEncodersCount;
private boolean mMediaMuxerStarted; private boolean mMediaMuxerStarted;
private Controller mController; private Controller mController;
private Listener mListener; private Listener mListener;
private int mStopReason = STOP_BY_USER; private int mStopReason = STOP_BY_USER;
private int mPossibleStopReason; private int mPossibleStopReason;
private final Object mLock = new Object(); private final Object mControllerLock = new Object();
MediaEncoderEngine(@NonNull File file, @NonNull VideoMediaEncoder videoEncoder, @Nullable AudioMediaEncoder audioEncoder, MediaEncoderEngine(@NonNull File file, @NonNull VideoMediaEncoder videoEncoder, @Nullable AudioMediaEncoder audioEncoder,
final int maxDuration, final long maxSize, @Nullable Listener listener) { final int maxDuration, final long maxSize, @Nullable Listener listener) {
mWorker = WorkerHandler.get("EncoderEngine");
mListener = listener; mListener = listener;
mController = new Controller(); mController = new Controller();
mEncoders = new ArrayList<>(); mEncoders = new ArrayList<>();
@ -50,17 +47,16 @@ class MediaEncoderEngine {
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
mMediaMuxerStartCount = 0; mStartedEncodersCount = 0;
mMediaMuxerStarted = false; mMediaMuxerStarted = false;
mWorker.post(new Runnable() { mStoppedEncodersCount = 0;
@Override
public void run() {
// Trying to convert the size constraints to duration constraints, // Trying to convert the size constraints to duration constraints,
// because they are super easy to check. // because they are super easy to check.
// This is really naive & probably not accurate, but... // This is really naive & probably not accurate, but...
int bitRate = 0; int bitRate = 0;
for (MediaEncoder encoder : mEncoders) { for (MediaEncoder encoder : mEncoders) {
bitRate += encoder.getBitRate(); bitRate += encoder.getEncodedBitRate();
} }
int bytePerSecond = bitRate / 8; int bytePerSecond = bitRate / 8;
long sizeMaxDuration = (maxSize / bytePerSecond) * 1000L; long sizeMaxDuration = (maxSize / bytePerSecond) * 1000L;
@ -76,25 +72,29 @@ class MediaEncoderEngine {
mPossibleStopReason = STOP_BY_MAX_DURATION; mPossibleStopReason = STOP_BY_MAX_DURATION;
finalMaxDuration = maxDuration; finalMaxDuration = maxDuration;
} }
LOG.i("Computed a max duration of", (finalMaxDuration / 1000F)); LOG.w("Computed a max duration of", (finalMaxDuration / 1000F));
for (MediaEncoder encoder : mEncoders) { for (MediaEncoder encoder : mEncoders) {
encoder.prepare(mController, finalMaxDuration); encoder.prepare(mController, finalMaxDuration);
} }
} }
});
}
// Stuff here might be called from multiple threads. // Stuff here might be called from multiple threads.
class Controller { class Controller {
int start(MediaFormat format) { /**
synchronized (mLock) { * Request that the muxer should start. This is not guaranteed to be executed:
* we wait for all encoders to call this method, and only then, start the muxer.
* @param format the media format
* @return the encoder track index
*/
int requestStart(MediaFormat format) {
synchronized (mControllerLock) {
if (mMediaMuxerStarted) { if (mMediaMuxerStarted) {
throw new IllegalStateException("Trying to start but muxer started already"); throw new IllegalStateException("Trying to start but muxer started already");
} }
int track = mMediaMuxer.addTrack(format); int track = mMediaMuxer.addTrack(format);
mMediaMuxerStartCount++; LOG.w("Controller:", "Assigned track", track, "to format", format.getString(MediaFormat.KEY_MIME));
if (mMediaMuxerStartCount == mEncoders.size()) { if (++mStartedEncodersCount == mEncoders.size()) {
mMediaMuxer.start(); mMediaMuxer.start();
mMediaMuxerStarted = true; mMediaMuxerStarted = true;
} }
@ -102,63 +102,89 @@ class MediaEncoderEngine {
} }
} }
/**
* Whether the muxer is started.
* @return true if muxer was started
*/
boolean isStarted() { boolean isStarted() {
synchronized (mLock) { synchronized (mControllerLock) {
return mMediaMuxerStarted; return mMediaMuxerStarted;
} }
} }
// Synchronization does not seem needed here. /**
void write(int track, ByteBuffer encodedData, MediaCodec.BufferInfo info) { * Writes the given data to the muxer. Should be called after {@link #isStarted()}
* returns true. Note: this seems to be thread safe, no lock.
* TODO cache values if not started yet, then apply later. Read comments in drain().
* Currently they are recycled instantly.
*/
void write(OutputBufferPool pool, OutputBuffer buffer) {
if (!mMediaMuxerStarted) { if (!mMediaMuxerStarted) {
throw new IllegalStateException("Trying to write before muxer started"); throw new IllegalStateException("Trying to write before muxer started");
} }
mMediaMuxer.writeSampleData(track, encodedData, info); // This is a bad idea and causes crashes.
} // if (info.presentationTimeUs < mLastTimestampUs) info.presentationTimeUs = mLastTimestampUs;
// mLastTimestampUs = info.presentationTimeUs;
void requestStop() { LOG.v("Writing for track", buffer.trackIndex, ". Presentation:", buffer.info.presentationTimeUs);
synchronized (mLock) { mMediaMuxer.writeSampleData(buffer.trackIndex, buffer.data, buffer.info);
mMediaMuxerStartCount--; pool.recycle(buffer);
if (mMediaMuxerStartCount == 0) { }
/**
* Requests that the engine stops. This is not executed until all encoders call
* this method, so it is a kind of soft request, just like {@link #requestStart(MediaFormat)}.
* To be used when maxLength / maxSize constraints are reached, for example.
*
* When this succeeds, {@link MediaEncoder#stop()} is called.
*/
void requestStop(int track) {
LOG.i("RequestStop was called for track", track);
synchronized (mControllerLock) {
if (--mStartedEncodersCount == 0) {
mStopReason = mPossibleStopReason; mStopReason = mPossibleStopReason;
stop(); stop();
} }
} }
} }
/**
* Notifies that the encoder was stopped. After this is called by all encoders,
* we will actually stop the muxer.
*/
void requestRelease(int track) {
LOG.i("requestRelease was called for track", track);
synchronized (mControllerLock) {
if (++mStoppedEncodersCount == mEncoders.size()) {
release();
}
}
}
} }
void start() { final void start() {
mWorker.post(new Runnable() {
@Override
public void run() {
for (MediaEncoder encoder : mEncoders) { for (MediaEncoder encoder : mEncoders) {
encoder.start(); encoder.start();
} }
} }
});
}
void notify(final String event, final Object data) { @SuppressWarnings("SameParameterValue")
mWorker.post(new Runnable() { final void notify(final String event, final Object data) {
@Override
public void run() {
for (MediaEncoder encoder : mEncoders) { for (MediaEncoder encoder : mEncoders) {
encoder.notify(event, data); encoder.notify(event, data);
} }
} }
});
}
void stop() { /**
mWorker.post(new Runnable() { * This just asks the encoder to stop. We will wait for them to call {@link Controller#requestRelease(int)}
@Override * to actually stop the muxer, as there might be async stuff going on.
public void run() { */
final void stop() {
for (MediaEncoder encoder : mEncoders) { for (MediaEncoder encoder : mEncoders) {
encoder.stop(); encoder.stop();
} }
for (MediaEncoder encoder : mEncoders) {
encoder.release();
} }
private void release() {
Exception error = null; Exception error = null;
if (mMediaMuxer != null) { if (mMediaMuxer != null) {
// stop() throws an exception if you haven't fed it any data. // stop() throws an exception if you haven't fed it any data.
@ -172,13 +198,28 @@ class MediaEncoderEngine {
} }
mMediaMuxer = null; mMediaMuxer = null;
} }
if (mListener != null) mListener.onEncoderStop(mStopReason, error); if (mListener != null) {
mStopReason = STOP_BY_USER; mListener.onEncoderStop(mStopReason, error);
mListener = null; mListener = null;
mMediaMuxerStartCount = 0; }
mStopReason = STOP_BY_USER;
mStartedEncodersCount = 0;
mStoppedEncodersCount = 0;
mMediaMuxerStarted = false; mMediaMuxerStarted = false;
} }
});
@NonNull
VideoMediaEncoder getVideoEncoder() {
return (VideoMediaEncoder) mEncoders.get(0);
}
@Nullable
AudioMediaEncoder getAudioEncoder() {
if (mEncoders.size() > 1) {
return (AudioMediaEncoder) mEncoders.get(1);
} else {
return null;
}
} }
interface Listener { interface Listener {

@ -0,0 +1,11 @@
package com.otaliastudios.cameraview;
import android.media.MediaCodec;
import java.nio.ByteBuffer;
class OutputBuffer {
MediaCodec.BufferInfo info;
int trackIndex;
ByteBuffer data;
}

@ -0,0 +1,18 @@
package com.otaliastudios.cameraview;
import android.media.MediaCodec;
class OutputBufferPool extends Pool<OutputBuffer> {
OutputBufferPool(final int trackIndex) {
super(Integer.MAX_VALUE, new Factory<OutputBuffer>() {
@Override
public OutputBuffer create() {
OutputBuffer buffer = new OutputBuffer();
buffer.trackIndex = trackIndex;
buffer.info = new MediaCodec.BufferInfo();
return buffer;
}
});
}
}

@ -0,0 +1,89 @@
package com.otaliastudios.cameraview;
import java.util.concurrent.LinkedBlockingQueue;
import androidx.annotation.CallSuper;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
class Pool<T> {
private static final String TAG = Pool.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
private int maxPoolSize;
private int activeCount;
private LinkedBlockingQueue<T> mQueue;
private Factory<T> factory;
interface Factory<T> {
T create();
}
Pool(int maxPoolSize, Factory<T> factory) {
this.maxPoolSize = maxPoolSize;
this.mQueue = new LinkedBlockingQueue<>(maxPoolSize);
this.factory = factory;
}
boolean canGet() {
return count() < maxPoolSize;
}
@Nullable
T get() {
T buffer = mQueue.poll();
if (buffer != null) {
activeCount++; // poll decreases, this fixes
LOG.v("GET: Reusing recycled item.", this);
return buffer;
}
if (!canGet()) {
LOG.v("GET: Returning null. Too much items requested.", this);
return null;
}
activeCount++;
LOG.v("GET: Creating a new item.", this);
return factory.create();
}
void recycle(@NonNull T item) {
LOG.v("RECYCLE: Recycling item.", this);
if (--activeCount < 0) {
throw new IllegalStateException("Trying to recycle an item which makes activeCount < 0." +
"This means that this or some previous items being recycled were not coming from " +
"this pool, or some item was recycled more than once. " + this);
}
if (!mQueue.offer(item)) {
throw new IllegalStateException("Trying to recycle an item while the queue is full. " +
"This means that this or some previous items being recycled were not coming from " +
"this pool, or some item was recycled more than once. " + this);
}
}
@NonNull
@Override
public String toString() {
return getClass().getSimpleName() + " -- count:" + count() + ", active:" + activeCount() + ", cached:" + cachedCount();
}
final int count() {
return activeCount() + cachedCount();
}
final int activeCount() {
return activeCount;
}
final int cachedCount() {
return mQueue.size();
}
@CallSuper
void clear() {
mQueue.clear();
}
}

@ -3,6 +3,8 @@ package com.otaliastudios.cameraview;
import android.opengl.EGLContext; import android.opengl.EGLContext;
import android.opengl.Matrix; import android.opengl.Matrix;
import android.os.Build; import android.os.Build;
import android.widget.TextView;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi; import androidx.annotation.RequiresApi;
@ -15,10 +17,6 @@ class TextureMediaEncoder extends VideoMediaEncoder<TextureMediaEncoder.Config>
final static String FRAME_EVENT = "frame"; final static String FRAME_EVENT = "frame";
static class Frame {
float[] transform;
long timestamp;
}
static class Config extends VideoMediaEncoder.Config { static class Config extends VideoMediaEncoder.Config {
int textureId; int textureId;
float scaleX; float scaleX;
@ -44,15 +42,40 @@ class TextureMediaEncoder extends VideoMediaEncoder<TextureMediaEncoder.Config>
private EglCore mEglCore; private EglCore mEglCore;
private EglWindowSurface mWindow; private EglWindowSurface mWindow;
private EglViewport mViewport; private EglViewport mViewport;
private Pool<TextureFrame> mFramePool = new Pool<>(100, new Pool.Factory<TextureFrame>() {
@Override
public TextureFrame create() {
return new TextureFrame();
}
});
TextureMediaEncoder(@NonNull Config config) { TextureMediaEncoder(@NonNull Config config) {
super(config); super(config);
} }
static class TextureFrame {
private TextureFrame() {}
// Nanoseconds, in no meaningful time-base. Should be for offsets only.
// Typically coming from SurfaceTexture.getTimestamp().
long timestamp;
float[] transform = new float[16];
}
@NonNull
TextureFrame acquireFrame() {
if (!mFramePool.canGet()) {
throw new RuntimeException("Need more frames than this! Please increase the pool size.");
} else {
//noinspection ConstantConditions
return mFramePool.get();
}
}
@EncoderThread @EncoderThread
@Override @Override
void prepare(@NonNull MediaEncoderEngine.Controller controller, long maxLengthMillis) { void onPrepare(@NonNull MediaEncoderEngine.Controller controller, long maxLengthMillis) {
super.prepare(controller, maxLengthMillis); super.onPrepare(controller, maxLengthMillis);
mEglCore = new EglCore(mConfig.eglContext, EglCore.FLAG_RECORDABLE); mEglCore = new EglCore(mConfig.eglContext, EglCore.FLAG_RECORDABLE);
mWindow = new EglWindowSurface(mEglCore, mSurface, true); mWindow = new EglWindowSurface(mEglCore, mSurface, true);
mWindow.makeCurrent(); // drawing will happen on the InputWindowSurface, which mWindow.makeCurrent(); // drawing will happen on the InputWindowSurface, which
@ -62,51 +85,29 @@ class TextureMediaEncoder extends VideoMediaEncoder<TextureMediaEncoder.Config>
@EncoderThread @EncoderThread
@Override @Override
void release() { void onStart() {
super.release(); super.onStart();
if (mWindow != null) { // Nothing to do here. Waiting for the first frame.
mWindow.release();
mWindow = null;
}
if (mViewport != null) {
mViewport.release(true);
mViewport = null;
}
if (mEglCore != null) {
mEglCore.release();
mEglCore = null;
}
} }
@EncoderThread @EncoderThread
@Override @Override
void start() { void onEvent(@NonNull String event, @Nullable Object data) {
super.start(); if (!event.equals(FRAME_EVENT)) return;
// Nothing to do here. Waiting for the first frame. TextureFrame frame = (TextureFrame) data;
if (frame == null) return; // Should not happen
if (frame.timestamp == 0 || mFrameNum < 0) {
// The first condition comes from grafika.
// The second condition means we were asked to stop.
mFramePool.recycle(frame);
return;
} }
@EncoderThread
@Override
void notify(@NonNull String event, @Nullable Object data) {
if (event.equals(FRAME_EVENT)) {
Frame frame = (Frame) data;
// Seeing this after device is toggled off/on with power button. The
// first frame back has a zero timestamp.
// MPEG4Writer thinks this is cause to abort() in native code, so it's very
// important that we just ignore the frame.
if (frame.timestamp == 0) return;
if (mFrameNum < 0) return;
mFrameNum++; mFrameNum++;
LOG.v("Incoming frame timestamp:", frame.timestamp);
int arg1 = (int) (frame.timestamp >> 32);
int arg2 = (int) frame.timestamp;
long timestamp = (((long) arg1) << 32) | (((long) arg2) & 0xffffffffL);
float[] transform = frame.transform;
// We must scale this matrix like GlCameraPreview does, because it might have some cropping. // We must scale this matrix like GlCameraPreview does, because it might have some cropping.
// Scaling takes place with respect to the (0, 0, 0) point, so we must apply a Translation to compensate. // Scaling takes place with respect to the (0, 0, 0) point, so we must apply a Translation to compensate.
float[] transform = frame.transform;
float scaleX = mConfig.scaleX; float scaleX = mConfig.scaleX;
float scaleY = mConfig.scaleY; float scaleY = mConfig.scaleY;
float scaleTranslX = (1F - scaleX) / 2F; float scaleTranslX = (1F - scaleX) / 2F;
@ -123,12 +124,29 @@ class TextureMediaEncoder extends VideoMediaEncoder<TextureMediaEncoder.Config>
Matrix.rotateM(transform, 0, mConfig.transformRotation, 0, 0, 1); Matrix.rotateM(transform, 0, mConfig.transformRotation, 0, 0, 1);
Matrix.translateM(transform, 0, -0.5F, -0.5F, 0); Matrix.translateM(transform, 0, -0.5F, -0.5F, 0);
drain(false); drainOutput(false);
// Future note: passing scale values to the viewport? They are scaleX and scaleY, // Future note: passing scale values to the viewport? They are scaleX and scaleY,
// but flipped based on the mConfig.scaleFlipped boolean. // but flipped based on the mConfig.scaleFlipped boolean.
mViewport.drawFrame(mConfig.textureId, transform); mViewport.drawFrame(mConfig.textureId, transform);
mWindow.setPresentationTime(timestamp); mWindow.setPresentationTime(frame.timestamp);
mWindow.swapBuffers(); mWindow.swapBuffers();
mFramePool.recycle(frame);
}
@Override
void onRelease() {
mFramePool.clear();
if (mWindow != null) {
mWindow.release();
mWindow = null;
}
if (mViewport != null) {
mViewport.release(true);
mViewport = null;
}
if (mEglCore != null) {
mEglCore.release();
mEglCore = null;
} }
} }
} }

@ -51,10 +51,15 @@ abstract class VideoMediaEncoder<C extends VideoMediaEncoder.Config> extends Med
mConfig = config; mConfig = config;
} }
@NonNull
@Override
String getName() {
return "VideoEncoder";
}
@EncoderThread @EncoderThread
@Override @Override
void prepare(@NonNull MediaEncoderEngine.Controller controller, long maxLengthMillis) { void onPrepare(@NonNull MediaEncoderEngine.Controller controller, long maxLengthMillis) {
super.prepare(controller, maxLengthMillis);
MediaFormat format = MediaFormat.createVideoFormat(mConfig.mimeType, mConfig.width, mConfig.height); MediaFormat format = MediaFormat.createVideoFormat(mConfig.mimeType, mConfig.width, mConfig.height);
// Set some properties. Failing to specify some of these can cause the MediaCodec // Set some properties. Failing to specify some of these can cause the MediaCodec
@ -62,6 +67,7 @@ abstract class VideoMediaEncoder<C extends VideoMediaEncoder.Config> extends Med
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, mConfig.bitRate); format.setInteger(MediaFormat.KEY_BIT_RATE, mConfig.bitRate);
format.setInteger(MediaFormat.KEY_FRAME_RATE, mConfig.frameRate); format.setInteger(MediaFormat.KEY_FRAME_RATE, mConfig.frameRate);
format.setInteger(MediaFormat.KEY_FRAME_RATE, 6); // TODO
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 2); format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 2);
format.setInteger("rotation-degrees", mConfig.rotation); format.setInteger("rotation-degrees", mConfig.rotation);
@ -79,20 +85,21 @@ abstract class VideoMediaEncoder<C extends VideoMediaEncoder.Config> extends Med
@EncoderThread @EncoderThread
@Override @Override
void start() { void onStart() {
// Nothing to do here. Waiting for the first frame. // Nothing to do here. Waiting for the first frame.
mFrameNum = 0; mFrameNum = 0;
} }
@EncoderThread @EncoderThread
@Override @Override
void stop() { void onStop() {
mFrameNum = -1; mFrameNum = -1;
drain(true); signalEndOfInputStream();
drainOutput(true);
} }
@Override @Override
int getBitRate() { int getEncodedBitRate() {
return mConfig.bitRate; return mConfig.bitRate;
} }
} }

@ -58,8 +58,13 @@ class SnapshotVideoRecorder extends VideoRecorder implements GlCameraPreview.Ren
@Override @Override
public void onRendererFrame(@NonNull SurfaceTexture surfaceTexture, float scaleX, float scaleY) { public void onRendererFrame(@NonNull SurfaceTexture surfaceTexture, float scaleX, float scaleY) {
if (mCurrentState == STATE_NOT_RECORDING && mDesiredState == STATE_RECORDING) { if (mCurrentState == STATE_NOT_RECORDING && mDesiredState == STATE_RECORDING) {
// Set default options
if (mResult.videoBitRate <= 0) mResult.videoBitRate = DEFAULT_VIDEO_BITRATE;
if (mResult.videoFrameRate <= 0) mResult.videoFrameRate = DEFAULT_VIDEO_FRAMERATE;
if (mResult.audioBitRate <= 0) mResult.audioBitRate = DEFAULT_AUDIO_BITRATE;
// Video. Ensure width and height are divisible by 2, as I have read somewhere.
Size size = mResult.getSize(); Size size = mResult.getSize();
// Ensure width and height are divisible by 2, as I have read somewhere.
int width = size.getWidth(); int width = size.getWidth();
int height = size.getHeight(); int height = size.getHeight();
width = width % 2 == 0 ? width : width + 1; width = width % 2 == 0 ? width : width + 1;
@ -70,9 +75,6 @@ class SnapshotVideoRecorder extends VideoRecorder implements GlCameraPreview.Ren
case H_264: type = "video/avc"; break; // MediaFormat.MIMETYPE_VIDEO_AVC: case H_264: type = "video/avc"; break; // MediaFormat.MIMETYPE_VIDEO_AVC:
case DEVICE_DEFAULT: type = "video/avc"; break; case DEVICE_DEFAULT: type = "video/avc"; break;
} }
if (mResult.videoBitRate <= 0) mResult.videoBitRate = DEFAULT_VIDEO_BITRATE;
if (mResult.audioBitRate <= 0) mResult.audioBitRate = DEFAULT_AUDIO_BITRATE;
if (mResult.videoFrameRate <= 0) mResult.videoFrameRate = DEFAULT_VIDEO_FRAMERATE;
LOG.w("Creating frame encoder. Rotation:", mResult.rotation); LOG.w("Creating frame encoder. Rotation:", mResult.rotation);
TextureMediaEncoder.Config config = new TextureMediaEncoder.Config(width, height, TextureMediaEncoder.Config config = new TextureMediaEncoder.Config(width, height,
mResult.videoBitRate, mResult.videoBitRate,
@ -84,10 +86,14 @@ class SnapshotVideoRecorder extends VideoRecorder implements GlCameraPreview.Ren
EGL14.eglGetCurrentContext() EGL14.eglGetCurrentContext()
); );
TextureMediaEncoder videoEncoder = new TextureMediaEncoder(config); TextureMediaEncoder videoEncoder = new TextureMediaEncoder(config);
// Audio
AudioMediaEncoder audioEncoder = null; AudioMediaEncoder audioEncoder = null;
if (mResult.audio == Audio.ON) { if (mResult.audio == Audio.ON) {
audioEncoder = new AudioMediaEncoder(new AudioMediaEncoder.Config(mResult.audioBitRate)); audioEncoder = new AudioMediaEncoder(new AudioMediaEncoder.Config(mResult.audioBitRate));
} }
// Engine
mEncoderEngine = new MediaEncoderEngine(mResult.file, videoEncoder, audioEncoder, mEncoderEngine = new MediaEncoderEngine(mResult.file, videoEncoder, audioEncoder,
mResult.maxDuration, mResult.maxSize, SnapshotVideoRecorder.this); mResult.maxDuration, mResult.maxSize, SnapshotVideoRecorder.this);
mEncoderEngine.start(); mEncoderEngine.start();
@ -96,11 +102,11 @@ class SnapshotVideoRecorder extends VideoRecorder implements GlCameraPreview.Ren
} }
if (mCurrentState == STATE_RECORDING) { if (mCurrentState == STATE_RECORDING) {
TextureMediaEncoder.Frame frame = new TextureMediaEncoder.Frame(); TextureMediaEncoder textureEncoder = (TextureMediaEncoder) mEncoderEngine.getVideoEncoder();
frame.timestamp = surfaceTexture.getTimestamp(); TextureMediaEncoder.TextureFrame textureFrame = textureEncoder.acquireFrame();
frame.transform = new float[16]; // TODO would be cool to avoid this at every frame. But it's not easy. textureFrame.timestamp = surfaceTexture.getTimestamp();
surfaceTexture.getTransformMatrix(frame.transform); surfaceTexture.getTransformMatrix(textureFrame.transform);
mEncoderEngine.notify(TextureMediaEncoder.FRAME_EVENT, frame); mEncoderEngine.notify(TextureMediaEncoder.FRAME_EVENT, textureFrame);
} }
if (mCurrentState == STATE_RECORDING && mDesiredState == STATE_NOT_RECORDING) { if (mCurrentState == STATE_RECORDING && mDesiredState == STATE_NOT_RECORDING) {
@ -113,7 +119,6 @@ class SnapshotVideoRecorder extends VideoRecorder implements GlCameraPreview.Ren
} }
@EncoderThread
@Override @Override
public void onEncoderStop(int stopReason, @Nullable Exception e) { public void onEncoderStop(int stopReason, @Nullable Exception e) {
// If something failed, undo the result, since this is the mechanism // If something failed, undo the result, since this is the mechanism

@ -22,6 +22,7 @@ abstract class VideoRecorder {
abstract void stop(); abstract void stop();
@SuppressWarnings("WeakerAccess")
protected void dispatchResult() { protected void dispatchResult() {
if (mListener != null) { if (mListener != null) {
mListener.onVideoResult(mResult); mListener.onVideoResult(mResult);

@ -2,6 +2,8 @@ package com.otaliastudios.cameraview;
import android.os.Handler; import android.os.Handler;
import android.os.HandlerThread; import android.os.HandlerThread;
import android.os.Looper;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
import java.lang.ref.WeakReference; import java.lang.ref.WeakReference;
@ -63,16 +65,22 @@ class WorkerHandler {
} }
@NonNull @NonNull
public Thread getThread() { public HandlerThread getThread() {
return mThread; return mThread;
} }
@NonNull
public Looper getLooper() {
return mThread.getLooper();
}
static void destroy() { static void destroy() {
for (String key : sCache.keySet()) { for (String key : sCache.keySet()) {
WeakReference<WorkerHandler> ref = sCache.get(key); WeakReference<WorkerHandler> ref = sCache.get(key);
WorkerHandler handler = ref.get(); WorkerHandler handler = ref.get();
if (handler != null && handler.getThread().isAlive()) { if (handler != null && handler.getThread().isAlive()) {
handler.getThread().interrupt(); handler.getThread().interrupt();
// handler.getThread().quit();
} }
ref.clear(); ref.clear();
} }

@ -2,6 +2,7 @@ package com.otaliastudios.cameraview.demo;
import android.content.Intent; import android.content.Intent;
import android.content.pm.PackageManager; import android.content.pm.PackageManager;
import android.net.Uri;
import android.os.Bundle; import android.os.Bundle;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
import com.google.android.material.bottomsheet.BottomSheetBehavior; import com.google.android.material.bottomsheet.BottomSheetBehavior;

Loading…
Cancel
Save