From c7f9b9289661f5c031c5404e21d8b9a793b68e73 Mon Sep 17 00:00:00 2001 From: Mattia Iavarone Date: Tue, 16 Jul 2019 21:03:46 -0300 Subject: [PATCH] Fix stereo bug --- .../video/encoding/AudioConfig.java | 51 ++++++++++++++++--- .../video/encoding/AudioMediaEncoder.java | 40 +++++++-------- demo/src/main/res/layout/activity_camera.xml | 2 +- 3 files changed, 66 insertions(+), 27 deletions(-) diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioConfig.java b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioConfig.java index a4b04469..3d09b51e 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioConfig.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioConfig.java @@ -22,7 +22,6 @@ public class AudioConfig { final int samplingFrequency = 44100; // samples/sec final int sampleSizePerChannel = 2; // byte/sample/channel [16bit] final int byteRatePerChannel = samplingFrequency * sampleSizePerChannel; // byte/sec/channel - final int frameSizePerChannel = 1024; // bytes/frame/channel [AAC constant] @NonNull AudioConfig copy() { @@ -41,11 +40,6 @@ public class AudioConfig { return byteRate() * 8; // bit/sec } - int frameSize() { - // We call FRAME here the chunk of data that we want to read at each loop cycle - return frameSizePerChannel * channels; // bytes/frame - } - int audioFormatChannels() { if (channels == 1) { return AudioFormat.CHANNEL_IN_MONO; @@ -54,4 +48,49 @@ public class AudioConfig { } throw new RuntimeException("Invalid number of channels: " + channels); } + + /** + * We call FRAME here the chunk of data that we want to read at each loop cycle. + * + * When this number is HIGH, the AudioRecord might be unable to keep a good pace and + * we might end up skip some frames. + * + * When this number is LOW, we pull a bigger number of frames and this might end up + * delaying our recorder/encoder balance (more frames means more encoding operations). + * In the end, this means that the recorder will skip some frames to restore the balance. + * + * @return the frame size + */ + int frameSize() { + return 1024 * channels; + } + + /** + * Number of frames contained in the {@link android.media.AudioRecord} buffer. + * In theory, the higher this value is, the safer it is to delay reading as the + * audioRecord will hold the recorded samples anyway and return to us next time we read. + * + * Should be coordinated with {@link #frameSize()}. + * + * @return the number of frames + */ + int audioRecordBufferFrames() { + return 25; + } + + /** + * We allocate buffers of {@link #frameSize()} each, which is not much. + * + * This value indicates the maximum number of these buffers that we can allocate at a given instant. + * This value is the number of runnables that the encoder thread is allowed to be 'behind' + * the recorder thread. It's not safe to have it very large or we can end encoding A LOT AFTER + * the actual recording. It's better to reduce this and skip recording at all. + * + * Should be coordinated with {@link #frameSize()}. + * + * @return the buffer pool max size + */ + int bufferPoolMaxSize() { + return 80; + } } diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioMediaEncoder.java b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioMediaEncoder.java index 9f89c652..fc12420c 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioMediaEncoder.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioMediaEncoder.java @@ -1,5 +1,6 @@ package com.otaliastudios.cameraview.video.encoding; +import android.media.AudioFormat; import android.media.AudioRecord; import android.media.MediaCodec; import android.media.MediaCodecInfo; @@ -22,20 +23,13 @@ import java.util.concurrent.LinkedBlockingQueue; * Default implementation for audio encoding. */ // TODO create onVideoRecordingEnd callback -// TODO STEREO does not work well @RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2) public class AudioMediaEncoder extends MediaEncoder { private static final String TAG = AudioMediaEncoder.class.getSimpleName(); private static final CameraLogger LOG = CameraLogger.create(TAG); - // We allocate buffers of 1KB each, which is not so much. This value indicates the maximum - // number of these buffers that we can allocate at a given instant. - // This value is the number of runnables that the encoder thread is allowed to be 'behind' - // the recorder thread. It's not safe to have it very large or we can end encoding A LOT AFTER - // the actual recording. It's better to reduce this and skip recording at all. - private static final int BUFFER_POOL_MAX_SIZE = 80; - private static final boolean PERFORMANCE_DEBUG = true; + private static final boolean PERFORMANCE_DEBUG = false; private static final boolean PERFORMANCE_FILL_GAPS = true; private boolean mRequestStop = false; @@ -68,11 +62,13 @@ public class AudioMediaEncoder extends MediaEncoder { @EncoderThread @Override protected void onPrepare(@NonNull MediaEncoderEngine.Controller controller, long maxLengthMillis) { - final MediaFormat audioFormat = MediaFormat.createAudioFormat(mConfig.mimeType, mConfig.samplingFrequency, mConfig.channels); + final MediaFormat audioFormat = MediaFormat.createAudioFormat( + mConfig.mimeType, + mConfig.samplingFrequency, + mConfig.channels); audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC); audioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, mConfig.audioFormatChannels()); audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, mConfig.bitRate); // TODO multiply by channels? - audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, mConfig.channels); try { mMediaCodec = MediaCodec.createEncoderByType(mConfig.mimeType); } catch (IOException e) { @@ -80,7 +76,7 @@ public class AudioMediaEncoder extends MediaEncoder { } mMediaCodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); mMediaCodec.start(); - mByteBufferPool = new ByteBufferPool(mConfig.frameSize(), BUFFER_POOL_MAX_SIZE); + mByteBufferPool = new ByteBufferPool(mConfig.frameSize(), mConfig.bufferPoolMaxSize()); mZeroBuffer = ByteBuffer.allocateDirect(mConfig.frameSize()); } @@ -140,19 +136,21 @@ public class AudioMediaEncoder extends MediaEncoder { private long mFirstTimeUs = Long.MIN_VALUE; private AudioRecordingThread() { - final int minBufferSize = AudioRecord.getMinBufferSize(mConfig.samplingFrequency, - mConfig.channels, mConfig.encoding); + final int minBufferSize = AudioRecord.getMinBufferSize( + mConfig.samplingFrequency, + mConfig.audioFormatChannels(), + mConfig.encoding); // Make this bigger so we don't skip frames. 25: Stereo: 51200. Mono: 25600 // 25 is quite big already. Tried to make it bigger to solve the read() delay // but it just makes things worse (ruins MONO as well). // Tried to make it smaller and things change as well. - int bufferSize = mConfig.frameSize() * 25; + int bufferSize = mConfig.frameSize() * mConfig.audioRecordBufferFrames(); while (bufferSize < minBufferSize) { bufferSize += mConfig.frameSize(); // Unlikely. } mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.CAMCORDER, mConfig.samplingFrequency, - mConfig.channels, + mConfig.audioFormatChannels(), mConfig.encoding, bufferSize); setPriority(Thread.MAX_PRIORITY); @@ -315,13 +313,15 @@ public class AudioMediaEncoder extends MediaEncoder { // Performance logging if (PERFORMANCE_DEBUG) { long sendEnd = System.nanoTime() / 1000000; - //noinspection ConstantConditions - long sendStart = mSendStartMap.remove(inputBuffer.timestamp); - mAvgSendDelay = ((mAvgSendDelay * mSendCount) + (sendEnd - sendStart)) / (++mSendCount); - LOG.v("send delay millis:", sendEnd - sendStart, "average:", mAvgSendDelay); + Long sendStart = mSendStartMap.remove(inputBuffer.timestamp); + if (sendStart != null) { + mAvgSendDelay = ((mAvgSendDelay * mSendCount) + (sendEnd - sendStart)) / (++mSendCount); + LOG.v("send delay millis:", sendEnd - sendStart, "average:", mAvgSendDelay); + } else { + // This input buffer was already processed (but tryAcquire failed for now). + } } - // Actual work if (inputBuffer.isEndOfStream) { acquireInputBuffer(inputBuffer); diff --git a/demo/src/main/res/layout/activity_camera.xml b/demo/src/main/res/layout/activity_camera.xml index 3e9f269d..15c40ade 100644 --- a/demo/src/main/res/layout/activity_camera.xml +++ b/demo/src/main/res/layout/activity_camera.xml @@ -22,7 +22,7 @@ app:cameraPlaySounds="true" app:cameraGrid="off" app:cameraFlash="off" - app:cameraAudio="stereo" + app:cameraAudio="on" app:cameraGestureTap="autoFocus" app:cameraGestureLongTap="none" app:cameraGesturePinch="zoom"