Add Audio.MONO and Audio.STEREO

pull/506/head
Mattia Iavarone 6 years ago
parent 0fe7c9235a
commit d6b20e7ed1
  1. 2
      README.md
  2. 4
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraViewTest.java
  3. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java
  4. 18
      cameraview/src/main/java/com/otaliastudios/cameraview/controls/Audio.java
  5. 13
      cameraview/src/main/java/com/otaliastudios/cameraview/video/FullVideoRecorder.java
  6. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/video/SnapshotVideoRecorder.java
  7. 80
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioMediaEncoder.java
  8. 6
      docs/_posts/2018-12-20-controls.md

@ -95,7 +95,7 @@ motivation boost to push the library forward.
app:cameraFlash="on|auto|torch|off"
app:cameraWhiteBalance="auto|cloudy|daylight|fluorescent|incandescent"
app:cameraMode="picture|video"
app:cameraAudio="on|off"
app:cameraAudio="on|off|mono|stereo"
app:cameraGrid="draw3x3|draw4x4|drawPhi|off"
app:cameraGridColor="@color/grid_color"
app:cameraPlaySounds="true|false"

@ -628,6 +628,10 @@ public class CameraViewTest extends BaseTest {
assertEquals(cameraView.get(Audio.class), Audio.ON);
cameraView.set(Audio.OFF);
assertEquals(cameraView.get(Audio.class), Audio.OFF);
cameraView.set(Audio.MONO);
assertEquals(cameraView.get(Audio.class), Audio.MONO);
cameraView.set(Audio.STEREO);
assertEquals(cameraView.get(Audio.class), Audio.STEREO);
}
@Test

@ -678,7 +678,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
Context c = getContext();
boolean needsCamera = true;
boolean needsAudio = audio == Audio.ON;
boolean needsAudio = audio == Audio.ON || audio == Audio.MONO || audio == Audio.STEREO;
needsCamera = needsCamera && c.checkSelfPermission(Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED;
needsAudio = needsAudio && c.checkSelfPermission(Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED;
@ -696,7 +696,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* If the developer did not add this to its manifest, throw and fire warnings.
*/
private void checkPermissionsManifestOrThrow(@NonNull Audio audio) {
if (audio == Audio.ON) {
if (audio == Audio.ON || audio == Audio.MONO || audio == Audio.STEREO) {
try {
PackageManager manager = getContext().getPackageManager();
PackageInfo info = manager.getPackageInfo(getContext().getPackageName(), PackageManager.GET_PERMISSIONS);
@ -1174,6 +1174,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
*
* @see Audio#OFF
* @see Audio#ON
* @see Audio#MONO
* @see Audio#STEREO
*
* @param audio desired audio value
*/

@ -14,14 +14,26 @@ import androidx.annotation.Nullable;
public enum Audio implements Control {
/**
* No Audio.
* No audio.
*/
OFF(0),
/**
* With Audio.
* Audio on. The number of channels depends on the video configuration,
* on the device capabilities and on the video type (e.g. we default to
* mono for snapshots).
*/
ON(1);
ON(1),
/**
* Force mono channel audio.
*/
MONO(2),
/**
* Force stereo audio.
*/
STEREO(3);
final static Audio DEFAULT = ON;

@ -46,7 +46,7 @@ public abstract class FullVideoRecorder extends VideoRecorder {
protected boolean onPrepareMediaRecorder(@NonNull VideoResult.Stub stub, @NonNull MediaRecorder mediaRecorder) {
mMediaRecorder = mediaRecorder;
Size size = stub.rotation % 180 != 0 ? stub.size.flip() : stub.size;
if (stub.audio == Audio.ON) {
if (stub.audio == Audio.ON || stub.audio == Audio.MONO || stub.audio == Audio.STEREO) {
// Must be called before setOutputFormat.
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
}
@ -70,8 +70,15 @@ public abstract class FullVideoRecorder extends VideoRecorder {
} else {
mMediaRecorder.setVideoEncodingBitRate(stub.videoBitRate);
}
if (stub.audio == Audio.ON) {
mMediaRecorder.setAudioChannels(mProfile.audioChannels);
if (stub.audio == Audio.ON || stub.audio == Audio.MONO || stub.audio == Audio.STEREO) {
if (stub.audio == Audio.ON) {
mMediaRecorder.setAudioChannels(mProfile.audioChannels);
} else if (stub.audio == Audio.MONO) {
mMediaRecorder.setAudioChannels(1);
} else //noinspection ConstantConditions
if (stub.audio == Audio.STEREO) {
mMediaRecorder.setAudioChannels(2);
}
mMediaRecorder.setAudioSamplingRate(mProfile.audioSampleRate);
mMediaRecorder.setAudioEncoder(mProfile.audioCodec);
if (stub.audioBitRate <= 0) {

@ -143,9 +143,11 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
// Audio
AudioMediaEncoder audioEncoder = null;
if (mResult.audio == Audio.ON) {
if (mResult.audio == Audio.ON || mResult.audio == Audio.MONO || mResult.audio == Audio.STEREO) {
AudioMediaEncoder.Config audioConfig = new AudioMediaEncoder.Config();
audioConfig.bitRate = mResult.audioBitRate;
if (mResult.audio == Audio.MONO) audioConfig.channels = 1;
if (mResult.audio == Audio.STEREO) audioConfig.channels = 2;
audioEncoder = new AudioMediaEncoder(audioConfig);
}

@ -31,24 +31,6 @@ public class AudioMediaEncoder extends MediaEncoder {
private static final String TAG = AudioMediaEncoder.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
private static final String MIME_TYPE = "audio/mp4a-latm";
private static final int ENCODING = AudioFormat.ENCODING_PCM_16BIT; // Determines the SAMPLE_SIZE
private static final int CHANNELS = AudioFormat.CHANNEL_IN_MONO; // AudioFormat.CHANNEL_IN_STEREO;
// The 44.1KHz frequency is the only setting guaranteed to be available on all devices.
private static final int SAMPLING_FREQUENCY = 44100; // samples/sec
private static final int CHANNELS_COUNT = 1; // 2;
private static final int SAMPLE_SIZE = 2; // byte/sample/channel
private static final int BYTE_RATE_PER_CHANNEL = SAMPLING_FREQUENCY * SAMPLE_SIZE; // byte/sec/channel
private static final int BYTE_RATE = BYTE_RATE_PER_CHANNEL * CHANNELS_COUNT; // byte/sec
@SuppressWarnings("unused")
private static final int BIT_RATE = BYTE_RATE * 8; // bit/sec
// We call FRAME here the chunk of data that we want to read at each loop cycle
private static final int FRAME_SIZE_PER_CHANNEL = 1024; // bytes/frame/channel [AAC constant]
private static final int FRAME_SIZE = FRAME_SIZE_PER_CHANNEL * CHANNELS_COUNT; // bytes/frame
// We allocate buffers of 1KB each, which is not so much. This value indicates the maximum
// number of these buffers that we can allocate at a given instant.
// This value is the number of runnables that the encoder thread is allowed to be 'behind'
@ -64,14 +46,48 @@ public class AudioMediaEncoder extends MediaEncoder {
private Config mConfig;
public static class Config {
public int bitRate;
public int channels = 1;
// Not configurable options (for now)
private final String mimeType = "audio/mp4a-latm";
private final int encoding = AudioFormat.ENCODING_PCM_16BIT; // Determines the SAMPLE_SIZE
// The 44.1KHz frequency is the only setting guaranteed to be available on all devices.
private final int samplingFrequency = 44100; // samples/sec
private final int sampleSize = 2; // byte/sample/channel
private final int byteRatePerChannel = samplingFrequency * sampleSize; // byte/sec/channel
private final int frameSizePerChannel = 1024; // bytes/frame/channel [AAC constant]
@NonNull
private Config copy() {
Config config = new Config();
config.bitRate = this.bitRate;
config.channels = this.channels;
return config;
}
private int byteRate() {
return byteRatePerChannel * channels; // byte/sec
}
private int bitRate() {
return byteRate() * 8; // bit/sec
}
private int frameSize() {
// We call FRAME here the chunk of data that we want to read at each loop cycle
return frameSizePerChannel * channels; // bytes/frame
}
private int audioFormatChannels() {
if (channels == 1) {
return AudioFormat.CHANNEL_IN_MONO;
} else if (channels == 2) {
return AudioFormat.CHANNEL_IN_STEREO;
}
throw new RuntimeException("Invalid number of channels: " + channels);
}
}
public AudioMediaEncoder(@NonNull Config config) {
@ -87,19 +103,19 @@ public class AudioMediaEncoder extends MediaEncoder {
@EncoderThread
@Override
protected void onPrepare(@NonNull MediaEncoderEngine.Controller controller, long maxLengthMillis) {
final MediaFormat audioFormat = MediaFormat.createAudioFormat(MIME_TYPE, SAMPLING_FREQUENCY, CHANNELS_COUNT);
final MediaFormat audioFormat = MediaFormat.createAudioFormat(mConfig.mimeType, mConfig.samplingFrequency, mConfig.channels);
audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
audioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, CHANNELS);
audioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, mConfig.audioFormatChannels());
audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, mConfig.bitRate);
audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, CHANNELS_COUNT);
audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, mConfig.channels);
try {
mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE);
mMediaCodec = MediaCodec.createEncoderByType(mConfig.mimeType);
} catch (IOException e) {
throw new RuntimeException(e);
}
mMediaCodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mMediaCodec.start();
mByteBufferPool = new ByteBufferPool(FRAME_SIZE, BUFFER_POOL_MAX_SIZE);
mByteBufferPool = new ByteBufferPool(mConfig.frameSize(), BUFFER_POOL_MAX_SIZE);
}
@EncoderThread
@ -141,13 +157,13 @@ public class AudioMediaEncoder extends MediaEncoder {
private long mFirstTimeUs = Long.MIN_VALUE;
private AudioRecordingThread() {
final int minBufferSize = AudioRecord.getMinBufferSize(SAMPLING_FREQUENCY, CHANNELS, ENCODING);
int bufferSize = FRAME_SIZE * 25; // Make this bigger so we don't skip frames.
final int minBufferSize = AudioRecord.getMinBufferSize(mConfig.samplingFrequency, mConfig.channels, mConfig.encoding);
int bufferSize = mConfig.frameSize() * 25; // Make this bigger so we don't skip frames.
while (bufferSize < minBufferSize) {
bufferSize += FRAME_SIZE; // Unlikely I think.
bufferSize += mConfig.frameSize(); // Unlikely I think.
}
mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.CAMCORDER,
SAMPLING_FREQUENCY, CHANNELS, ENCODING, bufferSize);
mConfig.samplingFrequency, mConfig.channels, mConfig.encoding, bufferSize);
setPriority(Thread.MAX_PRIORITY);
}
@ -185,7 +201,7 @@ public class AudioMediaEncoder extends MediaEncoder {
}
} else {
mCurrentBuffer.clear();
mReadBytes = mAudioRecord.read(mCurrentBuffer, FRAME_SIZE);
mReadBytes = mAudioRecord.read(mCurrentBuffer, mConfig.frameSize());
LOG.i("read thread - eos:", endOfStream, "- Read new audio frame. Bytes:", mReadBytes);
if (mReadBytes > 0) { // Good read: increase PTS.
increaseTime(mReadBytes, endOfStream);
@ -206,7 +222,9 @@ public class AudioMediaEncoder extends MediaEncoder {
*/
private void sleep() {
try {
Thread.sleep(AudioTimestamp.bytesToUs(FRAME_SIZE * 6, BYTE_RATE) / 1000);
Thread.sleep(AudioTimestamp.bytesToUs(
mConfig.frameSize() * 6,
mConfig.byteRate()) / 1000);
} catch (InterruptedException ignore) {}
}
@ -219,12 +237,12 @@ public class AudioMediaEncoder extends MediaEncoder {
* @param endOfStream end of stream?
*/
private void increaseTime(int readBytes, boolean endOfStream) {
mLastTimeUs = mTimestamp.increaseUs(readBytes, BYTE_RATE);
mLastTimeUs = mTimestamp.increaseUs(readBytes, mConfig.byteRate());
if (mFirstTimeUs == Long.MIN_VALUE) {
mFirstTimeUs = mLastTimeUs;
// Compute the first frame milliseconds as well.
notifyFirstFrameMillis(System.currentTimeMillis()
- AudioTimestamp.bytesToUs(readBytes, BYTE_RATE) / 1000L);
- AudioTimestamp.bytesToUs(readBytes, mConfig.byteRate()) / 1000L);
}
boolean didReachMaxLength = (mLastTimeUs - mFirstTimeUs) > getMaxLengthMillis() * 1000L;
if (didReachMaxLength && !endOfStream) {

@ -25,7 +25,7 @@ or `CameraOptions.supports(Control)` to see if it is supported.
app:cameraFlash="off|on|auto|torch"
app:cameraWhiteBalance="auto|incandescent|fluorescent|daylight|cloudy"
app:cameraHdr="off|on"
app:cameraAudio="on|off"
app:cameraAudio="on|off|mono|stereo"
app:cameraAudioBitRate="0"
app:cameraVideoCodec="deviceDefault|h263|h264"
app:cameraVideoMaxSize="0"
@ -96,7 +96,9 @@ Defaults to `ON`.
```java
cameraView.setAudio(Audio.OFF);
cameraView.setAudio(Audio.ON);
cameraView.setAudio(Audio.ON); // on but depends on video config
cameraView.setAudio(Audio.MONO); // force mono
cameraView.setAudio(Audio.STEREO); // force stereo
```
##### cameraAudioBitRate

Loading…
Cancel
Save