Encoders improvements (#545)

* Create DeviceEncoders

* Prefer hardware encoders, adjust size and bitrate

* Do the same for video frame rate

* Fix docs

* Rename CameraView method

* Fix DeviceEncoders comparator

* Add tests

* Fix tests

* Fix tests again

* Scale down based on encoder requirements

* Add DeviceEncoders MODE_ values

* Use DeviceEncoders for full videos as well

* Fix small bug
pull/564/head
Mattia Iavarone 5 years ago committed by GitHub
parent f48d1c17ba
commit f2ea77ce79
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 224
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/internal/DeviceEncodersTest.java
  2. 12
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java
  3. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/filter/BaseFilter.java
  4. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/filter/Filter.java
  5. 302
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/DeviceEncoders.java
  6. 95
      cameraview/src/main/java/com/otaliastudios/cameraview/video/FullVideoRecorder.java
  7. 43
      cameraview/src/main/java/com/otaliastudios/cameraview/video/SnapshotVideoRecorder.java
  8. 5
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioConfig.java
  9. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioMediaEncoder.java
  10. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/VideoConfig.java
  11. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/VideoMediaEncoder.java
  12. 11
      docs/_posts/2019-02-24-snapshot-size.md

@ -0,0 +1,224 @@
package com.otaliastudios.cameraview.internal;
import android.media.MediaCodecInfo;
import androidx.annotation.NonNull;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.MediumTest;
import androidx.test.rule.ActivityTestRule;
import com.otaliastudios.cameraview.BaseTest;
import com.otaliastudios.cameraview.TestActivity;
import com.otaliastudios.cameraview.controls.Grid;
import com.otaliastudios.cameraview.size.AspectRatio;
import com.otaliastudios.cameraview.size.Size;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.Arrays;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@RunWith(AndroidJUnit4.class)
@MediumTest
public class DeviceEncodersTest extends BaseTest {
// This is guaranteed to work, see
// https://developer.android.com/guide/topics/media/media-formats
private final static Size GUARANTEED_SIZE = new Size(176, 144);
private boolean enabled;
@Before
public void setUp() {
enabled = DeviceEncoders.ENABLED;
}
@After
public void tearDown() {
DeviceEncoders.ENABLED = enabled;
}
@NonNull
private DeviceEncoders create() {
return new DeviceEncoders(
"video/avc",
"audio/mp4a-latm",
DeviceEncoders.MODE_TAKE_FIRST);
}
@Test
public void testGetDeviceEncoders() {
DeviceEncoders deviceEncoders = create();
if (DeviceEncoders.ENABLED) {
List<MediaCodecInfo> infos = deviceEncoders.getDeviceEncoders();
for (MediaCodecInfo info : infos) {
assertTrue(info.isEncoder());
}
}
}
@Test
public void testIsHardwareEncoder() {
DeviceEncoders deviceEncoders = create();
if (DeviceEncoders.ENABLED) {
assertFalse(deviceEncoders.isHardwareEncoder("OMX.google.encoder"));
assertTrue(deviceEncoders.isHardwareEncoder("OMX.other.encoder"));
}
}
@Test
public void testFindDeviceEncoder() {
DeviceEncoders deviceEncoders = create();
if (DeviceEncoders.ENABLED) {
List<MediaCodecInfo> allEncoders = deviceEncoders.getDeviceEncoders();
MediaCodecInfo encoder = deviceEncoders.findDeviceEncoder(allEncoders,
"video/avc", DeviceEncoders.MODE_TAKE_FIRST);
assertNotNull(encoder);
List<String> encoderTypes = Arrays.asList(encoder.getSupportedTypes());
assertTrue(encoderTypes.contains("video/avc"));
}
}
@Test
public void testGetVideoEncoder() {
if (DeviceEncoders.ENABLED) {
DeviceEncoders deviceEncoders = create();
assertNotNull(deviceEncoders.getVideoEncoder());
}
DeviceEncoders.ENABLED = false;
DeviceEncoders deviceEncoders = create();
assertNull(deviceEncoders.getVideoEncoder());
}
@Test
public void testGetAudioEncoder() {
if (DeviceEncoders.ENABLED) {
DeviceEncoders deviceEncoders = create();
assertNotNull(deviceEncoders.getAudioEncoder());
}
DeviceEncoders.ENABLED = false;
DeviceEncoders deviceEncoders = create();
assertNull(deviceEncoders.getAudioEncoder());
}
@Test
public void testGetSupportedVideoSize_disabled() {
DeviceEncoders.ENABLED = false;
DeviceEncoders deviceEncoders = create();
Size input = new Size(GUARANTEED_SIZE.getWidth(), GUARANTEED_SIZE.getHeight());
Size output = deviceEncoders.getSupportedVideoSize(input);
assertSame(input, output);
}
@Test
public void testGetSupportedVideoSize_scalesDown() {
DeviceEncoders deviceEncoders = create();
if (DeviceEncoders.ENABLED) {
Size input = new Size(
GUARANTEED_SIZE.getWidth() * 1000,
GUARANTEED_SIZE.getHeight() * 1000);
try {
Size output = deviceEncoders.getSupportedVideoSize(input);
assertTrue(AspectRatio.of(input).matches(output, 0.01F));
} catch (RuntimeException e) {
// The scaled down size happens to be not supported.
// I see no way of testing this easily if we're not sure of supported ranges.
// This depends highly on the alignment since scaling down, while keeping AR,
// can change the alignment and require width / height changes.
}
}
}
@Test
public void testGetSupportedVideoSize_aligns() {
DeviceEncoders deviceEncoders = create();
if (DeviceEncoders.ENABLED) {
Size input = new Size(GUARANTEED_SIZE.getWidth() + 1,
GUARANTEED_SIZE.getHeight() + 1);
Size output = deviceEncoders.getSupportedVideoSize(input);
assertTrue(output.getWidth() <= input.getWidth());
assertTrue(output.getHeight() <= input.getHeight());
}
}
@Test
public void testGetSupportedVideoBitRate_disabled() {
DeviceEncoders.ENABLED = false;
DeviceEncoders deviceEncoders = create();
int input = 1000;
int output = deviceEncoders.getSupportedVideoBitRate(input);
assertEquals(input, output);
}
@Test
public void testGetSupportedVideoBitRate_enabled() {
DeviceEncoders deviceEncoders = create();
if (DeviceEncoders.ENABLED) {
// Ensure it's clamped: we can pass a negative value and check it's >= 0.
int input = -1000;
int output = deviceEncoders.getSupportedVideoBitRate(input);
assertNotEquals(input, output);
assertTrue(output >= 0);
}
}
@Test
public void testGetSupportedAudioBitRate_disabled() {
DeviceEncoders.ENABLED = false;
DeviceEncoders deviceEncoders = create();
int input = 1000;
int output = deviceEncoders.getSupportedAudioBitRate(input);
assertEquals(input, output);
}
@Test
public void testGetSupportedAudioBitRate_enabled() {
DeviceEncoders deviceEncoders = create();
if (DeviceEncoders.ENABLED) {
// Ensure it's clamped: we can pass a negative value and check it's >= 0.
int input = -1000;
int output = deviceEncoders.getSupportedAudioBitRate(input);
assertNotEquals(input, output);
assertTrue(output >= 0);
}
}
@Test
public void testGetSupportedFrameRate_disabled() {
DeviceEncoders.ENABLED = false;
DeviceEncoders deviceEncoders = create();
int input = 1000;
int output = deviceEncoders.getSupportedVideoFrameRate(GUARANTEED_SIZE, input);
assertEquals(input, output);
}
@Test
public void testGetSupportedFrameRate_enabled() {
DeviceEncoders deviceEncoders = create();
if (DeviceEncoders.ENABLED) {
// Ensure it's clamped: we can pass a negative value and check it's >= 0.
int input = -10;
Size inputSize = deviceEncoders.getSupportedVideoSize(GUARANTEED_SIZE);
int output = deviceEncoders.getSupportedVideoFrameRate(inputSize, input);
assertNotEquals(input, output);
assertTrue(output >= 0);
}
}
}

@ -148,18 +148,18 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
public CameraView(@NonNull Context context) { public CameraView(@NonNull Context context) {
super(context, null); super(context, null);
init(context, null); initialize(context, null);
} }
public CameraView(@NonNull Context context, @Nullable AttributeSet attrs) { public CameraView(@NonNull Context context, @Nullable AttributeSet attrs) {
super(context, attrs); super(context, attrs);
init(context, attrs); initialize(context, attrs);
} }
//region Init //region Init
@SuppressWarnings("WrongConstant") @SuppressWarnings("WrongConstant")
private void init(@NonNull Context context, @Nullable AttributeSet attrs) { private void initialize(@NonNull Context context, @Nullable AttributeSet attrs) {
setWillNotDraw(false); setWillNotDraw(false);
TypedArray a = context.getTheme().obtainStyledAttributes(attrs, R.styleable.CameraView, 0, 0); TypedArray a = context.getTheme().obtainStyledAttributes(attrs, R.styleable.CameraView, 0, 0);
ControlParser controls = new ControlParser(context, a); ControlParser controls = new ControlParser(context, a);
@ -1664,6 +1664,12 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* (for example if the surface is not ready). This is the preview size, rotated to match * (for example if the surface is not ready). This is the preview size, rotated to match
* the output orientation, and cropped to the visible part. * the output orientation, and cropped to the visible part.
* *
* This also includes the {@link #setSnapshotMaxWidth(int)} and
* {@link #setSnapshotMaxHeight(int)} constraints.
*
* This does NOT include any constraints specific to video encoding, which are
* device specific and depend on the capabilities of the device codec.
*
* @return the size of snapshots * @return the size of snapshots
*/ */
@Nullable @Nullable

@ -19,7 +19,7 @@ import java.nio.FloatBuffer;
* to be changed. Most effects can be rendered by simply changing the fragment shader, thus * to be changed. Most effects can be rendered by simply changing the fragment shader, thus
* by overriding {@link #getFragmentShader()}. * by overriding {@link #getFragmentShader()}.
* *
* All {@link BaseFilter}s should have a no-op public constructor. * All {@link BaseFilter}s should have a no-arguments public constructor.
* This class will try to automatically implement {@link #copy()} thanks to this. * This class will try to automatically implement {@link #copy()} thanks to this.
* If your filter implements public parameters, please implement {@link OneParameterFilter} * If your filter implements public parameters, please implement {@link OneParameterFilter}
* and {@link TwoParameterFilter} to handle them and have them passed automatically to copies. * and {@link TwoParameterFilter} to handle them and have them passed automatically to copies.
@ -226,9 +226,9 @@ public abstract class BaseFilter implements Filter {
try { try {
return getClass().newInstance(); return getClass().newInstance();
} catch (IllegalAccessException e) { } catch (IllegalAccessException e) {
throw new RuntimeException("Filters should have a public no-op constructor.", e); throw new RuntimeException("Filters should have a public no-arguments constructor.", e);
} catch (InstantiationException e) { } catch (InstantiationException e) {
throw new RuntimeException("Filters should have a public no-op constructor.", e); throw new RuntimeException("Filters should have a public no-arguments constructor.", e);
} }
} }
} }

@ -19,7 +19,7 @@ import java.io.File;
* Advanced users can create custom filters using GLES. * Advanced users can create custom filters using GLES.
* It is recommended to extend {@link BaseFilter} instead of this class. * It is recommended to extend {@link BaseFilter} instead of this class.
* *
* All {@link Filter}s should have a no-op public constructor. * All {@link Filter}s should have a no-arguments public constructor.
* This ensures that you can pass the filter class to XML attribute {@code app:cameraFilter}, * This ensures that you can pass the filter class to XML attribute {@code app:cameraFilter},
* and also helps {@link BaseFilter} automatically make a copy of the filter. * and also helps {@link BaseFilter} automatically make a copy of the filter.
* *

@ -0,0 +1,302 @@
package com.otaliastudios.cameraview.internal;
import android.annotation.SuppressLint;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.os.Build;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.size.Size;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
/**
* Checks the capabilities of device encoders and adjust parameters to ensure
* that they'll be supported by the final encoder.
* This can choose the encoder in two ways, based on the mode flag:
*
* 1. {@link #MODE_TAKE_FIRST}
*
* Chooses the encoder as the first one that matches the given mime type.
* This is what {@link android.media.MediaCodec#createEncoderByType(String)} does,
* and what {@link android.media.MediaRecorder} also does when recording.
*
* The list is ordered based on the encoder definitions in system/etc/media_codecs.xml,
* as explained here: https://source.android.com/devices/media , for example.
* So taking the first means respecting the vendor priorities and should generally be
* a good idea.
*
* About {@link android.media.MediaRecorder}, we know it uses this option from here:
* https://stackoverflow.com/q/57479564/4288782 where all links to source code are shown.
* - StagefrightRecorder (https://android.googlesource.com/platform/frameworks/av/+/master/media/libmediaplayerservice/StagefrightRecorder.cpp#1782)
* - MediaCodecSource (https://android.googlesource.com/platform/frameworks/av/+/master/media/libstagefright/MediaCodecSource.cpp#515)
* - MediaCodecList (https://android.googlesource.com/platform/frameworks/av/+/master/media/libstagefright/MediaCodecList.cpp#322)
*
* To be fair, what {@link android.media.MediaRecorder} does is actually choose the first one
* that configures itself without errors. We currently do not offer this option here. TODO
*
* 2. {@link #MODE_PREFER_HARDWARE}
*
* This takes the list - as ordered by the vendor - and just sorts it such that hardware encoders
* are preferred over software ones. It's questionable whether this is good or not. Some vendors
* might forget to put hardware encoders first in the list, some others might put poor hardware
* encoders on the bottom of the list on purpose.
*/
public class DeviceEncoders {
private final static String TAG = DeviceEncoders.class.getSimpleName();
private final static CameraLogger LOG = CameraLogger.create(TAG);
@VisibleForTesting static boolean ENABLED = Build.VERSION.SDK_INT >= 21;
public final static int MODE_TAKE_FIRST = 0;
public final static int MODE_PREFER_HARDWARE = 1;
@SuppressWarnings("FieldCanBeLocal")
private final MediaCodecInfo mVideoEncoder;
@SuppressWarnings("FieldCanBeLocal")
private final MediaCodecInfo mAudioEncoder;
private final MediaCodecInfo.VideoCapabilities mVideoCapabilities;
private final MediaCodecInfo.AudioCapabilities mAudioCapabilities;
@SuppressLint("NewApi")
public DeviceEncoders(@NonNull String videoType, @NonNull String audioType, int mode) {
// We could still get a list of MediaCodecInfo for API >= 16, but it seems that the APIs
// for querying the availability of a specified MediaFormat were only added in 21 anyway.
if (ENABLED) {
List<MediaCodecInfo> encoders = getDeviceEncoders();
mVideoEncoder = findDeviceEncoder(encoders, videoType, mode);
LOG.i("Enabled. Found video encoder:", mVideoEncoder.getName());
mAudioEncoder = findDeviceEncoder(encoders, audioType, mode);
LOG.i("Enabled. Found audio encoder:", mAudioEncoder.getName());
mVideoCapabilities = mVideoEncoder.getCapabilitiesForType(videoType).getVideoCapabilities();
mAudioCapabilities = mAudioEncoder.getCapabilitiesForType(audioType).getAudioCapabilities();
} else {
mVideoEncoder = null;
mAudioEncoder = null;
mVideoCapabilities = null;
mAudioCapabilities = null;
LOG.i("Disabled.");
}
}
/**
* Collects all the device encoders, which means excluding decoders.
* @return encoders
*/
@NonNull
@SuppressLint("NewApi")
@VisibleForTesting
List<MediaCodecInfo> getDeviceEncoders() {
ArrayList<MediaCodecInfo> results = new ArrayList<>();
MediaCodecInfo[] array = new MediaCodecList(MediaCodecList.REGULAR_CODECS).getCodecInfos();
for (MediaCodecInfo info : array) {
if (info.isEncoder()) results.add(info);
}
return results;
}
/**
* Whether an encoder is a hardware encoder or not. We don't have an API to check this,
* but we can follow what libstagefright does:
* https://android.googlesource.com/platform/frameworks/av/+/master/media/libstagefright/MediaCodecList.cpp#293
*
* @param encoder encoder
* @return true if hardware
*/
@SuppressLint("NewApi")
@VisibleForTesting
boolean isHardwareEncoder(@NonNull String encoder) {
encoder = encoder.toLowerCase();
boolean isSoftwareEncoder = encoder.startsWith("omx.google.")
|| encoder.startsWith("c2.android.")
|| (!encoder.startsWith("omx.") && !encoder.startsWith("c2."));
return !isSoftwareEncoder;
}
/**
* Finds the encoder we'll be using, depending on the given mode flag:
* - {@link #MODE_TAKE_FIRST} will just take the first of the list
* - {@link #MODE_PREFER_HARDWARE} will prefer hardware encoders
* Throws if we find no encoder for this type.
*
* @param encoders encoders
* @param mimeType mime type
* @param mode mode
* @return encoder
*/
@SuppressLint("NewApi")
@NonNull
@VisibleForTesting
MediaCodecInfo findDeviceEncoder(@NonNull List<MediaCodecInfo> encoders, @NonNull String mimeType, int mode) {
ArrayList<MediaCodecInfo> results = new ArrayList<>();
for (MediaCodecInfo encoder : encoders) {
String[] types = encoder.getSupportedTypes();
for (String type : types) {
if (type.equalsIgnoreCase(mimeType)) {
results.add(encoder);
break;
}
}
}
LOG.i("findDeviceEncoder -", "type:", mimeType, "encoders:", results.size());
if (mode == MODE_PREFER_HARDWARE) {
Collections.sort(results, new Comparator<MediaCodecInfo>() {
@Override
public int compare(MediaCodecInfo o1, MediaCodecInfo o2) {
boolean hw1 = isHardwareEncoder(o1.getName());
boolean hw2 = isHardwareEncoder(o2.getName());
if (hw1 && hw2) return 0;
if (hw1) return -1;
if (hw2) return 1;
return 0;
}
});
}
if (results.isEmpty()) {
throw new RuntimeException("No encoders for type:" + mimeType);
}
return results.get(0);
}
/**
* Returns a video size supported by the device encoders.
* Throws if input width or height are out of the supported boundaries.
*
* @param size input size
* @return adjusted size
*/
@SuppressLint("NewApi")
@NonNull
public Size getSupportedVideoSize(@NonNull Size size) {
if (!ENABLED) return size;
int width = size.getWidth();
int height = size.getHeight();
double aspect = (double) width / height;
// If width is too large, scale down, but keep aspect ratio.
if (mVideoCapabilities.getSupportedWidths().getUpper() < width) {
width = mVideoCapabilities.getSupportedWidths().getUpper();
height = (int) Math.round(width / aspect);
}
// If height is too large, scale down, but keep aspect ratio.
if (mVideoCapabilities.getSupportedHeights().getUpper() < height) {
height = mVideoCapabilities.getSupportedHeights().getUpper();
width = (int) Math.round(aspect * height);
}
// Adjust the alignment.
while (width % mVideoCapabilities.getWidthAlignment() != 0) width--;
while (height % mVideoCapabilities.getHeightAlignment() != 0) height--;
// It's still possible that we're BELOW the lower.
if (!mVideoCapabilities.getSupportedWidths().contains(width)) {
throw new RuntimeException("Width not supported after adjustment." +
" Desired:" + width +
" Range:" + mVideoCapabilities.getSupportedWidths());
}
if (!mVideoCapabilities.getSupportedHeights().contains(height)) {
throw new RuntimeException("Height not supported after adjustment." +
" Desired:" + height +
" Range:" + mVideoCapabilities.getSupportedHeights());
}
// It's still possible that we're unsupported for other reasons.
if (!mVideoCapabilities.isSizeSupported(width, height)) {
throw new RuntimeException("Size not supported for unknown reason." +
" Might be an aspect ratio issue." +
" Desired size:" + new Size(width, height));
}
Size adjusted = new Size(width, height);
LOG.i("getSupportedVideoSize -", "inputSize:", size, "adjustedSize:", adjusted);
return adjusted;
}
/**
* Returns a video bit rate supported by the device encoders.
* This means adjusting the input bit rate if needed, to match encoder constraints.
*
* @param bitRate input rate
* @return adjusted rate
*/
@SuppressLint("NewApi")
public int getSupportedVideoBitRate(int bitRate) {
if (!ENABLED) return bitRate;
int newBitRate = mVideoCapabilities.getBitrateRange().clamp(bitRate);
LOG.i("getSupportedVideoBitRate -", "inputRate:", bitRate, "adjustedRate:", newBitRate);
return newBitRate;
}
/**
* Returns a video frame rate supported by the device encoders.
* This means adjusting the input frame rate if needed, to match encoder constraints.
*
* @param frameRate input rate
* @return adjusted rate
*/
@SuppressLint("NewApi")
public int getSupportedVideoFrameRate(@NonNull Size size, int frameRate) {
if (!ENABLED) return frameRate;
int newFrameRate = (int) (double) mVideoCapabilities
.getSupportedFrameRatesFor(size.getWidth(), size.getHeight())
.clamp((double) frameRate);
LOG.i("getSupportedVideoFrameRate -", "inputRate:", frameRate, "adjustedRate:", newFrameRate);
return newFrameRate;
}
/**
* Returns an audio bit rate supported by the device encoders.
* This means adjusting the input bit rate if needed, to match encoder constraints.
*
* @param bitRate input rate
* @return adjusted rate
*/
@SuppressLint("NewApi")
public int getSupportedAudioBitRate(int bitRate) {
if (!ENABLED) return bitRate;
int newBitRate = mAudioCapabilities.getBitrateRange().clamp(bitRate);
LOG.i("getSupportedAudioBitRate -", "inputRate:", bitRate, "adjustedRate:", newBitRate);
return newBitRate;
}
// Won't do this for audio sample rate. As far as I remember, the value we're using,
// 44.1kHz, is guaranteed to be available, and it's not configurable.
/**
* Returns the name of the video encoder if we were able to determine one.
* @return encoder name
*/
@SuppressLint("NewApi")
@Nullable
public String getVideoEncoder() {
if (mVideoEncoder != null) {
return mVideoEncoder.getName();
} else {
return null;
}
}
/**
* Returns the name of the audio encoder if we were able to determine one.
* @return encoder name
*/
@SuppressLint("NewApi")
@Nullable
public String getAudioEncoder() {
if (mAudioEncoder != null) {
return mAudioEncoder.getName();
} else {
return null;
}
}
}

@ -6,6 +6,8 @@ import android.media.MediaRecorder;
import com.otaliastudios.cameraview.CameraLogger; import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.VideoResult; import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.controls.Audio; import com.otaliastudios.cameraview.controls.Audio;
import com.otaliastudios.cameraview.controls.VideoCodec;
import com.otaliastudios.cameraview.internal.DeviceEncoders;
import com.otaliastudios.cameraview.size.Size; import com.otaliastudios.cameraview.size.Size;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
@ -45,49 +47,85 @@ public abstract class FullVideoRecorder extends VideoRecorder {
protected boolean onPrepareMediaRecorder(@NonNull VideoResult.Stub stub, @NonNull MediaRecorder mediaRecorder) { protected boolean onPrepareMediaRecorder(@NonNull VideoResult.Stub stub, @NonNull MediaRecorder mediaRecorder) {
mMediaRecorder = mediaRecorder; mMediaRecorder = mediaRecorder;
Size size = stub.rotation % 180 != 0 ? stub.size.flip() : stub.size; boolean hasAudio = stub.audio == Audio.ON
if (stub.audio == Audio.ON || stub.audio == Audio.MONO || stub.audio == Audio.STEREO) { || stub.audio == Audio.MONO
// Must be called before setOutputFormat. || stub.audio == Audio.STEREO;
if (hasAudio) {
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT); mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
} }
mMediaRecorder.setOutputFormat(mProfile.fileFormat); mMediaRecorder.setOutputFormat(mProfile.fileFormat);
if (stub.videoFrameRate <= 0) {
mMediaRecorder.setVideoFrameRate(mProfile.videoFrameRate); // Get the audio mime type
stub.videoFrameRate = mProfile.videoFrameRate; // https://android.googlesource.com/platform/frameworks/av/+/master/media/libmediaplayerservice/StagefrightRecorder.cpp#1096
} else { // https://github.com/MrAlex94/Waterfox-Old/blob/master/media/libstagefright/frameworks/av/media/libstagefright/MediaDefs.cpp
mMediaRecorder.setVideoFrameRate(stub.videoFrameRate); String audioType;
switch (mProfile.audioCodec) {
case MediaRecorder.AudioEncoder.AMR_NB: audioType = "audio/3gpp"; break;
case MediaRecorder.AudioEncoder.AMR_WB: audioType = "audio/amr-wb"; break;
case MediaRecorder.AudioEncoder.AAC:
case MediaRecorder.AudioEncoder.HE_AAC:
case MediaRecorder.AudioEncoder.AAC_ELD: audioType = "audio/mp4a-latm"; break;
case MediaRecorder.AudioEncoder.VORBIS: audioType = "audio/vorbis"; break;
case MediaRecorder.AudioEncoder.DEFAULT:
default: audioType = "audio/3gpp";
} }
mMediaRecorder.setVideoSize(size.getWidth(), size.getHeight());
switch (stub.videoCodec) { // Get the video mime type
case H_263: mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H263); break; // https://android.googlesource.com/platform/frameworks/av/+/master/media/libmediaplayerservice/StagefrightRecorder.cpp#1650
case H_264: mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264); break; // https://github.com/MrAlex94/Waterfox-Old/blob/master/media/libstagefright/frameworks/av/media/libstagefright/MediaDefs.cpp
case DEVICE_DEFAULT: mMediaRecorder.setVideoEncoder(mProfile.videoCodec); break; String videoType;
if (stub.videoCodec == VideoCodec.H_264) mProfile.videoCodec = MediaRecorder.VideoEncoder.H264;
if (stub.videoCodec == VideoCodec.H_263) mProfile.videoCodec = MediaRecorder.VideoEncoder.H263;
switch (mProfile.videoCodec) {
case MediaRecorder.VideoEncoder.H263: videoType = "video/3gpp"; break;
case MediaRecorder.VideoEncoder.H264: videoType = "video/avc"; break;
case MediaRecorder.VideoEncoder.MPEG_4_SP: videoType = "video/mp4v-es"; break;
case MediaRecorder.VideoEncoder.VP8: videoType = "video/x-vnd.on2.vp8"; break;
case MediaRecorder.VideoEncoder.HEVC: videoType = "video/hevc"; break;
case MediaRecorder.VideoEncoder.DEFAULT:
default: videoType = "video/avc";
} }
if (stub.videoBitRate <= 0) {
mMediaRecorder.setVideoEncodingBitRate(mProfile.videoBitRate); // Merge stub and profile
stub.videoBitRate = mProfile.videoBitRate; stub.videoFrameRate = stub.videoFrameRate > 0 ? stub.videoFrameRate : mProfile.videoFrameRate;
} else { stub.videoBitRate = stub.videoBitRate > 0 ? stub.videoBitRate : mProfile.videoBitRate;
mMediaRecorder.setVideoEncodingBitRate(stub.videoBitRate); if (hasAudio) {
stub.audioBitRate = stub.audioBitRate > 0 ? stub.audioBitRate : mProfile.audioBitRate;
} }
if (stub.audio == Audio.ON || stub.audio == Audio.MONO || stub.audio == Audio.STEREO) {
// Check DeviceEncoders support
DeviceEncoders encoders = new DeviceEncoders(videoType, audioType, DeviceEncoders.MODE_TAKE_FIRST);
boolean flip = stub.rotation % 180 != 0;
if (flip) stub.size = stub.size.flip();
stub.size = encoders.getSupportedVideoSize(stub.size);
stub.videoBitRate = encoders.getSupportedVideoBitRate(stub.videoBitRate);
stub.audioBitRate = encoders.getSupportedAudioBitRate(stub.audioBitRate);
stub.videoFrameRate = encoders.getSupportedVideoFrameRate(stub.size, stub.videoFrameRate);
if (flip) stub.size = stub.size.flip();
// Set video params
mMediaRecorder.setVideoSize(
flip ? stub.size.getHeight() : stub.size.getWidth(),
flip ? stub.size.getWidth() : stub.size.getHeight());
mMediaRecorder.setVideoFrameRate(stub.videoFrameRate);
mMediaRecorder.setVideoEncoder(mProfile.videoCodec);
mMediaRecorder.setVideoEncodingBitRate(stub.videoBitRate);
// Set audio params
if (hasAudio) {
if (stub.audio == Audio.ON) { if (stub.audio == Audio.ON) {
mMediaRecorder.setAudioChannels(mProfile.audioChannels); mMediaRecorder.setAudioChannels(mProfile.audioChannels);
} else if (stub.audio == Audio.MONO) { } else if (stub.audio == Audio.MONO) {
mMediaRecorder.setAudioChannels(1); mMediaRecorder.setAudioChannels(1);
} else //noinspection ConstantConditions } else if (stub.audio == Audio.STEREO) {
if (stub.audio == Audio.STEREO) {
mMediaRecorder.setAudioChannels(2); mMediaRecorder.setAudioChannels(2);
} }
mMediaRecorder.setAudioSamplingRate(mProfile.audioSampleRate); mMediaRecorder.setAudioSamplingRate(mProfile.audioSampleRate);
mMediaRecorder.setAudioEncoder(mProfile.audioCodec); mMediaRecorder.setAudioEncoder(mProfile.audioCodec);
if (stub.audioBitRate <= 0) { mMediaRecorder.setAudioEncodingBitRate(stub.audioBitRate);
mMediaRecorder.setAudioEncodingBitRate(mProfile.audioBitRate);
stub.audioBitRate = mProfile.audioBitRate;
} else {
mMediaRecorder.setAudioEncodingBitRate(stub.audioBitRate);
}
} }
// Set other params
if (stub.location != null) { if (stub.location != null) {
mMediaRecorder.setLocation( mMediaRecorder.setLocation(
(float) stub.location.getLatitude(), (float) stub.location.getLatitude(),
@ -113,6 +151,7 @@ public abstract class FullVideoRecorder extends VideoRecorder {
} }
}); });
// Prepare the Recorder
try { try {
mMediaRecorder.prepare(); mMediaRecorder.prepare();
mMediaRecorderPrepared = true; mMediaRecorderPrepared = true;

@ -5,6 +5,7 @@ import android.opengl.EGL14;
import android.os.Build; import android.os.Build;
import com.otaliastudios.cameraview.CameraLogger; import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.internal.DeviceEncoders;
import com.otaliastudios.cameraview.overlay.Overlay; import com.otaliastudios.cameraview.overlay.Overlay;
import com.otaliastudios.cameraview.VideoResult; import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.controls.Audio; import com.otaliastudios.cameraview.controls.Audio;
@ -121,26 +122,31 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
if (mResult.videoBitRate <= 0) mResult.videoBitRate = estimateVideoBitRate(mResult.size, mResult.videoFrameRate); if (mResult.videoBitRate <= 0) mResult.videoBitRate = estimateVideoBitRate(mResult.size, mResult.videoFrameRate);
if (mResult.audioBitRate <= 0) mResult.audioBitRate = DEFAULT_AUDIO_BITRATE; if (mResult.audioBitRate <= 0) mResult.audioBitRate = DEFAULT_AUDIO_BITRATE;
// Video. Ensure width and height are divisible by 2, as I have read somewhere. // Define mime types
Size size = mResult.size; String videoType = "";
int width = size.getWidth();
int height = size.getHeight();
width = width % 2 == 0 ? width : width + 1;
height = height % 2 == 0 ? height : height + 1;
String type = "";
switch (mResult.videoCodec) { switch (mResult.videoCodec) {
case H_263: type = "video/3gpp"; break; // MediaFormat.MIMETYPE_VIDEO_H263; case H_263: videoType = "video/3gpp"; break; // MediaFormat.MIMETYPE_VIDEO_H263;
case H_264: type = "video/avc"; break; // MediaFormat.MIMETYPE_VIDEO_AVC: case H_264: videoType = "video/avc"; break; // MediaFormat.MIMETYPE_VIDEO_AVC:
case DEVICE_DEFAULT: type = "video/avc"; break; case DEVICE_DEFAULT: videoType = "video/avc"; break;
} }
LOG.w("Creating frame encoder. Rotation:", mResult.rotation); String audioType = "audio/mp4a-latm";
// Check the availability of values
DeviceEncoders deviceEncoders = new DeviceEncoders(videoType, audioType, DeviceEncoders.MODE_PREFER_HARDWARE);
mResult.size = deviceEncoders.getSupportedVideoSize(mResult.size);
mResult.videoBitRate = deviceEncoders.getSupportedVideoBitRate(mResult.videoBitRate);
mResult.audioBitRate = deviceEncoders.getSupportedAudioBitRate(mResult.audioBitRate);
mResult.videoFrameRate = deviceEncoders.getSupportedVideoFrameRate(mResult.size, mResult.videoFrameRate);
// Video
TextureConfig videoConfig = new TextureConfig(); TextureConfig videoConfig = new TextureConfig();
videoConfig.width = width; videoConfig.width = mResult.size.getWidth();
videoConfig.height = height; videoConfig.height = mResult.size.getHeight();
videoConfig.bitRate = mResult.videoBitRate; videoConfig.bitRate = mResult.videoBitRate;
videoConfig.frameRate = mResult.videoFrameRate; videoConfig.frameRate = mResult.videoFrameRate;
videoConfig.rotation = mResult.rotation; videoConfig.rotation = mResult.rotation;
videoConfig.mimeType = type; videoConfig.mimeType = videoType;
videoConfig.encoder = deviceEncoders.getVideoEncoder();
videoConfig.textureId = mTextureId; videoConfig.textureId = mTextureId;
videoConfig.scaleX = scaleX; videoConfig.scaleX = scaleX;
videoConfig.scaleY = scaleY; videoConfig.scaleY = scaleY;
@ -162,12 +168,17 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
audioConfig.bitRate = mResult.audioBitRate; audioConfig.bitRate = mResult.audioBitRate;
if (mResult.audio == Audio.MONO) audioConfig.channels = 1; if (mResult.audio == Audio.MONO) audioConfig.channels = 1;
if (mResult.audio == Audio.STEREO) audioConfig.channels = 2; if (mResult.audio == Audio.STEREO) audioConfig.channels = 2;
audioConfig.encoder = deviceEncoders.getAudioEncoder();
audioEncoder = new AudioMediaEncoder(audioConfig); audioEncoder = new AudioMediaEncoder(audioConfig);
} }
// Engine // Engine
mEncoderEngine = new MediaEncoderEngine(mResult.file, videoEncoder, audioEncoder, mEncoderEngine = new MediaEncoderEngine(mResult.file,
mResult.maxDuration, mResult.maxSize, SnapshotVideoRecorder.this); videoEncoder,
audioEncoder,
mResult.maxDuration,
mResult.maxSize,
SnapshotVideoRecorder.this);
mEncoderEngine.notify(TextureMediaEncoder.FILTER_EVENT, mCurrentFilter); mEncoderEngine.notify(TextureMediaEncoder.FILTER_EVENT, mCurrentFilter);
mEncoderEngine.start(); mEncoderEngine.start();
mResult.rotation = 0; // We will rotate the result instead. mResult.rotation = 0; // We will rotate the result instead.

@ -14,9 +14,10 @@ public class AudioConfig {
// Configurable options // Configurable options
public int bitRate; // ENCODED bit rate public int bitRate; // ENCODED bit rate
public int channels = 1; public int channels = 1;
public String encoder;
public String mimeType = "audio/mp4a-latm";
// Not configurable options (for now) // Not configurable options (for now)
final String mimeType = "audio/mp4a-latm";
final int encoding = AudioFormat.ENCODING_PCM_16BIT; // Determines the sampleSizePerChannel final int encoding = AudioFormat.ENCODING_PCM_16BIT; // Determines the sampleSizePerChannel
// The 44.1KHz frequency is the only setting guaranteed to be available on all devices. // The 44.1KHz frequency is the only setting guaranteed to be available on all devices.
final int samplingFrequency = 44100; // samples/sec final int samplingFrequency = 44100; // samples/sec
@ -28,6 +29,8 @@ public class AudioConfig {
AudioConfig config = new AudioConfig(); AudioConfig config = new AudioConfig();
config.bitRate = this.bitRate; config.bitRate = this.bitRate;
config.channels = this.channels; config.channels = this.channels;
config.encoder = this.encoder;
config.mimeType = mimeType;
return config; return config;
} }

@ -71,7 +71,11 @@ public class AudioMediaEncoder extends MediaEncoder {
audioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, mConfig.audioFormatChannels()); audioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, mConfig.audioFormatChannels());
audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, mConfig.bitRate); audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, mConfig.bitRate);
try { try {
mMediaCodec = MediaCodec.createEncoderByType(mConfig.mimeType); if (mConfig.encoder != null) {
mMediaCodec = MediaCodec.createByCodecName(mConfig.encoder);
} else {
mMediaCodec = MediaCodec.createEncoderByType(mConfig.mimeType);
}
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }

@ -13,6 +13,7 @@ public class VideoConfig {
public int frameRate; public int frameRate;
public int rotation; public int rotation;
public String mimeType; public String mimeType;
public String encoder;
protected <C extends VideoConfig> void copy(@NonNull C output) { protected <C extends VideoConfig> void copy(@NonNull C output) {
output.width = this.width; output.width = this.width;
@ -21,5 +22,6 @@ public class VideoConfig {
output.frameRate = this.frameRate; output.frameRate = this.frameRate;
output.rotation = this.rotation; output.rotation = this.rotation;
output.mimeType = this.mimeType; output.mimeType = this.mimeType;
output.encoder = this.encoder;
} }
} }

@ -67,7 +67,11 @@ abstract class VideoMediaEncoder<C extends VideoConfig> extends MediaEncoder {
format.setInteger("rotation-degrees", mConfig.rotation); format.setInteger("rotation-degrees", mConfig.rotation);
try { try {
mMediaCodec = MediaCodec.createEncoderByType(mConfig.mimeType); if (mConfig.encoder != null) {
mMediaCodec = MediaCodec.createByCodecName(mConfig.encoder);
} else {
mMediaCodec = MediaCodec.createEncoderByType(mConfig.mimeType);
}
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }

@ -42,6 +42,17 @@ This is very useful as it decouples the snapshot size logic from the preview. By
you can have a pleasant, good looking preview stream, while still capturing fast, low-res snapshots you can have a pleasant, good looking preview stream, while still capturing fast, low-res snapshots
with no issues. with no issues.
### Video Codec requirements
When taking video snapshots, the video codec that the device provides might require extra constraints,
like
- width / height alignment
- maximum width or height
CameraView will try to read these requirements and apply them, which can result in video snapshots
that are smaller than you would expect, or with a **very slightly** different aspect ratio.
### XML Attributes ### XML Attributes
```xml ```xml

Loading…
Cancel
Save