Enforce 100 character limits

pull/588/head
Mattia Iavarone 6 years ago
parent 77f1574b1c
commit f668d8aae1
  1. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/BitmapCallback.java
  2. 13
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraListener.java
  3. 10
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraLogger.java
  4. 47
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraOptions.java
  5. 45
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraUtils.java
  6. 121
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java
  7. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/PictureResult.java
  8. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/VideoResult.java
  9. 23
      cameraview/src/main/java/com/otaliastudios/cameraview/controls/ControlParser.java
  10. 86
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java
  11. 191
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java
  12. 176
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraEngine.java
  13. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/LogAction.java
  14. 20
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Step.java
  15. 21
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/Action.java
  16. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/ActionHolder.java
  17. 8
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/ActionWrapper.java
  18. 8
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/BaseAction.java
  19. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/SequenceAction.java
  20. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/TogetherAction.java
  21. 13
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/lock/ExposureLock.java
  22. 13
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/lock/FocusLock.java
  23. 11
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/lock/WhiteBalanceLock.java
  24. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/BaseReset.java
  25. 13
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/ExposureMeter.java
  26. 15
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/ExposureReset.java
  27. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/FocusMeter.java
  28. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/FocusReset.java
  29. 29
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/MeterAction.java
  30. 13
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/WhiteBalanceMeter.java
  31. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/WhiteBalanceReset.java
  32. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/offset/Axis.java
  33. 41
      cameraview/src/main/java/com/otaliastudios/cameraview/filter/BaseFilter.java
  34. 21
      cameraview/src/main/java/com/otaliastudios/cameraview/filter/MultiFilter.java
  35. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/AutoFixFilter.java
  36. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/GammaFilter.java
  37. 20
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/GrainFilter.java
  38. 9
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/SaturationFilter.java
  39. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/VignetteFilter.java
  40. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/frame/Frame.java
  41. 23
      cameraview/src/main/java/com/otaliastudios/cameraview/frame/FrameManager.java
  42. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/gesture/Gesture.java
  43. 5
      cameraview/src/main/java/com/otaliastudios/cameraview/gesture/GestureAction.java
  44. 16
      cameraview/src/main/java/com/otaliastudios/cameraview/gesture/GestureParser.java
  45. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/gesture/PinchGestureFinder.java
  46. 11
      cameraview/src/main/java/com/otaliastudios/cameraview/gesture/ScrollGestureFinder.java
  47. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/gesture/TapGestureFinder.java
  48. 22
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/DeviceEncoders.java
  49. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/GlUtils.java
  50. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/GridLinesLayout.java
  51. 28
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/Issue514Workaround.java
  52. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/egl/EglBaseSurface.java
  53. 7
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/egl/EglCore.java
  54. 15
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/egl/EglViewport.java
  55. 21
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/egl/EglWindowSurface.java
  56. 9
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/CamcorderProfiles.java
  57. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/ImageHelper.java
  58. 10
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/OrientationHelper.java
  59. 15
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/Pool.java
  60. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/RotationHelper.java
  61. 9
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/WorkerHandler.java
  62. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/markers/AutoFocusMarker.java
  63. 13
      cameraview/src/main/java/com/otaliastudios/cameraview/markers/DefaultAutoFocusMarker.java
  64. 9
      cameraview/src/main/java/com/otaliastudios/cameraview/overlay/OverlayLayout.java
  65. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/Full1PictureRecorder.java
  66. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/Full2PictureRecorder.java
  67. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/PictureRecorder.java
  68. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/Snapshot1PictureRecorder.java
  69. 31
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/Snapshot2PictureRecorder.java
  70. 19
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/SnapshotGlPictureRecorder.java
  71. 11
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/CameraPreview.java
  72. 59
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/GlCameraPreview.java
  73. 15
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/SurfaceCameraPreview.java
  74. 7
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/TextureCameraPreview.java
  75. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/size/AspectRatio.java
  76. 72
      cameraview/src/main/java/com/otaliastudios/cameraview/size/SizeSelectorParser.java
  77. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/video/Full1VideoRecorder.java
  78. 10
      cameraview/src/main/java/com/otaliastudios/cameraview/video/Full2VideoRecorder.java
  79. 25
      cameraview/src/main/java/com/otaliastudios/cameraview/video/FullVideoRecorder.java
  80. 22
      cameraview/src/main/java/com/otaliastudios/cameraview/video/SnapshotVideoRecorder.java
  81. 11
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioConfig.java
  82. 73
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioMediaEncoder.java
  83. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioTimestamp.java
  84. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaCodecBuffers.java
  85. 42
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoder.java
  86. 32
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoderEngine.java
  87. 27
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/TextureMediaEncoder.java
  88. 18
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/VideoMediaEncoder.java

@ -10,7 +10,7 @@ import androidx.annotation.UiThread;
public interface BitmapCallback {
/**
* Notifies that the bitmap was succesfully decoded.
* Notifies that the bitmap was successfully decoded.
* This is run on the UI thread.
* Returns a null object if a {@link OutOfMemoryError} was encountered.
*

@ -48,8 +48,9 @@ public abstract class CameraListener {
* Notifies that a picture previously captured with {@link CameraView#takePicture()}
* or {@link CameraView#takePictureSnapshot()} is ready to be shown or saved to file.
*
* If planning to show a bitmap, you can use {@link PictureResult#toBitmap(int, int, BitmapCallback)}
* to decode the byte array taking care about orientation and threading.
* If planning to show a bitmap, you can use
* {@link PictureResult#toBitmap(int, int, BitmapCallback)} to decode the byte array
* taking care about orientation and threading.
*
* @param result captured picture
*/
@ -114,7 +115,9 @@ public abstract class CameraListener {
* @param fingers finger positions that caused the event, null if not caused by touch
*/
@UiThread
public void onZoomChanged(float newValue, @NonNull float[] bounds, @Nullable PointF[] fingers) { }
public void onZoomChanged(float newValue,
@NonNull float[] bounds,
@Nullable PointF[] fingers) { }
/**
@ -126,7 +129,9 @@ public abstract class CameraListener {
* @param fingers finger positions that caused the event, null if not caused by touch
*/
@UiThread
public void onExposureCorrectionChanged(float newValue, @NonNull float[] bounds, @Nullable PointF[] fingers) { }
public void onExposureCorrectionChanged(float newValue,
@NonNull float[] bounds,
@Nullable PointF[] fingers) { }
/**

@ -49,7 +49,10 @@ public final class CameraLogger {
* @param message the log message
* @param throwable an optional throwable
*/
void log(@LogLevel int level, @NonNull String tag, @NonNull String message, @Nullable Throwable throwable);
void log(@LogLevel int level,
@NonNull String tag,
@NonNull String message,
@Nullable Throwable throwable);
}
@VisibleForTesting static String lastMessage;
@ -60,7 +63,10 @@ public final class CameraLogger {
@VisibleForTesting static Logger sAndroidLogger = new Logger() {
@Override
public void log(int level, @NonNull String tag, @NonNull String message, @Nullable Throwable throwable) {
public void log(int level,
@NonNull String tag,
@NonNull String message,
@Nullable Throwable throwable) {
switch (level) {
case LEVEL_VERBOSE: Log.v(tag, message, throwable); break;
case LEVEL_INFO: Log.i(tag, message, throwable); break;

@ -107,7 +107,8 @@ public class CameraOptions {
zoomSupported = params.isZoomSupported();
// autofocus
autoFocusSupported = params.getSupportedFocusModes().contains(Camera.Parameters.FOCUS_MODE_AUTO);
autoFocusSupported = params.getSupportedFocusModes()
.contains(Camera.Parameters.FOCUS_MODE_AUTO);
// Exposure correction
float step = params.getExposureCompensationStep();
@ -135,7 +136,8 @@ public class CameraOptions {
supportedVideoAspectRatio.add(AspectRatio.of(width, height));
}
} else {
// StackOverflow threads seems to agree that if getSupportedVideoSizes is null, previews can be used.
// StackOverflow threads seems to agree that if getSupportedVideoSizes is null,
// previews can be used.
List<Camera.Size> fallback = params.getSupportedPreviewSizes();
for (Camera.Size size : fallback) {
int width = flipSizes ? size.height : size.width;
@ -148,13 +150,16 @@ public class CameraOptions {
// Camera2Engine constructor.
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
public CameraOptions(@NonNull CameraManager manager, @NonNull String cameraId, boolean flipSizes) throws CameraAccessException {
public CameraOptions(@NonNull CameraManager manager,
@NonNull String cameraId,
boolean flipSizes) throws CameraAccessException {
Camera2Mapper mapper = Camera2Mapper.get();
CameraCharacteristics cameraCharacteristics = manager.getCameraCharacteristics(cameraId);
// Facing
for (String cameraId1 : manager.getCameraIdList()) {
CameraCharacteristics cameraCharacteristics1 = manager.getCameraCharacteristics(cameraId1);
CameraCharacteristics cameraCharacteristics1 = manager
.getCameraCharacteristics(cameraId1);
Integer cameraFacing = cameraCharacteristics1.get(CameraCharacteristics.LENS_FACING);
if (cameraFacing != null) {
Facing value = mapper.unmapFacing(cameraFacing);
@ -163,7 +168,8 @@ public class CameraOptions {
}
// WB
int[] awbModes = cameraCharacteristics.get(CameraCharacteristics.CONTROL_AWB_AVAILABLE_MODES);
int[] awbModes = cameraCharacteristics.get(
CameraCharacteristics.CONTROL_AWB_AVAILABLE_MODES);
//noinspection ConstantConditions
for (int awbMode : awbModes) {
WhiteBalance value = mapper.unmapWhiteBalance(awbMode);
@ -174,7 +180,8 @@ public class CameraOptions {
supportedFlash.add(Flash.OFF);
Boolean hasFlash = cameraCharacteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE);
if (hasFlash != null && hasFlash) {
int[] aeModes = cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_MODES);
int[] aeModes = cameraCharacteristics.get(
CameraCharacteristics.CONTROL_AE_AVAILABLE_MODES);
//noinspection ConstantConditions
for (int aeMode : aeModes) {
Set<Flash> flashes = mapper.unmapFlash(aeMode);
@ -184,7 +191,8 @@ public class CameraOptions {
// HDR
supportedHdr.add(Hdr.OFF);
int[] sceneModes = cameraCharacteristics.get(CameraCharacteristics.CONTROL_AVAILABLE_SCENE_MODES);
int[] sceneModes = cameraCharacteristics.get(
CameraCharacteristics.CONTROL_AVAILABLE_SCENE_MODES);
//noinspection ConstantConditions
for (int sceneMode : sceneModes) {
Hdr value = mapper.unmapHdr(sceneMode);
@ -192,7 +200,8 @@ public class CameraOptions {
}
// Zoom
Float maxZoom = cameraCharacteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM);
Float maxZoom = cameraCharacteristics.get(
CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM);
if(maxZoom != null) {
zoomSupported = maxZoom > 1;
}
@ -205,24 +214,31 @@ public class CameraOptions {
// What really matters in my opinion is the availability of regions.
Integer afRegions = cameraCharacteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AF);
Integer aeRegions = cameraCharacteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AE);
Integer awbRegions = cameraCharacteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB);
Integer awbRegions = cameraCharacteristics.get(
CameraCharacteristics.CONTROL_MAX_REGIONS_AWB);
autoFocusSupported = (afRegions != null && afRegions > 0)
|| (aeRegions != null && aeRegions > 0)
|| (awbRegions != null && awbRegions > 0);
// Exposure correction
Range<Integer> exposureRange = cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE);
Rational exposureStep = cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_STEP);
Range<Integer> exposureRange = cameraCharacteristics.get(
CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE);
Rational exposureStep = cameraCharacteristics.get(
CameraCharacteristics.CONTROL_AE_COMPENSATION_STEP);
if (exposureRange != null && exposureStep != null && exposureStep.floatValue() != 0) {
exposureCorrectionMinValue = exposureRange.getLower() / exposureStep.floatValue();
exposureCorrectionMaxValue = exposureRange.getUpper() / exposureStep.floatValue();
}
exposureCorrectionSupported = exposureCorrectionMinValue != 0 && exposureCorrectionMaxValue != 0;
exposureCorrectionSupported = exposureCorrectionMinValue != 0
&& exposureCorrectionMaxValue != 0;
// Picture Sizes
StreamConfigurationMap streamMap = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (streamMap == null) throw new RuntimeException("StreamConfigurationMap is null. Should not happen.");
StreamConfigurationMap streamMap = cameraCharacteristics.get(
CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (streamMap == null) {
throw new RuntimeException("StreamConfigurationMap is null. Should not happen.");
}
android.util.Size[] psizes = streamMap.getOutputSizes(ImageFormat.JPEG);
for (android.util.Size size : psizes) {
int width = flipSizes ? size.getHeight() : size.getWidth();
@ -238,7 +254,8 @@ public class CameraOptions {
Size videoMaxSize = new Size(profile.videoFrameWidth, profile.videoFrameHeight);
android.util.Size[] vsizes = streamMap.getOutputSizes(MediaRecorder.class);
for (android.util.Size size : vsizes) {
if (size.getWidth() <= videoMaxSize.getWidth() && size.getHeight() <= videoMaxSize.getHeight()) {
if (size.getWidth() <= videoMaxSize.getWidth()
&& size.getHeight() <= videoMaxSize.getHeight()) {
int width = flipSizes ? size.getHeight() : size.getWidth();
int height = flipSizes ? size.getWidth() : size.getHeight();
supportedVideoSizes.add(new Size(width, height));

@ -108,7 +108,9 @@ public class CameraUtils {
* @param callback a callback
*/
@SuppressWarnings("WeakerAccess")
public static void writeToFile(@NonNull final byte[] data, @NonNull final File file, @NonNull final FileCallback callback) {
public static void writeToFile(@NonNull final byte[] data,
@NonNull final File file,
@NonNull final FileCallback callback) {
final Handler ui = new Handler();
WorkerHandler.execute(new Runnable() {
@Override
@ -149,7 +151,8 @@ public class CameraUtils {
* @param callback a callback to be notified
*/
@SuppressWarnings("WeakerAccess")
public static void decodeBitmap(@NonNull final byte[] source, @NonNull final BitmapCallback callback) {
public static void decodeBitmap(@NonNull final byte[] source,
@NonNull final BitmapCallback callback) {
decodeBitmap(source, Integer.MAX_VALUE, Integer.MAX_VALUE, callback);
}
@ -167,7 +170,10 @@ public class CameraUtils {
* @param callback a callback to be notified
*/
@SuppressWarnings("WeakerAccess")
public static void decodeBitmap(@NonNull final byte[] source, final int maxWidth, final int maxHeight, @NonNull final BitmapCallback callback) {
public static void decodeBitmap(@NonNull final byte[] source,
final int maxWidth,
final int maxHeight,
@NonNull final BitmapCallback callback) {
decodeBitmap(source, maxWidth, maxHeight, new BitmapFactory.Options(), callback);
}
@ -186,12 +192,20 @@ public class CameraUtils {
* @param callback a callback to be notified
*/
@SuppressWarnings("WeakerAccess")
public static void decodeBitmap(@NonNull final byte[] source, final int maxWidth, final int maxHeight, @NonNull final BitmapFactory.Options options, @NonNull final BitmapCallback callback) {
public static void decodeBitmap(@NonNull final byte[] source,
final int maxWidth,
final int maxHeight,
@NonNull final BitmapFactory.Options options,
@NonNull final BitmapCallback callback) {
decodeBitmap(source, maxWidth, maxHeight, options, -1, callback);
}
@SuppressWarnings("WeakerAccess")
static void decodeBitmap(@NonNull final byte[] source, final int maxWidth, final int maxHeight, @NonNull final BitmapFactory.Options options, final int rotation, @NonNull final BitmapCallback callback) {
static void decodeBitmap(@NonNull final byte[] source,
final int maxWidth,
final int maxHeight,
@NonNull final BitmapFactory.Options options,
final int rotation,
@NonNull final BitmapCallback callback) {
final Handler ui = new Handler();
WorkerHandler.execute(new Runnable() {
@Override
@ -239,10 +253,13 @@ public class CameraUtils {
* @param options the options to be passed to decodeByteArray
* @return decoded bitmap or null if error is encountered
*/
@SuppressWarnings({"SuspiciousNameCombination", "WeakerAccess"})
@SuppressWarnings("WeakerAccess")
@Nullable
@WorkerThread
public static Bitmap decodeBitmap(@NonNull byte[] source, int maxWidth, int maxHeight, @NonNull BitmapFactory.Options options) {
public static Bitmap decodeBitmap(@NonNull byte[] source,
int maxWidth,
int maxHeight,
@NonNull BitmapFactory.Options options) {
return decodeBitmap(source, maxWidth, maxHeight, options, -1);
}
@ -250,7 +267,11 @@ public class CameraUtils {
// Ignores flipping, but it should be super rare.
@SuppressWarnings("TryFinallyCanBeTryWithResources")
@Nullable
private static Bitmap decodeBitmap(@NonNull byte[] source, int maxWidth, int maxHeight, @NonNull BitmapFactory.Options options, int rotation) {
private static Bitmap decodeBitmap(@NonNull byte[] source,
int maxWidth,
int maxHeight,
@NonNull BitmapFactory.Options options,
int rotation) {
if (maxWidth <= 0) maxWidth = Integer.MAX_VALUE;
if (maxHeight <= 0) maxHeight = Integer.MAX_VALUE;
int orientation;
@ -261,7 +282,8 @@ public class CameraUtils {
// http://sylvana.net/jpegcrop/exif_orientation.html
stream = new ByteArrayInputStream(source);
ExifInterface exif = new ExifInterface(stream);
int exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);
int exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION,
ExifInterface.ORIENTATION_NORMAL);
orientation = ExifHelper.readExifOrientation(exifOrientation);
flip = exifOrientation == ExifInterface.ORIENTATION_FLIP_HORIZONTAL ||
exifOrientation == ExifInterface.ORIENTATION_FLIP_VERTICAL ||
@ -310,7 +332,8 @@ public class CameraUtils {
Matrix matrix = new Matrix();
matrix.setRotate(orientation);
Bitmap temp = bitmap;
bitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true);
bitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(),
bitmap.getHeight(), matrix, true);
temp.recycle();
}
} catch (OutOfMemoryError e) {

@ -167,25 +167,36 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
if (mInEditor) return;
setWillNotDraw(false);
TypedArray a = context.getTheme().obtainStyledAttributes(attrs, R.styleable.CameraView, 0, 0);
TypedArray a = context.getTheme().obtainStyledAttributes(attrs, R.styleable.CameraView,
0, 0);
ControlParser controls = new ControlParser(context, a);
// Self managed
boolean playSounds = a.getBoolean(R.styleable.CameraView_cameraPlaySounds, DEFAULT_PLAY_SOUNDS);
boolean useDeviceOrientation = a.getBoolean(R.styleable.CameraView_cameraUseDeviceOrientation, DEFAULT_USE_DEVICE_ORIENTATION);
boolean playSounds = a.getBoolean(R.styleable.CameraView_cameraPlaySounds,
DEFAULT_PLAY_SOUNDS);
boolean useDeviceOrientation = a.getBoolean(
R.styleable.CameraView_cameraUseDeviceOrientation, DEFAULT_USE_DEVICE_ORIENTATION);
mExperimental = a.getBoolean(R.styleable.CameraView_cameraExperimental, false);
mPreview = controls.getPreview();
mEngine = controls.getEngine();
// Camera engine params
int gridColor = a.getColor(R.styleable.CameraView_cameraGridColor, GridLinesLayout.DEFAULT_COLOR);
long videoMaxSize = (long) a.getFloat(R.styleable.CameraView_cameraVideoMaxSize, 0);
int videoMaxDuration = a.getInteger(R.styleable.CameraView_cameraVideoMaxDuration, 0);
int gridColor = a.getColor(R.styleable.CameraView_cameraGridColor,
GridLinesLayout.DEFAULT_COLOR);
long videoMaxSize = (long) a.getFloat(R.styleable.CameraView_cameraVideoMaxSize,
0);
int videoMaxDuration = a.getInteger(R.styleable.CameraView_cameraVideoMaxDuration,
0);
int videoBitRate = a.getInteger(R.styleable.CameraView_cameraVideoBitRate, 0);
int audioBitRate = a.getInteger(R.styleable.CameraView_cameraAudioBitRate, 0);
long autoFocusResetDelay = (long) a.getInteger(R.styleable.CameraView_cameraAutoFocusResetDelay, (int) DEFAULT_AUTOFOCUS_RESET_DELAY_MILLIS);
boolean pictureMetering = a.getBoolean(R.styleable.CameraView_cameraPictureMetering, DEFAULT_PICTURE_METERING);
boolean pictureSnapshotMetering = a.getBoolean(R.styleable.CameraView_cameraPictureSnapshotMetering, DEFAULT_PICTURE_SNAPSHOT_METERING);
long autoFocusResetDelay = (long) a.getInteger(
R.styleable.CameraView_cameraAutoFocusResetDelay,
(int) DEFAULT_AUTOFOCUS_RESET_DELAY_MILLIS);
boolean pictureMetering = a.getBoolean(R.styleable.CameraView_cameraPictureMetering,
DEFAULT_PICTURE_METERING);
boolean pictureSnapshotMetering = a.getBoolean(
R.styleable.CameraView_cameraPictureSnapshotMetering,
DEFAULT_PICTURE_SNAPSHOT_METERING);
// Size selectors and gestures
SizeSelectorParser sizeSelectors = new SizeSelectorParser(a);
@ -265,7 +276,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
private void doInstantiateEngine() {
LOG.w("doInstantiateEngine:", "instantiating. engine:", mEngine);
mCameraEngine = instantiateCameraEngine(mEngine, mCameraCallbacks);
LOG.w("doInstantiateEngine:", "instantiated. engine:", mCameraEngine.getClass().getSimpleName());
LOG.w("doInstantiateEngine:", "instantiated. engine:",
mCameraEngine.getClass().getSimpleName());
mCameraEngine.setOverlay(mOverlayLayout);
}
@ -278,7 +290,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
void doInstantiatePreview() {
LOG.w("doInstantiateEngine:", "instantiating. preview:", mPreview);
mCameraPreview = instantiatePreview(mPreview, getContext(), this);
LOG.w("doInstantiateEngine:", "instantiated. preview:", mCameraPreview.getClass().getSimpleName());
LOG.w("doInstantiateEngine:", "instantiated. preview:",
mCameraPreview.getClass().getSimpleName());
mCameraEngine.setPreview(mCameraPreview);
if (mPendingFilter != null) {
setFilter(mPendingFilter);
@ -294,8 +307,11 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @return the engine
*/
@NonNull
protected CameraEngine instantiateCameraEngine(@NonNull Engine engine, @NonNull CameraEngine.Callback callback) {
if (mExperimental && engine == Engine.CAMERA2 && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
protected CameraEngine instantiateCameraEngine(@NonNull Engine engine,
@NonNull CameraEngine.Callback callback) {
if (mExperimental
&& engine == Engine.CAMERA2
&& Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
return new Camera2Engine(callback);
} else {
mEngine = Engine.CAMERA1;
@ -312,7 +328,9 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @return the preview
*/
@NonNull
protected CameraPreview instantiatePreview(@NonNull Preview preview, @NonNull Context context, @NonNull ViewGroup container) {
protected CameraPreview instantiatePreview(@NonNull Preview preview,
@NonNull Context context,
@NonNull ViewGroup container) {
switch (preview) {
case SURFACE:
return new SurfaceCameraPreview(context, container);
@ -414,13 +432,15 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
if (heightMode == AT_MOST && lp.height == MATCH_PARENT) heightMode = EXACTLY;
}
LOG.i("onMeasure:", "requested dimensions are", "(" + widthValue + "[" + ms(widthMode) + "]x" +
heightValue + "[" + ms(heightMode) + "])");
LOG.i("onMeasure:", "previewSize is", "(" + previewWidth + "x" + previewHeight + ")");
LOG.i("onMeasure:", "requested dimensions are (" + widthValue + "[" + ms(widthMode)
+ "]x" + heightValue + "[" + ms(heightMode) + "])");
LOG.i("onMeasure:", "previewSize is", "(" + previewWidth + "x"
+ previewHeight + ")");
// (1) If we have fixed dimensions (either 300dp or MATCH_PARENT), there's nothing we should do,
// other than respect it. The preview will eventually be cropped at the sides (by PreviewImpl scaling)
// except the case in which these fixed dimensions manage to fit exactly the preview aspect ratio.
// (1) If we have fixed dimensions (either 300dp or MATCH_PARENT), there's nothing we
// should do, other than respect it. The preview will eventually be cropped at the sides
// (by Preview scaling) except the case in which these fixed dimensions manage to fit
// exactly the preview aspect ratio.
if (widthMode == EXACTLY && heightMode == EXACTLY) {
LOG.i("onMeasure:", "both are MATCH_PARENT or fixed value. We adapt.",
"This means CROP_CENTER.", "(" + widthValue + "x" + heightValue + ")");
@ -440,8 +460,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
return;
}
// It's sure now that at least one dimension can be determined (either because EXACTLY or AT_MOST).
// This starts to seem a pleasant situation.
// It's sure now that at least one dimension can be determined (either because EXACTLY
// or AT_MOST). This starts to seem a pleasant situation.
// (3) If one of the dimension is completely free (e.g. in a scrollable container),
// take the other and fit the ratio.
@ -457,16 +477,17 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
width = widthValue;
height = Math.round(width * ratio);
}
LOG.i("onMeasure:", "one dimension was free, we adapted it to fit the aspect ratio.",
LOG.i("onMeasure:", "one dimension was free, we adapted it to fit the ratio.",
"(" + width + "x" + height + ")");
super.onMeasure(MeasureSpec.makeMeasureSpec(width, EXACTLY),
MeasureSpec.makeMeasureSpec(height, EXACTLY));
return;
}
// (4) At this point both dimensions are either AT_MOST-AT_MOST, EXACTLY-AT_MOST or AT_MOST-EXACTLY.
// Let's manage this sanely. If only one is EXACTLY, we can TRY to fit the aspect ratio,
// but it is not guaranteed to succeed. It depends on the AT_MOST value of the other dimensions.
// (4) At this point both dimensions are either AT_MOST-AT_MOST, EXACTLY-AT_MOST or
// AT_MOST-EXACTLY. Let's manage this sanely. If only one is EXACTLY, we can TRY to fit
// the aspect ratio, but it is not guaranteed to succeed. It depends on the AT_MOST
// value of the other dimensions.
if (widthMode == EXACTLY || heightMode == EXACTLY) {
boolean freeWidth = widthMode == AT_MOST;
int height, width;
@ -724,8 +745,10 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
boolean needsCamera = true;
boolean needsAudio = audio == Audio.ON || audio == Audio.MONO || audio == Audio.STEREO;
needsCamera = needsCamera && c.checkSelfPermission(Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED;
needsAudio = needsAudio && c.checkSelfPermission(Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED;
needsCamera = needsCamera && c.checkSelfPermission(Manifest.permission.CAMERA)
!= PackageManager.PERMISSION_GRANTED;
needsAudio = needsAudio && c.checkSelfPermission(Manifest.permission.RECORD_AUDIO)
!= PackageManager.PERMISSION_GRANTED;
if (needsCamera || needsAudio) {
requestPermissions(needsCamera, needsAudio);
@ -742,13 +765,14 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
if (audio == Audio.ON || audio == Audio.MONO || audio == Audio.STEREO) {
try {
PackageManager manager = getContext().getPackageManager();
PackageInfo info = manager.getPackageInfo(getContext().getPackageName(), PackageManager.GET_PERMISSIONS);
PackageInfo info = manager.getPackageInfo(getContext().getPackageName(),
PackageManager.GET_PERMISSIONS);
for (String requestedPermission : info.requestedPermissions) {
if (requestedPermission.equals(Manifest.permission.RECORD_AUDIO)) {
return;
}
}
String message = LOG.e("Permission error:", "When audio is enabled (Audio.ON),",
String message = LOG.e("Permission error: when audio is enabled (Audio.ON)" +
" the RECORD_AUDIO permission should be added to the app manifest file.");
throw new IllegalStateException(message);
} catch (PackageManager.NameNotFoundException e) {
@ -1272,8 +1296,12 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @param y should be between 0 and getHeight()
*/
public void startAutoFocus(float x, float y) {
if (x < 0 || x > getWidth()) throw new IllegalArgumentException("x should be >= 0 and <= getWidth()");
if (y < 0 || y > getHeight()) throw new IllegalArgumentException("y should be >= 0 and <= getHeight()");
if (x < 0 || x > getWidth()) {
throw new IllegalArgumentException("x should be >= 0 and <= getWidth()");
}
if (y < 0 || y > getHeight()) {
throw new IllegalArgumentException("y should be >= 0 and <= getHeight()");
}
mCameraEngine.startAutoFocus(null, new PointF(x, y));
}
@ -1510,9 +1538,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* This will trigger {@link CameraListener#onPictureTaken(PictureResult)} if a listener
* was registered.
*
* Note that if sessionType is {@link Mode#VIDEO}, this
* might fall back to {@link #takePictureSnapshot()} (that is, we might capture a preview frame).
*
* @see #takePictureSnapshot()
*/
public void takePicture() {
@ -2000,7 +2025,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
}
@Override
public void dispatchOnFocusStart(@Nullable final Gesture gesture, @NonNull final PointF point) {
public void dispatchOnFocusStart(@Nullable final Gesture gesture,
@NonNull final PointF point) {
mLogger.i("dispatchOnFocusStart", gesture, point);
mUiHandler.post(new Runnable() {
@Override
@ -2020,7 +2046,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
}
@Override
public void dispatchOnFocusEnd(@Nullable final Gesture gesture, final boolean success,
public void dispatchOnFocusEnd(@Nullable final Gesture gesture,
final boolean success,
@NonNull final PointF point) {
mLogger.i("dispatchOnFocusEnd", gesture, success, point);
mUiHandler.post(new Runnable() {
@ -2096,7 +2123,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
@Override
public void dispatchFrame(@NonNull final Frame frame) {
// The getTime() below might crash if developers incorrectly release frames asynchronously.
// The getTime() below might crash if developers incorrectly release
// frames asynchronously.
mLogger.v("dispatchFrame:", frame.getTime(), "processors:", mFrameProcessors.size());
if (mFrameProcessors.isEmpty()) {
// Mark as released. This instance will be reused.
@ -2106,7 +2134,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
mFrameProcessorsHandler.run(new Runnable() {
@Override
public void run() {
mLogger.v("dispatchFrame: dispatching", frame.getTime(), "to processors.");
mLogger.v("dispatchFrame: dispatching", frame.getTime(),
"to processors.");
for (FrameProcessor processor : mFrameProcessors) {
try {
processor.process(frame);
@ -2198,8 +2227,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* Use {@link NoFilter} to clear the existing filter,
* and take a look at the {@link Filters} class for commonly used filters.
*
* This method will throw an exception if the current preview does not support real-time filters.
* Make sure you use {@link Preview#GL_SURFACE} (the default).
* This method will throw an exception if the current preview does not support real-time
* filters. Make sure you use {@link Preview#GL_SURFACE} (the default).
*
* @see Filters
* @param filter a new filter
@ -2231,8 +2260,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
/**
* Returns the current real-time filter applied to the camera preview.
*
* This method will throw an exception if the current preview does not support real-time filters.
* Make sure you use {@link Preview#GL_SURFACE} (the default).
* This method will throw an exception if the current preview does not support real-time
* filters. Make sure you use {@link Preview#GL_SURFACE} (the default).
*
* @see #setFilter(Filter)
* @return the current filter
@ -2240,13 +2269,15 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
@NonNull
public Filter getFilter() {
if (!mExperimental) {
throw new RuntimeException("Filters are an experimental features and need the experimental flag set.");
throw new RuntimeException("Filters are an experimental features and need " +
"the experimental flag set.");
} else if (mCameraPreview == null) {
return mPendingFilter;
} else if (mCameraPreview instanceof FilterCameraPreview) {
return ((FilterCameraPreview) mCameraPreview).getCurrentFilter();
} else {
throw new RuntimeException("Filters are only supported by the GL_SURFACE preview. Current:" + mPreview);
throw new RuntimeException("Filters are only supported by the GL_SURFACE preview. " +
"Current:" + mPreview);
}
}

@ -137,7 +137,8 @@ public class PictureResult {
* @param callback a callback to be notified of image decoding
*/
public void toBitmap(int maxWidth, int maxHeight, @NonNull BitmapCallback callback) {
CameraUtils.decodeBitmap(getData(), maxWidth, maxHeight, new BitmapFactory.Options(), rotation, callback);
CameraUtils.decodeBitmap(getData(), maxWidth, maxHeight, new BitmapFactory.Options(),
rotation, callback);
}
/**

@ -185,7 +185,8 @@ public class VideoResult {
/**
* Returns the reason why the recording was stopped.
* @return one of {@link #REASON_USER}, {@link #REASON_MAX_DURATION_REACHED} or {@link #REASON_MAX_SIZE_REACHED}.
* @return one of {@link #REASON_USER}, {@link #REASON_MAX_DURATION_REACHED}
* or {@link #REASON_MAX_SIZE_REACHED}.
*/
public int getTerminationReason() {
return endReason;

@ -25,16 +25,19 @@ public class ControlParser {
private int engine;
public ControlParser(@NonNull Context context, @NonNull TypedArray array) {
this.preview = array.getInteger(R.styleable.CameraView_cameraPreview, Preview.DEFAULT.value());
this.facing = array.getInteger(R.styleable.CameraView_cameraFacing, Facing.DEFAULT(context).value());
this.flash = array.getInteger(R.styleable.CameraView_cameraFlash, Flash.DEFAULT.value());
this.grid = array.getInteger(R.styleable.CameraView_cameraGrid, Grid.DEFAULT.value());
this.whiteBalance = array.getInteger(R.styleable.CameraView_cameraWhiteBalance, WhiteBalance.DEFAULT.value());
this.mode = array.getInteger(R.styleable.CameraView_cameraMode, Mode.DEFAULT.value());
this.hdr = array.getInteger(R.styleable.CameraView_cameraHdr, Hdr.DEFAULT.value());
this.audio = array.getInteger(R.styleable.CameraView_cameraAudio, Audio.DEFAULT.value());
this.videoCodec = array.getInteger(R.styleable.CameraView_cameraVideoCodec, VideoCodec.DEFAULT.value());
this.engine = array.getInteger(R.styleable.CameraView_cameraEngine, Engine.DEFAULT.value());
preview = array.getInteger(R.styleable.CameraView_cameraPreview, Preview.DEFAULT.value());
facing = array.getInteger(R.styleable.CameraView_cameraFacing,
Facing.DEFAULT(context).value());
flash = array.getInteger(R.styleable.CameraView_cameraFlash, Flash.DEFAULT.value());
grid = array.getInteger(R.styleable.CameraView_cameraGrid, Grid.DEFAULT.value());
whiteBalance = array.getInteger(R.styleable.CameraView_cameraWhiteBalance,
WhiteBalance.DEFAULT.value());
mode = array.getInteger(R.styleable.CameraView_cameraMode, Mode.DEFAULT.value());
hdr = array.getInteger(R.styleable.CameraView_cameraHdr, Hdr.DEFAULT.value());
audio = array.getInteger(R.styleable.CameraView_cameraAudio, Audio.DEFAULT.value());
videoCodec = array.getInteger(R.styleable.CameraView_cameraVideoCodec,
VideoCodec.DEFAULT.value());
engine = array.getInteger(R.styleable.CameraView_cameraEngine, Engine.DEFAULT.value());
}
@NonNull

@ -76,7 +76,8 @@ public class Camera1Engine extends CameraEngine implements
public void onError(int error, Camera camera) {
if (error == Camera.CAMERA_ERROR_SERVER_DIED) {
// Looks like this is recoverable.
LOG.w("Recoverable error inside the onError callback.", "CAMERA_ERROR_SERVER_DIED");
LOG.w("Recoverable error inside the onError callback.",
"CAMERA_ERROR_SERVER_DIED");
restart();
return;
}
@ -118,7 +119,10 @@ public class Camera1Engine extends CameraEngine implements
@Override
protected boolean collectCameraInfo(@NonNull Facing facing) {
int internalFacing = mMapper.mapFacing(facing);
LOG.i("collectCameraInfo", "Facing:", facing, "Internal:", internalFacing, "Cameras:", Camera.getNumberOfCameras());
LOG.i("collectCameraInfo",
"Facing:", facing,
"Internal:", internalFacing,
"Cameras:", Camera.getNumberOfCameras());
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int i = 0, count = Camera.getNumberOfCameras(); i < count; i++) {
Camera.getCameraInfo(i, cameraInfo);
@ -150,10 +154,12 @@ public class Camera1Engine extends CameraEngine implements
// Set parameters that might have been set before the camera was opened.
LOG.i("onStartEngine:", "Applying default parameters.");
Camera.Parameters params = mCamera.getParameters();
mCameraOptions = new CameraOptions(params, getAngles().flip(Reference.SENSOR, Reference.VIEW));
mCameraOptions = new CameraOptions(params, getAngles()
.flip(Reference.SENSOR, Reference.VIEW));
applyAllParameters(params);
mCamera.setParameters(params);
mCamera.setDisplayOrientation(getAngles().offset(Reference.SENSOR, Reference.VIEW, Axis.ABSOLUTE)); // <- not allowed during preview
mCamera.setDisplayOrientation(getAngles().offset(Reference.SENSOR, Reference.VIEW,
Axis.ABSOLUTE)); // <- not allowed during preview
LOG.i("onStartEngine:", "Ended");
return Tasks.forResult(null);
}
@ -194,14 +200,19 @@ public class Camera1Engine extends CameraEngine implements
mPreview.setStreamSize(previewSize.getWidth(), previewSize.getHeight());
Camera.Parameters params = mCamera.getParameters();
params.setPreviewFormat(ImageFormat.NV21); // should be the default, but let's make sure, since YuvImage will only support this & a few others
params.setPreviewSize(mPreviewStreamSize.getWidth(), mPreviewStreamSize.getHeight()); // not allowed during preview
// NV21 should be the default, but let's make sure, since YuvImage will only support this
// and a few others
params.setPreviewFormat(ImageFormat.NV21);
// setPreviewSize is not allowed during preview
params.setPreviewSize(mPreviewStreamSize.getWidth(), mPreviewStreamSize.getHeight());
if (getMode() == Mode.PICTURE) {
params.setPictureSize(mCaptureSize.getWidth(), mCaptureSize.getHeight()); // allowed during preview
// setPictureSize is allowed during preview
params.setPictureSize(mCaptureSize.getWidth(), mCaptureSize.getHeight());
} else {
// mCaptureSize in this case is a video size. The available video sizes are not necessarily
// a subset of the picture sizes, so we can't use the mCaptureSize value: it might crash.
// However, the setPictureSize() passed here is useless : we don't allow HQ pictures in video mode.
// mCaptureSize in this case is a video size. The available video sizes are not
// necessarily a subset of the picture sizes, so we can't use the mCaptureSize value:
// it might crash. However, the setPictureSize() passed here is useless : we don't allow
// HQ pictures in video mode.
// While this might be lifted in the future, for now, just use a picture capture size.
Size pictureSize = computeCaptureSize(Mode.PICTURE);
params.setPictureSize(pictureSize.getWidth(), pictureSize.getHeight());
@ -298,7 +309,8 @@ public class Camera1Engine extends CameraEngine implements
@WorkerThread
@Override
protected void onTakePicture(@NonNull PictureResult.Stub stub, boolean doMetering) {
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR);
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT,
Axis.RELATIVE_TO_SENSOR);
stub.size = getPictureSize(Reference.OUTPUT);
mPictureRecorder = new Full1PictureRecorder(stub, Camera1Engine.this, mCamera);
mPictureRecorder.take();
@ -306,10 +318,13 @@ public class Camera1Engine extends CameraEngine implements
@WorkerThread
@Override
protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio outputRatio, boolean doMetering) {
stub.size = getUncroppedSnapshotSize(Reference.OUTPUT); // Not the real size: it will be cropped to match the view ratio
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR); // Actually it will be rotated and set to 0.
protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub,
@NonNull AspectRatio outputRatio,
boolean doMetering) {
// Not the real size: it will be cropped to match the view ratio
stub.size = getUncroppedSnapshotSize(Reference.OUTPUT);
// Actually it will be rotated and set to 0.
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR);
if (mPreview instanceof GlCameraPreview && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
mPictureRecorder = new SnapshotGlPictureRecorder(stub, this, (GlCameraPreview) mPreview, outputRatio);
} else {
@ -324,8 +339,10 @@ public class Camera1Engine extends CameraEngine implements
@Override
protected void onTakeVideo(@NonNull VideoResult.Stub stub) {
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR);
stub.size = getAngles().flip(Reference.SENSOR, Reference.OUTPUT) ? mCaptureSize.flip() : mCaptureSize;
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT,
Axis.RELATIVE_TO_SENSOR);
stub.size = getAngles().flip(Reference.SENSOR, Reference.OUTPUT) ? mCaptureSize.flip()
: mCaptureSize;
// Unlock the camera and start recording.
try {
mCamera.unlock();
@ -342,12 +359,13 @@ public class Camera1Engine extends CameraEngine implements
@SuppressLint("NewApi")
@WorkerThread
@Override
protected void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull AspectRatio outputRatio) {
protected void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub,
@NonNull AspectRatio outputRatio) {
if (!(mPreview instanceof GlCameraPreview)) {
throw new IllegalStateException("Video snapshots are only supported with GlCameraPreview.");
throw new IllegalStateException("Video snapshots are only supported with GL_SURFACE.");
}
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR2) {
throw new IllegalStateException("Video snapshots are only supported starting from API 18.");
throw new IllegalStateException("Video snapshots are only supported on API 18+.");
}
GlCameraPreview glPreview = (GlCameraPreview) mPreview;
Size outputSize = getUncroppedSnapshotSize(Reference.OUTPUT);
@ -370,7 +388,8 @@ public class Camera1Engine extends CameraEngine implements
LOG.i("onTakeVideoSnapshot", "rotation:", stub.rotation, "size:", stub.size);
// Start.
mVideoRecorder = new SnapshotVideoRecorder(Camera1Engine.this, glPreview, getOverlay(), stub.rotation);
mVideoRecorder = new SnapshotVideoRecorder(Camera1Engine.this, glPreview,
getOverlay(), stub.rotation);
mVideoRecorder.start(stub);
}
@ -494,7 +513,8 @@ public class Camera1Engine extends CameraEngine implements
});
}
private boolean applyWhiteBalance(@NonNull Camera.Parameters params, @NonNull WhiteBalance oldWhiteBalance) {
private boolean applyWhiteBalance(@NonNull Camera.Parameters params,
@NonNull WhiteBalance oldWhiteBalance) {
if (mCameraOptions.supports(mWhiteBalance)) {
params.setWhiteBalance(mMapper.mapWhiteBalance(mWhiteBalance));
return true;
@ -573,7 +593,8 @@ public class Camera1Engine extends CameraEngine implements
if (applyExposureCorrection(params, old)) {
mCamera.setParameters(params);
if (notify) {
mCallback.dispatchOnExposureCorrectionChanged(mExposureCorrectionValue, bounds, points);
mCallback.dispatchOnExposureCorrectionChanged(mExposureCorrectionValue,
bounds, points);
}
}
}
@ -582,7 +603,8 @@ public class Camera1Engine extends CameraEngine implements
});
}
private boolean applyExposureCorrection(@NonNull Camera.Parameters params, float oldExposureCorrection) {
private boolean applyExposureCorrection(@NonNull Camera.Parameters params,
float oldExposureCorrection) {
if (mCameraOptions.isExposureCorrectionSupported()) {
// Just make sure we're inside boundaries.
float max = mCameraOptions.getExposureCorrectionMaxValue();
@ -591,7 +613,8 @@ public class Camera1Engine extends CameraEngine implements
val = val < min ? min : val > max ? max : val; // cap
mExposureCorrectionValue = val;
// Apply.
int indexValue = (int) (mExposureCorrectionValue / params.getExposureCompensationStep());
int indexValue = (int) (mExposureCorrectionValue
/ params.getExposureCompensationStep());
params.setExposureCompensation(indexValue);
return true;
}
@ -692,7 +715,8 @@ public class Camera1Engine extends CameraEngine implements
viewWidthF, viewHeightF, offset);
List<Camera.Area> meteringAreas1 = meteringAreas2.subList(0, 1);
// At this point we are sure that camera supports auto focus... right? Look at CameraView.onTouchEvent().
// At this point we are sure that camera supports auto focus... right?
// Look at CameraView.onTouchEvent().
Camera.Parameters params = mCamera.getParameters();
int maxAF = params.getMaxNumFocusAreas();
int maxAE = params.getMaxNumMeteringAreas();
@ -702,8 +726,8 @@ public class Camera1Engine extends CameraEngine implements
mCamera.setParameters(params);
mCallback.dispatchOnFocusStart(gesture, p);
// The auto focus callback is not guaranteed to be called, but we really want it to be.
// So we remove the old runnable if still present and post a new one.
// The auto focus callback is not guaranteed to be called, but we really want it
// to be. So we remove the old runnable if still present and post a new one.
if (mFocusEndRunnable != null) mHandler.remove(mFocusEndRunnable);
mFocusEndRunnable = new Runnable() {
@Override
@ -778,7 +802,11 @@ public class Camera1Engine extends CameraEngine implements
int bottom = (int) Math.min(centerY + delta, 1000);
int left = (int) Math.max(centerX - delta, -1000);
int right = (int) Math.min(centerX + delta, 1000);
LOG.i("focus:", "computeMeteringArea:", "top:", top, "left:", left, "bottom:", bottom, "right:", right);
LOG.i("focus:", "computeMeteringArea:",
"top:", top,
"left:", left,
"bottom:", bottom,
"right:", right);
return new Rect(left, top, right, bottom);
}

@ -104,11 +104,12 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
private Surface mPreviewStreamSurface;
// Video recording
private VideoResult.Stub mFullVideoPendingStub; // When takeVideo is called, we restart the session.
// When takeVideo is called, we restart the session.
private VideoResult.Stub mFullVideoPendingStub;
// Picture capturing
private ImageReader mPictureReader;
private final boolean mPictureCaptureStopsPreview = false; // can make configurable at some point
private final boolean mPictureCaptureStopsPreview = false; // can be configurable at some point
// Actions
private final List<Action> mActions = new ArrayList<>();
@ -137,32 +138,46 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
return value == null ? fallback : value;
}
@SuppressWarnings("DuplicateBranchesInSwitch")
@NonNull
private CameraException createCameraException(@NonNull CameraAccessException exception) {
int reason;
switch (exception.getReason()) {
case CameraAccessException.CAMERA_DISABLED: reason = CameraException.REASON_FAILED_TO_CONNECT; break;
case CameraAccessException.CAMERA_ERROR: reason = CameraException.REASON_DISCONNECTED; break;
case CameraAccessException.CAMERA_DISCONNECTED: reason = CameraException.REASON_DISCONNECTED; break;
case CameraAccessException.CAMERA_IN_USE: reason = CameraException.REASON_FAILED_TO_CONNECT; break;
case CameraAccessException.MAX_CAMERAS_IN_USE: reason = CameraException.REASON_FAILED_TO_CONNECT; break;
default: reason = CameraException.REASON_UNKNOWN; break;
case CameraAccessException.CAMERA_DISABLED:
case CameraAccessException.CAMERA_IN_USE:
case CameraAccessException.MAX_CAMERAS_IN_USE: {
reason = CameraException.REASON_FAILED_TO_CONNECT;
break;
}
case CameraAccessException.CAMERA_ERROR:
case CameraAccessException.CAMERA_DISCONNECTED: {
reason = CameraException.REASON_DISCONNECTED;
break;
}
default: {
reason = CameraException.REASON_UNKNOWN;
break;
}
}
return new CameraException(exception, reason);
}
@SuppressWarnings("DuplicateBranchesInSwitch")
@NonNull
private CameraException createCameraException(int stateCallbackError) {
int reason;
switch (stateCallbackError) {
case CameraDevice.StateCallback.ERROR_CAMERA_DISABLED: reason = CameraException.REASON_FAILED_TO_CONNECT; break; // Device policy
case CameraDevice.StateCallback.ERROR_CAMERA_DEVICE: reason = CameraException.REASON_FAILED_TO_CONNECT; break; // Fatal error
case CameraDevice.StateCallback.ERROR_CAMERA_SERVICE: reason = CameraException.REASON_FAILED_TO_CONNECT; break; // Fatal error, device might have to be restarted
case CameraDevice.StateCallback.ERROR_CAMERA_IN_USE: reason = CameraException.REASON_FAILED_TO_CONNECT; break;
case CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE: reason = CameraException.REASON_FAILED_TO_CONNECT; break;
default: reason = CameraException.REASON_UNKNOWN; break;
case CameraDevice.StateCallback.ERROR_CAMERA_DISABLED: // Device policy
case CameraDevice.StateCallback.ERROR_CAMERA_DEVICE: // Fatal error
case CameraDevice.StateCallback.ERROR_CAMERA_SERVICE: // Fatal error, might have to
// restart the device
case CameraDevice.StateCallback.ERROR_CAMERA_IN_USE:
case CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE: {
reason = CameraException.REASON_FAILED_TO_CONNECT;
break;
}
default: {
reason = CameraException.REASON_UNKNOWN;
break;
}
}
return new CameraException(reason);
}
@ -366,12 +381,13 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@Override
public void onDisconnected(@NonNull CameraDevice camera) {
// Not sure if this is called INSTEAD of onOpened() or can be called after as well.
// However, using trySetException should address this problem - it will only trigger
// if the task has no result.
// Not sure if this is called INSTEAD of onOpened() or can be called after
// as well. However, using trySetException should address this problem -
// it will only trigger if the task has no result.
//
// Docs say to release this camera instance, however, since we throw an unrecoverable
// CameraException, this will trigger a stop() through the exception handler.
// Docs say to release this camera instance, however, since we throw an
// unrecoverable CameraException, this will trigger a stop() through the
// exception handler.
task.trySetException(new CameraException(CameraException.REASON_DISCONNECTED));
}
@ -498,7 +514,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession session) {
// I would say this should be a library error and as such we throw a Runtime Exception.
// This SHOULD be a library error so we throw a RuntimeException.
String message = LOG.e("onConfigureFailed! Session", session);
throw new RuntimeException(message);
}
@ -527,13 +543,14 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
LOG.i("onStartPreview", "Starting preview.");
addRepeatingRequestBuilderSurfaces();
applyRepeatingRequestBuilder(false, CameraException.REASON_FAILED_TO_START_PREVIEW);
applyRepeatingRequestBuilder(false,
CameraException.REASON_FAILED_TO_START_PREVIEW);
LOG.i("onStartPreview", "Started preview.");
// Start delayed video if needed.
if (mFullVideoPendingStub != null) {
// Do not call takeVideo/onTakeVideo. It will reset some stub parameters that the recorder sets.
// Also we are posting this so that doTakeVideo sees a started preview.
// Do not call takeVideo/onTakeVideo. It will reset some stub parameters that
// the recorder sets. Also we are posting so that doTakeVideo sees a started preview.
LOG.i("onStartPreview", "Posting doTakeVideo call.");
final VideoResult.Stub stub = mFullVideoPendingStub;
mFullVideoPendingStub = null;
@ -694,18 +711,19 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
stub.size = getPictureSize(Reference.OUTPUT);
try {
if (mPictureCaptureStopsPreview) {
// These two are present in official samples and are probably meant to speed things up?
// But from my tests, they actually make everything slower. So this is disabled by default
// with a boolean flag. Maybe in the future we can make this configurable as some
// people might want to stop the preview while picture is being taken even if it
// increases the latency.
// These two are present in official samples and are probably meant to
// speed things up? But from my tests, they actually make everything slower.
// So this is disabled by default with a boolean flag. Maybe in the future
// we can make this configurable as some people might want to stop the preview
// while picture is being taken even if it increases the latency.
mSession.stopRepeating();
mSession.abortCaptures();
}
CaptureRequest.Builder builder
= mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
applyAllParameters(builder, mRepeatingRequestBuilder);
mPictureRecorder = new Full2PictureRecorder(stub, this, builder, mPictureReader);
mPictureRecorder = new Full2PictureRecorder(stub, this, builder,
mPictureReader);
mPictureRecorder.take();
} catch (CameraAccessException e) {
throw createCameraException(e);
@ -738,7 +756,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@Override
protected void onTakeVideo(@NonNull VideoResult.Stub stub) {
LOG.i("onTakeVideo", "called.");
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR);
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT,
Axis.RELATIVE_TO_SENSOR);
stub.size = getAngles().flip(Reference.SENSOR, Reference.OUTPUT) ?
mCaptureSize.flip() : mCaptureSize;
// We must restart the session at each time.
@ -773,7 +792,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
protected void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub,
@NonNull AspectRatio outputRatio) {
if (!(mPreview instanceof GlCameraPreview)) {
throw new IllegalStateException("Video snapshots are only supported with GlCameraPreview.");
throw new IllegalStateException("Video snapshots are only supported with GL_SURFACE.");
}
GlCameraPreview glPreview = (GlCameraPreview) mPreview;
Size outputSize = getUncroppedSnapshotSize(Reference.OUTPUT);
@ -798,16 +817,17 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
// Start.
// The overlay rotation should alway be VIEW-OUTPUT, just liek Camera1Engine.
int overlayRotation = getAngles().offset(Reference.VIEW, Reference.OUTPUT, Axis.ABSOLUTE);
mVideoRecorder = new SnapshotVideoRecorder(this, glPreview, getOverlay(), overlayRotation);
mVideoRecorder = new SnapshotVideoRecorder(this, glPreview, getOverlay(),
overlayRotation);
mVideoRecorder.start(stub);
}
/**
* When video ends we must stop the recorder and remove the recorder surface from camera outputs.
* This is done in onVideoResult. However, on some devices, order matters. If we stop the recorder
* and AFTER send camera frames to it, the camera will try to fill the recorder "abandoned"
* Surface and on some devices with a poor internal implementation (HW_LEVEL_LEGACY) this crashes.
* So if the conditions are met, we restore here. Issue #549.
* When video ends we must stop the recorder and remove the recorder surface from
* camera outputs. This is done in onVideoResult. However, on some devices, order matters.
* If we stop the recorder and AFTER send camera frames to it, the camera will try to fill
* the recorder "abandoned" Surface and on some devices with a poor internal implementation
* (HW_LEVEL_LEGACY) this crashes. So if the conditions are met, we restore here. Issue #549.
*/
@Override
public void onVideoRecordingEnd() {
@ -827,7 +847,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
}
/**
* Some video recorders might change the camera template to {@link CameraDevice#TEMPLATE_RECORD}.
* Video recorders might change the camera template to {@link CameraDevice#TEMPLATE_RECORD}.
* After the video is taken, we should restore the template preview, which also means that
* we'll remove any extra surface target that was added by the video recorder.
*
@ -867,17 +887,22 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
// We might be in a metering operation, or the old builder might have some special
// metering parameters. Copy these special keys over to the new builder.
// These are the keys changed by metering.Parameters, or by us in applyFocusForMetering.
builder.set(CaptureRequest.CONTROL_AF_REGIONS, oldBuilder.get(CaptureRequest.CONTROL_AF_REGIONS));
builder.set(CaptureRequest.CONTROL_AE_REGIONS, oldBuilder.get(CaptureRequest.CONTROL_AE_REGIONS));
builder.set(CaptureRequest.CONTROL_AWB_REGIONS, oldBuilder.get(CaptureRequest.CONTROL_AWB_REGIONS));
builder.set(CaptureRequest.CONTROL_AF_MODE, oldBuilder.get(CaptureRequest.CONTROL_AF_MODE));
builder.set(CaptureRequest.CONTROL_AF_REGIONS,
oldBuilder.get(CaptureRequest.CONTROL_AF_REGIONS));
builder.set(CaptureRequest.CONTROL_AE_REGIONS,
oldBuilder.get(CaptureRequest.CONTROL_AE_REGIONS));
builder.set(CaptureRequest.CONTROL_AWB_REGIONS,
oldBuilder.get(CaptureRequest.CONTROL_AWB_REGIONS));
builder.set(CaptureRequest.CONTROL_AF_MODE,
oldBuilder.get(CaptureRequest.CONTROL_AF_MODE));
// Do NOT copy exposure or focus triggers!
}
}
@SuppressWarnings("WeakerAccess")
protected void applyDefaultFocus(@NonNull CaptureRequest.Builder builder) {
int[] modesArray = readCharacteristic(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES, new int[]{});
int[] modesArray = readCharacteristic(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES,
new int[]{});
List<Integer> modes = new ArrayList<>();
for (int mode : modesArray) { modes.add(mode); }
if (getMode() == Mode.VIDEO &&
@ -916,7 +941,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
*/
@SuppressWarnings("WeakerAccess")
protected void applyFocusForMetering(@NonNull CaptureRequest.Builder builder) {
int[] modesArray = readCharacteristic(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES, new int[]{});
int[] modesArray = readCharacteristic(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES,
new int[]{});
List<Integer> modes = new ArrayList<>();
for (int mode : modesArray) { modes.add(mode); }
if (modes.contains(CaptureRequest.CONTROL_AF_MODE_AUTO)) {
@ -949,13 +975,14 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
boolean shouldApply = applyFlash(mRepeatingRequestBuilder, old);
boolean needsWorkaround = getPreviewState() == STATE_STARTED;
if (needsWorkaround) {
// Runtime changes to the flash value are not correctly handled by the driver.
// See https://stackoverflow.com/q/53003383/4288782 for example.
// Runtime changes to the flash value are not correctly handled by the
// driver. See https://stackoverflow.com/q/53003383/4288782 for example.
// For this reason, we go back to OFF, capture once, then go to the new one.
mFlash = Flash.OFF;
applyFlash(mRepeatingRequestBuilder, old);
try {
mSession.capture(mRepeatingRequestBuilder.build(), null, null);
mSession.capture(mRepeatingRequestBuilder.build(), null,
null);
} catch (CameraAccessException e) {
throw createCameraException(e);
}
@ -979,15 +1006,15 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
* - {@link CaptureRequest#CONTROL_AE_MODE_ON_ALWAYS_FLASH}
*
* The API offers a high level control through {@link CaptureRequest#CONTROL_AE_MODE},
* which is what the mapper looks at. It will trigger (if specified) flash only for still captures
* which is exactly what we want.
* which is what the mapper looks at. It will trigger (if specified) flash only for
* still captures which is exactly what we want.
*
* However, we set CONTROL_AE_MODE to ON/OFF (depending
* on which is available) with both {@link Flash#OFF} and {@link Flash#TORCH}.
*
* When CONTROL_AE_MODE is ON or OFF, the low level control, called {@link CaptureRequest#FLASH_MODE},
* becomes effective, and that's where we can actually distinguish between a turned off flash
* and a torch flash.
* When CONTROL_AE_MODE is ON or OFF, the low level control, called
* {@link CaptureRequest#FLASH_MODE}, becomes effective, and that's where we can actually
* distinguish between a turned off flash and a torch flash.
*/
@SuppressWarnings("WeakerAccess")
protected boolean applyFlash(@NonNull CaptureRequest.Builder builder,
@ -1119,7 +1146,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@SuppressWarnings("WeakerAccess")
protected boolean applyZoom(@NonNull CaptureRequest.Builder builder, float oldZoom) {
if (mCameraOptions.isZoomSupported()) {
float maxZoom = readCharacteristic(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, 1F);
float maxZoom = readCharacteristic(
CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, 1F);
// converting 0.0f-1.0f zoom scale to the actual camera digital zoom scale
// (which will be for example, 1.0-10.0)
float calculatedZoom = (mZoomValue * (maxZoom - 1.0f)) + 1.0f;
@ -1133,8 +1161,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@NonNull
private Rect getZoomRect(float zoomLevel, float maxDigitalZoom) {
Rect activeRect = readCharacteristic(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE, new Rect());
Rect activeRect = readCharacteristic(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE,
new Rect());
int minW = (int) (activeRect.width() / maxDigitalZoom);
int minH = (int) (activeRect.height() / maxDigitalZoom);
int difW = activeRect.width() - minW;
@ -1144,11 +1172,15 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
// When zoom is maxZoom, we want to return a centered rect with minW and minH
int cropW = (int) (difW * (zoomLevel - 1) / (maxDigitalZoom - 1) / 2F);
int cropH = (int) (difH * (zoomLevel - 1) / (maxDigitalZoom - 1) / 2F);
return new Rect(cropW, cropH, activeRect.width() - cropW, activeRect.height() - cropH);
return new Rect(cropW, cropH, activeRect.width() - cropW,
activeRect.height() - cropH);
}
@Override
public void setExposureCorrection(final float EVvalue, @NonNull final float[] bounds, @Nullable final PointF[] points, final boolean notify) {
public void setExposureCorrection(final float EVvalue,
@NonNull final float[] bounds,
@Nullable final PointF[] points,
final boolean notify) {
final float old = mExposureCorrectionValue;
mExposureCorrectionValue = EVvalue;
mHandler.run(new Runnable() {
@ -1168,11 +1200,14 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
}
@SuppressWarnings("WeakerAccess")
protected boolean applyExposureCorrection(@NonNull CaptureRequest.Builder builder, float oldEVvalue) {
protected boolean applyExposureCorrection(@NonNull CaptureRequest.Builder builder,
float oldEVvalue) {
if (mCameraOptions.isExposureCorrectionSupported()) {
Rational exposureCorrectionStep = readCharacteristic(CameraCharacteristics.CONTROL_AE_COMPENSATION_STEP,
Rational exposureCorrectionStep = readCharacteristic(
CameraCharacteristics.CONTROL_AE_COMPENSATION_STEP,
new Rational(1, 1));
int exposureCorrectionSteps = Math.round(mExposureCorrectionValue * exposureCorrectionStep.floatValue());
int exposureCorrectionSteps = Math.round(mExposureCorrectionValue
* exposureCorrectionStep.floatValue());
builder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, exposureCorrectionSteps);
return true;
}
@ -1229,7 +1264,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
// After preview, the frame manager is correctly set up
Frame frame = getFrameManager().getFrame(data,
System.currentTimeMillis(),
getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR));
getAngles().offset(Reference.SENSOR, Reference.OUTPUT,
Axis.RELATIVE_TO_SENSOR));
mCallback.dispatchFrame(frame);
} else {
getFrameManager().onBufferUnused(data);
@ -1243,8 +1279,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
mHandler.run(new Runnable() {
@Override
public void run() {
LOG.i("setHasFrameProcessors", "changing to", hasFrameProcessors, "executing.",
"BindState:", getBindState(),
LOG.i("setHasFrameProcessors", "changing to", hasFrameProcessors,
"executing. BindState:", getBindState(),
"PreviewState:", getPreviewState());
// Frame processing is set up partially when binding and partially when starting
@ -1253,7 +1289,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
LOG.i("setHasFrameProcessors", "not bound so won't restart.");
} else if (getPreviewState() == STATE_STARTED) {
// This needs a restartBind(). NOTE: if taking video, this stops it.
LOG.i("setHasFrameProcessors", "bound with preview. Calling restartBind().");
LOG.i("setHasFrameProcessors", "bound with preview.",
"Calling restartBind().");
restartBind();
} else {
// Bind+Preview is not completely started yet not completely stopped.
@ -1262,7 +1299,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
// this should be discouraged anyway since changing the frame processor number
// at this time requires restarting the camera when it was just opened.
// For these reasons, let's throw.
throw new IllegalStateException("Added or removed a FrameProcessor at an illegal " +
throw new IllegalStateException("Added/removed a FrameProcessor at illegal " +
"time. These operations should be done before opening the camera, or " +
"before closing it - NOT when it just opened, for example during the " +
"onCameraOpened() callback.");
@ -1282,8 +1319,9 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@Override
public void run() {
LOG.i("startAutoFocus", "executing. Preview state:", getPreviewState());
// This will only work when we have a preview, since it launches the preview in the end.
// Even without this it would need the bind state at least, since we need the preview size.
// This will only work when we have a preview, since it launches the preview
// in the end. Even without this it would need the bind state at least,
// since we need the preview size.
if (getPreviewState() < STATE_STARTED) return;
// The camera options API still has the auto focus API but it really
@ -1301,7 +1339,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
mCallback.dispatchOnFocusEnd(gesture, action.isSuccessful(), point);
mHandler.remove(mUnlockAndResetMeteringRunnable);
if (shouldResetAutoFocus()) {
mHandler.post(getAutoFocusResetDelay(), mUnlockAndResetMeteringRunnable);
mHandler.post(getAutoFocusResetDelay(),
mUnlockAndResetMeteringRunnable);
}
}
});
@ -1320,7 +1359,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
// The last one is under our control because the library has no focus API.
// So let's set a good af mode here. This operation is reverted during onMeteringReset().
applyFocusForMetering(mRepeatingRequestBuilder);
mMeterAction = new MeterAction(Camera2Engine.this, point, point == null);
mMeterAction = new MeterAction(Camera2Engine.this, point,
point == null);
return mMeterAction;
}
@ -1339,8 +1379,10 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
protected void onStart(@NonNull ActionHolder holder) {
super.onStart(holder);
applyDefaultFocus(holder.getBuilder(this));
holder.getBuilder(this).set(CaptureRequest.CONTROL_AE_LOCK, false);
holder.getBuilder(this).set(CaptureRequest.CONTROL_AWB_LOCK, false);
holder.getBuilder(this)
.set(CaptureRequest.CONTROL_AE_LOCK, false);
holder.getBuilder(this)
.set(CaptureRequest.CONTROL_AWB_LOCK, false);
holder.applyBuilder(this);
setState(STATE_COMPLETED);
// TODO should wait results?
@ -1405,7 +1447,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
}
@Override
public void applyBuilder(@NonNull Action source, @NonNull CaptureRequest.Builder builder) throws CameraAccessException {
public void applyBuilder(@NonNull Action source, @NonNull CaptureRequest.Builder builder)
throws CameraAccessException {
mSession.capture(builder.build(), mRepeatingRequestCallback, null);
}

@ -64,8 +64,10 @@ import java.util.concurrent.TimeUnit;
* Setting up the Camera is usually a 4 steps process:
* 1. Setting up the Surface. Done by {@link CameraPreview}.
* 2. Starting the camera. Done by us. See {@link #startEngine()}, {@link #onStartEngine()}.
* 3. Binding the camera to the surface. Done by us. See {@link #startBind()}, {@link #onStartBind()}
* 4. Streaming the camera preview. Done by us. See {@link #startPreview()}, {@link #onStartPreview()}
* 3. Binding the camera to the surface. Done by us. See {@link #startBind()},
* {@link #onStartBind()}
* 4. Streaming the camera preview. Done by us. See {@link #startPreview()},
* {@link #onStartPreview()}
*
* The first two steps can actually happen at the same time, anyway
* the order is not guaranteed, we just get a callback from the Preview when 1 happens.
@ -80,14 +82,16 @@ import java.util.concurrent.TimeUnit;
* STATE
* We only expose generic {@link #start()} and {@link #stop()} calls to the outside.
* The external users of this class are most likely interested in whether we have completed step 2
* or not, since that tells us if we can act on the camera or not, rather than knowing about steps 3 and 4.
* or not, since that tells us if we can act on the camera or not, rather than knowing about
* steps 3 and 4.
*
* So in the {@link CameraEngine} notation,
* - {@link #start()}: ASYNC - starts the engine (S2). When possible, at a later time, S3 and S4 are also performed.
* - {@link #start()}: ASYNC - starts the engine (S2). When possible, at a later time,
* S3 and S4 are also performed.
* - {@link #stop()}: ASYNC - stops everything: undoes S4, then S3, then S2.
* - {@link #restart()}: ASYNC - completes a stop then a start.
* - {@link #destroy()}: SYNC - performs a {@link #stop()} that will go on no matter the exceptions, without throwing.
* Makes the engine unusable and clears resources.
* - {@link #destroy()}: SYNC - performs a {@link #stop()} that will go on no matter the exceptions,
* without throwing. Makes the engine unusable and clears resources.
*
* For example, we expose the engine (S2) state through {@link #getEngineState()}. It will be:
* - {@link #STATE_STARTING} if we're into step 2
@ -109,13 +113,13 @@ import java.util.concurrent.TimeUnit;
*
*
* ERROR HANDLING
* THe {@link #mHandler} thread has a special {@link Thread.UncaughtExceptionHandler} that handles exceptions
* and dispatches error to the callback (instead of crashing the app). This lets subclasses run code
* safely and directly throw {@link CameraException}s when needed.
* THe {@link #mHandler} thread has a special {@link Thread.UncaughtExceptionHandler} that handles
* exceptions and dispatches error to the callback (instead of crashing the app).
* This lets subclasses run code safely and directly throw {@link CameraException}s when needed.
*
* For convenience, the two main method {@link #onStartEngine()} and {@link #onStopEngine()} are already
* called on the engine thread, but they can still be asynchronous by returning a Google's
* {@link com.google.android.gms.tasks.Task}.
* For convenience, the two main method {@link #onStartEngine()} and {@link #onStopEngine()}
* are already called on the engine thread, but they can still be asynchronous by returning a
* Google's {@link com.google.android.gms.tasks.Task}.
*/
public abstract class CameraEngine implements
CameraPreview.SurfaceCallback,
@ -133,7 +137,9 @@ public abstract class CameraEngine implements
void dispatchOnFocusStart(@Nullable Gesture trigger, @NonNull PointF where);
void dispatchOnFocusEnd(@Nullable Gesture trigger, boolean success, @NonNull PointF where);
void dispatchOnZoomChanged(final float newValue, @Nullable final PointF[] fingers);
void dispatchOnExposureCorrectionChanged(float newValue, @NonNull float[] bounds, @Nullable PointF[] fingers);
void dispatchOnExposureCorrectionChanged(float newValue,
@NonNull float[] bounds,
@Nullable PointF[] fingers);
void dispatchFrame(@NonNull Frame frame);
void dispatchError(CameraException exception);
void dispatchOnVideoRecordingStart();
@ -186,8 +192,10 @@ public abstract class CameraEngine implements
private int mAudioBitRate;
private boolean mHasFrameProcessors;
private long mAutoFocusResetDelayMillis;
private int mSnapshotMaxWidth = Integer.MAX_VALUE; // in REF_VIEW for consistency with SizeSelectors
private int mSnapshotMaxHeight = Integer.MAX_VALUE; // in REF_VIEW for consistency with SizeSelectors
// in REF_VIEW, for consistency with SizeSelectors
private int mSnapshotMaxWidth = Integer.MAX_VALUE;
// in REF_VIEW, for consistency with SizeSelectors
private int mSnapshotMaxHeight = Integer.MAX_VALUE;
private Overlay overlay;
// Steps
@ -205,9 +213,11 @@ public abstract class CameraEngine implements
// Ops used for testing.
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Op<Void> mZoomOp = new Op<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Op<Void> mExposureCorrectionOp = new Op<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Op<Void> mExposureCorrectionOp
= new Op<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Op<Void> mFlashOp = new Op<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Op<Void> mWhiteBalanceOp = new Op<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Op<Void> mWhiteBalanceOp
= new Op<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Op<Void> mHdrOp = new Op<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Op<Void> mLocationOp = new Op<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Op<Void> mPlaySoundsOp = new Op<>();
@ -258,9 +268,9 @@ public abstract class CameraEngine implements
/**
* Handles exceptions coming from either runtime errors on the {@link #mHandler} code that is
* not caught (using the {@link CrashExceptionHandler}), as might happen during standard mHandler.post()
* operations that subclasses might do, OR for errors caught by tasks and continuations that
* we launch here.
* not caught (using the {@link CrashExceptionHandler}), as might happen during standard
* mHandler.post() operations that subclasses might do, OR for errors caught by tasks and
* continuations that we launch here.
*
* In the first case, the thread is about to be terminated. In the second case,
* we can actually keep using it.
@ -269,7 +279,9 @@ public abstract class CameraEngine implements
* @param throwable the throwable
* @param fromExceptionHandler true if coming from exception handler
*/
private void handleException(@NonNull Thread thread, final @NonNull Throwable throwable, final boolean fromExceptionHandler) {
private void handleException(@NonNull Thread thread,
final @NonNull Throwable throwable,
final boolean fromExceptionHandler) {
if (!(throwable instanceof CameraException)) {
// This is unexpected, either a bug or something the developer should know.
// Release and crash the UI thread so we get bug reports.
@ -290,7 +302,8 @@ public abstract class CameraEngine implements
}
final CameraException cameraException = (CameraException) throwable;
LOG.e("uncaughtException:", "Got CameraException:", cameraException, "on engine state:", getEngineStateName());
LOG.e("uncaughtException:", "Got CameraException:", cameraException,
"on engine state:", getEngineStateName());
if (fromExceptionHandler) {
// Got to restart the handler.
thread.interrupt();
@ -478,7 +491,8 @@ public abstract class CameraEngine implements
@Override
public void run() {
LOG.w("restartBind", "executing stopPreview.");
stopPreview(false).continueWithTask(mHandler.getExecutor(), new Continuation<Void, Task<Void>>() {
stopPreview(false).continueWithTask(mHandler.getExecutor(),
new Continuation<Void, Task<Void>>() {
@Override
public Task<Void> then(@NonNull Task<Void> task) {
LOG.w("restartBind", "executing stopBind.");
@ -525,7 +539,9 @@ public abstract class CameraEngine implements
@NonNull
@WorkerThread
private Task<Void> stopPreview(boolean swallowExceptions) {
LOG.i("stopPreview", "needsStopPreview:", needsStopPreview(), "swallowExceptions:", swallowExceptions);
LOG.i("stopPreview",
"needsStopPreview:", needsStopPreview(),
"swallowExceptions:", swallowExceptions);
if (needsStopPreview()) {
mPreviewStep.doStop(swallowExceptions, new Callable<Task<Void>>() {
@Override
@ -595,7 +611,8 @@ public abstract class CameraEngine implements
@Override
public final void onSurfaceChanged() {
LOG.i("onSurfaceChanged:", "Size is", getPreviewSurfaceSize(Reference.VIEW), "Posting.");
LOG.i("onSurfaceChanged:", "Size is", getPreviewSurfaceSize(Reference.VIEW),
"Posting.");
mHandler.run(new Runnable() {
@Override
public void run() {
@ -608,9 +625,11 @@ public abstract class CameraEngine implements
// Compute a new camera preview size and apply.
Size newSize = computePreviewStreamSize();
if (newSize.equals(mPreviewStreamSize)) {
LOG.i("onSurfaceChanged:", "The computed preview size is identical. No op.");
LOG.i("onSurfaceChanged:",
"The computed preview size is identical. No op.");
} else {
LOG.i("onSurfaceChanged:", "Computed a new preview size. Calling onPreviewStreamSizeChanged().");
LOG.i("onSurfaceChanged:",
"Computed a new preview size. Calling onPreviewStreamSizeChanged().");
mPreviewStreamSize = newSize;
onPreviewStreamSizeChanged();
}
@ -633,7 +652,8 @@ public abstract class CameraEngine implements
mHandler.run(new Runnable() {
@Override
public void run() {
stopPreview(false).onSuccessTask(mHandler.getExecutor(), new SuccessContinuation<Void, Void>() {
stopPreview(false).onSuccessTask(mHandler.getExecutor(),
new SuccessContinuation<Void, Void>() {
@NonNull
@Override
public Task<Void> then(@Nullable Void aVoid) {
@ -663,7 +683,8 @@ public abstract class CameraEngine implements
// Stop if needed, synchronously and silently.
// Cannot use Tasks.await() because we might be on the UI thread.
final CountDownLatch latch = new CountDownLatch(1);
stop(true).addOnCompleteListener(mHandler.getExecutor(), new OnCompleteListener<Void>() {
stop(true).addOnCompleteListener(mHandler.getExecutor(),
new OnCompleteListener<Void>() {
@Override
public void onComplete(@NonNull Task<Void> task) {
latch.countDown();
@ -695,14 +716,17 @@ public abstract class CameraEngine implements
@Override
public void run() {
LOG.w("Start:", "executing runnable. AllState is", mAllStep.getState());
// It's better to schedule anyway. allStep might be STARTING and we might be tempted to early return here,
// But the truth is that there might be a stop already scheduled when the STARTING op ends.
// It's better to schedule anyway. allStep might be STARTING and we might be
// tempted to early return here, but the truth is that there might be a stop
// already scheduled when the STARTING op ends.
// if (mAllStep.isStoppingOrStopped()) {
// LOG.i("Start:", "executing runnable. AllState is STOPPING or STOPPED, so we schedule a start.");
// LOG.i("Start:", "executing runnable. AllState is STOPPING or STOPPED,
// so we schedule a start.");
mAllStep.doStart(false, new Callable<Task<Void>>() {
@Override
public Task<Void> call() {
return startEngine().addOnFailureListener(mHandler.getExecutor(), new OnFailureListener() {
return startEngine().addOnFailureListener(mHandler.getExecutor(),
new OnFailureListener() {
@Override
public void onFailure(@NonNull Exception e) {
outTask.trySetException(e);
@ -725,7 +749,8 @@ public abstract class CameraEngine implements
});
// } else {
// // NOTE: this returns early if we were STARTING.
// LOG.i("Start:", "executing runnable. AllState is STARTING or STARTED, so we return early.");
// LOG.i("Start:",
// "executing runnable. AllState is STARTING or STARTED, so we return early.");
// outTask.trySetResult(null);
// }
}
@ -746,14 +771,17 @@ public abstract class CameraEngine implements
@Override
public void run() {
LOG.w("Stop:", "executing runnable. AllState is", mAllStep.getState());
// It's better to schedule anyway. allStep might be STOPPING and we might be tempted to early return here,
// But the truth is that there might be a start already scheduled when the STOPPING op ends.
// It's better to schedule anyway. allStep might be STOPPING and we might be
// tempted to early return here, but the truth is that there might be a start
// already scheduled when the STOPPING op ends.
// if (mAllStep.isStartedOrStarting()) {
// LOG.i("Stop:", "executing runnable. AllState is STARTING or STARTED, so we schedule a stop.");
// LOG.i("Stop:", "executing runnable. AllState is STARTING or STARTED,
// so we schedule a stop.");
mAllStep.doStop(swallowExceptions, new Callable<Task<Void>>() {
@Override
public Task<Void> call() {
return stopPreview(swallowExceptions).continueWithTask(mHandler.getExecutor(), new Continuation<Void, Task<Void>>() {
return stopPreview(swallowExceptions).continueWithTask(
mHandler.getExecutor(), new Continuation<Void, Task<Void>>() {
@Override
public Task<Void> then(@NonNull Task<Void> task) {
return stopBind(swallowExceptions);
@ -779,7 +807,8 @@ public abstract class CameraEngine implements
});
// } else {
// // NOTE: this returns early if we were STOPPING.
// LOG.i("Stop:", "executing runnable. AllState is STOPPING or STOPPED, so we return early.");
// LOG.i("Stop:", "executing runnable.
// AllState is STOPPING or STOPPED, so we return early.");
// outTask.trySetResult(null);
// }
}
@ -800,7 +829,6 @@ public abstract class CameraEngine implements
return overlay;
}
@SuppressWarnings("WeakerAccess")
public final Angles getAngles() {
return mAngles;
}
@ -888,9 +916,13 @@ public abstract class CameraEngine implements
return mSnapshotMaxHeight;
}
public final void setAutoFocusResetDelay(long delayMillis) { mAutoFocusResetDelayMillis = delayMillis; }
public final void setAutoFocusResetDelay(long delayMillis) {
mAutoFocusResetDelayMillis = delayMillis;
}
public final long getAutoFocusResetDelay() { return mAutoFocusResetDelayMillis; }
public final long getAutoFocusResetDelay() {
return mAutoFocusResetDelayMillis;
}
/**
* Sets a new facing value. This will restart the session (if there's any)
@ -1039,8 +1071,8 @@ public abstract class CameraEngine implements
* Camera is about to be opened. Implementors should look into available cameras
* and see if anyone matches the given {@link Facing value}.
*
* If so, implementors should set {@link Angles#setSensorOffset(Facing, int)} and any other information
* (like camera ID) needed to start the engine.
* If so, implementors should set {@link Angles#setSensorOffset(Facing, int)}
* and any other information (like camera ID) needed to start the engine.
*
* @param facing the facing value
* @return true if we have one
@ -1059,7 +1091,10 @@ public abstract class CameraEngine implements
public abstract void setZoom(float zoom, @Nullable PointF[] points, boolean notify);
// If closed, no-op. If opened, check supported and apply.
public abstract void setExposureCorrection(float EVvalue, @NonNull float[] bounds, @Nullable PointF[] points, boolean notify);
public abstract void setExposureCorrection(float EVvalue,
@NonNull float[] bounds,
@Nullable PointF[] points,
boolean notify);
// If closed, keep. If opened, check supported and apply.
public abstract void setFlash(@NonNull Flash flash);
@ -1091,7 +1126,8 @@ public abstract class CameraEngine implements
mHandler.run(new Runnable() {
@Override
public void run() {
LOG.v("takePicture", "performing. BindState:", getBindState(), "isTakingPicture:", isTakingPicture());
LOG.v("takePicture", "performing. BindState:", getBindState(),
"isTakingPicture:", isTakingPicture());
if (mMode == Mode.VIDEO) {
throw new IllegalStateException("Can't take hq pictures while in VIDEO mode");
}
@ -1115,7 +1151,8 @@ public abstract class CameraEngine implements
mHandler.run(new Runnable() {
@Override
public void run() {
LOG.v("takePictureSnapshot", "performing. BindState:", getBindState(), "isTakingPicture:", isTakingPicture());
LOG.v("takePictureSnapshot", "performing. BindState:",
getBindState(), "isTakingPicture:", isTakingPicture());
if (getBindState() < STATE_STARTED) return;
if (isTakingPicture()) return;
stub.location = mLocation;
@ -1141,7 +1178,8 @@ public abstract class CameraEngine implements
mCallback.dispatchOnPictureTaken(result);
} else {
LOG.e("onPictureResult", "result is null: something went wrong.", error);
mCallback.dispatchError(new CameraException(error, CameraException.REASON_PICTURE_FAILED));
mCallback.dispatchError(new CameraException(error,
CameraException.REASON_PICTURE_FAILED));
}
}
@ -1154,7 +1192,8 @@ public abstract class CameraEngine implements
mHandler.run(new Runnable() {
@Override
public void run() {
LOG.v("takeVideo", "performing. BindState:", getBindState(), "isTakingVideo:", isTakingVideo());
LOG.v("takeVideo", "performing. BindState:", getBindState(),
"isTakingVideo:", isTakingVideo());
if (getBindState() < STATE_STARTED) return;
if (isTakingVideo()) return;
if (mMode == Mode.PICTURE) {
@ -1179,12 +1218,14 @@ public abstract class CameraEngine implements
* @param stub a video stub
* @param file the output file
*/
public final void takeVideoSnapshot(final @NonNull VideoResult.Stub stub, @NonNull final File file) {
public final void takeVideoSnapshot(@NonNull final VideoResult.Stub stub,
@NonNull final File file) {
LOG.v("takeVideoSnapshot", "scheduling");
mHandler.run(new Runnable() {
@Override
public void run() {
LOG.v("takeVideoSnapshot", "performing. BindState:", getBindState(), "isTakingVideo:", isTakingVideo());
LOG.v("takeVideoSnapshot", "performing. BindState:", getBindState(),
"isTakingVideo:", isTakingVideo());
if (getBindState() < STATE_STARTED) return;
if (isTakingVideo()) return;
stub.file = file;
@ -1233,7 +1274,8 @@ public abstract class CameraEngine implements
mCallback.dispatchOnVideoTaken(result);
} else {
LOG.e("onVideoResult", "result is null: something went wrong.", exception);
mCallback.dispatchError(new CameraException(exception, CameraException.REASON_VIDEO_FAILED));
mCallback.dispatchError(new CameraException(exception,
CameraException.REASON_VIDEO_FAILED));
}
}
@ -1251,10 +1293,13 @@ public abstract class CameraEngine implements
protected abstract void onTakePicture(@NonNull PictureResult.Stub stub, boolean doMetering);
@WorkerThread
protected abstract void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio outputRatio, boolean doMetering);
protected abstract void onTakePictureSnapshot(@NonNull PictureResult.Stub stub,
@NonNull AspectRatio outputRatio,
boolean doMetering);
@WorkerThread
protected abstract void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull AspectRatio outputRatio);
protected abstract void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub,
@NonNull AspectRatio outputRatio);
@WorkerThread
protected abstract void onTakeVideo(@NonNull VideoResult.Stub stub);
@ -1289,7 +1334,8 @@ public abstract class CameraEngine implements
private Size getPreviewSurfaceSize(@NonNull Reference reference) {
CameraPreview preview = mPreview;
if (preview == null) return null;
return getAngles().flip(Reference.VIEW, reference) ? preview.getSurfaceSize().flip() : preview.getSurfaceSize();
return getAngles().flip(Reference.VIEW, reference) ? preview.getSurfaceSize().flip()
: preview.getSurfaceSize();
}
/**
@ -1298,7 +1344,7 @@ public abstract class CameraEngine implements
* levels so we don't want to perform the op here.
*
* The base snapshot size is based on PreviewStreamSize (later cropped with view ratio). Why?
* One might be tempted to say that it is the SurfaceSize (which already matches the view ratio).
* One might be tempted to say that it's the SurfaceSize (which already matches the view ratio).
*
* The camera sensor will capture preview frames with PreviewStreamSize and that's it. Then they
* are hardware-scaled by the preview surface, but this does not affect the snapshot, as the
@ -1370,7 +1416,8 @@ public abstract class CameraEngine implements
List<Size> list = new ArrayList<>(sizes);
Size result = selector.select(list).get(0);
if (!list.contains(result)) {
throw new RuntimeException("SizeSelectors must not return Sizes other than those in the input list.");
throw new RuntimeException("SizeSelectors must not return Sizes other than " +
"those in the input list.");
}
LOG.i("computeCaptureSize:", "result:", result, "flip:", flip, "mode:", mode);
if (flip) result = result.flip(); // Go back to REF_SENSOR
@ -1398,13 +1445,17 @@ public abstract class CameraEngine implements
sizes.add(flip ? size.flip() : size);
}
// Create our own default selector, which will be used if the external mPreviewStreamSizeSelector
// is null, or if it fails in finding a size.
// Create our own default selector, which will be used if the external
// mPreviewStreamSizeSelector is null, or if it fails in finding a size.
Size targetMinSize = getPreviewSurfaceSize(Reference.VIEW);
if (targetMinSize == null) throw new IllegalStateException("targetMinSize should not be null here.");
if (targetMinSize == null) {
throw new IllegalStateException("targetMinSize should not be null here.");
}
AspectRatio targetRatio = AspectRatio.of(mCaptureSize.getWidth(), mCaptureSize.getHeight());
if (flip) targetRatio = targetRatio.flip();
LOG.i("computePreviewStreamSize:", "targetRatio:", targetRatio, "targetMinSize:", targetMinSize);
LOG.i("computePreviewStreamSize:",
"targetRatio:", targetRatio,
"targetMinSize:", targetMinSize);
SizeSelector matchRatio = SizeSelectors.and( // Match this aspect ratio and sort by biggest
SizeSelectors.aspectRatio(targetRatio, 0),
SizeSelectors.biggest());
@ -1429,7 +1480,8 @@ public abstract class CameraEngine implements
}
Size result = selector.select(sizes).get(0);
if (!sizes.contains(result)) {
throw new RuntimeException("SizeSelectors must not return Sizes other than those in the input list.");
throw new RuntimeException("SizeSelectors must not return Sizes other than " +
"those in the input list.");
}
if (flip) result = result.flip();
LOG.i("computePreviewStreamSize:", "result:", result, "flip:", flip);

@ -15,12 +15,14 @@ import com.otaliastudios.cameraview.engine.action.BaseAction;
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
class LogAction extends BaseAction {
private final static CameraLogger LOG = CameraLogger.create(Camera2Engine.class.getSimpleName());
private final static CameraLogger LOG
= CameraLogger.create(Camera2Engine.class.getSimpleName());
private String lastLog;
@Override
public void onCaptureCompleted(@NonNull ActionHolder holder, @NonNull CaptureRequest request,
public void onCaptureCompleted(@NonNull ActionHolder holder,
@NonNull CaptureRequest request,
@NonNull TotalCaptureResult result) {
super.onCaptureCompleted(holder, request, result);
Integer aeMode = result.get(CaptureResult.CONTROL_AE_MODE);

@ -105,17 +105,21 @@ class Step {
return doStart(swallowExceptions, op, null);
}
Task<Void> doStart(final boolean swallowExceptions, final @NonNull Callable<Task<Void>> op, final @Nullable Runnable onStarted) {
Task<Void> doStart(final boolean swallowExceptions,
final @NonNull Callable<Task<Void>> op,
final @Nullable Runnable onStarted) {
LOG.i(name, "doStart", "Called. Enqueuing.");
task = task.continueWithTask(callback.getExecutor(), new Continuation<Void, Task<Void>>() {
@Override
public Task<Void> then(@NonNull Task<Void> task) throws Exception {
LOG.i(name, "doStart", "About to start. Setting state to STARTING");
setState(STATE_STARTING);
return op.call().addOnFailureListener(callback.getExecutor(), new OnFailureListener() {
return op.call().addOnFailureListener(callback.getExecutor(),
new OnFailureListener() {
@Override
public void onFailure(@NonNull Exception e) {
LOG.w(name, "doStart", "Failed with error", e, "Setting state to STOPPED");
LOG.w(name, "doStart", "Failed with error", e,
"Setting state to STOPPED");
setState(STATE_STOPPED);
if (!swallowExceptions) callback.handleException(e);
}
@ -139,17 +143,21 @@ class Step {
return doStop(swallowExceptions, op, null);
}
Task<Void> doStop(final boolean swallowExceptions, final @NonNull Callable<Task<Void>> op, final @Nullable Runnable onStopped) {
Task<Void> doStop(final boolean swallowExceptions,
final @NonNull Callable<Task<Void>> op,
final @Nullable Runnable onStopped) {
LOG.i(name, "doStop", "Called. Enqueuing.");
task = task.continueWithTask(callback.getExecutor(), new Continuation<Void, Task<Void>>() {
@Override
public Task<Void> then(@NonNull Task<Void> task) throws Exception {
LOG.i(name, "doStop", "About to stop. Setting state to STOPPING");
state = STATE_STOPPING;
return op.call().addOnFailureListener(callback.getExecutor(), new OnFailureListener() {
return op.call().addOnFailureListener(callback.getExecutor(),
new OnFailureListener() {
@Override
public void onFailure(@NonNull Exception e) {
LOG.w(name, "doStop", "Failed with error", e, "Setting state to STOPPED");
LOG.w(name, "doStop", "Failed with error", e,
"Setting state to STOPPED");
state = STATE_STOPPED;
if (!swallowExceptions) callback.handleException(e);
}

@ -13,8 +13,8 @@ import androidx.annotation.RequiresApi;
/**
* The Action class encapsulates logic for completing an action in a Camera2 environment.
* In this case, we are often interested in constantly receiving the {@link CaptureResult}
* and {@link CaptureRequest} callbacks, as well as applying changes to a {@link CaptureRequest.Builder}
* and having them applied to the sensor.
* and {@link CaptureRequest} callbacks, as well as applying changes to a
* {@link CaptureRequest.Builder} and having them applied to the sensor.
*
* The Action class receives the given callbacks and can operate over the engine
* through the {@link ActionHolder} object.
@ -61,25 +61,32 @@ public interface Action {
void removeCallback(@NonNull ActionCallback callback);
/**
* Called from {@link CaptureCallback#onCaptureStarted(CameraCaptureSession, CaptureRequest, long, long)}.
* Called from {@link CaptureCallback#onCaptureStarted(CameraCaptureSession, CaptureRequest,
* long, long)}.
* @param holder the holder
* @param request the request
*/
void onCaptureStarted(@NonNull ActionHolder holder, @NonNull CaptureRequest request);
/**
* Called from {@link CaptureCallback#onCaptureProgressed(CameraCaptureSession, CaptureRequest, CaptureResult)}.
* Called from {@link CaptureCallback#onCaptureProgressed(CameraCaptureSession, CaptureRequest,
* CaptureResult)}.
* @param holder the holder
* @param request the request
* @param result the result
*/
void onCaptureProgressed(@NonNull ActionHolder holder, @NonNull CaptureRequest request, @NonNull CaptureResult result);
void onCaptureProgressed(@NonNull ActionHolder holder,
@NonNull CaptureRequest request,
@NonNull CaptureResult result);
/**
* Called from {@link CaptureCallback#onCaptureCompleted(CameraCaptureSession, CaptureRequest, TotalCaptureResult)}.
* Called from {@link CaptureCallback#onCaptureCompleted(CameraCaptureSession, CaptureRequest,
* TotalCaptureResult)}.
* @param holder the holder
* @param request the request
* @param result the result
*/
void onCaptureCompleted(@NonNull ActionHolder holder, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result);
void onCaptureCompleted(@NonNull ActionHolder holder,
@NonNull CaptureRequest request,
@NonNull TotalCaptureResult result);
}

@ -77,5 +77,6 @@ public interface ActionHolder {
* @param builder builder
* @throws CameraAccessException camera exception
*/
void applyBuilder(@NonNull Action source, @NonNull CaptureRequest.Builder builder) throws CameraAccessException;
void applyBuilder(@NonNull Action source, @NonNull CaptureRequest.Builder builder)
throws CameraAccessException;
}

@ -50,13 +50,17 @@ public abstract class ActionWrapper extends BaseAction {
}
@Override
public void onCaptureProgressed(@NonNull ActionHolder holder, @NonNull CaptureRequest request, @NonNull CaptureResult result) {
public void onCaptureProgressed(@NonNull ActionHolder holder,
@NonNull CaptureRequest request,
@NonNull CaptureResult result) {
super.onCaptureProgressed(holder, request, result);
getAction().onCaptureProgressed(holder, request, result);
}
@Override
public void onCaptureCompleted(@NonNull ActionHolder holder, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) {
public void onCaptureCompleted(@NonNull ActionHolder holder,
@NonNull CaptureRequest request,
@NonNull TotalCaptureResult result) {
super.onCaptureCompleted(holder, request, result);
getAction().onCaptureCompleted(holder, request, result);
}

@ -78,12 +78,16 @@ public abstract class BaseAction implements Action {
}
@Override
public void onCaptureProgressed(@NonNull ActionHolder holder, @NonNull CaptureRequest request, @NonNull CaptureResult result) {
public void onCaptureProgressed(@NonNull ActionHolder holder,
@NonNull CaptureRequest request,
@NonNull CaptureResult result) {
// Overrideable
}
@Override
public void onCaptureCompleted(@NonNull ActionHolder holder, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) {
public void onCaptureCompleted(@NonNull ActionHolder holder,
@NonNull CaptureRequest request,
@NonNull TotalCaptureResult result) {
// Overrideable
}

@ -75,7 +75,8 @@ class SequenceAction extends BaseAction {
}
@Override
public void onCaptureProgressed(@NonNull ActionHolder holder, @NonNull CaptureRequest request,
public void onCaptureProgressed(@NonNull ActionHolder holder,
@NonNull CaptureRequest request,
@NonNull CaptureResult result) {
super.onCaptureProgressed(holder, request, result);
if (runningAction >= 0) {
@ -84,7 +85,8 @@ class SequenceAction extends BaseAction {
}
@Override
public void onCaptureCompleted(@NonNull ActionHolder holder, @NonNull CaptureRequest request,
public void onCaptureCompleted(@NonNull ActionHolder holder,
@NonNull CaptureRequest request,
@NonNull TotalCaptureResult result) {
super.onCaptureCompleted(holder, request, result);
if (runningAction >= 0) {

@ -65,7 +65,8 @@ class TogetherAction extends BaseAction {
}
@Override
public void onCaptureProgressed(@NonNull ActionHolder holder, @NonNull CaptureRequest request,
public void onCaptureProgressed(@NonNull ActionHolder holder,
@NonNull CaptureRequest request,
@NonNull CaptureResult result) {
super.onCaptureProgressed(holder, request, result);
for (BaseAction action : actions) {
@ -74,7 +75,8 @@ class TogetherAction extends BaseAction {
}
@Override
public void onCaptureCompleted(@NonNull ActionHolder holder, @NonNull CaptureRequest request,
public void onCaptureCompleted(@NonNull ActionHolder holder,
@NonNull CaptureRequest request,
@NonNull TotalCaptureResult result) {
super.onCaptureCompleted(holder, request, result);
for (BaseAction action : actions) {

@ -20,7 +20,8 @@ public class ExposureLock extends BaseLock {
@Override
protected boolean checkIsSupported(@NonNull ActionHolder holder) {
boolean isNotLegacy = readCharacteristic(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL, -1)
boolean isNotLegacy = readCharacteristic(
CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL, -1)
!= CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY;
// Not sure we should check aeMode as well, probably all aeModes support locking,
// but this should not be a big issue since we're not even using different AE modes.
@ -30,7 +31,8 @@ public class ExposureLock extends BaseLock {
|| aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON_ALWAYS_FLASH
|| aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH
|| aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE
|| aeMode == 5 /* CameraCharacteristics.CONTROL_AE_MODE_ON_EXTERNAL_FLASH, API 28 */);
|| aeMode == 5
/* CameraCharacteristics.CONTROL_AE_MODE_ON_EXTERNAL_FLASH, API 28 */);
boolean result = isNotLegacy && isAEOn;
LOG.i("checkIsSupported:", result);
return result;
@ -49,13 +51,16 @@ public class ExposureLock extends BaseLock {
int cancelTrigger = Build.VERSION.SDK_INT >= 23
? CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
: CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
holder.getBuilder(this).set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, cancelTrigger);
holder.getBuilder(this).set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
cancelTrigger);
holder.getBuilder(this).set(CaptureRequest.CONTROL_AE_LOCK, true);
holder.applyBuilder(this);
}
@Override
public void onCaptureCompleted(@NonNull ActionHolder holder, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) {
public void onCaptureCompleted(@NonNull ActionHolder holder,
@NonNull CaptureRequest request,
@NonNull TotalCaptureResult result) {
super.onCaptureCompleted(holder, request, result);
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
LOG.i("processCapture:", "aeState:", aeState);

@ -22,7 +22,8 @@ public class FocusLock extends BaseLock {
protected boolean checkIsSupported(@NonNull ActionHolder holder) {
// We'll lock by changing the AF mode to AUTO.
// In that mode, AF won't change unless someone starts a trigger operation.
int[] modes = readCharacteristic(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES, new int[]{});
int[] modes = readCharacteristic(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES,
new int[]{});
for (int mode : modes) {
if (mode == CameraCharacteristics.CONTROL_AF_MODE_AUTO) {
return true;
@ -50,13 +51,17 @@ public class FocusLock extends BaseLock {
@Override
protected void onStarted(@NonNull ActionHolder holder) {
holder.getBuilder(this).set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO);
holder.getBuilder(this).set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_CANCEL);
holder.getBuilder(this).set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_AUTO);
holder.getBuilder(this).set(CaptureRequest.CONTROL_AF_TRIGGER,
CaptureRequest.CONTROL_AF_TRIGGER_CANCEL);
holder.applyBuilder(this);
}
@Override
public void onCaptureCompleted(@NonNull ActionHolder holder, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) {
public void onCaptureCompleted(@NonNull ActionHolder holder,
@NonNull CaptureRequest request,
@NonNull TotalCaptureResult result) {
super.onCaptureCompleted(holder, request, result);
Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
Integer afMode = result.get(CaptureResult.CONTROL_AF_MODE);

@ -20,10 +20,13 @@ public class WhiteBalanceLock extends BaseLock {
@Override
protected boolean checkIsSupported(@NonNull ActionHolder holder) {
boolean isNotLegacy = readCharacteristic(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL, -1)
boolean isNotLegacy = readCharacteristic(
CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL, -1)
!= CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY;
Integer awbMode = holder.getBuilder(this).get(CaptureRequest.CONTROL_AWB_MODE);
boolean result = isNotLegacy && awbMode != null && awbMode == CaptureRequest.CONTROL_AWB_MODE_AUTO;
boolean result = isNotLegacy
&& awbMode != null
&& awbMode == CaptureRequest.CONTROL_AWB_MODE_AUTO;
LOG.i("checkIsSupported:", result);
return result;
}
@ -43,7 +46,9 @@ public class WhiteBalanceLock extends BaseLock {
}
@Override
public void onCaptureCompleted(@NonNull ActionHolder holder, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) {
public void onCaptureCompleted(@NonNull ActionHolder holder,
@NonNull CaptureRequest request,
@NonNull TotalCaptureResult result) {
super.onCaptureCompleted(holder, request, result);
Integer awbState = result.get(CaptureResult.CONTROL_AWB_STATE);
LOG.i("processCapture:", "awbState:", awbState);

@ -29,7 +29,8 @@ public abstract class BaseReset extends BaseAction {
super.onStart(holder);
MeteringRectangle area = null;
if (resetArea) {
Rect rect = readCharacteristic(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE, new Rect());
Rect rect = readCharacteristic(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE,
new Rect());
area = new MeteringRectangle(rect, MeteringRectangle.METERING_WEIGHT_DONT_CARE);
}
onStarted(holder, area);

@ -32,7 +32,8 @@ public class ExposureMeter extends BaseMeter {
@Override
protected boolean checkIsSupported(@NonNull ActionHolder holder) {
// In our case, this means checking if we support the AE precapture trigger.
boolean isNotLegacy = readCharacteristic(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL, -1)
boolean isNotLegacy = readCharacteristic(
CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL, -1)
!= CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY;
Integer aeMode = holder.getBuilder(this).get(CaptureRequest.CONTROL_AE_MODE);
boolean isAEOn = aeMode != null &&
@ -40,7 +41,8 @@ public class ExposureMeter extends BaseMeter {
|| aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON_ALWAYS_FLASH
|| aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH
|| aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE
|| aeMode == 5 /* CameraCharacteristics.CONTROL_AE_MODE_ON_EXTERNAL_FLASH, API 28 */);
|| aeMode == 5
/* CameraCharacteristics.CONTROL_AE_MODE_ON_EXTERNAL_FLASH, API 28 */);
boolean result = isNotLegacy && isAEOn;
LOG.i("checkIsSupported:", result);
return result;
@ -63,7 +65,8 @@ public class ExposureMeter extends BaseMeter {
CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
// Check the regions.
int maxRegions = readCharacteristic(CameraCharacteristics.CONTROL_MAX_REGIONS_AE, 0);
int maxRegions = readCharacteristic(CameraCharacteristics.CONTROL_MAX_REGIONS_AE,
0);
if (!areas.isEmpty() && maxRegions > 0) {
int max = Math.min(maxRegions, areas.size());
holder.getBuilder(this).set(CaptureRequest.CONTROL_AE_REGIONS,
@ -103,8 +106,8 @@ public class ExposureMeter extends BaseMeter {
// PRECAPTURE is a transient state. Being here might mean that precapture run
// and was successful, OR that the trigger was not even received yet. To
// distinguish, check the trigger state.
if (aeTriggerState != null
&& aeTriggerState == CaptureResult.CONTROL_AE_PRECAPTURE_TRIGGER_START) {
if (aeTriggerState != null && aeTriggerState
== CaptureResult.CONTROL_AE_PRECAPTURE_TRIGGER_START) {
setSuccessful(true);
setState(STATE_COMPLETED);
}

@ -29,29 +29,32 @@ public class ExposureReset extends BaseReset {
@Override
protected void onStarted(@NonNull ActionHolder holder, @Nullable MeteringRectangle area) {
int maxRegions = readCharacteristic(CameraCharacteristics.CONTROL_MAX_REGIONS_AE, 0);
int maxRegions = readCharacteristic(CameraCharacteristics.CONTROL_MAX_REGIONS_AE,
0);
if (area != null && maxRegions > 0) {
holder.getBuilder(this).set(CaptureRequest.CONTROL_AE_REGIONS,
new MeteringRectangle[]{area});
}
// NOTE: precapture might not be supported, in which case I think it will be ignored.
Integer trigger = holder.getLastResult(this).get(CaptureResult.CONTROL_AE_PRECAPTURE_TRIGGER);
Integer trigger = holder.getLastResult(this)
.get(CaptureResult.CONTROL_AE_PRECAPTURE_TRIGGER);
LOG.i("onStarted:", "last precapture trigger is", trigger);
if (trigger != null && trigger == CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START) {
LOG.i("onStarted:", "canceling precapture.");
int newTrigger = Build.VERSION.SDK_INT >= 23
? CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
: CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
holder.getBuilder(this).set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, newTrigger);
holder.getBuilder(this).set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
newTrigger);
}
// Documentation about CONTROL_AE_PRECAPTURE_TRIGGER says that, if it was started but not
// followed by a CAPTURE_INTENT_STILL_PICTURE request, the internal AE routine might remain
// locked unless we unlock manually.
// This is often the case for us, since the snapshot picture recorder does not use the intent
// and anyway we use the precapture sequence for touch metering as well.
// To reset, docs suggest the use of CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL, which we do above,
// This is often the case for us, since the snapshot picture recorder does not use the
// intent and anyway we use the precapture sequence for touch metering as well.
// To reset docs suggest the use of CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL, which we do above,
// or the technique used below: locking then unlocking. This proved to be the ONLY method
// to unlock reliably, unlike the cancel trigger (which we'll run anyway).
holder.getBuilder(this).set(CaptureRequest.CONTROL_AE_LOCK, true);

@ -53,7 +53,8 @@ public class FocusMeter extends BaseMeter {
LOG.i("onStarted:", "with areas:", areas);
holder.getBuilder(this).set(CaptureRequest.CONTROL_AF_TRIGGER,
CaptureRequest.CONTROL_AF_TRIGGER_START);
int maxRegions = readCharacteristic(CameraCharacteristics.CONTROL_MAX_REGIONS_AF, 0);
int maxRegions = readCharacteristic(CameraCharacteristics.CONTROL_MAX_REGIONS_AF,
0);
if (!areas.isEmpty() && maxRegions > 0) {
int max = Math.min(maxRegions, areas.size());
holder.getBuilder(this).set(CaptureRequest.CONTROL_AF_REGIONS,

@ -27,7 +27,8 @@ public class FocusReset extends BaseReset {
@Override
protected void onStarted(@NonNull ActionHolder holder, @Nullable MeteringRectangle area) {
boolean changed = false;
int maxRegions = readCharacteristic(CameraCharacteristics.CONTROL_MAX_REGIONS_AF, 0);
int maxRegions = readCharacteristic(CameraCharacteristics.CONTROL_MAX_REGIONS_AF,
0);
if (area != null && maxRegions > 0) {
holder.getBuilder(this).set(CaptureRequest.CONTROL_AF_REGIONS,
new MeteringRectangle[]{area});

@ -82,8 +82,8 @@ public class MeterAction extends ActionWrapper {
List<MeteringRectangle> areas = new ArrayList<>();
if (point != null) {
// This is a good Q/A. https://stackoverflow.com/a/33181620/4288782
// At first, the point is relative to the View system and does not account our own cropping.
// Will keep updating these two below.
// At first, the point is relative to the View system and does not account
// our own cropping. Will keep updating these two below.
final PointF referencePoint = new PointF(point.x, point.y);
Size referenceSize = engine.getPreview().getSurfaceSize();
@ -132,7 +132,7 @@ public class MeterAction extends ActionWrapper {
Size previewStreamSize = engine.getPreviewStreamSize(Reference.VIEW);
Size previewSurfaceSize = referenceSize;
if (previewStreamSize == null) {
throw new IllegalStateException("getPreviewStreamSize should not be null at this point.");
throw new IllegalStateException("getPreviewStreamSize should not be null here.");
}
int referenceWidth = previewSurfaceSize.getWidth();
int referenceHeight = previewSurfaceSize.getHeight();
@ -142,13 +142,15 @@ public class MeterAction extends ActionWrapper {
if (previewStreamAspectRatio.toFloat() > previewSurfaceAspectRatio.toFloat()) {
// Stream is larger. The x coordinate must be increased: a touch on the left side
// of the surface is not on the left size of stream (it's more to the right).
float scale = previewStreamAspectRatio.toFloat() / previewSurfaceAspectRatio.toFloat();
float scale = previewStreamAspectRatio.toFloat()
/ previewSurfaceAspectRatio.toFloat();
referencePoint.x += previewSurfaceSize.getWidth() * (scale - 1F) / 2F;
referenceWidth = Math.round(previewSurfaceSize.getWidth() * scale);
} else {
// Stream is taller. The y coordinate must be increased: a touch on the top side
// of the surface is not on the top size of stream (it's a bit lower).
float scale = previewSurfaceAspectRatio.toFloat() / previewStreamAspectRatio.toFloat();
float scale = previewSurfaceAspectRatio.toFloat()
/ previewStreamAspectRatio.toFloat();
referencePoint.y += previewSurfaceSize.getHeight() * (scale - 1F) / 2F;
referenceHeight = Math.round(previewSurfaceSize.getHeight() * scale);
}
@ -169,7 +171,8 @@ public class MeterAction extends ActionWrapper {
@SuppressWarnings("SuspiciousNameCombination")
@NonNull
private Size applyPreviewToSensorRotation(@NonNull Size referenceSize, @NonNull PointF referencePoint) {
private Size applyPreviewToSensorRotation(@NonNull Size referenceSize,
@NonNull PointF referencePoint) {
// Not elegant, but the sin/cos way was failing for some reason.
int angle = engine.getAngles().offset(Reference.SENSOR, Reference.VIEW, Axis.ABSOLUTE);
boolean flip = angle % 180 != 0;
@ -194,12 +197,13 @@ public class MeterAction extends ActionWrapper {
}
@NonNull
private Size applyCropRegionCoordinates(@NonNull Size referenceSize, @NonNull PointF referencePoint) {
private Size applyCropRegionCoordinates(@NonNull Size referenceSize,
@NonNull PointF referencePoint) {
// The input point and size refer to the stream rect.
// The stream rect is part of the 'crop region', as described below.
// https://source.android.com/devices/camera/camera3_crop_reprocess.html
Rect cropRect = holder.getBuilder(this).get(CaptureRequest.SCALER_CROP_REGION);
// For now, we don't care about x and y position. Rect should be non-null, but let's be safe.
// For now we don't care about x and y position. Rect should not be null, but let's be safe.
int cropRectWidth = cropRect == null ? referenceSize.getWidth() : cropRect.width();
int cropRectHeight = cropRect == null ? referenceSize.getHeight() : cropRect.height();
// The stream is always centered inside the crop region, and one of the dimensions
@ -210,16 +214,19 @@ public class MeterAction extends ActionWrapper {
}
@NonNull
private Size applyActiveArrayCoordinates(@NonNull Size referenceSize, @NonNull PointF referencePoint) {
private Size applyActiveArrayCoordinates(@NonNull Size referenceSize,
@NonNull PointF referencePoint) {
// The input point and size refer to the scaler crop region.
// We can query for the crop region position inside the active array, so this is easy.
Rect cropRect = holder.getBuilder(this).get(CaptureRequest.SCALER_CROP_REGION);
referencePoint.x += cropRect == null ? 0 : cropRect.left;
referencePoint.y += cropRect == null ? 0 : cropRect.top;
// Finally, get the active rect width and height from characteristics.
Rect activeRect = holder.getCharacteristics(this).get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
Rect activeRect = holder.getCharacteristics(this)
.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
if (activeRect == null) { // Should never happen
activeRect = new Rect(0, 0, referenceSize.getWidth(), referenceSize.getHeight());
activeRect = new Rect(0, 0, referenceSize.getWidth(),
referenceSize.getHeight());
}
return new Size(activeRect.width(), activeRect.height());
}

@ -27,10 +27,13 @@ public class WhiteBalanceMeter extends BaseMeter {
@Override
protected boolean checkIsSupported(@NonNull ActionHolder holder) {
boolean isNotLegacy = readCharacteristic(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL, -1)
boolean isNotLegacy = readCharacteristic(
CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL, -1)
!= CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY;
Integer awbMode = holder.getBuilder(this).get(CaptureRequest.CONTROL_AWB_MODE);
boolean result = isNotLegacy && awbMode != null && awbMode == CaptureRequest.CONTROL_AWB_MODE_AUTO;
boolean result = isNotLegacy
&& awbMode != null
&& awbMode == CaptureRequest.CONTROL_AWB_MODE_AUTO;
LOG.i("checkIsSupported:", result);
return result;
}
@ -46,7 +49,8 @@ public class WhiteBalanceMeter extends BaseMeter {
@Override
protected void onStarted(@NonNull ActionHolder holder, @NonNull List<MeteringRectangle> areas) {
LOG.i("onStarted:", "with areas:", areas);
int maxRegions = readCharacteristic(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB, 0);
int maxRegions = readCharacteristic(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB,
0);
if (!areas.isEmpty() && maxRegions > 0) {
int max = Math.min(maxRegions, areas.size());
holder.getBuilder(this).set(CaptureRequest.CONTROL_AWB_REGIONS,
@ -56,7 +60,8 @@ public class WhiteBalanceMeter extends BaseMeter {
}
@Override
public void onCaptureCompleted(@NonNull ActionHolder holder, @NonNull CaptureRequest request,
public void onCaptureCompleted(@NonNull ActionHolder holder,
@NonNull CaptureRequest request,
@NonNull TotalCaptureResult result) {
super.onCaptureCompleted(holder, request, result);
Integer awbState = result.get(CaptureResult.CONTROL_AWB_STATE);

@ -30,9 +30,11 @@ public class WhiteBalanceReset extends BaseReset {
@Override
protected void onStarted(@NonNull ActionHolder holder, @Nullable MeteringRectangle area) {
LOG.w("onStarted:", "with area:", area);
int maxRegions = readCharacteristic(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB, 0);
int maxRegions = readCharacteristic(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB,
0);
if (area != null && maxRegions > 0) {
holder.getBuilder(this).set(CaptureRequest.CONTROL_AWB_REGIONS, new MeteringRectangle[]{area});
holder.getBuilder(this).set(CaptureRequest.CONTROL_AWB_REGIONS,
new MeteringRectangle[]{area});
holder.applyBuilder(this);
}
setState(STATE_COMPLETED);

@ -21,7 +21,7 @@ public enum Axis {
* This rotation axis takes into account the current
* {@link com.otaliastudios.cameraview.controls.Facing} value.
*
* - for {@link com.otaliastudios.cameraview.controls.Facing#BACK}, this equals {@link #ABSOLUTE}
* - for {@link com.otaliastudios.cameraview.controls.Facing#BACK}, this is {@link #ABSOLUTE}
* - for {@link com.otaliastudios.cameraview.controls.Facing#FRONT}, this is inverted
*/
RELATIVE_TO_SENSOR

@ -36,7 +36,8 @@ import java.nio.FloatBuffer;
* NOTE - the {@link android.graphics.SurfaceTexture} restrictions apply:
* We only support the {@link android.opengl.GLES11Ext#GL_TEXTURE_EXTERNAL_OES} texture target
* and it must be specified in the fragment shader as a samplerExternalOES texture.
* You also have to explicitly require the extension: see {@link #createDefaultFragmentShader(String)}.
* You also have to explicitly require the extension: see
* {@link #createDefaultFragmentShader(String)}.
*
*/
public abstract class BaseFilter implements Filter {
@ -58,24 +59,28 @@ public abstract class BaseFilter implements Filter {
protected final static String DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME = "vTextureCoord";
@NonNull
private static String createDefaultVertexShader(@NonNull String vertexPositionName,
private static String createDefaultVertexShader(
@NonNull String vertexPositionName,
@NonNull String vertexTextureCoordinateName,
@NonNull String vertexModelViewProjectionMatrixName,
@NonNull String vertexTransformMatrixName,
@NonNull String fragmentTextureCoordinateName) {
return "uniform mat4 "+vertexModelViewProjectionMatrixName+";\n" +
"uniform mat4 "+vertexTransformMatrixName+";\n" +
"attribute vec4 "+vertexPositionName+";\n" +
"attribute vec4 "+vertexTextureCoordinateName+";\n" +
"varying vec2 "+fragmentTextureCoordinateName+";\n" +
"void main() {\n" +
" gl_Position = "+vertexModelViewProjectionMatrixName+" * "+vertexPositionName+";\n" +
" "+fragmentTextureCoordinateName+" = ("+vertexTransformMatrixName+" * "+vertexTextureCoordinateName+").xy;\n" +
"}\n";
return "uniform mat4 "+vertexModelViewProjectionMatrixName+";\n"
+ "uniform mat4 "+vertexTransformMatrixName+";\n"
+ "attribute vec4 "+vertexPositionName+";\n"
+ "attribute vec4 "+vertexTextureCoordinateName+";\n"
+ "varying vec2 "+fragmentTextureCoordinateName+";\n"
+ "void main() {\n"
+ " gl_Position = " +vertexModelViewProjectionMatrixName+" * "
+ vertexPositionName+";\n"
+ " "+fragmentTextureCoordinateName+" = ("+vertexTransformMatrixName+" * "
+ vertexTextureCoordinateName+").xy;\n"
+ "}\n";
}
@NonNull
private static String createDefaultFragmentShader(@NonNull String fragmentTextureCoordinateName) {
private static String createDefaultFragmentShader(
@NonNull String fragmentTextureCoordinateName) {
return "#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n"
+ "varying vec2 "+fragmentTextureCoordinateName+";\n"
@ -139,11 +144,15 @@ public abstract class BaseFilter implements Filter {
this.programHandle = programHandle;
vertexPositionLocation = GLES20.glGetAttribLocation(programHandle, vertexPositionName);
GlUtils.checkLocation(vertexPositionLocation, vertexPositionName);
vertexTextureCoordinateLocation = GLES20.glGetAttribLocation(programHandle, vertexTextureCoordinateName);
vertexTextureCoordinateLocation = GLES20.glGetAttribLocation(programHandle,
vertexTextureCoordinateName);
GlUtils.checkLocation(vertexTextureCoordinateLocation, vertexTextureCoordinateName);
vertexModelViewProjectionMatrixLocation = GLES20.glGetUniformLocation(programHandle, vertexModelViewProjectionMatrixName);
GlUtils.checkLocation(vertexModelViewProjectionMatrixLocation, vertexModelViewProjectionMatrixName);
vertexTransformMatrixLocation = GLES20.glGetUniformLocation(programHandle, vertexTransformMatrixName);
vertexModelViewProjectionMatrixLocation = GLES20.glGetUniformLocation(programHandle,
vertexModelViewProjectionMatrixName);
GlUtils.checkLocation(vertexModelViewProjectionMatrixLocation,
vertexModelViewProjectionMatrixName);
vertexTransformMatrixLocation = GLES20.glGetUniformLocation(programHandle,
vertexTransformMatrixName);
GlUtils.checkLocation(vertexTransformMatrixLocation, vertexTransformMatrixName);
}

@ -26,8 +26,9 @@ import java.util.Map;
* New filters can be added at any time through {@link #addFilter(Filter)}, but currently they
* can not be removed because we can not easily ensure that they would be correctly released.
*
* The {@link MultiFilter} does also implement {@link OneParameterFilter} and {@link TwoParameterFilter},
* dispatching all the parameter calls to child filters, assuming they support it.
* The {@link MultiFilter} does also implement {@link OneParameterFilter} and
* {@link TwoParameterFilter}, dispatching all the parameter calls to child filters,
* assuming they support it.
*
* There are some important technical caveats when using {@link MultiFilter}:
* - each child filter requires the allocation of a GL framebuffer. Using a large number of filters
@ -146,11 +147,17 @@ public class MultiFilter implements Filter, OneParameterFilter, TwoParameterFilt
state.textureId = textureArray[0];
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, state.textureId);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, state.size.getWidth(), state.size.getHeight(), 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA,
state.size.getWidth(), state.size.getHeight(), 0, GLES20.GL_RGBA,
GLES20.GL_UNSIGNED_BYTE, null);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S,
GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T,
GLES20.GL_CLAMP_TO_EDGE);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, state.framebufferId);
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER,
GLES20.GL_COLOR_ATTACHMENT0,

@ -32,7 +32,8 @@ public class AutoFixFilter extends BaseFilter implements OneParameterFilter {
+ " density_offset = " + (0.5f / 1024f) + ";\n"
+ " density_scale = " + (1023f / 1024f) + ";\n"
+ " const vec3 weights = vec3(0.33333, 0.33333, 0.33333);\n"
+ " vec4 color = texture2D(tex_sampler_0, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+");\n"
+ " vec4 color = texture2D(tex_sampler_0, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME
+ ");\n"
+ " float energy = dot(color.rgb, weights);\n"
+ " float mask_value = energy - 0.5;\n"
+ " float alpha;\n"

@ -19,7 +19,8 @@ public class GammaFilter extends BaseFilter implements OneParameterFilter {
+ "uniform samplerExternalOES sTexture;\n"
+ "uniform float gamma;\n"
+ "void main() {\n"
+ " vec4 textureColor = texture2D(sTexture, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+");\n"
+ " vec4 textureColor = texture2D(sTexture, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME
+ ");\n"
+ " gl_FragColor = vec4(pow(textureColor.rgb, vec3(gamma)), textureColor.w);\n"
+ "}\n";

@ -40,18 +40,25 @@ public class GrainFilter extends BaseFilter implements OneParameterFilter {
+ "void main() {\n"
+ " seed[0] = " + RANDOM.nextFloat() + ";\n"
+ " seed[1] = " + RANDOM.nextFloat() + ";\n"
+ " float noise = texture2D(tex_sampler_1, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+" + vec2(-stepX, -stepY)).r * 0.224;\n"
+ " noise += texture2D(tex_sampler_1, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+" + vec2(-stepX, stepY)).r * 0.224;\n"
+ " noise += texture2D(tex_sampler_1, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+" + vec2(stepX, -stepY)).r * 0.224;\n"
+ " noise += texture2D(tex_sampler_1, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+" + vec2(stepX, stepY)).r * 0.224;\n"
+ " float noise = texture2D(tex_sampler_1, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME
+ " + vec2(-stepX, -stepY)).r * 0.224;\n"
+ " noise += texture2D(tex_sampler_1, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME
+ " + vec2(-stepX, stepY)).r * 0.224;\n"
+ " noise += texture2D(tex_sampler_1, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME
+ " + vec2(stepX, -stepY)).r * 0.224;\n"
+ " noise += texture2D(tex_sampler_1, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME
+ " + vec2(stepX, stepY)).r * 0.224;\n"
+ " noise += 0.4448;\n"
+ " noise *= scale;\n"
+ " vec4 color = texture2D(tex_sampler_0, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+");\n"
+ " vec4 color = texture2D(tex_sampler_0, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME
+ ");\n"
+ " float energy = 0.33333 * color.r + 0.33333 * color.g + 0.33333 * color.b;\n"
+ " float mask = (1.0 - sqrt(energy));\n"
+ " float weight = 1.0 - 1.333 * mask * noise;\n"
+ " gl_FragColor = vec4(color.rgb * weight, color.a);\n"
+ " gl_FragColor = gl_FragColor+vec4(rand("+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+" + seed), rand("+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+" + seed),rand("+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+" + seed),1);\n"
+ " gl_FragColor = gl_FragColor+vec4(rand("+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME
+ " + seed), rand("+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+" + seed),rand("
+ DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+" + seed),1);\n"
+ "}\n";
private float strength = 0.5f;
@ -61,7 +68,6 @@ public class GrainFilter extends BaseFilter implements OneParameterFilter {
private int stepXLocation = -1;
private int stepYLocation = -1;
@SuppressWarnings("WeakerAccess")
public GrainFilter() { }
@Override

@ -26,15 +26,18 @@ public class SaturationFilter extends BaseFilter implements OneParameterFilter {
+ " weights[1] = " + 5f / 8f + ";\n"
+ " weights[2] = " + 1f / 8f + ";\n"
+ " shift = " + 1.0f / 255.0f + ";\n"
+ " vec4 oldcolor = texture2D(sTexture, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+");\n"
+ " vec4 oldcolor = texture2D(sTexture, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME
+ ");\n"
+ " float kv = dot(oldcolor.rgb, weights) + shift;\n"
+ " vec3 new_color = scale * oldcolor.rgb + (1.0 - scale) * kv;\n"
+ " gl_FragColor = vec4(new_color, oldcolor.a);\n"
+ " vec4 color = texture2D(sTexture, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+");\n"
+ " vec4 color = texture2D(sTexture, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME
+ ");\n"
+ " float de = dot(color.rgb, weights);\n"
+ " float inv_de = 1.0 / de;\n"
+ " vec3 verynew_color = de * pow(color.rgb * inv_de, exponents);\n"
+ " float max_color = max(max(max(verynew_color.r, verynew_color.g), verynew_color.b), 1.0);\n"
+ " float max_color = max(max(max(verynew_color.r, verynew_color.g), "
+ "verynew_color.b), 1.0);\n"
+ " gl_FragColor = gl_FragColor+vec4(verynew_color / max_color, color.a);\n"
+ "}\n";

@ -26,7 +26,8 @@ public class VignetteFilter extends BaseFilter implements TwoParameterFilter {
+ " const float slope = 20.0;\n"
+ " vec2 coord = "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+" - vec2(0.5, 0.5);\n"
+ " float dist = length(coord * scale);\n"
+ " float lumen = shade / (1.0 + exp((dist * inv_max_dist - range) * slope)) + (1.0 - shade);\n"
+ " float lumen = shade / (1.0 + exp((dist * inv_max_dist - range) * slope)) "
+ "+ (1.0 - shade);\n"
+ " vec4 color = texture2D(sTexture, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+");\n"
+ " gl_FragColor = vec4(color.rgb * lumen, color.a);\n"
+ "}\n";

@ -44,8 +44,8 @@ public class Frame {
if (!hasContent()) {
LOG.e("Frame is dead! time:", mTime, "lastTime:", mLastTime);
throw new RuntimeException("You should not access a released frame. " +
"If this frame was passed to a FrameProcessor, you can only use its contents synchronously, " +
"for the duration of the process() method.");
"If this frame was passed to a FrameProcessor, you can only use its contents " +
"synchronously, for the duration of the process() method.");
}
}

@ -28,18 +28,20 @@ import java.util.concurrent.LinkedBlockingQueue;
* For both byte buffers and frames to get back to the FrameManager pool, all you have to do
* is call {@link Frame#release()} when done.
*
* Other than this, the FrameManager can work in two modes, depending on whether a {@link BufferCallback}
* is passed to the constructor. The modes changes the buffer behavior.
* Other than this, the FrameManager can work in two modes, depending on whether a
* {@link BufferCallback} is passed to the constructor. The modes changes the buffer behavior.
*
* 1. {@link #BUFFER_MODE_DISPATCH}: in this mode, as soon as we have a buffer, it is dispatched to
* the {@link BufferCallback}. The callback should then fill the buffer, and finally call
* {@link #getFrame(byte[], long, int)} to receive a frame.
* This is used for Camera1.
*
* 2. {@link #BUFFER_MODE_ENQUEUE}: in this mode, the manager internally keeps a queue of byte buffers,
* instead of handing them to the callback. The users can ask for buffers through {@link #getBuffer()}.
* This buffer can be filled with data and used to get a frame {@link #getFrame(byte[], long, int)},
* or, in case it was not filled, returned to the queue using {@link #onBufferUnused(byte[])}.
* 2. {@link #BUFFER_MODE_ENQUEUE}: in this mode, the manager internally keeps a queue of byte
* buffers, instead of handing them to the callback. The users can ask for buffers through
* {@link #getBuffer()}.
* This buffer can be filled with data and used to get a frame
* {@link #getFrame(byte[], long, int)}, or, in case it was not filled, returned to the queue
* using {@link #onBufferUnused(byte[])}.
* This is used for Camera2.
*/
public class FrameManager {
@ -149,7 +151,8 @@ public class FrameManager {
@Nullable
public byte[] getBuffer() {
if (mBufferMode != BUFFER_MODE_ENQUEUE) {
throw new IllegalStateException("Can't call getBuffer() when not in BUFFER_MODE_ENQUEUE.");
throw new IllegalStateException("Can't call getBuffer() " +
"when not in BUFFER_MODE_ENQUEUE.");
}
return mBufferQueue.poll();
}
@ -161,7 +164,8 @@ public class FrameManager {
*/
public void onBufferUnused(@NonNull byte[] buffer) {
if (mBufferMode != BUFFER_MODE_ENQUEUE) {
throw new IllegalStateException("Can't call onBufferUnused() when not in BUFFER_MODE_ENQUEUE.");
throw new IllegalStateException("Can't call onBufferUnused() " +
"when not in BUFFER_MODE_ENQUEUE.");
}
if (isSetUp()) {
@ -188,7 +192,8 @@ public class FrameManager {
@NonNull
public Frame getFrame(@NonNull byte[] data, long time, int rotation) {
if (!isSetUp()) {
throw new IllegalStateException("Can't call getFrame() after releasing or before setUp.");
throw new IllegalStateException("Can't call getFrame() after releasing " +
"or before setUp.");
}
Frame frame = mFrameQueue.poll();

@ -8,7 +8,8 @@ import androidx.annotation.NonNull;
/**
* Gestures listen to finger gestures over the {@link CameraView} bounds and can be mapped
* to one or more camera controls using XML attributes or {@link CameraView#mapGesture(Gesture, GestureAction)}.
* to one or more camera controls using XML attributes or {@link CameraView#mapGesture(Gesture,
* GestureAction)}.
*
* Not every gesture can control a certain action. For example, pinch gestures can only control
* continuous values, such as zoom or AE correction. Single point gestures, on the other hand,

@ -2,6 +2,7 @@ package com.otaliastudios.cameraview.gesture;
import com.otaliastudios.cameraview.CameraView;
import com.otaliastudios.cameraview.filter.Filter;
import com.otaliastudios.cameraview.markers.AutoFocusMarker;
import androidx.annotation.NonNull;
@ -63,7 +64,7 @@ public enum GestureAction {
EXPOSURE_CORRECTION(4, GestureType.CONTINUOUS),
/**
* Controls the first parameter of a real-time {@link com.otaliastudios.cameraview.filter.Filter},
* Controls the first parameter of a real-time {@link Filter},
* if it accepts one. This action can be mapped to continuous gestures:
*
* - {@link Gesture#PINCH}
@ -73,7 +74,7 @@ public enum GestureAction {
FILTER_CONTROL_1(5, GestureType.CONTINUOUS),
/**
* Controls the second parameter of a real-time {@link com.otaliastudios.cameraview.filter.Filter},
* Controls the second parameter of a real-time {@link Filter},
* if it accepts one. This action can be mapped to continuous gestures:
*
* - {@link Gesture#PINCH}

@ -18,11 +18,17 @@ public class GestureParser {
private int verticalScrollAction;
public GestureParser(@NonNull TypedArray array) {
this.tapAction = array.getInteger(R.styleable.CameraView_cameraGestureTap, GestureAction.DEFAULT_TAP.value());
this.longTapAction = array.getInteger(R.styleable.CameraView_cameraGestureLongTap, GestureAction.DEFAULT_LONG_TAP.value());
this.pinchAction = array.getInteger(R.styleable.CameraView_cameraGesturePinch, GestureAction.DEFAULT_PINCH.value());
this.horizontalScrollAction = array.getInteger(R.styleable.CameraView_cameraGestureScrollHorizontal, GestureAction.DEFAULT_SCROLL_HORIZONTAL.value());
this.verticalScrollAction = array.getInteger(R.styleable.CameraView_cameraGestureScrollVertical, GestureAction.DEFAULT_SCROLL_VERTICAL.value());
tapAction = array.getInteger(R.styleable.CameraView_cameraGestureTap,
GestureAction.DEFAULT_TAP.value());
longTapAction = array.getInteger(R.styleable.CameraView_cameraGestureLongTap,
GestureAction.DEFAULT_LONG_TAP.value());
pinchAction = array.getInteger(R.styleable.CameraView_cameraGesturePinch,
GestureAction.DEFAULT_PINCH.value());
horizontalScrollAction = array.getInteger(
R.styleable.CameraView_cameraGestureScrollHorizontal,
GestureAction.DEFAULT_SCROLL_HORIZONTAL.value());
verticalScrollAction = array.getInteger(R.styleable.CameraView_cameraGestureScrollVertical,
GestureAction.DEFAULT_SCROLL_VERTICAL.value());
}
private GestureAction get(int which) {

@ -19,7 +19,8 @@ public class PinchGestureFinder extends GestureFinder {
public PinchGestureFinder(@NonNull Controller controller) {
super(controller, 2);
setGesture(Gesture.PINCH);
mDetector = new ScaleGestureDetector(controller.getContext(), new ScaleGestureDetector.SimpleOnScaleGestureListener() {
mDetector = new ScaleGestureDetector(controller.getContext(),
new ScaleGestureDetector.SimpleOnScaleGestureListener() {
@Override
public boolean onScale(ScaleGestureDetector detector) {
mNotify = true;

@ -21,10 +21,14 @@ public class ScrollGestureFinder extends GestureFinder {
public ScrollGestureFinder(final @NonNull Controller controller) {
super(controller, 2);
mDetector = new GestureDetector(controller.getContext(), new GestureDetector.SimpleOnGestureListener() {
mDetector = new GestureDetector(controller.getContext(),
new GestureDetector.SimpleOnGestureListener() {
@Override
public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) {
public boolean onScroll(MotionEvent e1,
MotionEvent e2,
float distanceX,
float distanceY) {
boolean horizontal;
LOG.i("onScroll:", "distanceX="+distanceX, "distanceY="+distanceY);
if (e1 == null || e2 == null) return false; // Got some crashes about this.
@ -39,7 +43,8 @@ public class ScrollGestureFinder extends GestureFinder {
horizontal = getGesture() == Gesture.SCROLL_HORIZONTAL;
}
getPoint(1).set(e2.getX(), e2.getY());
mFactor = horizontal ? (distanceX / controller.getWidth()) : (distanceY / controller.getHeight());
mFactor = horizontal ? (distanceX / controller.getWidth())
: (distanceY / controller.getHeight());
mFactor = horizontal ? -mFactor : mFactor; // When vertical, up = positive
mNotify = true;
return true;

@ -16,7 +16,8 @@ public class TapGestureFinder extends GestureFinder {
public TapGestureFinder(@NonNull Controller controller) {
super(controller, 1);
mDetector = new GestureDetector(controller.getContext(), new GestureDetector.SimpleOnGestureListener() {
mDetector = new GestureDetector(controller.getContext(),
new GestureDetector.SimpleOnGestureListener() {
@Override
public boolean onSingleTapUp(MotionEvent e) {

@ -76,8 +76,10 @@ public class DeviceEncoders {
LOG.i("Enabled. Found video encoder:", mVideoEncoder.getName());
mAudioEncoder = findDeviceEncoder(encoders, audioType, mode);
LOG.i("Enabled. Found audio encoder:", mAudioEncoder.getName());
mVideoCapabilities = mVideoEncoder.getCapabilitiesForType(videoType).getVideoCapabilities();
mAudioCapabilities = mAudioEncoder.getCapabilitiesForType(audioType).getAudioCapabilities();
mVideoCapabilities = mVideoEncoder.getCapabilitiesForType(videoType)
.getVideoCapabilities();
mAudioCapabilities = mAudioEncoder.getCapabilitiesForType(audioType)
.getAudioCapabilities();
} else {
mVideoEncoder = null;
mAudioEncoder = null;
@ -135,7 +137,9 @@ public class DeviceEncoders {
@SuppressLint("NewApi")
@NonNull
@VisibleForTesting
MediaCodecInfo findDeviceEncoder(@NonNull List<MediaCodecInfo> encoders, @NonNull String mimeType, int mode) {
MediaCodecInfo findDeviceEncoder(@NonNull List<MediaCodecInfo> encoders,
@NonNull String mimeType,
int mode) {
ArrayList<MediaCodecInfo> results = new ArrayList<>();
for (MediaCodecInfo encoder : encoders) {
String[] types = encoder.getSupportedTypes();
@ -231,7 +235,9 @@ public class DeviceEncoders {
public int getSupportedVideoBitRate(int bitRate) {
if (!ENABLED) return bitRate;
int newBitRate = mVideoCapabilities.getBitrateRange().clamp(bitRate);
LOG.i("getSupportedVideoBitRate -", "inputRate:", bitRate, "adjustedRate:", newBitRate);
LOG.i("getSupportedVideoBitRate -",
"inputRate:", bitRate,
"adjustedRate:", newBitRate);
return newBitRate;
}
@ -248,7 +254,9 @@ public class DeviceEncoders {
int newFrameRate = (int) (double) mVideoCapabilities
.getSupportedFrameRatesFor(size.getWidth(), size.getHeight())
.clamp((double) frameRate);
LOG.i("getSupportedVideoFrameRate -", "inputRate:", frameRate, "adjustedRate:", newFrameRate);
LOG.i("getSupportedVideoFrameRate -",
"inputRate:", frameRate,
"adjustedRate:", newFrameRate);
return newFrameRate;
}
@ -263,7 +271,9 @@ public class DeviceEncoders {
public int getSupportedAudioBitRate(int bitRate) {
if (!ENABLED) return bitRate;
int newBitRate = mAudioCapabilities.getBitrateRange().clamp(bitRate);
LOG.i("getSupportedAudioBitRate -", "inputRate:", bitRate, "adjustedRate:", newBitRate);
LOG.i("getSupportedAudioBitRate -",
"inputRate:", bitRate,
"adjustedRate:", newBitRate);
return newBitRate;
}

@ -26,7 +26,8 @@ public class GlUtils {
public static void checkError(@NonNull String opName) {
int error = GLES20.glGetError();
if (error != GLES20.GL_NO_ERROR) {
String message = LOG.e("Error during", opName, "glError 0x", Integer.toHexString(error));
String message = LOG.e("Error during", opName, "glError 0x",
Integer.toHexString(error));
throw new RuntimeException(message);
}
}
@ -47,7 +48,8 @@ public class GlUtils {
int[] compiled = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
LOG.e("Could not compile shader", shaderType, ":", GLES20.glGetShaderInfoLog(shader));
LOG.e("Could not compile shader", shaderType, ":",
GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = 0;
}

@ -43,7 +43,8 @@ public class GridLinesLayout extends View {
super(context, attrs);
horiz = new ColorDrawable(gridColor);
vert = new ColorDrawable(gridColor);
width = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 0.9f, context.getResources().getDisplayMetrics());
width = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 0.9f,
context.getResources().getDisplayMetrics());
}
@Override

@ -36,8 +36,9 @@ import com.otaliastudios.cameraview.preview.RendererThread;
* 9B. Publish overlays to GL texture using overlaySurfaceTexture.updateTexImage()
* 9C. GLES - draw textureId
* 9D. GLES - draw overlayTextureId
* Both textures are drawn on the same EGLWindow and we manage to overlay them with {@link GLES20#GL_BLEND}.
* This is the whole procedure and it works for the majority of devices and situations.
* Both textures are drawn on the same EGLWindow and we manage to overlay them with
* {@link GLES20#GL_BLEND}. This is the whole procedure and it works for the majority of
* devices and situations.
*
* ISSUE DESCRIPTION
* The #514 issue can be described as follows:
@ -66,7 +67,8 @@ import com.otaliastudios.cameraview.preview.RendererThread;
*
* So a pixel format conversion takes place, when it shouldn't happen. We can't solve this:
* - It is done at a lower level, there's no real way for us to specify the surface format, but
* it seems that these devices will prefer a YUV format and misunderstand our {@link Canvas} pixels.
* it seems that these devices will prefer a YUV format and misunderstand our {@link Canvas}
* pixels.
* - There is also no way to identify which devices will present this issue, it's a bug somewhere
* and it is implementation specific.
*
@ -74,16 +76,18 @@ import com.otaliastudios.cameraview.preview.RendererThread;
* Hard to say why, but using this class fixes the described issue.
* It seems that when the {@link SurfaceTexture#updateTexImage()} method for the overlay surface
* is called - the one that updates the overlayTextureId - we must ensure that the CURRENTLY
* BOUND TEXTURE ID IS NOT 0. The id we choose to apply might be cameraTextureId, or overlayTextureId,
* or probably whatever other valid id, and should be passed to {@link #Issue514Workaround(int)}.
* BOUND TEXTURE ID IS NOT 0. The id we choose to apply might be cameraTextureId, or
* overlayTextureId, or probably whatever other valid id, and should be passed to
* {@link #Issue514Workaround(int)}.
* [Tested with cameraTextureId and overlayTextureId: both do work.]
* [Tested with invalid id like 9999. This won't work.]
*
* This makes no sense, since overlaySurfaceTexture.updateTexImage() is setting it to overlayTextureId
* anyway, but it fixes the issue. Specifically, after any draw operation with {@link EglViewport},
* the bound texture is reset to 0 so this must be undone here. We offer:
* This makes no sense, since overlaySurfaceTexture.updateTexImage() is setting it to
* overlayTextureId anyway, but it fixes the issue. Specifically, after any draw operation with
* {@link EglViewport}, the bound texture is reset to 0 so this must be undone here. We offer:
*
* - {@link #beforeOverlayUpdateTexImage()} to be called before the {@link SurfaceTexture#updateTexImage()} call
* - {@link #beforeOverlayUpdateTexImage()} to be called before the
* {@link SurfaceTexture#updateTexImage()} call
* - {@link #end()} to release and bring things back to normal state
*
* Since updating and rendering can happen on different threads with a shared EGL context,
@ -93,9 +97,9 @@ import com.otaliastudios.cameraview.preview.RendererThread;
* REFERENCES
* https://github.com/natario1/CameraView/issues/514
* https://android.googlesource.com/platform/frameworks/native/+/5c1139f/libs/gui/SurfaceTexture.cpp
* I can see here that SurfaceTexture does indeed call glBindTexture with the same parameters whenever
* updateTexImage is called, but it also does other gl stuff first. This other gl stuff might be
* breaking when we don't have a bound texture on some specific hardware implementation.
* I can see here that SurfaceTexture does indeed call glBindTexture with the same parameters
* whenever updateTexImage is called, but it also does other gl stuff first. This other gl stuff
* might be breaking when we don't have a bound texture on some specific hardware implementation.
*/
public class Issue514Workaround {

@ -229,7 +229,8 @@ public class EglBaseSurface {
int height = getHeight();
ByteBuffer buf = ByteBuffer.allocateDirect(width * height * 4);
buf.order(ByteOrder.LITTLE_ENDIAN);
GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);
GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE,
buf);
GlUtils.checkError("glReadPixels");
buf.rewind();

@ -172,8 +172,8 @@ public final class EglCore {
}
EGLConfig[] configs = new EGLConfig[1];
int[] numConfigs = new int[1];
if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length,
numConfigs, 0)) {
if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0,
configs.length, numConfigs, 0)) {
Log.w(TAG, "unable to find RGB8888 / " + version + " EGLConfig");
return null;
}
@ -210,7 +210,8 @@ public final class EglCore {
// the EGL state, so if a surface or context is still current on another
// thread we can't fully release it here. Exceptions thrown from here
// are quietly discarded. Complain in the log file.
Log.w(TAG, "WARNING: EglCore was not explicitly released -- state may be leaked");
Log.w(TAG, "WARNING: EglCore was not explicitly released! " +
"State may be leaked");
release();
}
} finally {

@ -35,7 +35,8 @@ public class EglViewport {
}
private void createProgram() {
mProgramHandle = GlUtils.createProgram(mFilter.getVertexShader(), mFilter.getFragmentShader());
mProgramHandle = GlUtils.createProgram(mFilter.getVertexShader(),
mFilter.getFragmentShader());
mFilter.onCreate(mProgramHandle);
}
@ -57,10 +58,14 @@ public class EglViewport {
GLES20.glBindTexture(mTextureTarget, texId);
GlUtils.checkError("glBindTexture " + texId);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
GLES20.GL_CLAMP_TO_EDGE);
GlUtils.checkError("glTexParameter");
return texId;

@ -77,25 +77,4 @@ public class EglWindowSurface extends EglBaseSurface {
mSurface = null;
}
}
/**
* Recreate the EGLSurface, using the new EglBase. The caller should have already
* freed the old EGLSurface with releaseEglSurface().
* <p>
* This is useful when we want to update the EGLSurface associated with a Surface.
* For example, if we want to share with a different EGLContext, which can only
* be done by tearing down and recreating the context. (That's handled by the caller;
* this just creates a new EGLSurface for the Surface we were handed earlier.)
* <p>
* If the previous EGLSurface isn't fully destroyed, e.g. it's still current on a
* context somewhere, the create call will fail with complaints from the Surface
* about already being connected.
*/
public void recreate(EglCore newEglCore) {
if (mSurface == null) {
throw new RuntimeException("not yet implemented for SurfaceTexture");
}
mEglCore = newEglCore; // switch to new context
createWindowSurface(mSurface); // create new surface
}
}

@ -31,14 +31,16 @@ public class CamcorderProfiles {
sizeToProfileMap.put(new Size(1280, 720), CamcorderProfile.QUALITY_720P);
sizeToProfileMap.put(new Size(1920, 1080), CamcorderProfile.QUALITY_1080P);
if (Build.VERSION.SDK_INT >= 21) {
sizeToProfileMap.put(new Size(3840, 2160), CamcorderProfile.QUALITY_2160P);
sizeToProfileMap.put(new Size(3840, 2160),
CamcorderProfile.QUALITY_2160P);
}
}
/**
* Returns a CamcorderProfile that's somewhat coherent with the target size,
* to ensure we get acceptable video/audio parameters for MediaRecorders (most notably the bitrate).
* to ensure we get acceptable video/audio parameters for MediaRecorders
* (most notably the bitrate).
*
* @param cameraId the camera2 id
* @param targetSize the target video size
@ -57,7 +59,8 @@ public class CamcorderProfiles {
/**
* Returns a CamcorderProfile that's somewhat coherent with the target size,
* to ensure we get acceptable video/audio parameters for MediaRecorders (most notably the bitrate).
* to ensure we get acceptable video/audio parameters for MediaRecorders
* (most notably the bitrate).
*
* @param cameraId the camera id
* @param targetSize the target video size

@ -67,7 +67,8 @@ public class ImageHelper {
}
if (pixelStride == 2 && rowStride == width && uBuffer.get(0) == vBuffer.get(1)) {
// maybe V an U planes overlap as per NV21, which means vBuffer[1] is alias of uBuffer[0]
// maybe V an U planes overlap as per NV21, which means vBuffer[1]
// is alias of uBuffer[0]
byte savePixel = vBuffer.get(1);
vBuffer.put(1, (byte)0);
if (uBuffer.get(0) == 0) {

@ -12,7 +12,8 @@ import android.view.WindowManager;
/**
* Helps with keeping track of both device orientation (which changes when device is rotated)
* and the display offset (which depends on the activity orientation wrt the device default orientation).
* and the display offset (which depends on the activity orientation
* wrt the device default orientation).
*/
public class OrientationHelper {
@ -35,7 +36,8 @@ public class OrientationHelper {
*/
public OrientationHelper(@NonNull Context context, @NonNull Callback callback) {
mCallback = callback;
mListener = new OrientationEventListener(context.getApplicationContext(), SensorManager.SENSOR_DELAY_NORMAL) {
mListener = new OrientationEventListener(context.getApplicationContext(),
SensorManager.SENSOR_DELAY_NORMAL) {
@SuppressWarnings("ConstantConditions")
@Override
@ -66,7 +68,9 @@ public class OrientationHelper {
* @param context a context
*/
public void enable(@NonNull Context context) {
Display display = ((WindowManager) context.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
Display display = ((WindowManager) context
.getSystemService(Context.WINDOW_SERVICE))
.getDefaultDisplay();
switch (display.getRotation()) {
case Surface.ROTATION_0: mDisplayOffset = 0; break;
case Surface.ROTATION_90: mDisplayOffset = 90; break;

@ -92,14 +92,16 @@ public class Pool<T> {
synchronized (lock) {
LOG.v("RECYCLE - Recycling item.", this);
if (--activeCount < 0) {
throw new IllegalStateException("Trying to recycle an item which makes activeCount < 0." +
"This means that this or some previous items being recycled were not coming from " +
"this pool, or some item was recycled more than once. " + this);
throw new IllegalStateException("Trying to recycle an item which makes " +
"activeCount < 0. This means that this or some previous items being " +
"recycled were not coming from this pool, or some item was recycled " +
"more than once. " + this);
}
if (!queue.offer(item)) {
throw new IllegalStateException("Trying to recycle an item while the queue is full. " +
"This means that this or some previous items being recycled were not coming from " +
"this pool, or some item was recycled more than once. " + this);
throw new IllegalStateException("Trying to recycle an item while the queue " +
"is full. This means that this or some previous items being recycled " +
"were not coming from this pool, or some item was recycled " +
"more than once. " + this);
}
}
}
@ -121,7 +123,6 @@ public class Pool<T> {
*
* @return count
*/
@SuppressWarnings("WeakerAccess")
public final int count() {
synchronized (lock) {
return activeCount() + recycledCount();

@ -19,7 +19,9 @@ public class RotationHelper {
* @param rotation desired angle
* @return a new yuv array
*/
public static byte[] rotate(@NonNull final byte[] yuv, @NonNull final Size size, final int rotation) {
public static byte[] rotate(@NonNull final byte[] yuv,
@NonNull final Size size,
final int rotation) {
if (rotation == 0) return yuv;
if (rotation % 90 != 0 || rotation < 0 || rotation > 270) {
throw new IllegalArgumentException("0 <= rotation < 360, rotation % 90 == 0");

@ -23,8 +23,10 @@ import java.util.concurrent.Executor;
*/
public class WorkerHandler {
private final static CameraLogger LOG = CameraLogger.create(WorkerHandler.class.getSimpleName());
private final static ConcurrentHashMap<String, WeakReference<WorkerHandler>> sCache = new ConcurrentHashMap<>(4);
private final static CameraLogger LOG
= CameraLogger.create(WorkerHandler.class.getSimpleName());
private final static ConcurrentHashMap<String, WeakReference<WorkerHandler>> sCache
= new ConcurrentHashMap<>(4);
private final static String FALLBACK_NAME = "FallbackCameraThread";
@ -50,7 +52,8 @@ public class WorkerHandler {
} else {
// Cleanup the old thread before creating a new one
cached.destroy();
LOG.w("get:", "Thread reference found, but not alive or interrupted. Removing.", name);
LOG.w("get:", "Thread reference found, but not alive or interrupted.",
"Removing.", name);
sCache.remove(name);
}
} else {

@ -32,7 +32,9 @@ public interface AutoFocusMarker extends Marker {
* @param successful whether the operation succeeded
* @param point coordinates
*/
void onAutoFocusEnd(@NonNull AutoFocusTrigger trigger, boolean successful, @NonNull PointF point);
void onAutoFocusEnd(@NonNull AutoFocusTrigger trigger,
boolean successful,
@NonNull PointF point);
}

@ -27,7 +27,8 @@ public class DefaultAutoFocusMarker implements AutoFocusMarker {
@Nullable
@Override
public View onAttach(@NonNull Context context, @NonNull ViewGroup container) {
View view = LayoutInflater.from(context).inflate(R.layout.cameraview_layout_focus_marker, container, false);
View view = LayoutInflater.from(context).inflate(R.layout.cameraview_layout_focus_marker,
container, false);
mContainer = view.findViewById(R.id.focusMarkerContainer);
mFill = view.findViewById(R.id.focusMarkerFill);
return view;
@ -49,18 +50,22 @@ public class DefaultAutoFocusMarker implements AutoFocusMarker {
}
@Override
public void onAutoFocusEnd(@NonNull AutoFocusTrigger trigger, boolean successful, @NonNull PointF point) {
public void onAutoFocusEnd(@NonNull AutoFocusTrigger trigger,
boolean successful,
@NonNull PointF point) {
if (trigger == AutoFocusTrigger.METHOD) return;
if (successful) {
animate(mContainer, 1, 0, 500, 0, null);
animate(mFill, 1, 0, 500, 0, null);
} else {
animate(mFill, 0, 0, 500, 0, null);
animate(mContainer, 1.36f, 1, 500, 0, new AnimatorListenerAdapter() {
animate(mContainer, 1.36f, 1, 500, 0,
new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
super.onAnimationEnd(animation);
animate(mContainer, 1.36f, 0, 200, 1000, null);
animate(mContainer, 1.36f, 0, 200, 1000,
null);
}
});
}

@ -183,9 +183,12 @@ public class OverlayLayout extends FrameLayout implements Overlay {
super(context, attrs);
TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.CameraView_Layout);
try {
drawOnPreview = a.getBoolean(R.styleable.CameraView_Layout_layout_drawOnPreview, false);
drawOnPictureSnapshot = a.getBoolean(R.styleable.CameraView_Layout_layout_drawOnPictureSnapshot, false);
drawOnVideoSnapshot = a.getBoolean(R.styleable.CameraView_Layout_layout_drawOnVideoSnapshot, false);
drawOnPreview = a.getBoolean(R.styleable.CameraView_Layout_layout_drawOnPreview,
false);
drawOnPictureSnapshot = a.getBoolean(
R.styleable.CameraView_Layout_layout_drawOnPictureSnapshot, false);
drawOnVideoSnapshot = a.getBoolean(
R.styleable.CameraView_Layout_layout_drawOnVideoSnapshot, false);
} finally {
a.recycle();
}

@ -54,7 +54,9 @@ public class Full1PictureRecorder extends PictureRecorder {
int exifRotation;
try {
ExifInterface exif = new ExifInterface(new ByteArrayInputStream(data));
int exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);
int exifOrientation = exif.getAttributeInt(
ExifInterface.TAG_ORIENTATION,
ExifInterface.ORIENTATION_NORMAL);
exifRotation = ExifHelper.readExifOrientation(exifOrientation);
} catch (IOException e) {
exifRotation = 0;

@ -28,7 +28,8 @@ import androidx.exifinterface.media.ExifInterface;
* A {@link PictureResult} that uses standard APIs.
*/
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
public class Full2PictureRecorder extends PictureRecorder implements ImageReader.OnImageAvailableListener {
public class Full2PictureRecorder extends PictureRecorder
implements ImageReader.OnImageAvailableListener {
private static final String TAG = Full2PictureRecorder.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
@ -65,7 +66,8 @@ public class Full2PictureRecorder extends PictureRecorder implements ImageReader
}
@Override
public void onCaptureStarted(@NonNull ActionHolder holder, @NonNull CaptureRequest request) {
public void onCaptureStarted(@NonNull ActionHolder holder,
@NonNull CaptureRequest request) {
super.onCaptureStarted(holder, request);
if (request.getTag() == (Integer) CameraDevice.TEMPLATE_STILL_CAPTURE) {
LOG.i("onCaptureStarted:", "Dispatching picture shutter.");

@ -44,7 +44,8 @@ public abstract class PictureRecorder {
* @param listener a listener
*/
@SuppressWarnings("WeakerAccess")
public PictureRecorder(@NonNull PictureResult.Stub stub, @Nullable PictureResultListener listener) {
public PictureRecorder(@NonNull PictureResult.Stub stub,
@Nullable PictureResultListener listener) {
mResult = stub;
mListener = listener;
}

@ -60,7 +60,8 @@ public class Snapshot1PictureRecorder extends PictureRecorder {
final Size outputSize = mResult.size;
final Size previewStreamSize = mEngine1.getPreviewStreamSize(Reference.SENSOR);
if (previewStreamSize == null) {
throw new IllegalStateException("Preview stream size should never be null here.");
throw new IllegalStateException("Preview stream size " +
"should never be null here.");
}
WorkerHandler.execute(new Runnable() {
@Override
@ -69,7 +70,8 @@ public class Snapshot1PictureRecorder extends PictureRecorder {
// then crop if needed. In both cases, transform yuv to jpeg.
//noinspection deprecation
byte[] data = RotationHelper.rotate(yuv, previewStreamSize, sensorToOutput);
YuvImage yuv = new YuvImage(data, mFormat, outputSize.getWidth(), outputSize.getHeight(), null);
YuvImage yuv = new YuvImage(data, mFormat, outputSize.getWidth(),
outputSize.getHeight(), null);
ByteArrayOutputStream stream = new ByteArrayOutputStream();
Rect outputRect = CropHelper.computeCrop(outputSize, mOutputRatio);

@ -26,18 +26,20 @@ import com.otaliastudios.cameraview.size.AspectRatio;
* Wraps {@link SnapshotGlPictureRecorder} for Camera2.
*
* Camera2 engine supports metering for snapshots and we expect for them to correctly fire flash as well.
* The first idea, and in theory, the most correct one, was to set {@link CaptureRequest#CONTROL_CAPTURE_INTENT}
* to {@link CaptureRequest#CONTROL_CAPTURE_INTENT_STILL_CAPTURE}.
* The first idea, and in theory, the most correct one, was to set
* {@link CaptureRequest#CONTROL_CAPTURE_INTENT} to
* {@link CaptureRequest#CONTROL_CAPTURE_INTENT_STILL_CAPTURE}.
*
* According to documentation, this will automatically trigger the flash if parameters says so.
* In fact this is what happens, but it is a very fast flash that only lasts for 1 or 2 frames.
* It's not easy to call super.take() at the exact time so that we capture the frame that was lit.
* I have tried by comparing {@link SurfaceTexture#getTimestamp()} and {@link CaptureResult#SENSOR_TIMESTAMP}
* to identify the correct frame. These timestamps match, but the frame is not the correct one.
* I have tried by comparing {@link SurfaceTexture#getTimestamp()} and
* {@link CaptureResult#SENSOR_TIMESTAMP} to identify the correct frame. These timestamps match,
* but the frame is not the correct one.
*
* So what we do here is ignore the {@link CaptureRequest#CONTROL_CAPTURE_INTENT} and instead open the
* torch, if requested to do so. Then wait for exposure to settle again and finally take a snapshot.
* I'd still love to use the capture intent instead of this, but was not able yet.
* So what we do here is ignore the {@link CaptureRequest#CONTROL_CAPTURE_INTENT} and instead
* open the torch, if requested to do so. Then wait for exposure to settle again and finally
* take a snapshot. I'd still love to use the capture intent instead of this, but was not able yet.
*/
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
public class Snapshot2PictureRecorder extends SnapshotGlPictureRecorder {
@ -52,8 +54,10 @@ public class Snapshot2PictureRecorder extends SnapshotGlPictureRecorder {
protected void onStart(@NonNull ActionHolder holder) {
super.onStart(holder);
LOG.i("FlashAction:", "Parameters locked, opening torch.");
holder.getBuilder(this).set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH);
holder.getBuilder(this).set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
holder.getBuilder(this).set(CaptureRequest.FLASH_MODE,
CaptureRequest.FLASH_MODE_TORCH);
holder.getBuilder(this).set(CaptureRequest.CONTROL_AE_MODE,
CaptureRequest.CONTROL_AE_MODE_ON);
holder.applyBuilder(this);
}
@ -64,10 +68,12 @@ public class Snapshot2PictureRecorder extends SnapshotGlPictureRecorder {
super.onCaptureCompleted(holder, request, result);
Integer flashState = result.get(CaptureResult.FLASH_STATE);
if (flashState == null) {
LOG.w("FlashAction:", "Waiting flash, but flashState is null! Taking snapshot.");
LOG.w("FlashAction:", "Waiting flash, but flashState is null!",
"Taking snapshot.");
setState(STATE_COMPLETED);
} else if (flashState == CaptureResult.FLASH_STATE_FIRED) {
LOG.i("FlashAction:", "Waiting flash and we have FIRED state! Taking snapshot.");
LOG.i("FlashAction:", "Waiting flash and we have FIRED state!",
"Taking snapshot.");
setState(STATE_COMPLETED);
} else {
LOG.i("FlashAction:", "Waiting flash but flashState is",
@ -111,7 +117,8 @@ public class Snapshot2PictureRecorder extends SnapshotGlPictureRecorder {
@Override
public void take() {
if (!mActionNeeded) {
LOG.i("take:", "Engine does no metering or needs no flash, taking fast snapshot.");
LOG.i("take:", "Engine does no metering or needs no flash.",
"Taking fast snapshot.");
super.take();
} else {
LOG.i("take:", "Engine needs flash. Starting action");

@ -46,11 +46,13 @@ import android.view.Surface;
* - We move to another thread, and create a new EGL surface for that EGL context.
* - We make this new surface current, and re-draw the textureId on it
* - [Optional: fill the overlayTextureId and draw it on the same surface]
* - We use glReadPixels (through {@link EglBaseSurface#saveFrameTo(Bitmap.CompressFormat)}) and save to file.
* - We use glReadPixels (through {@link EglBaseSurface#saveFrameTo(Bitmap.CompressFormat)})
* and save to file.
*
* We create a new EGL surface and redraw the frame because:
* 1. We want to go off the renderer thread as soon as possible
* 2. We have overlays to be drawn - we don't want to draw them on the preview surface, not even for a frame.
* 2. We have overlays to be drawn - we don't want to draw them on the preview surface,
* not even for a frame.
*/
public class SnapshotGlPictureRecorder extends PictureRecorder {
@ -102,7 +104,9 @@ public class SnapshotGlPictureRecorder extends PictureRecorder {
@RendererThread
@Override
public void onRendererFrame(@NonNull SurfaceTexture surfaceTexture, final float scaleX, final float scaleY) {
public void onRendererFrame(@NonNull SurfaceTexture surfaceTexture,
final float scaleX,
final float scaleY) {
mPreview.removeRendererFrameCallback(this);
SnapshotGlPictureRecorder.this.onRendererFrame(surfaceTexture, scaleX, scaleY);
}
@ -110,6 +114,7 @@ public class SnapshotGlPictureRecorder extends PictureRecorder {
});
}
@SuppressWarnings("WeakerAccess")
@RendererThread
@TargetApi(Build.VERSION_CODES.KITKAT)
protected void onRendererTextureCreated(int textureId) {
@ -126,12 +131,14 @@ public class SnapshotGlPictureRecorder extends PictureRecorder {
}
}
@SuppressWarnings("WeakerAccess")
@RendererThread
@TargetApi(Build.VERSION_CODES.KITKAT)
protected void onRendererFilterChanged(@NonNull Filter filter) {
mViewport.setFilter(filter.copy());
}
@SuppressWarnings("WeakerAccess")
@RendererThread
@TargetApi(Build.VERSION_CODES.KITKAT)
protected void onRendererFrame(@SuppressWarnings("unused") @NonNull final SurfaceTexture surfaceTexture,
@ -175,9 +182,13 @@ public class SnapshotGlPictureRecorder extends PictureRecorder {
* @param scaleX frame scale x in {@link Reference#VIEW}
* @param scaleY frame scale y in {@link Reference#VIEW}
*/
@SuppressWarnings("WeakerAccess")
@WorkerThread
@TargetApi(Build.VERSION_CODES.KITKAT)
protected void takeFrame(@NonNull SurfaceTexture surfaceTexture, float scaleX, float scaleY, @NonNull EGLContext eglContext) {
protected void takeFrame(@NonNull SurfaceTexture surfaceTexture,
float scaleX,
float scaleY,
@NonNull EGLContext eglContext) {
// 0. EGL window will need an output.
// We create a fake one as explained in javadocs.

@ -26,11 +26,13 @@ import com.otaliastudios.cameraview.size.Size;
* into an output surface that belongs to the view hierarchy.
*
* @param <T> the type of view which hosts the content surface
* @param <Output> the type of output, either {@link android.view.SurfaceHolder} or {@link android.graphics.SurfaceTexture}
* @param <Output> the type of output, either {@link android.view.SurfaceHolder}
* or {@link android.graphics.SurfaceTexture}
*/
public abstract class CameraPreview<T extends View, Output> {
protected final static CameraLogger LOG = CameraLogger.create(CameraPreview.class.getSimpleName());
protected final static CameraLogger LOG
= CameraLogger.create(CameraPreview.class.getSimpleName());
/**
* This is used to notify CameraEngine to recompute its camera Preview size.
@ -267,8 +269,9 @@ public abstract class CameraPreview<T extends View, Output> {
}
/**
* At this point we undo the work that was done during {@link #onCreateView(Context, ViewGroup)},
* which basically means removing the root view from the hierarchy.
* At this point we undo the work that was done during
* {@link #onCreateView(Context, ViewGroup)}, which basically means removing the root view
* from the hierarchy.
*/
@SuppressWarnings("WeakerAccess")
@UiThread

@ -32,9 +32,10 @@ import javax.microedition.khronos.opengles.GL10;
*
* - in the SurfaceTexture constructor we pass the GL texture handle that we have created.
*
* - The SurfaceTexture is linked to the Camera1Engine object. The camera will pass down buffers of data with
* a specified size (that is, the Camera1Engine preview size). For this reason we don't have to specify
* surfaceTexture.setDefaultBufferSize() (like we do, for example, in Snapshot1PictureRecorder).
* - The SurfaceTexture is linked to the Camera1Engine object. The camera will pass down
* buffers of data with a specified size (that is, the Camera1Engine preview size).
* For this reason we don't have to specify surfaceTexture.setDefaultBufferSize()
* (like we do, for example, in Snapshot1PictureRecorder).
*
* - When SurfaceTexture.updateTexImage() is called, it will fetch the latest texture image from the
* camera stream and assign it to the GL texture that was passed.
@ -42,12 +43,13 @@ import javax.microedition.khronos.opengles.GL10;
* the transformation matrix to be applied.
*
* - The easy way to render an OpenGL texture is using the {@link GLSurfaceView} class.
* It manages the GL context, hosts a surface and runs a separated rendering thread that will perform
* the rendering.
* It manages the GL context, hosts a surface and runs a separated rendering thread that will
* perform the rendering.
*
* - As per docs, we ask the GLSurfaceView to delegate rendering to us, using
* {@link GLSurfaceView#setRenderer(GLSurfaceView.Renderer)}. We request a render on the SurfaceView
* anytime the SurfaceTexture notifies that it has new data available (see OnFrameAvailableListener below).
* {@link GLSurfaceView#setRenderer(GLSurfaceView.Renderer)}. We request a render on the
* SurfaceView anytime the SurfaceTexture notifies that it has new data available
* (see OnFrameAvailableListener below).
*
* - So in short:
* - The SurfaceTexture has buffers of data of mInputStreamSize
@ -55,9 +57,10 @@ import javax.microedition.khronos.opengles.GL10;
* These are determined by the CameraView.onMeasure method.
* - We have a GL rich texture to be drawn (in the given method and thread).
*
* This class will provide rendering callbacks to anyone who registers a {@link RendererFrameCallback}.
* Callbacks are guaranteed to be called on the renderer thread, which means that we can fetch
* the GL context that was created and is managed by the {@link GLSurfaceView}.
* This class will provide rendering callbacks to anyone who registers a
* {@link RendererFrameCallback}. Callbacks are guaranteed to be called on the renderer thread,
* which means that we can fetch the GL context that was created and is managed
* by the {@link GLSurfaceView}.
*/
public class GlCameraPreview extends FilterCameraPreview<GLSurfaceView, SurfaceTexture> {
@ -66,7 +69,8 @@ public class GlCameraPreview extends FilterCameraPreview<GLSurfaceView, SurfaceT
private int mOutputTextureId = 0;
private SurfaceTexture mInputSurfaceTexture;
private EglViewport mOutputViewport;
private final Set<RendererFrameCallback> mRendererFrameCallbacks = Collections.synchronizedSet(new HashSet<RendererFrameCallback>());
private final Set<RendererFrameCallback> mRendererFrameCallbacks
= Collections.synchronizedSet(new HashSet<RendererFrameCallback>());
@VisibleForTesting float mCropScaleX = 1F;
@VisibleForTesting float mCropScaleY = 1F;
private View mRootView;
@ -79,7 +83,8 @@ public class GlCameraPreview extends FilterCameraPreview<GLSurfaceView, SurfaceT
@NonNull
@Override
protected GLSurfaceView onCreateView(@NonNull Context context, @NonNull ViewGroup parent) {
ViewGroup root = (ViewGroup) LayoutInflater.from(context).inflate(R.layout.cameraview_gl_view, parent, false);
ViewGroup root = (ViewGroup) LayoutInflater.from(context)
.inflate(R.layout.cameraview_gl_view, parent, false);
GLSurfaceView glView = root.findViewById(R.id.gl_surface_view);
glView.setEGLContextClientVersion(2);
glView.setRenderer(instantiateRenderer());
@ -160,8 +165,8 @@ public class GlCameraPreview extends FilterCameraPreview<GLSurfaceView, SurfaceT
}
});
// Since we are using GLSurfaceView.RENDERMODE_WHEN_DIRTY, we must notify the SurfaceView
// of dirtyness, so that it draws again. This is how it's done.
// Since we are using GLSurfaceView.RENDERMODE_WHEN_DIRTY, we must notify
// the SurfaceView of dirtyness, so that it draws again. This is how it's done.
mInputSurfaceTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
@ -209,8 +214,10 @@ public class GlCameraPreview extends FilterCameraPreview<GLSurfaceView, SurfaceT
if (isCropping()) {
// Scaling is easy, but we must also translate before:
// If the view is 10x1000 (very tall), it will show only the left strip of the preview (not the center one).
// If the view is 1000x10 (very large), it will show only the bottom strip of the preview (not the center one).
// If the view is 10x1000 (very tall), it will show only the left strip
// of the preview (not the center one).
// If the view is 1000x10 (very large), it will show only the bottom strip
// of the preview (not the center one).
float translX = (1F - mCropScaleX) / 2F;
float translY = (1F - mCropScaleY) / 2F;
Matrix.translateM(mTransformMatrix, 0, translX, translY, 0);
@ -245,21 +252,23 @@ public class GlCameraPreview extends FilterCameraPreview<GLSurfaceView, SurfaceT
}
/**
* To crop in GL, we could actually use view.setScaleX and setScaleY, but only from Android N onward.
* See documentation: https://developer.android.com/reference/android/view/SurfaceView
* To crop in GL, we could actually use view.setScaleX and setScaleY, but only from Android N
* onward. See documentation: https://developer.android.com/reference/android/view/SurfaceView
*
* Note: Starting in platform version Build.VERSION_CODES.N, SurfaceView's window position is updated
* synchronously with other View rendering. This means that translating and scaling a SurfaceView on
* screen will not cause rendering artifacts. Such artifacts may occur on previous versions of the
* platform when its window is positioned asynchronously.
* Note: Starting in platform version Build.VERSION_CODES.N, SurfaceView's window position
* is updated synchronously with other View rendering. This means that translating and scaling
* a SurfaceView on screen will not cause rendering artifacts. Such artifacts may occur on
* previous versions of the platform when its window is positioned asynchronously.
*
* But to support older platforms, this seem to work - computing scale values and requesting a new frame,
* then drawing it with a scaled transformation matrix. See {@link Renderer#onDrawFrame(GL10)}.
* But to support older platforms, this seem to work - computing scale values and requesting
* a new frame, then drawing it with a scaled transformation matrix.
* See {@link Renderer#onDrawFrame(GL10)}.
*/
@Override
protected void crop(@NonNull Op<Void> op) {
op.start();
if (mInputStreamWidth > 0 && mInputStreamHeight > 0 && mOutputSurfaceWidth > 0 && mOutputSurfaceHeight > 0) {
if (mInputStreamWidth > 0 && mInputStreamHeight > 0 && mOutputSurfaceWidth > 0
&& mOutputSurfaceHeight > 0) {
float scaleX = 1f, scaleY = 1f;
AspectRatio current = AspectRatio.of(mOutputSurfaceWidth, mOutputSurfaceHeight);
AspectRatio target = AspectRatio.of(mInputStreamWidth, mInputStreamHeight);

@ -14,14 +14,15 @@ import com.otaliastudios.cameraview.R;
/**
* This is the fallback preview when hardware acceleration is off, and is the last resort.
* Currently does not support cropping, which means that {@link com.otaliastudios.cameraview.CameraView}
* is forced to be wrap_content.
* Currently does not support cropping, which means that
* {@link com.otaliastudios.cameraview.CameraView} is forced to be wrap_content.
*
* Do not use.
*/
public class SurfaceCameraPreview extends CameraPreview<SurfaceView, SurfaceHolder> {
private final static CameraLogger LOG = CameraLogger.create(SurfaceCameraPreview.class.getSimpleName());
private final static CameraLogger LOG
= CameraLogger.create(SurfaceCameraPreview.class.getSimpleName());
private boolean mDispatched;
private View mRootView;
@ -33,7 +34,8 @@ public class SurfaceCameraPreview extends CameraPreview<SurfaceView, SurfaceHold
@NonNull
@Override
protected SurfaceView onCreateView(@NonNull Context context, @NonNull ViewGroup parent) {
View root = LayoutInflater.from(context).inflate(R.layout.cameraview_surface_view, parent, false);
View root = LayoutInflater.from(context).inflate(R.layout.cameraview_surface_view, parent,
false);
parent.addView(root, 0);
SurfaceView surfaceView = root.findViewById(R.id.surface_view);
final SurfaceHolder holder = surfaceView.getHolder();
@ -48,7 +50,10 @@ public class SurfaceCameraPreview extends CameraPreview<SurfaceView, SurfaceHold
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
LOG.i("callback:", "surfaceChanged", "w:", width, "h:", height, "dispatched:", mDispatched);
LOG.i("callback:", "surfaceChanged",
"w:", width,
"h:", height,
"dispatched:", mDispatched);
if (!mDispatched) {
dispatchOnSurfaceAvailable(width, height);
mDispatched = true;

@ -37,7 +37,8 @@ public class TextureCameraPreview extends CameraPreview<TextureView, SurfaceText
@NonNull
@Override
protected TextureView onCreateView(@NonNull Context context, @NonNull ViewGroup parent) {
View root = LayoutInflater.from(context).inflate(R.layout.cameraview_texture_view, parent, false);
View root = LayoutInflater.from(context).inflate(R.layout.cameraview_texture_view, parent,
false);
parent.addView(root, 0);
TextureView texture = root.findViewById(R.id.texture_view);
texture.setSurfaceTextureListener(new TextureView.SurfaceTextureListener() {
@ -145,6 +146,8 @@ public class TextureCameraPreview extends CameraPreview<TextureView, SurfaceText
task.setResult(null);
}
});
try { Tasks.await(task.getTask()); } catch (InterruptedException | ExecutionException ignore) { }
try {
Tasks.await(task.getTask());
} catch (InterruptedException | ExecutionException ignore) { }
}
}

@ -11,7 +11,8 @@ import java.util.HashMap;
*/
public class AspectRatio implements Comparable<AspectRatio> {
@VisibleForTesting final static HashMap<String, AspectRatio> sCache = new HashMap<>(16);
@VisibleForTesting final static HashMap<String, AspectRatio> sCache
= new HashMap<>(16);
/**
* Creates an aspect ratio for the given size.

@ -20,30 +20,48 @@ public class SizeSelectorParser {
public SizeSelectorParser(@NonNull TypedArray array) {
List<SizeSelector> pictureConstraints = new ArrayList<>(3);
if (array.hasValue(R.styleable.CameraView_cameraPictureSizeMinWidth)) {
pictureConstraints.add(SizeSelectors.minWidth(array.getInteger(R.styleable.CameraView_cameraPictureSizeMinWidth, 0)));
pictureConstraints.add(SizeSelectors.minWidth(
array.getInteger(R.styleable.CameraView_cameraPictureSizeMinWidth, 0)
));
}
if (array.hasValue(R.styleable.CameraView_cameraPictureSizeMaxWidth)) {
pictureConstraints.add(SizeSelectors.maxWidth(array.getInteger(R.styleable.CameraView_cameraPictureSizeMaxWidth, 0)));
pictureConstraints.add(SizeSelectors.maxWidth(
array.getInteger(R.styleable.CameraView_cameraPictureSizeMaxWidth, 0)
));
}
if (array.hasValue(R.styleable.CameraView_cameraPictureSizeMinHeight)) {
pictureConstraints.add(SizeSelectors.minHeight(array.getInteger(R.styleable.CameraView_cameraPictureSizeMinHeight, 0)));
pictureConstraints.add(SizeSelectors.minHeight(
array.getInteger(R.styleable.CameraView_cameraPictureSizeMinHeight, 0)
));
}
if (array.hasValue(R.styleable.CameraView_cameraPictureSizeMaxHeight)) {
pictureConstraints.add(SizeSelectors.maxHeight(array.getInteger(R.styleable.CameraView_cameraPictureSizeMaxHeight, 0)));
pictureConstraints.add(SizeSelectors.maxHeight(
array.getInteger(R.styleable.CameraView_cameraPictureSizeMaxHeight, 0)
));
}
if (array.hasValue(R.styleable.CameraView_cameraPictureSizeMinArea)) {
pictureConstraints.add(SizeSelectors.minArea(array.getInteger(R.styleable.CameraView_cameraPictureSizeMinArea, 0)));
pictureConstraints.add(SizeSelectors.minArea(
array.getInteger(R.styleable.CameraView_cameraPictureSizeMinArea, 0)
));
}
if (array.hasValue(R.styleable.CameraView_cameraPictureSizeMaxArea)) {
pictureConstraints.add(SizeSelectors.maxArea(array.getInteger(R.styleable.CameraView_cameraPictureSizeMaxArea, 0)));
pictureConstraints.add(SizeSelectors.maxArea(
array.getInteger(R.styleable.CameraView_cameraPictureSizeMaxArea, 0)
));
}
if (array.hasValue(R.styleable.CameraView_cameraPictureSizeAspectRatio)) {
//noinspection ConstantConditions
pictureConstraints.add(SizeSelectors.aspectRatio(AspectRatio.parse(array.getString(R.styleable.CameraView_cameraPictureSizeAspectRatio)), 0));
pictureConstraints.add(SizeSelectors.aspectRatio(
AspectRatio.parse(array.getString(
R.styleable.CameraView_cameraPictureSizeAspectRatio)), 0));
}
if (array.getBoolean(R.styleable.CameraView_cameraPictureSizeSmallest, false)) pictureConstraints.add(SizeSelectors.smallest());
if (array.getBoolean(R.styleable.CameraView_cameraPictureSizeBiggest, false)) pictureConstraints.add(SizeSelectors.biggest());
if (array.getBoolean(R.styleable.CameraView_cameraPictureSizeSmallest, false)) {
pictureConstraints.add(SizeSelectors.smallest());
}
if (array.getBoolean(R.styleable.CameraView_cameraPictureSizeBiggest, false)) {
pictureConstraints.add(SizeSelectors.biggest());
}
pictureSizeSelector = !pictureConstraints.isEmpty() ?
SizeSelectors.and(pictureConstraints.toArray(new SizeSelector[0])) :
SizeSelectors.biggest();
@ -51,29 +69,47 @@ public class SizeSelectorParser {
// Video size selector
List<SizeSelector> videoConstraints = new ArrayList<>(3);
if (array.hasValue(R.styleable.CameraView_cameraVideoSizeMinWidth)) {
videoConstraints.add(SizeSelectors.minWidth(array.getInteger(R.styleable.CameraView_cameraVideoSizeMinWidth, 0)));
videoConstraints.add(SizeSelectors.minWidth(
array.getInteger(R.styleable.CameraView_cameraVideoSizeMinWidth, 0)
));
}
if (array.hasValue(R.styleable.CameraView_cameraVideoSizeMaxWidth)) {
videoConstraints.add(SizeSelectors.maxWidth(array.getInteger(R.styleable.CameraView_cameraVideoSizeMaxWidth, 0)));
videoConstraints.add(SizeSelectors.maxWidth(
array.getInteger(R.styleable.CameraView_cameraVideoSizeMaxWidth, 0)
));
}
if (array.hasValue(R.styleable.CameraView_cameraVideoSizeMinHeight)) {
videoConstraints.add(SizeSelectors.minHeight(array.getInteger(R.styleable.CameraView_cameraVideoSizeMinHeight, 0)));
videoConstraints.add(SizeSelectors.minHeight(
array.getInteger(R.styleable.CameraView_cameraVideoSizeMinHeight, 0)
));
}
if (array.hasValue(R.styleable.CameraView_cameraVideoSizeMaxHeight)) {
videoConstraints.add(SizeSelectors.maxHeight(array.getInteger(R.styleable.CameraView_cameraVideoSizeMaxHeight, 0)));
videoConstraints.add(SizeSelectors.maxHeight(
array.getInteger(R.styleable.CameraView_cameraVideoSizeMaxHeight, 0)
));
}
if (array.hasValue(R.styleable.CameraView_cameraVideoSizeMinArea)) {
videoConstraints.add(SizeSelectors.minArea(array.getInteger(R.styleable.CameraView_cameraVideoSizeMinArea, 0)));
videoConstraints.add(SizeSelectors.minArea(
array.getInteger(R.styleable.CameraView_cameraVideoSizeMinArea, 0)
));
}
if (array.hasValue(R.styleable.CameraView_cameraVideoSizeMaxArea)) {
videoConstraints.add(SizeSelectors.maxArea(array.getInteger(R.styleable.CameraView_cameraVideoSizeMaxArea, 0)));
videoConstraints.add(SizeSelectors.maxArea(
array.getInteger(R.styleable.CameraView_cameraVideoSizeMaxArea, 0)
));
}
if (array.hasValue(R.styleable.CameraView_cameraVideoSizeAspectRatio)) {
//noinspection ConstantConditions
videoConstraints.add(SizeSelectors.aspectRatio(AspectRatio.parse(array.getString(R.styleable.CameraView_cameraVideoSizeAspectRatio)), 0));
videoConstraints.add(SizeSelectors.aspectRatio(
AspectRatio.parse(array.getString(
R.styleable.CameraView_cameraVideoSizeAspectRatio)), 0));
}
if (array.getBoolean(R.styleable.CameraView_cameraVideoSizeSmallest, false)) {
videoConstraints.add(SizeSelectors.smallest());
}
if (array.getBoolean(R.styleable.CameraView_cameraVideoSizeBiggest, false)) {
videoConstraints.add(SizeSelectors.biggest());
}
if (array.getBoolean(R.styleable.CameraView_cameraVideoSizeSmallest, false)) videoConstraints.add(SizeSelectors.smallest());
if (array.getBoolean(R.styleable.CameraView_cameraVideoSizeBiggest, false)) videoConstraints.add(SizeSelectors.biggest());
videoSizeSelector = !videoConstraints.isEmpty() ?
SizeSelectors.and(videoConstraints.toArray(new SizeSelector[0])) :
SizeSelectors.biggest();

@ -34,7 +34,8 @@ public class Full1VideoRecorder extends FullVideoRecorder {
}
@Override
protected boolean onPrepareMediaRecorder(@NonNull VideoResult.Stub stub, @NonNull MediaRecorder mediaRecorder) {
protected boolean onPrepareMediaRecorder(@NonNull VideoResult.Stub stub,
@NonNull MediaRecorder mediaRecorder) {
mediaRecorder.setCamera(mCamera);
mediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);

@ -71,13 +71,15 @@ public class Full2VideoRecorder extends FullVideoRecorder {
@SuppressLint("NewApi")
@Override
protected boolean onPrepareMediaRecorder(@NonNull VideoResult.Stub stub, @NonNull MediaRecorder mediaRecorder) {
protected boolean onPrepareMediaRecorder(@NonNull VideoResult.Stub stub,
@NonNull MediaRecorder mediaRecorder) {
mediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
Size size = stub.rotation % 180 != 0 ? stub.size.flip() : stub.size;
mProfile = CamcorderProfiles.get(mCameraId, size);
// This was an option: get the surface from outside this class, using MediaCodec.createPersistentInputSurface()
// But it doesn't really help since the Camera2 engine refuses a surface that has not been configured,
// so even with that trick we would have to attach the surface to this recorder before creating the CameraSession.
// This was an option: get the surface from outside this class, using
// MediaCodec.createPersistentInputSurface(). But it doesn't really help since the
// Camera2 engine refuses a surface that has not been configured, so even with that trick
// we would have to attach the surface to this recorder before creating the CameraSession.
// mediaRecorder.setInputSurface(mInputSurface);
return super.onPrepareMediaRecorder(stub, mediaRecorder);
}

@ -16,7 +16,8 @@ import androidx.annotation.Nullable;
/**
* A {@link VideoRecorder} that uses {@link android.media.MediaRecorder} APIs.
*
* When started, the media recorder will be prepared in {@link #onPrepareMediaRecorder(VideoResult.Stub, MediaRecorder)}.
* When started, the media recorder will be prepared in
* {@link #onPrepareMediaRecorder(VideoResult.Stub, MediaRecorder)}.
* Subclasses should override this method and, before calling super(), do two things:
* - set the media recorder VideoSource
* - define {@link #mProfile}
@ -45,7 +46,8 @@ public abstract class FullVideoRecorder extends VideoRecorder {
return onPrepareMediaRecorder(stub, new MediaRecorder());
}
protected boolean onPrepareMediaRecorder(@NonNull VideoResult.Stub stub, @NonNull MediaRecorder mediaRecorder) {
protected boolean onPrepareMediaRecorder(@NonNull VideoResult.Stub stub,
@NonNull MediaRecorder mediaRecorder) {
mMediaRecorder = mediaRecorder;
boolean hasAudio = stub.audio == Audio.ON
|| stub.audio == Audio.MONO
@ -74,8 +76,12 @@ public abstract class FullVideoRecorder extends VideoRecorder {
// https://android.googlesource.com/platform/frameworks/av/+/master/media/libmediaplayerservice/StagefrightRecorder.cpp#1650
// https://github.com/MrAlex94/Waterfox-Old/blob/master/media/libstagefright/frameworks/av/media/libstagefright/MediaDefs.cpp
String videoType;
if (stub.videoCodec == VideoCodec.H_264) mProfile.videoCodec = MediaRecorder.VideoEncoder.H264;
if (stub.videoCodec == VideoCodec.H_263) mProfile.videoCodec = MediaRecorder.VideoEncoder.H263;
if (stub.videoCodec == VideoCodec.H_264) {
mProfile.videoCodec = MediaRecorder.VideoEncoder.H264;
}
if (stub.videoCodec == VideoCodec.H_263) {
mProfile.videoCodec = MediaRecorder.VideoEncoder.H263;
}
switch (mProfile.videoCodec) {
case MediaRecorder.VideoEncoder.H263: videoType = "video/3gpp"; break;
case MediaRecorder.VideoEncoder.H264: videoType = "video/avc"; break;
@ -87,14 +93,16 @@ public abstract class FullVideoRecorder extends VideoRecorder {
}
// Merge stub and profile
stub.videoFrameRate = stub.videoFrameRate > 0 ? stub.videoFrameRate : mProfile.videoFrameRate;
stub.videoFrameRate = stub.videoFrameRate > 0 ? stub.videoFrameRate
: mProfile.videoFrameRate;
stub.videoBitRate = stub.videoBitRate > 0 ? stub.videoBitRate : mProfile.videoBitRate;
if (hasAudio) {
stub.audioBitRate = stub.audioBitRate > 0 ? stub.audioBitRate : mProfile.audioBitRate;
}
// Check DeviceEncoders support
DeviceEncoders encoders = new DeviceEncoders(videoType, audioType, DeviceEncoders.MODE_TAKE_FIRST);
DeviceEncoders encoders = new DeviceEncoders(videoType, audioType,
DeviceEncoders.MODE_TAKE_FIRST);
boolean flip = stub.rotation % 180 != 0;
if (flip) stub.size = stub.size.flip();
stub.size = encoders.getSupportedVideoSize(stub.size);
@ -192,8 +200,9 @@ public abstract class FullVideoRecorder extends VideoRecorder {
mMediaRecorder.stop();
} catch (Exception e) {
LOG.w("stop:", "Error while closing media recorder.", e);
// This can happen if stopVideo() is called right after takeVideo() (in which case we don't care)
// Or when prepare()/start() have failed for some reason and we are not allowed to call stop.
// This can happen if stopVideo() is called right after takeVideo()
// (in which case we don't care). Or when prepare()/start() have failed for
// some reason and we are not allowed to call stop.
// Make sure we don't override the error if one exists already.
mResult = null;
if (mError == null) mError = e;

@ -114,13 +114,16 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
@RendererThread
@Override
public void onRendererFrame(@NonNull SurfaceTexture surfaceTexture, float scaleX, float scaleY) {
public void onRendererFrame(@NonNull SurfaceTexture surfaceTexture,
float scaleX,
float scaleY) {
if (mCurrentState == STATE_NOT_RECORDING && mDesiredState == STATE_RECORDING) {
LOG.i("Starting the encoder engine.");
// Set default options
if (mResult.videoFrameRate <= 0) mResult.videoFrameRate = DEFAULT_VIDEO_FRAMERATE;
if (mResult.videoBitRate <= 0) mResult.videoBitRate = estimateVideoBitRate(mResult.size, mResult.videoFrameRate);
if (mResult.videoBitRate <= 0) mResult.videoBitRate
= estimateVideoBitRate(mResult.size, mResult.videoFrameRate);
if (mResult.audioBitRate <= 0) mResult.audioBitRate = DEFAULT_AUDIO_BITRATE;
// Define mime types
@ -133,11 +136,13 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
String audioType = "audio/mp4a-latm";
// Check the availability of values
DeviceEncoders deviceEncoders = new DeviceEncoders(videoType, audioType, DeviceEncoders.MODE_PREFER_HARDWARE);
DeviceEncoders deviceEncoders = new DeviceEncoders(videoType, audioType,
DeviceEncoders.MODE_PREFER_HARDWARE);
mResult.size = deviceEncoders.getSupportedVideoSize(mResult.size);
mResult.videoBitRate = deviceEncoders.getSupportedVideoBitRate(mResult.videoBitRate);
mResult.audioBitRate = deviceEncoders.getSupportedAudioBitRate(mResult.audioBitRate);
mResult.videoFrameRate = deviceEncoders.getSupportedVideoFrameRate(mResult.size, mResult.videoFrameRate);
mResult.videoFrameRate = deviceEncoders.getSupportedVideoFrameRate(mResult.size,
mResult.videoFrameRate);
// Video
TextureConfig videoConfig = new TextureConfig();
@ -168,7 +173,8 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
// Audio
AudioMediaEncoder audioEncoder = null;
if (mResult.audio == Audio.ON || mResult.audio == Audio.MONO || mResult.audio == Audio.STEREO) {
if (mResult.audio == Audio.ON || mResult.audio == Audio.MONO
|| mResult.audio == Audio.STEREO) {
AudioConfig audioConfig = new AudioConfig();
audioConfig.bitRate = mResult.audioBitRate;
if (mResult.audio == Audio.MONO) audioConfig.channels = 1;
@ -191,10 +197,12 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
if (mCurrentState == STATE_RECORDING) {
LOG.v("dispatching frame.");
TextureMediaEncoder textureEncoder = (TextureMediaEncoder) mEncoderEngine.getVideoEncoder();
TextureMediaEncoder textureEncoder
= (TextureMediaEncoder) mEncoderEngine.getVideoEncoder();
TextureMediaEncoder.Frame frame = textureEncoder.acquireFrame();
frame.timestampNanos = surfaceTexture.getTimestamp();
frame.timestampMillis = System.currentTimeMillis(); // NOTE: this is an approximation but it seems to work.
// NOTE: this is an approximation but it seems to work:
frame.timestampMillis = System.currentTimeMillis();
surfaceTexture.getTransformMatrix(frame.transform);
if (mEncoderEngine != null) { // Can happen on teardown. At least it used to.
mEncoderEngine.notify(TextureMediaEncoder.FRAME_EVENT, frame);

@ -21,7 +21,8 @@ public class AudioConfig {
final int encoding = AudioFormat.ENCODING_PCM_16BIT; // Determines the sampleSizePerChannel
// The 44.1KHz frequency is the only setting guaranteed to be available on all devices.
final int samplingFrequency = 44100; // samples/sec
final int sampleSizePerChannel = 2; // byte/sample/channel [16bit]. If this changes, review noise introduction
// If sampleSizePerChannel changes, review noise introduction
final int sampleSizePerChannel = 2; // byte/sample/channel [16bit].
final int byteRatePerChannel = samplingFrequency * sampleSizePerChannel; // byte/sec/channel
@NonNull
@ -84,10 +85,10 @@ public class AudioConfig {
/**
* We allocate buffers of {@link #frameSize()} each, which is not much.
*
* This value indicates the maximum number of these buffers that we can allocate at a given instant.
* This value is the number of runnables that the encoder thread is allowed to be 'behind'
* the recorder thread. It's not safe to have it very large or we can end encoding A LOT AFTER
* the actual recording. It's better to reduce this and skip recording at all.
* This value indicates the maximum number of these buffers that we can allocate at a given
* instant. This value is the number of runnables that the encoder thread is allowed to be
* 'behind' the recorder thread. It's not safe to have it very large or we can end encoding
* A LOT AFTER the actual recording. It's better to reduce this and skip recording at all.
*
* Should be coordinated with {@link #frameSize()}.
*

@ -67,7 +67,8 @@ public class AudioMediaEncoder extends MediaEncoder {
mConfig.mimeType,
mConfig.samplingFrequency,
mConfig.channels);
audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE,
MediaCodecInfo.CodecProfileLevel.AACObjectLC);
audioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, mConfig.audioFormatChannels());
audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, mConfig.bitRate);
try {
@ -79,7 +80,8 @@ public class AudioMediaEncoder extends MediaEncoder {
} catch (IOException e) {
throw new RuntimeException(e);
}
mMediaCodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mMediaCodec.configure(audioFormat, null, null,
MediaCodec.CONFIGURE_FLAG_ENCODE);
mMediaCodec.start();
mByteBufferPool = new ByteBufferPool(mConfig.frameSize(), mConfig.bufferPoolMaxSize());
mAudioNoise = new AudioNoise(mConfig);
@ -199,15 +201,16 @@ public class AudioMediaEncoder extends MediaEncoder {
mCurrentBuffer = mByteBufferPool.get();
if (mCurrentBuffer == null) {
// This can happen and it means that encoding is slow with respect to recording.
// One might be tempted to fix precisely the next frame presentation time when this happens,
// but this is not needed because the current increaseTime() algorithm will consider delays
// when they get large.
// One might be tempted to fix precisely the next frame presentation time when
// this happens, but this is not needed because the current increaseTime()
// algorithm will consider delays when they get large.
// Sleeping before returning is a good way of balancing the two operations.
// However, if endOfStream, we CAN'T lose this frame!
if (endOfStream) {
LOG.v("read thread - eos: true - No buffer, retrying.");
} else {
LOG.w("read thread - eos: false - Skipping audio frame, encoding is too slow.");
LOG.w("read thread - eos: false - Skipping audio frame,",
"encoding is too slow.");
skipFrames(6); // sleep a bit
}
return false;
@ -220,23 +223,27 @@ public class AudioMediaEncoder extends MediaEncoder {
mCurrentReadBytes = mAudioRecord.read(mCurrentBuffer, mConfig.frameSize());
long after = System.nanoTime();
float delayMillis = (after - before) / 1000000F;
float durationMillis = AudioTimestamp.bytesToMillis(mCurrentReadBytes, mConfig.byteRate());
float durationMillis = AudioTimestamp.bytesToMillis(mCurrentReadBytes,
mConfig.byteRate());
LOG.v("read thread - reading took:", delayMillis,
"should be:", durationMillis,
"delay:", delayMillis - durationMillis);
} else {
mCurrentReadBytes = mAudioRecord.read(mCurrentBuffer, mConfig.frameSize());
}
LOG.i("read thread - eos:", endOfStream, "- Read new audio frame. Bytes:", mCurrentReadBytes);
LOG.i("read thread - eos:", endOfStream, "- Read new audio frame. Bytes:",
mCurrentReadBytes);
if (mCurrentReadBytes > 0) { // Good read: increase PTS.
increaseTime(mCurrentReadBytes, endOfStream);
LOG.i("read thread - eos:", endOfStream, "- mLastTimeUs:", mLastTimeUs);
mCurrentBuffer.limit(mCurrentReadBytes);
enqueue(mCurrentBuffer, mLastTimeUs, endOfStream);
} else if (mCurrentReadBytes == AudioRecord.ERROR_INVALID_OPERATION) {
LOG.e("read thread - eos:", endOfStream, "- Got AudioRecord.ERROR_INVALID_OPERATION");
LOG.e("read thread - eos:", endOfStream,
"- Got AudioRecord.ERROR_INVALID_OPERATION");
} else if (mCurrentReadBytes == AudioRecord.ERROR_BAD_VALUE) {
LOG.e("read thread - eos:", endOfStream, "- Got AudioRecord.ERROR_BAD_VALUE");
LOG.e("read thread - eos:", endOfStream,
"- Got AudioRecord.ERROR_BAD_VALUE");
}
return true;
}
@ -264,7 +271,8 @@ public class AudioMediaEncoder extends MediaEncoder {
if (!hasReachedMaxLength()) {
boolean didReachMaxLength = (mLastTimeUs - mFirstTimeUs) > getMaxLengthUs();
if (didReachMaxLength && !endOfStream) {
LOG.w("read thread - this frame reached the maxLength! deltaUs:", mLastTimeUs - mFirstTimeUs);
LOG.w("read thread - this frame reached the maxLength! deltaUs:",
mLastTimeUs - mFirstTimeUs);
notifyMaxLengthReached();
}
}
@ -273,7 +281,9 @@ public class AudioMediaEncoder extends MediaEncoder {
maybeAddNoise();
}
private void enqueue(@NonNull ByteBuffer byteBuffer, long timestamp, boolean isEndOfStream) {
private void enqueue(@NonNull ByteBuffer byteBuffer,
long timestamp,
boolean isEndOfStream) {
if (PERFORMANCE_DEBUG) {
mDebugSendStartMap.put(timestamp, System.nanoTime() / 1000000);
}
@ -312,7 +322,8 @@ public class AudioMediaEncoder extends MediaEncoder {
long gapStart = mTimestamp.getGapStartUs(mLastTimeUs);
long frameUs = AudioTimestamp.bytesToUs(mConfig.frameSize(), mConfig.byteRate());
LOG.w("read thread - GAPS: trying to add", gaps, "noise buffers. PERFORMANCE_MAX_GAPS:", PERFORMANCE_MAX_GAPS);
LOG.w("read thread - GAPS: trying to add", gaps,
"noise buffers. PERFORMANCE_MAX_GAPS:", PERFORMANCE_MAX_GAPS);
for (int i = 0; i < Math.min(gaps, PERFORMANCE_MAX_GAPS); i++) {
ByteBuffer noiseBuffer = mByteBufferPool.get();
if (noiseBuffer == null) {
@ -347,7 +358,8 @@ public class AudioMediaEncoder extends MediaEncoder {
if (mInputBufferQueue.isEmpty()) {
skipFrames(2);
} else {
LOG.i("encoding thread - performing", mInputBufferQueue.size(), "pending operations.");
LOG.i("encoding thread - performing", mInputBufferQueue.size(),
"pending operations.");
InputBuffer inputBuffer;
while ((inputBuffer = mInputBufferQueue.peek()) != null) {
@ -357,10 +369,13 @@ public class AudioMediaEncoder extends MediaEncoder {
Long sendStart = mDebugSendStartMap.remove(inputBuffer.timestamp);
//noinspection StatementWithEmptyBody
if (sendStart != null) {
mDebugSendAvgDelay = ((mDebugSendAvgDelay * mDebugSendCount) + (sendEnd - sendStart)) / (++mDebugSendCount);
LOG.v("send delay millis:", sendEnd - sendStart, "average:", mDebugSendAvgDelay);
mDebugSendAvgDelay = ((mDebugSendAvgDelay * mDebugSendCount)
+ (sendEnd - sendStart)) / (++mDebugSendCount);
LOG.v("send delay millis:", sendEnd - sendStart,
"average:", mDebugSendAvgDelay);
} else {
// This input buffer was already processed (but tryAcquire failed for now).
// This input buffer was already processed
// (but tryAcquire failed for now).
}
}
@ -381,23 +396,29 @@ public class AudioMediaEncoder extends MediaEncoder {
mInputBufferPool.clear();
if (PERFORMANCE_DEBUG) {
// After latest changes, the count here is not so different between MONO and STEREO.
// We get about 400 frames in both cases (430 for MONO, but doesn't seem like a big issue).
LOG.e("EXECUTE DELAY MILLIS:", mDebugExecuteAvgDelay, "COUNT:", mDebugExecuteCount);
LOG.e("SEND DELAY MILLIS:", mDebugSendAvgDelay, "COUNT:", mDebugSendCount);
// We get about 400 frames in both cases (430 for MONO, but doesn't seem like
// a big issue).
LOG.e("EXECUTE DELAY MILLIS:", mDebugExecuteAvgDelay,
"COUNT:", mDebugExecuteCount);
LOG.e("SEND DELAY MILLIS:", mDebugSendAvgDelay,
"COUNT:", mDebugSendCount);
}
}
private void encode(@NonNull InputBuffer buffer) {
long executeStart = System.nanoTime() / 1000000;
LOG.i("encoding thread - performing pending operation for timestamp:", buffer.timestamp, "- encoding.");
buffer.data.put(buffer.source); // NOTE: this copy is prob. the worst part here for performance
LOG.i("encoding thread - performing pending operation for timestamp:",
buffer.timestamp, "- encoding.");
// NOTE: this copy is prob. the worst part here for performance
buffer.data.put(buffer.source);
mByteBufferPool.recycle(buffer.source);
mInputBufferQueue.remove(buffer);
encodeInputBuffer(buffer);
boolean eos = buffer.isEndOfStream;
mInputBufferPool.recycle(buffer);
LOG.i("encoding thread - performing pending operation for timestamp:", buffer.timestamp, "- draining.");
LOG.i("encoding thread - performing pending operation for timestamp:",
buffer.timestamp, "- draining.");
// NOTE: can consider calling this drainOutput on yet another thread, which would let us
// use an even smaller BUFFER_POOL_MAX_SIZE without losing audio frames. But this way
// we can accumulate delay on this new thread without noticing (no pool getting empty).
@ -405,8 +426,10 @@ public class AudioMediaEncoder extends MediaEncoder {
if (PERFORMANCE_DEBUG) {
long executeEnd = System.nanoTime() / 1000000;
mDebugExecuteAvgDelay = ((mDebugExecuteAvgDelay * mDebugExecuteCount) + (executeEnd - executeStart)) / (++mDebugExecuteCount);
LOG.v("execute delay millis:", executeEnd - executeStart, "average:", mDebugExecuteAvgDelay);
mDebugExecuteAvgDelay = ((mDebugExecuteAvgDelay * mDebugExecuteCount)
+ (executeEnd - executeStart)) / (++mDebugExecuteCount);
LOG.v("execute delay millis:", executeEnd - executeStart,
"average:", mDebugExecuteAvgDelay);
}
}
}

@ -69,8 +69,8 @@ class AudioTimestamp {
//noinspection StatementWithEmptyBody
if (correctionUs < 0) {
// This means that this method is being called too often, so that the expected start
// time for this buffer is BEFORE the last buffer end. So, respect the last buffer end
// instead.
// time for this buffer is BEFORE the last buffer end. So, respect the last buffer
// end instead.
}
mGapUs = 0;
mBytesSinceBaseTime += readBytes;

@ -8,8 +8,8 @@ import java.nio.ByteBuffer;
import androidx.annotation.RequiresApi;
/**
* A Wrapper to MediaCodec that facilitates the use of API-dependent get{Input/Output}Buffer methods,
* in order to prevent: http://stackoverflow.com/q/30646885
* A Wrapper to MediaCodec that facilitates the use of API-dependent get{Input/Output}Buffer
* methods, in order to prevent: http://stackoverflow.com/q/30646885
*/
@RequiresApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
class MediaCodecBuffers {

@ -76,15 +76,15 @@ import java.util.concurrent.atomic.AtomicInteger;
* milliseconds of the first frame in the {@link System#currentTimeMillis()} reference, so
* something that we can coordinate on.
*/
// https://github.com/saki4510t/AudioVideoRecordingSample/blob/master/app/src/main/java/com/serenegiant/encoder/MediaEncoder.java
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2)
public abstract class MediaEncoder {
private final static String TAG = MediaEncoder.class.getSimpleName();
private final static CameraLogger LOG = CameraLogger.create(TAG);
// Did some test to see which value would maximize our performance in the current setup (infinite audio pool).
// Measured the time it would take to write a 30 seconds video. Based on this, we'll go with TIMEOUT=0 for now.
// Did some test to see which value would maximize our performance in the current setup
// (infinite audio pool). Measured the time it would take to write a 30 seconds video.
// Based on this, we'll go with TIMEOUT=0 for now.
// INPUT_TIMEOUT_US 10000: 46 seconds
// INPUT_TIMEOUT_US 1000: 37 seconds
// INPUT_TIMEOUT_US 100: 33 seconds
@ -171,7 +171,8 @@ public abstract class MediaEncoder {
* works, we might have {@link #onStop()} or {@link #onStopped()} to be executed before
* the previous step has completed.
*/
final void prepare(@NonNull final MediaEncoderEngine.Controller controller, final long maxLengthUs) {
final void prepare(@NonNull final MediaEncoderEngine.Controller controller,
final long maxLengthUs) {
if (mState >= STATE_PREPARING) {
LOG.e(mName, "Wrong state while preparing. Aborting.", mState);
return;
@ -230,14 +231,16 @@ public abstract class MediaEncoder {
*/
@SuppressWarnings("ConstantConditions")
final void notify(final @NonNull String event, final @Nullable Object data) {
if (!mPendingEvents.containsKey(event)) mPendingEvents.put(event, new AtomicInteger(0));
if (!mPendingEvents.containsKey(event)) mPendingEvents.put(event,
new AtomicInteger(0));
final AtomicInteger pendingEvents = mPendingEvents.get(event);
pendingEvents.incrementAndGet();
LOG.v(mName, "Notify was called. Posting. pendingEvents:", pendingEvents.intValue());
mWorker.post(new Runnable() {
@Override
public void run() {
LOG.v(mName, "Notify was called. Executing. pendingEvents:", pendingEvents.intValue());
LOG.v(mName, "Notify was called. Executing. pendingEvents:",
pendingEvents.intValue());
onEvent(event, data);
pendingEvents.decrementAndGet();
}
@ -280,7 +283,8 @@ public abstract class MediaEncoder {
* @param maxLengthUs the maxLength in microseconds
*/
@EncoderThread
protected abstract void onPrepare(@NonNull MediaEncoderEngine.Controller controller, long maxLengthUs);
protected abstract void onPrepare(@NonNull MediaEncoderEngine.Controller controller,
long maxLengthUs);
/**
* Start recording. This might be a lightweight operation
@ -329,8 +333,8 @@ public abstract class MediaEncoder {
}
/**
* Returns a new input buffer and index, waiting at most {@link #INPUT_TIMEOUT_US} if none is available.
* Callers should check the boolean result - true if the buffer was filled.
* Returns a new input buffer and index, waiting at most {@link #INPUT_TIMEOUT_US} if none
* is available. Callers should check the boolean result - true if the buffer was filled.
*
* @param holder the input buffer holder
* @return true if acquired
@ -413,7 +417,9 @@ public abstract class MediaEncoder {
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// should happen before receiving buffers, and should only happen once
if (mController.isStarted()) throw new RuntimeException("MediaFormat changed twice.");
if (mController.isStarted()) {
throw new RuntimeException("MediaFormat changed twice.");
}
MediaFormat newFormat = mMediaCodec.getOutputFormat();
mTrackIndex = mController.notifyStarted(newFormat);
setState(STATE_STARTED);
@ -424,9 +430,10 @@ public abstract class MediaEncoder {
} else {
ByteBuffer encodedData = mBuffers.getOutputBuffer(encoderStatus);
// Codec config means that config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
boolean isCodecConfig = (mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0;
// Codec config means that config data was pulled out and fed to the muxer
// when we got the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
boolean isCodecConfig = (mBufferInfo.flags
& MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0;
if (!isCodecConfig && mController.isStarted() && mBufferInfo.size != 0) {
// adjust the ByteBuffer values to match BufferInfo (not needed?)
@ -437,7 +444,8 @@ public abstract class MediaEncoder {
// reached and stop recording when needed.
if (mFirstTimeUs == Long.MIN_VALUE) {
mFirstTimeUs = mBufferInfo.presentationTimeUs;
LOG.w(mName, "DRAINING - Got the first presentation time:", mFirstTimeUs);
LOG.w(mName, "DRAINING - Got the first presentation time:",
mFirstTimeUs);
}
mLastTimeUs = mBufferInfo.presentationTimeUs;
@ -447,10 +455,12 @@ public abstract class MediaEncoder {
// To address this, encoders are required to call notifyFirstFrameMillis
// so we can adjust here - moving to 1970 reference.
// Extra benefit: we never pass a pts equal to 0, which some encoders refuse.
mBufferInfo.presentationTimeUs = (mStartTimeMillis * 1000) + mLastTimeUs - mFirstTimeUs;
mBufferInfo.presentationTimeUs = (mStartTimeMillis * 1000)
+ mLastTimeUs - mFirstTimeUs;
// Write.
LOG.i(mName, "DRAINING - About to write(). Adjusted presentation:", mBufferInfo.presentationTimeUs);
LOG.i(mName, "DRAINING - About to write(). Adjusted presentation:",
mBufferInfo.presentationTimeUs);
OutputBuffer buffer = mOutputBufferPool.get();
//noinspection ConstantConditions
buffer.info = mBufferInfo;

@ -134,7 +134,8 @@ public class MediaEncoderEngine {
mEncoders.add(audioEncoder);
}
try {
mMediaMuxer = new MediaMuxer(file.toString(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
mMediaMuxer = new MediaMuxer(file.toString(),
MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
} catch (IOException e) {
throw new RuntimeException(e);
}
@ -151,7 +152,8 @@ public class MediaEncoderEngine {
long maxDurationUs = maxDuration * 1000L;
long finalMaxDurationUs = Long.MAX_VALUE;
if (maxSize > 0 && maxDuration > 0) {
mPossibleEndReason = sizeMaxDurationUs < maxDurationUs ? END_BY_MAX_SIZE : END_BY_MAX_DURATION;
mPossibleEndReason = sizeMaxDurationUs < maxDurationUs ? END_BY_MAX_SIZE
: END_BY_MAX_DURATION;
finalMaxDurationUs = Math.min(sizeMaxDurationUs, maxDurationUs);
} else if (maxSize > 0) {
mPossibleEndReason = END_BY_MAX_SIZE;
@ -207,8 +209,9 @@ public class MediaEncoderEngine {
}
/**
* Called after all encoders have requested a release using {@link Controller#notifyStopped(int)}.
* At this point we will do cleanup and notify the listener.
* Called after all encoders have requested a release using
* {@link Controller#notifyStopped(int)}. At this point we will do cleanup and notify
* the listener.
*/
private void end() {
LOG.i("end:", "Releasing muxer after all encoders have been released.");
@ -284,9 +287,11 @@ public class MediaEncoderEngine {
throw new IllegalStateException("Trying to start but muxer started already");
}
int track = mMediaMuxer.addTrack(format);
LOG.w("notifyStarted:", "Assigned track", track, "to format", format.getString(MediaFormat.KEY_MIME));
LOG.w("notifyStarted:", "Assigned track", track, "to format",
format.getString(MediaFormat.KEY_MIME));
if (++mStartedEncodersCount == mEncoders.size()) {
LOG.w("notifyStarted:", "All encoders have started. Starting muxer and dispatching onEncodingStart().");
LOG.w("notifyStarted:", "All encoders have started.",
"Starting muxer and dispatching onEncodingStart().");
// Go out of this thread since it might be very important for the
// encoders and we don't want to perform expensive operations here.
mControllerThread.run(new Runnable() {
@ -347,7 +352,8 @@ public class MediaEncoderEngine {
LOG.v("write:", "Writing into muxer -",
"track:", buffer.trackIndex,
"presentation:", buffer.info.presentationTimeUs,
"readable:", calendar.get(Calendar.SECOND) + ":" + calendar.get(Calendar.MILLISECOND),
"readable:", calendar.get(Calendar.SECOND) + ":"
+ calendar.get(Calendar.MILLISECOND),
"count:", count);
} else {
LOG.v("write:", "Writing into muxer -",
@ -360,9 +366,9 @@ public class MediaEncoderEngine {
/**
* Requests that the engine stops. This is not executed until all encoders call
* this method, so it is a kind of soft request, just like {@link #notifyStarted(MediaFormat)}.
* To be used when maxLength / maxSize constraints are reached, for example.
*
* this method, so it is a kind of soft request, just like
* {@link #notifyStarted(MediaFormat)}. To be used when maxLength / maxSize constraints
* are reached, for example.
* When this succeeds, {@link MediaEncoder#stop()} is called.
*
* @param track track
@ -371,7 +377,8 @@ public class MediaEncoderEngine {
synchronized (mControllerLock) {
LOG.w("requestStop:", "Called for track", track);
if (--mStartedEncodersCount == 0) {
LOG.w("requestStop:", "All encoders have requested a stop. Stopping them.");
LOG.w("requestStop:", "All encoders have requested a stop.",
"Stopping them.");
mEndReason = mPossibleEndReason;
// Go out of this thread since it might be very important for the
// encoders and we don't want to perform expensive operations here.
@ -395,7 +402,8 @@ public class MediaEncoderEngine {
synchronized (mControllerLock) {
LOG.w("notifyStopped:", "Called for track", track);
if (++mStoppedEncodersCount == mEncoders.size()) {
LOG.w("requestStop:", "All encoders have been stopped. Stopping the muxer.");
LOG.w("requestStop:", "All encoders have been stopped.",
"Stopping the muxer.");
// Go out of this thread since it might be very important for the
// encoders and we don't want to perform expensive operations here.
mControllerThread.run(new Runnable() {

@ -80,7 +80,8 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
@NonNull
public Frame acquireFrame() {
if (mFramePool.isEmpty()) {
throw new RuntimeException("Need more frames than this! Please increase the pool size.");
throw new RuntimeException("Need more frames than this! " +
"Please increase the pool size.");
} else {
//noinspection ConstantConditions
return mFramePool.get();
@ -123,7 +124,7 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
// Always render the first few frames, or muxer fails.
return true;
} else if (getPendingEvents(FRAME_EVENT) > 2) {
LOG.w("shouldRenderFrame - Dropping frame because we already have too many pending events:",
LOG.w("shouldRenderFrame - Dropping, we already have too many pending events:",
getPendingEvents(FRAME_EVENT));
return false;
} else {
@ -188,8 +189,9 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
"timestampUs:", frame.timestampUs(),
"- rendering.");
// 1. We must scale this matrix like GlCameraPreview does, because it might have some cropping.
// Scaling takes place with respect to the (0, 0, 0) point, so we must apply a Translation to compensate.
// 1. We must scale this matrix like GlCameraPreview does, because it might have some
// cropping. Scaling takes place with respect to the (0, 0, 0) point, so we must apply
// a Translation to compensate.
float[] transform = frame.transform;
float scaleX = mConfig.scaleX;
float scaleY = mConfig.scaleY;
@ -198,10 +200,10 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
Matrix.translateM(transform, 0, scaleTranslX, scaleTranslY, 0);
Matrix.scaleM(transform, 0, scaleX, scaleY, 1);
// 2. We also must rotate this matrix. In GlCameraPreview it is not needed because it is a live
// stream, but the output video, must be correctly rotated based on the device rotation at the moment.
// Rotation also takes place with respect to the origin (the Z axis), so we must
// translate to origin, rotate, then back to where we were.
// 2. We also must rotate this matrix. In GlCameraPreview it is not needed because it is
// a live stream, but the output video, must be correctly rotated based on the device
// rotation at the moment. Rotation also takes place with respect to the origin
// (the Z axis), so we must translate to origin, rotate, then back to where we were.
Matrix.translateM(transform, 0, 0.5F, 0.5F, 0);
Matrix.rotateM(transform, 0, mTransformRotation, 0, 0, 1);
Matrix.translateM(transform, 0, -0.5F, -0.5F, 0);
@ -209,9 +211,12 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
// 3. Do the same for overlays with their own rotation.
if (mConfig.hasOverlay()) {
mConfig.overlayDrawer.draw(mConfig.overlayTarget);
Matrix.translateM(mConfig.overlayDrawer.getTransform(), 0, 0.5F, 0.5F, 0);
Matrix.rotateM(mConfig.overlayDrawer.getTransform(), 0, mConfig.overlayRotation, 0, 0, 1);
Matrix.translateM(mConfig.overlayDrawer.getTransform(), 0, -0.5F, -0.5F, 0);
Matrix.translateM(mConfig.overlayDrawer.getTransform(),
0, 0.5F, 0.5F, 0);
Matrix.rotateM(mConfig.overlayDrawer.getTransform(), 0, mConfig.overlayRotation,
0, 0, 1);
Matrix.translateM(mConfig.overlayDrawer.getTransform(),
0, -0.5F, -0.5F, 0);
}
mViewport.drawFrame(frame.timestampUs(), mConfig.textureId, transform);
if (mConfig.hasOverlay()) {

@ -54,13 +54,17 @@ abstract class VideoMediaEncoder<C extends VideoConfig> extends MediaEncoder {
@EncoderThread
@Override
protected void onPrepare(@NonNull MediaEncoderEngine.Controller controller, long maxLengthUs) {
MediaFormat format = MediaFormat.createVideoFormat(mConfig.mimeType, mConfig.width, mConfig.height);
// Failing to specify some of these can cause the MediaCodec configure() call to throw an unhelpful exception.
// About COLOR_FormatSurface, see https://stackoverflow.com/q/28027858/4288782
// This just means it is an opaque, implementation-specific format that the device GPU prefers.
// So as long as we use the GPU to draw, the format will match what the encoder expects.
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
MediaFormat format = MediaFormat.createVideoFormat(mConfig.mimeType, mConfig.width,
mConfig.height);
// Failing to specify some of these can cause the MediaCodec configure() call to throw an
// unhelpful exception. About COLOR_FormatSurface, see
// https://stackoverflow.com/q/28027858/4288782
// This just means it is an opaque, implementation-specific format that the device
// GPU prefers. So as long as we use the GPU to draw, the format will match what
// the encoder expects.
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, mConfig.bitRate);
format.setInteger(MediaFormat.KEY_FRAME_RATE, mConfig.frameRate);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1); // seconds between key frames!

Loading…
Cancel
Save