Small improvements (#588)

* Abort actions on exit

* Fix VideoRecorder state

* Add missing metering tests

* Fix default vertex shader definition

* Fix #582

* Make Camera2Engine more open

* Add timestampUs in Filter.draw signature

* Add timestampUs in BaseFilter.onPreDraw, onDraw, onPostDraw

* Change README

* Fix FrameProcessing segmentation fault

* Fix #590 by throwing

* Fix StackOverflowErrors in AudioMediaEncoder (Fixes #594)

* Enforce 100 character limits

* Improve encoder selection

* Simplify DeviceEncoders class

* Improve overlay add/remove

* Add changelog info

* Add more info
pull/608/head
Mattia Iavarone 5 years ago committed by GitHub
parent 4ddd2af731
commit 5619dbdca5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 2
      README.md
  2. 27
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraViewTest.java
  3. 5
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/CameraIntegration1Test.java
  4. 5
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/CameraIntegration2Test.java
  5. 40
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/CameraIntegrationTest.java
  6. 21
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/filter/BaseFilterTest.java
  7. 17
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/filter/MultiFilterTest.java
  8. 13
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/internal/DeviceEncodersTest.java
  9. 3
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/overlay/OverlayDrawerTest.java
  10. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/BitmapCallback.java
  11. 13
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraListener.java
  12. 10
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraLogger.java
  13. 47
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraOptions.java
  14. 45
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraUtils.java
  15. 154
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java
  16. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/PictureResult.java
  17. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/VideoResult.java
  18. 23
      cameraview/src/main/java/com/otaliastudios/cameraview/controls/ControlParser.java
  19. 86
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java
  20. 342
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java
  21. 181
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraEngine.java
  22. 35
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/LogAction.java
  23. 20
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Step.java
  24. 21
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/Action.java
  25. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/ActionHolder.java
  26. 8
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/ActionWrapper.java
  27. 10
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/BaseAction.java
  28. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/SequenceAction.java
  29. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/action/TogetherAction.java
  30. 13
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/lock/ExposureLock.java
  31. 13
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/lock/FocusLock.java
  32. 11
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/lock/WhiteBalanceLock.java
  33. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/BaseReset.java
  34. 13
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/ExposureMeter.java
  35. 15
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/ExposureReset.java
  36. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/FocusMeter.java
  37. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/FocusReset.java
  38. 29
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/MeterAction.java
  39. 13
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/WhiteBalanceMeter.java
  40. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/meter/WhiteBalanceReset.java
  41. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/offset/Axis.java
  42. 94
      cameraview/src/main/java/com/otaliastudios/cameraview/filter/BaseFilter.java
  43. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/filter/Filter.java
  44. 27
      cameraview/src/main/java/com/otaliastudios/cameraview/filter/MultiFilter.java
  45. 9
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/AutoFixFilter.java
  46. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/BlackAndWhiteFilter.java
  47. 8
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/BrightnessFilter.java
  48. 8
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/ContrastFilter.java
  49. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/CrossProcessFilter.java
  50. 12
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/DocumentaryFilter.java
  51. 8
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/DuotoneFilter.java
  52. 8
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/FillLightFilter.java
  53. 9
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/GammaFilter.java
  54. 26
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/GrainFilter.java
  55. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/GrayscaleFilter.java
  56. 8
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/HueFilter.java
  57. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/InvertColorsFilter.java
  58. 29
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/LomoishFilter.java
  59. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/PosterizeFilter.java
  60. 15
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/SaturationFilter.java
  61. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/SepiaFilter.java
  62. 24
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/SharpnessFilter.java
  63. 8
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/TemperatureFilter.java
  64. 8
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/TintFilter.java
  65. 13
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/VignetteFilter.java
  66. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/frame/Frame.java
  67. 23
      cameraview/src/main/java/com/otaliastudios/cameraview/frame/FrameManager.java
  68. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/gesture/Gesture.java
  69. 5
      cameraview/src/main/java/com/otaliastudios/cameraview/gesture/GestureAction.java
  70. 16
      cameraview/src/main/java/com/otaliastudios/cameraview/gesture/GestureParser.java
  71. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/gesture/PinchGestureFinder.java
  72. 11
      cameraview/src/main/java/com/otaliastudios/cameraview/gesture/ScrollGestureFinder.java
  73. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/gesture/TapGestureFinder.java
  74. 87
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/DeviceEncoders.java
  75. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/GlUtils.java
  76. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/GridLinesLayout.java
  77. 28
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/Issue514Workaround.java
  78. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/egl/EglBaseSurface.java
  79. 7
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/egl/EglCore.java
  80. 19
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/egl/EglViewport.java
  81. 21
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/egl/EglWindowSurface.java
  82. 9
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/CamcorderProfiles.java
  83. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/ImageHelper.java
  84. 10
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/OrientationHelper.java
  85. 15
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/Pool.java
  86. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/RotationHelper.java
  87. 9
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/WorkerHandler.java
  88. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/markers/AutoFocusMarker.java
  89. 13
      cameraview/src/main/java/com/otaliastudios/cameraview/markers/DefaultAutoFocusMarker.java
  90. 8
      cameraview/src/main/java/com/otaliastudios/cameraview/overlay/OverlayDrawer.java
  91. 9
      cameraview/src/main/java/com/otaliastudios/cameraview/overlay/OverlayLayout.java
  92. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/Full1PictureRecorder.java
  93. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/Full2PictureRecorder.java
  94. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/PictureRecorder.java
  95. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/Snapshot1PictureRecorder.java
  96. 31
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/Snapshot2PictureRecorder.java
  97. 26
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/SnapshotGlPictureRecorder.java
  98. 11
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/CameraPreview.java
  99. 62
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/GlCameraPreview.java
  100. 15
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/SurfaceCameraPreview.java
  101. Some files were not shown because too many files have changed in this diff Show More

@ -54,7 +54,7 @@ If you like the project, make profit from it, or simply want to thank back, plea
[donating](https://natario1.github.io/CameraView/extra/donate) now! You can either make a one time
donation or become a sponsor, in which case your company logo will immediately show up below.
CameraView is trusted and supported by [ShareChat](https://sharechat.com/), a social media app with over 14 million daily active users.
CameraView is trusted and supported by [ShareChat](https://sharechat.com/), a social media app with over 100 million downloads.
<p align="center">
<img src="docs/static/sharechat.png" width="100%">

@ -930,6 +930,33 @@ public class CameraViewTest extends BaseTest {
verify(cameraView.mOverlayLayout, never()).addView(overlay, params);
}
@Test
public void testOverlays_removeOverlayView() {
// First add one.
cameraView.mOverlayLayout = spy(cameraView.mOverlayLayout);
View overlay = new View(getContext());
OverlayLayout.LayoutParams params = new OverlayLayout.LayoutParams(10, 10);
cameraView.addView(overlay, 0, params);
reset(cameraView.mOverlayLayout);
// Then remove.
cameraView.removeView(overlay);
verify(cameraView.mOverlayLayout, times(1)).isOverlay(params);
verify(cameraView.mOverlayLayout, times(1)).removeView(overlay);
}
@Test
public void testOverlays_dontRemoveOverlayView() {
// First add a view.
cameraView.mOverlayLayout = spy(cameraView.mOverlayLayout);
View notOverlay = new View(getContext());
ViewGroup.LayoutParams notOverlayParams = new ViewGroup.LayoutParams(10, 10);
cameraView.addView(notOverlay, notOverlayParams);
reset(cameraView.mOverlayLayout);
// Then remove.
cameraView.removeView(notOverlay);
verify(cameraView.mOverlayLayout, never()).removeView(notOverlay);
}
//endregion
// TODO: test permissions

@ -27,4 +27,9 @@ public class CameraIntegration1Test extends CameraIntegrationTest {
protected Engine getEngine() {
return Engine.CAMERA1;
}
@Override
protected long getMeteringTimeoutMillis() {
return Camera1Engine.AUTOFOCUS_END_DELAY_MILLIS;
}
}

@ -55,4 +55,9 @@ public class CameraIntegration2Test extends CameraIntegrationTest {
}.start(engine);
try { latch.await(); } catch (InterruptedException ignore) {}
}
@Override
protected long getMeteringTimeoutMillis() {
return Camera2Engine.METER_TIMEOUT;
}
}

@ -658,8 +658,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
doEndOp(focus, 1).when(listener).onAutoFocusEnd(anyBoolean(), any(PointF.class));
camera.startAutoFocus(1, 1);
// Stop is not guaranteed to be called, we use a delay. So wait at least the delay time.
PointF point = focus.await(1000 + Camera1Engine.AUTOFOCUS_END_DELAY_MILLIS);
// Stop routine can fail, so engines use a timeout. So wait at least the timeout time.
PointF point = focus.await(1000 + getMeteringTimeoutMillis());
if (o.isAutoFocusSupported()) {
assertNotNull(point);
assertEquals(point, new PointF(1, 1));
@ -668,6 +668,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
}
}
protected abstract long getMeteringTimeoutMillis();
//endregion
//region capture
@ -717,6 +719,24 @@ public abstract class CameraIntegrationTest extends BaseTest {
openSync(true);
camera.takePicture();
waitForUiException();
camera.takePicture();
}
@Test
public void testCapturePicture_withMetering() {
openSync(true);
camera.setPictureMetering(true);
camera.takePicture();
waitForPictureResult(true);
}
@Test
public void testCapturePicture_withoutMetering() {
openSync(true);
camera.setPictureMetering(false);
camera.takePicture();
waitForPictureResult(true);
}
@Test
@ -759,6 +779,22 @@ public abstract class CameraIntegrationTest extends BaseTest {
assertTrue(result.isSnapshot());
}
@Test
public void testCaptureSnapshot_withMetering() {
openSync(true);
camera.setPictureSnapshotMetering(true);
camera.takePictureSnapshot();
waitForPictureResult(true);
}
@Test
public void testCaptureSnapshot_withoutMetering() {
openSync(true);
camera.setPictureSnapshotMetering(false);
camera.takePictureSnapshot();
waitForPictureResult(true);
}
//endregion
//region Frame Processing

@ -8,21 +8,16 @@ import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest;
import com.otaliastudios.cameraview.BaseEglTest;
import com.otaliastudios.cameraview.BaseTest;
import com.otaliastudios.cameraview.internal.GlUtils;
import com.otaliastudios.cameraview.internal.egl.EglViewport;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.HashSet;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
@ -108,10 +103,10 @@ public class BaseFilterTest extends BaseEglTest {
public void testDraw_whenInvalid() {
filter = spy(new TestFilter());
float[] matrix = new float[16];
filter.draw(matrix);
verify(filter, never()).onPreDraw(matrix);
verify(filter, never()).onDraw();
verify(filter, never()).onPostDraw();
filter.draw(0L, matrix);
verify(filter, never()).onPreDraw(0L, matrix);
verify(filter, never()).onDraw(0L);
verify(filter, never()).onPostDraw(0L);
}
@Test
@ -122,10 +117,10 @@ public class BaseFilterTest extends BaseEglTest {
int texture = viewport.createTexture();
float[] matrix = new float[16];
viewport.drawFrame(texture, matrix);
verify(filter, times(1)).onPreDraw(matrix);
verify(filter, times(1)).onDraw();
verify(filter, times(1)).onPostDraw();
viewport.drawFrame(0L, texture, matrix);
verify(filter, times(1)).onPreDraw(0L, matrix);
verify(filter, times(1)).onDraw(0L);
verify(filter, times(1)).onPostDraw(0L);
viewport.release();
}

@ -3,7 +3,6 @@ package com.otaliastudios.cameraview.filter;
import android.opengl.GLES20;
import androidx.annotation.NonNull;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest;
@ -30,13 +29,13 @@ import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(AndroidJUnit4.class)
@ -146,7 +145,7 @@ public class MultiFilterTest extends BaseEglTest {
EglViewport viewport = new EglViewport(multiFilter);
int texture = viewport.createTexture();
float[] matrix = new float[16];
viewport.drawFrame(texture, matrix);
viewport.drawFrame(0L, texture, matrix);
viewport.release();
// The child should have experienced the whole lifecycle.
@ -154,7 +153,7 @@ public class MultiFilterTest extends BaseEglTest {
verify(filter, atLeastOnce()).getFragmentShader();
verify(filter, atLeastOnce()).setSize(anyInt(), anyInt());
verify(filter, times(1)).onCreate(anyInt());
verify(filter, times(1)).draw(matrix);
verify(filter, times(1)).draw(0L, matrix);
verify(filter, times(1)).onDestroy();
}
@ -181,7 +180,7 @@ public class MultiFilterTest extends BaseEglTest {
assertTrue(result[0] != 0);
return null;
}
}).when(filter1).draw(matrix);
}).when(filter1).draw(0L, matrix);
// Note: second filter is drawn with the identity matrix!
doAnswer(new Answer() {
@ -198,16 +197,16 @@ public class MultiFilterTest extends BaseEglTest {
return null;
}
}).when(filter2).draw(any(float[].class));
}).when(filter2).draw(eq(0L), any(float[].class));
EglViewport viewport = new EglViewport(multiFilter);
int texture = viewport.createTexture();
viewport.drawFrame(texture, matrix);
viewport.drawFrame(0L, texture, matrix);
viewport.release();
// Verify that both are drawn.
verify(filter1, times(1)).draw(matrix);
verify(filter2, times(1)).draw(any(float[].class));
verify(filter1, times(1)).draw(0L, matrix);
verify(filter2, times(1)).draw(eq(0L), any(float[].class));
}
}

@ -6,17 +6,13 @@ import android.media.MediaCodecInfo;
import androidx.annotation.NonNull;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.MediumTest;
import androidx.test.rule.ActivityTestRule;
import com.otaliastudios.cameraview.BaseTest;
import com.otaliastudios.cameraview.TestActivity;
import com.otaliastudios.cameraview.controls.Grid;
import com.otaliastudios.cameraview.size.AspectRatio;
import com.otaliastudios.cameraview.size.Size;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
@ -30,8 +26,6 @@ import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@RunWith(AndroidJUnit4.class)
@MediumTest
@ -55,10 +49,11 @@ public class DeviceEncodersTest extends BaseTest {
@NonNull
private DeviceEncoders create() {
return new DeviceEncoders(
return new DeviceEncoders(DeviceEncoders.MODE_RESPECT_ORDER,
"video/avc",
"audio/mp4a-latm",
DeviceEncoders.MODE_TAKE_FIRST);
0,
0);
}
@Test
@ -87,7 +82,7 @@ public class DeviceEncodersTest extends BaseTest {
if (DeviceEncoders.ENABLED) {
List<MediaCodecInfo> allEncoders = deviceEncoders.getDeviceEncoders();
MediaCodecInfo encoder = deviceEncoders.findDeviceEncoder(allEncoders,
"video/avc", DeviceEncoders.MODE_TAKE_FIRST);
"video/avc", DeviceEncoders.MODE_RESPECT_ORDER, 0);
assertNotNull(encoder);
List<String> encoderTypes = Arrays.asList(encoder.getSupportedTypes());
assertTrue(encoderTypes.contains("video/avc"));

@ -81,8 +81,9 @@ public class OverlayDrawerTest extends BaseEglTest {
OverlayDrawer drawer = new OverlayDrawer(mock(Overlay.class), new Size(WIDTH, HEIGHT));
drawer.mViewport = spy(drawer.mViewport);
drawer.draw(Overlay.Target.PICTURE_SNAPSHOT);
drawer.render();
drawer.render(0L);
verify(drawer.mViewport, times(1)).drawFrame(
0L,
drawer.mTextureId,
drawer.getTransform()
);

@ -10,7 +10,7 @@ import androidx.annotation.UiThread;
public interface BitmapCallback {
/**
* Notifies that the bitmap was succesfully decoded.
* Notifies that the bitmap was successfully decoded.
* This is run on the UI thread.
* Returns a null object if a {@link OutOfMemoryError} was encountered.
*

@ -48,8 +48,9 @@ public abstract class CameraListener {
* Notifies that a picture previously captured with {@link CameraView#takePicture()}
* or {@link CameraView#takePictureSnapshot()} is ready to be shown or saved to file.
*
* If planning to show a bitmap, you can use {@link PictureResult#toBitmap(int, int, BitmapCallback)}
* to decode the byte array taking care about orientation and threading.
* If planning to show a bitmap, you can use
* {@link PictureResult#toBitmap(int, int, BitmapCallback)} to decode the byte array
* taking care about orientation and threading.
*
* @param result captured picture
*/
@ -114,7 +115,9 @@ public abstract class CameraListener {
* @param fingers finger positions that caused the event, null if not caused by touch
*/
@UiThread
public void onZoomChanged(float newValue, @NonNull float[] bounds, @Nullable PointF[] fingers) { }
public void onZoomChanged(float newValue,
@NonNull float[] bounds,
@Nullable PointF[] fingers) { }
/**
@ -126,7 +129,9 @@ public abstract class CameraListener {
* @param fingers finger positions that caused the event, null if not caused by touch
*/
@UiThread
public void onExposureCorrectionChanged(float newValue, @NonNull float[] bounds, @Nullable PointF[] fingers) { }
public void onExposureCorrectionChanged(float newValue,
@NonNull float[] bounds,
@Nullable PointF[] fingers) { }
/**

@ -49,7 +49,10 @@ public final class CameraLogger {
* @param message the log message
* @param throwable an optional throwable
*/
void log(@LogLevel int level, @NonNull String tag, @NonNull String message, @Nullable Throwable throwable);
void log(@LogLevel int level,
@NonNull String tag,
@NonNull String message,
@Nullable Throwable throwable);
}
@VisibleForTesting static String lastMessage;
@ -60,7 +63,10 @@ public final class CameraLogger {
@VisibleForTesting static Logger sAndroidLogger = new Logger() {
@Override
public void log(int level, @NonNull String tag, @NonNull String message, @Nullable Throwable throwable) {
public void log(int level,
@NonNull String tag,
@NonNull String message,
@Nullable Throwable throwable) {
switch (level) {
case LEVEL_VERBOSE: Log.v(tag, message, throwable); break;
case LEVEL_INFO: Log.i(tag, message, throwable); break;

@ -107,7 +107,8 @@ public class CameraOptions {
zoomSupported = params.isZoomSupported();
// autofocus
autoFocusSupported = params.getSupportedFocusModes().contains(Camera.Parameters.FOCUS_MODE_AUTO);
autoFocusSupported = params.getSupportedFocusModes()
.contains(Camera.Parameters.FOCUS_MODE_AUTO);
// Exposure correction
float step = params.getExposureCompensationStep();
@ -135,7 +136,8 @@ public class CameraOptions {
supportedVideoAspectRatio.add(AspectRatio.of(width, height));
}
} else {
// StackOverflow threads seems to agree that if getSupportedVideoSizes is null, previews can be used.
// StackOverflow threads seems to agree that if getSupportedVideoSizes is null,
// previews can be used.
List<Camera.Size> fallback = params.getSupportedPreviewSizes();
for (Camera.Size size : fallback) {
int width = flipSizes ? size.height : size.width;
@ -148,13 +150,16 @@ public class CameraOptions {
// Camera2Engine constructor.
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
public CameraOptions(@NonNull CameraManager manager, @NonNull String cameraId, boolean flipSizes) throws CameraAccessException {
public CameraOptions(@NonNull CameraManager manager,
@NonNull String cameraId,
boolean flipSizes) throws CameraAccessException {
Camera2Mapper mapper = Camera2Mapper.get();
CameraCharacteristics cameraCharacteristics = manager.getCameraCharacteristics(cameraId);
// Facing
for (String cameraId1 : manager.getCameraIdList()) {
CameraCharacteristics cameraCharacteristics1 = manager.getCameraCharacteristics(cameraId1);
CameraCharacteristics cameraCharacteristics1 = manager
.getCameraCharacteristics(cameraId1);
Integer cameraFacing = cameraCharacteristics1.get(CameraCharacteristics.LENS_FACING);
if (cameraFacing != null) {
Facing value = mapper.unmapFacing(cameraFacing);
@ -163,7 +168,8 @@ public class CameraOptions {
}
// WB
int[] awbModes = cameraCharacteristics.get(CameraCharacteristics.CONTROL_AWB_AVAILABLE_MODES);
int[] awbModes = cameraCharacteristics.get(
CameraCharacteristics.CONTROL_AWB_AVAILABLE_MODES);
//noinspection ConstantConditions
for (int awbMode : awbModes) {
WhiteBalance value = mapper.unmapWhiteBalance(awbMode);
@ -174,7 +180,8 @@ public class CameraOptions {
supportedFlash.add(Flash.OFF);
Boolean hasFlash = cameraCharacteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE);
if (hasFlash != null && hasFlash) {
int[] aeModes = cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_MODES);
int[] aeModes = cameraCharacteristics.get(
CameraCharacteristics.CONTROL_AE_AVAILABLE_MODES);
//noinspection ConstantConditions
for (int aeMode : aeModes) {
Set<Flash> flashes = mapper.unmapFlash(aeMode);
@ -184,7 +191,8 @@ public class CameraOptions {
// HDR
supportedHdr.add(Hdr.OFF);
int[] sceneModes = cameraCharacteristics.get(CameraCharacteristics.CONTROL_AVAILABLE_SCENE_MODES);
int[] sceneModes = cameraCharacteristics.get(
CameraCharacteristics.CONTROL_AVAILABLE_SCENE_MODES);
//noinspection ConstantConditions
for (int sceneMode : sceneModes) {
Hdr value = mapper.unmapHdr(sceneMode);
@ -192,7 +200,8 @@ public class CameraOptions {
}
// Zoom
Float maxZoom = cameraCharacteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM);
Float maxZoom = cameraCharacteristics.get(
CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM);
if(maxZoom != null) {
zoomSupported = maxZoom > 1;
}
@ -205,24 +214,31 @@ public class CameraOptions {
// What really matters in my opinion is the availability of regions.
Integer afRegions = cameraCharacteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AF);
Integer aeRegions = cameraCharacteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AE);
Integer awbRegions = cameraCharacteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB);
Integer awbRegions = cameraCharacteristics.get(
CameraCharacteristics.CONTROL_MAX_REGIONS_AWB);
autoFocusSupported = (afRegions != null && afRegions > 0)
|| (aeRegions != null && aeRegions > 0)
|| (awbRegions != null && awbRegions > 0);
// Exposure correction
Range<Integer> exposureRange = cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE);
Rational exposureStep = cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_STEP);
Range<Integer> exposureRange = cameraCharacteristics.get(
CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE);
Rational exposureStep = cameraCharacteristics.get(
CameraCharacteristics.CONTROL_AE_COMPENSATION_STEP);
if (exposureRange != null && exposureStep != null && exposureStep.floatValue() != 0) {
exposureCorrectionMinValue = exposureRange.getLower() / exposureStep.floatValue();
exposureCorrectionMaxValue = exposureRange.getUpper() / exposureStep.floatValue();
}
exposureCorrectionSupported = exposureCorrectionMinValue != 0 && exposureCorrectionMaxValue != 0;
exposureCorrectionSupported = exposureCorrectionMinValue != 0
&& exposureCorrectionMaxValue != 0;
// Picture Sizes
StreamConfigurationMap streamMap = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (streamMap == null) throw new RuntimeException("StreamConfigurationMap is null. Should not happen.");
StreamConfigurationMap streamMap = cameraCharacteristics.get(
CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (streamMap == null) {
throw new RuntimeException("StreamConfigurationMap is null. Should not happen.");
}
android.util.Size[] psizes = streamMap.getOutputSizes(ImageFormat.JPEG);
for (android.util.Size size : psizes) {
int width = flipSizes ? size.getHeight() : size.getWidth();
@ -238,7 +254,8 @@ public class CameraOptions {
Size videoMaxSize = new Size(profile.videoFrameWidth, profile.videoFrameHeight);
android.util.Size[] vsizes = streamMap.getOutputSizes(MediaRecorder.class);
for (android.util.Size size : vsizes) {
if (size.getWidth() <= videoMaxSize.getWidth() && size.getHeight() <= videoMaxSize.getHeight()) {
if (size.getWidth() <= videoMaxSize.getWidth()
&& size.getHeight() <= videoMaxSize.getHeight()) {
int width = flipSizes ? size.getHeight() : size.getWidth();
int height = flipSizes ? size.getWidth() : size.getHeight();
supportedVideoSizes.add(new Size(width, height));

@ -108,7 +108,9 @@ public class CameraUtils {
* @param callback a callback
*/
@SuppressWarnings("WeakerAccess")
public static void writeToFile(@NonNull final byte[] data, @NonNull final File file, @NonNull final FileCallback callback) {
public static void writeToFile(@NonNull final byte[] data,
@NonNull final File file,
@NonNull final FileCallback callback) {
final Handler ui = new Handler();
WorkerHandler.execute(new Runnable() {
@Override
@ -149,7 +151,8 @@ public class CameraUtils {
* @param callback a callback to be notified
*/
@SuppressWarnings("WeakerAccess")
public static void decodeBitmap(@NonNull final byte[] source, @NonNull final BitmapCallback callback) {
public static void decodeBitmap(@NonNull final byte[] source,
@NonNull final BitmapCallback callback) {
decodeBitmap(source, Integer.MAX_VALUE, Integer.MAX_VALUE, callback);
}
@ -167,7 +170,10 @@ public class CameraUtils {
* @param callback a callback to be notified
*/
@SuppressWarnings("WeakerAccess")
public static void decodeBitmap(@NonNull final byte[] source, final int maxWidth, final int maxHeight, @NonNull final BitmapCallback callback) {
public static void decodeBitmap(@NonNull final byte[] source,
final int maxWidth,
final int maxHeight,
@NonNull final BitmapCallback callback) {
decodeBitmap(source, maxWidth, maxHeight, new BitmapFactory.Options(), callback);
}
@ -186,12 +192,20 @@ public class CameraUtils {
* @param callback a callback to be notified
*/
@SuppressWarnings("WeakerAccess")
public static void decodeBitmap(@NonNull final byte[] source, final int maxWidth, final int maxHeight, @NonNull final BitmapFactory.Options options, @NonNull final BitmapCallback callback) {
public static void decodeBitmap(@NonNull final byte[] source,
final int maxWidth,
final int maxHeight,
@NonNull final BitmapFactory.Options options,
@NonNull final BitmapCallback callback) {
decodeBitmap(source, maxWidth, maxHeight, options, -1, callback);
}
@SuppressWarnings("WeakerAccess")
static void decodeBitmap(@NonNull final byte[] source, final int maxWidth, final int maxHeight, @NonNull final BitmapFactory.Options options, final int rotation, @NonNull final BitmapCallback callback) {
static void decodeBitmap(@NonNull final byte[] source,
final int maxWidth,
final int maxHeight,
@NonNull final BitmapFactory.Options options,
final int rotation,
@NonNull final BitmapCallback callback) {
final Handler ui = new Handler();
WorkerHandler.execute(new Runnable() {
@Override
@ -239,10 +253,13 @@ public class CameraUtils {
* @param options the options to be passed to decodeByteArray
* @return decoded bitmap or null if error is encountered
*/
@SuppressWarnings({"SuspiciousNameCombination", "WeakerAccess"})
@SuppressWarnings("WeakerAccess")
@Nullable
@WorkerThread
public static Bitmap decodeBitmap(@NonNull byte[] source, int maxWidth, int maxHeight, @NonNull BitmapFactory.Options options) {
public static Bitmap decodeBitmap(@NonNull byte[] source,
int maxWidth,
int maxHeight,
@NonNull BitmapFactory.Options options) {
return decodeBitmap(source, maxWidth, maxHeight, options, -1);
}
@ -250,7 +267,11 @@ public class CameraUtils {
// Ignores flipping, but it should be super rare.
@SuppressWarnings("TryFinallyCanBeTryWithResources")
@Nullable
private static Bitmap decodeBitmap(@NonNull byte[] source, int maxWidth, int maxHeight, @NonNull BitmapFactory.Options options, int rotation) {
private static Bitmap decodeBitmap(@NonNull byte[] source,
int maxWidth,
int maxHeight,
@NonNull BitmapFactory.Options options,
int rotation) {
if (maxWidth <= 0) maxWidth = Integer.MAX_VALUE;
if (maxHeight <= 0) maxHeight = Integer.MAX_VALUE;
int orientation;
@ -261,7 +282,8 @@ public class CameraUtils {
// http://sylvana.net/jpegcrop/exif_orientation.html
stream = new ByteArrayInputStream(source);
ExifInterface exif = new ExifInterface(stream);
int exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);
int exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION,
ExifInterface.ORIENTATION_NORMAL);
orientation = ExifHelper.readExifOrientation(exifOrientation);
flip = exifOrientation == ExifInterface.ORIENTATION_FLIP_HORIZONTAL ||
exifOrientation == ExifInterface.ORIENTATION_FLIP_VERTICAL ||
@ -310,7 +332,8 @@ public class CameraUtils {
Matrix matrix = new Matrix();
matrix.setRotate(orientation);
Bitmap temp = bitmap;
bitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true);
bitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(),
bitmap.getHeight(), matrix, true);
temp.recycle();
}
} catch (OutOfMemoryError e) {

@ -167,25 +167,36 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
if (mInEditor) return;
setWillNotDraw(false);
TypedArray a = context.getTheme().obtainStyledAttributes(attrs, R.styleable.CameraView, 0, 0);
TypedArray a = context.getTheme().obtainStyledAttributes(attrs, R.styleable.CameraView,
0, 0);
ControlParser controls = new ControlParser(context, a);
// Self managed
boolean playSounds = a.getBoolean(R.styleable.CameraView_cameraPlaySounds, DEFAULT_PLAY_SOUNDS);
boolean useDeviceOrientation = a.getBoolean(R.styleable.CameraView_cameraUseDeviceOrientation, DEFAULT_USE_DEVICE_ORIENTATION);
boolean playSounds = a.getBoolean(R.styleable.CameraView_cameraPlaySounds,
DEFAULT_PLAY_SOUNDS);
boolean useDeviceOrientation = a.getBoolean(
R.styleable.CameraView_cameraUseDeviceOrientation, DEFAULT_USE_DEVICE_ORIENTATION);
mExperimental = a.getBoolean(R.styleable.CameraView_cameraExperimental, false);
mPreview = controls.getPreview();
mEngine = controls.getEngine();
// Camera engine params
int gridColor = a.getColor(R.styleable.CameraView_cameraGridColor, GridLinesLayout.DEFAULT_COLOR);
long videoMaxSize = (long) a.getFloat(R.styleable.CameraView_cameraVideoMaxSize, 0);
int videoMaxDuration = a.getInteger(R.styleable.CameraView_cameraVideoMaxDuration, 0);
int gridColor = a.getColor(R.styleable.CameraView_cameraGridColor,
GridLinesLayout.DEFAULT_COLOR);
long videoMaxSize = (long) a.getFloat(R.styleable.CameraView_cameraVideoMaxSize,
0);
int videoMaxDuration = a.getInteger(R.styleable.CameraView_cameraVideoMaxDuration,
0);
int videoBitRate = a.getInteger(R.styleable.CameraView_cameraVideoBitRate, 0);
int audioBitRate = a.getInteger(R.styleable.CameraView_cameraAudioBitRate, 0);
long autoFocusResetDelay = (long) a.getInteger(R.styleable.CameraView_cameraAutoFocusResetDelay, (int) DEFAULT_AUTOFOCUS_RESET_DELAY_MILLIS);
boolean pictureMetering = a.getBoolean(R.styleable.CameraView_cameraPictureMetering, DEFAULT_PICTURE_METERING);
boolean pictureSnapshotMetering = a.getBoolean(R.styleable.CameraView_cameraPictureSnapshotMetering, DEFAULT_PICTURE_SNAPSHOT_METERING);
long autoFocusResetDelay = (long) a.getInteger(
R.styleable.CameraView_cameraAutoFocusResetDelay,
(int) DEFAULT_AUTOFOCUS_RESET_DELAY_MILLIS);
boolean pictureMetering = a.getBoolean(R.styleable.CameraView_cameraPictureMetering,
DEFAULT_PICTURE_METERING);
boolean pictureSnapshotMetering = a.getBoolean(
R.styleable.CameraView_cameraPictureSnapshotMetering,
DEFAULT_PICTURE_SNAPSHOT_METERING);
// Size selectors and gestures
SizeSelectorParser sizeSelectors = new SizeSelectorParser(a);
@ -265,7 +276,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
private void doInstantiateEngine() {
LOG.w("doInstantiateEngine:", "instantiating. engine:", mEngine);
mCameraEngine = instantiateCameraEngine(mEngine, mCameraCallbacks);
LOG.w("doInstantiateEngine:", "instantiated. engine:", mCameraEngine.getClass().getSimpleName());
LOG.w("doInstantiateEngine:", "instantiated. engine:",
mCameraEngine.getClass().getSimpleName());
mCameraEngine.setOverlay(mOverlayLayout);
}
@ -278,7 +290,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
void doInstantiatePreview() {
LOG.w("doInstantiateEngine:", "instantiating. preview:", mPreview);
mCameraPreview = instantiatePreview(mPreview, getContext(), this);
LOG.w("doInstantiateEngine:", "instantiated. preview:", mCameraPreview.getClass().getSimpleName());
LOG.w("doInstantiateEngine:", "instantiated. preview:",
mCameraPreview.getClass().getSimpleName());
mCameraEngine.setPreview(mCameraPreview);
if (mPendingFilter != null) {
setFilter(mPendingFilter);
@ -294,8 +307,11 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @return the engine
*/
@NonNull
protected CameraEngine instantiateCameraEngine(@NonNull Engine engine, @NonNull CameraEngine.Callback callback) {
if (mExperimental && engine == Engine.CAMERA2 && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
protected CameraEngine instantiateCameraEngine(@NonNull Engine engine,
@NonNull CameraEngine.Callback callback) {
if (mExperimental
&& engine == Engine.CAMERA2
&& Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
return new Camera2Engine(callback);
} else {
mEngine = Engine.CAMERA1;
@ -312,7 +328,9 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @return the preview
*/
@NonNull
protected CameraPreview instantiatePreview(@NonNull Preview preview, @NonNull Context context, @NonNull ViewGroup container) {
protected CameraPreview instantiatePreview(@NonNull Preview preview,
@NonNull Context context,
@NonNull ViewGroup container) {
switch (preview) {
case SURFACE:
return new SurfaceCameraPreview(context, container);
@ -414,13 +432,15 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
if (heightMode == AT_MOST && lp.height == MATCH_PARENT) heightMode = EXACTLY;
}
LOG.i("onMeasure:", "requested dimensions are", "(" + widthValue + "[" + ms(widthMode) + "]x" +
heightValue + "[" + ms(heightMode) + "])");
LOG.i("onMeasure:", "previewSize is", "(" + previewWidth + "x" + previewHeight + ")");
LOG.i("onMeasure:", "requested dimensions are (" + widthValue + "[" + ms(widthMode)
+ "]x" + heightValue + "[" + ms(heightMode) + "])");
LOG.i("onMeasure:", "previewSize is", "(" + previewWidth + "x"
+ previewHeight + ")");
// (1) If we have fixed dimensions (either 300dp or MATCH_PARENT), there's nothing we should do,
// other than respect it. The preview will eventually be cropped at the sides (by PreviewImpl scaling)
// except the case in which these fixed dimensions manage to fit exactly the preview aspect ratio.
// (1) If we have fixed dimensions (either 300dp or MATCH_PARENT), there's nothing we
// should do, other than respect it. The preview will eventually be cropped at the sides
// (by Preview scaling) except the case in which these fixed dimensions manage to fit
// exactly the preview aspect ratio.
if (widthMode == EXACTLY && heightMode == EXACTLY) {
LOG.i("onMeasure:", "both are MATCH_PARENT or fixed value. We adapt.",
"This means CROP_CENTER.", "(" + widthValue + "x" + heightValue + ")");
@ -440,8 +460,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
return;
}
// It's sure now that at least one dimension can be determined (either because EXACTLY or AT_MOST).
// This starts to seem a pleasant situation.
// It's sure now that at least one dimension can be determined (either because EXACTLY
// or AT_MOST). This starts to seem a pleasant situation.
// (3) If one of the dimension is completely free (e.g. in a scrollable container),
// take the other and fit the ratio.
@ -457,16 +477,17 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
width = widthValue;
height = Math.round(width * ratio);
}
LOG.i("onMeasure:", "one dimension was free, we adapted it to fit the aspect ratio.",
LOG.i("onMeasure:", "one dimension was free, we adapted it to fit the ratio.",
"(" + width + "x" + height + ")");
super.onMeasure(MeasureSpec.makeMeasureSpec(width, EXACTLY),
MeasureSpec.makeMeasureSpec(height, EXACTLY));
return;
}
// (4) At this point both dimensions are either AT_MOST-AT_MOST, EXACTLY-AT_MOST or AT_MOST-EXACTLY.
// Let's manage this sanely. If only one is EXACTLY, we can TRY to fit the aspect ratio,
// but it is not guaranteed to succeed. It depends on the AT_MOST value of the other dimensions.
// (4) At this point both dimensions are either AT_MOST-AT_MOST, EXACTLY-AT_MOST or
// AT_MOST-EXACTLY. Let's manage this sanely. If only one is EXACTLY, we can TRY to fit
// the aspect ratio, but it is not guaranteed to succeed. It depends on the AT_MOST
// value of the other dimensions.
if (widthMode == EXACTLY || heightMode == EXACTLY) {
boolean freeWidth = widthMode == AT_MOST;
int height, width;
@ -724,8 +745,10 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
boolean needsCamera = true;
boolean needsAudio = audio == Audio.ON || audio == Audio.MONO || audio == Audio.STEREO;
needsCamera = needsCamera && c.checkSelfPermission(Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED;
needsAudio = needsAudio && c.checkSelfPermission(Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED;
needsCamera = needsCamera && c.checkSelfPermission(Manifest.permission.CAMERA)
!= PackageManager.PERMISSION_GRANTED;
needsAudio = needsAudio && c.checkSelfPermission(Manifest.permission.RECORD_AUDIO)
!= PackageManager.PERMISSION_GRANTED;
if (needsCamera || needsAudio) {
requestPermissions(needsCamera, needsAudio);
@ -742,13 +765,14 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
if (audio == Audio.ON || audio == Audio.MONO || audio == Audio.STEREO) {
try {
PackageManager manager = getContext().getPackageManager();
PackageInfo info = manager.getPackageInfo(getContext().getPackageName(), PackageManager.GET_PERMISSIONS);
PackageInfo info = manager.getPackageInfo(getContext().getPackageName(),
PackageManager.GET_PERMISSIONS);
for (String requestedPermission : info.requestedPermissions) {
if (requestedPermission.equals(Manifest.permission.RECORD_AUDIO)) {
return;
}
}
String message = LOG.e("Permission error:", "When audio is enabled (Audio.ON),",
String message = LOG.e("Permission error: when audio is enabled (Audio.ON)" +
" the RECORD_AUDIO permission should be added to the app manifest file.");
throw new IllegalStateException(message);
} catch (PackageManager.NameNotFoundException e) {
@ -1272,8 +1296,12 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @param y should be between 0 and getHeight()
*/
public void startAutoFocus(float x, float y) {
if (x < 0 || x > getWidth()) throw new IllegalArgumentException("x should be >= 0 and <= getWidth()");
if (y < 0 || y > getHeight()) throw new IllegalArgumentException("y should be >= 0 and <= getHeight()");
if (x < 0 || x > getWidth()) {
throw new IllegalArgumentException("x should be >= 0 and <= getWidth()");
}
if (y < 0 || y > getHeight()) {
throw new IllegalArgumentException("y should be >= 0 and <= getHeight()");
}
mCameraEngine.startAutoFocus(null, new PointF(x, y));
}
@ -1510,9 +1538,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* This will trigger {@link CameraListener#onPictureTaken(PictureResult)} if a listener
* was registered.
*
* Note that if sessionType is {@link Mode#VIDEO}, this
* might fall back to {@link #takePictureSnapshot()} (that is, we might capture a preview frame).
*
* @see #takePictureSnapshot()
*/
public void takePicture() {
@ -2000,7 +2025,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
}
@Override
public void dispatchOnFocusStart(@Nullable final Gesture gesture, @NonNull final PointF point) {
public void dispatchOnFocusStart(@Nullable final Gesture gesture,
@NonNull final PointF point) {
mLogger.i("dispatchOnFocusStart", gesture, point);
mUiHandler.post(new Runnable() {
@Override
@ -2020,7 +2046,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
}
@Override
public void dispatchOnFocusEnd(@Nullable final Gesture gesture, final boolean success,
public void dispatchOnFocusEnd(@Nullable final Gesture gesture,
final boolean success,
@NonNull final PointF point) {
mLogger.i("dispatchOnFocusEnd", gesture, success, point);
mUiHandler.post(new Runnable() {
@ -2096,7 +2123,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
@Override
public void dispatchFrame(@NonNull final Frame frame) {
// The getTime() below might crash if developers incorrectly release frames asynchronously.
// The getTime() below might crash if developers incorrectly release
// frames asynchronously.
mLogger.v("dispatchFrame:", frame.getTime(), "processors:", mFrameProcessors.size());
if (mFrameProcessors.isEmpty()) {
// Mark as released. This instance will be reused.
@ -2106,7 +2134,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
mFrameProcessorsHandler.run(new Runnable() {
@Override
public void run() {
mLogger.v("dispatchFrame: dispatching", frame.getTime(), "to processors.");
mLogger.v("dispatchFrame: dispatching", frame.getTime(),
"to processors.");
for (FrameProcessor processor : mFrameProcessors) {
try {
processor.process(frame);
@ -2173,7 +2202,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
return super.generateLayoutParams(attributeSet);
}
// We don't support removeView on overlays for now.
@Override
public void addView(View child, int index, ViewGroup.LayoutParams params) {
if (!mInEditor && mOverlayLayout.isOverlay(params)) {
@ -2183,6 +2211,16 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
}
}
@Override
public void removeView(View view) {
ViewGroup.LayoutParams params = view.getLayoutParams();
if (!mInEditor && params != null && mOverlayLayout.isOverlay(params)) {
mOverlayLayout.removeView(view);
} else {
super.removeView(view);
}
}
//endregion
//region Filters
@ -2198,8 +2236,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* Use {@link NoFilter} to clear the existing filter,
* and take a look at the {@link Filters} class for commonly used filters.
*
* This method will throw an exception if the current preview does not support real-time filters.
* Make sure you use {@link Preview#GL_SURFACE} (the default).
* This method will throw an exception if the current preview does not support real-time
* filters. Make sure you use {@link Preview#GL_SURFACE} (the default).
*
* @see Filters
* @param filter a new filter
@ -2207,20 +2245,32 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
public void setFilter(@NonNull Filter filter) {
if (mCameraPreview == null) {
mPendingFilter = filter;
} else if (!(filter instanceof NoFilter) && !mExperimental) {
throw new RuntimeException("Filters are an experimental features and need the experimental flag set.");
} else if (mCameraPreview instanceof FilterCameraPreview) {
((FilterCameraPreview) mCameraPreview).setFilter(filter);
} else {
throw new RuntimeException("Filters are only supported by the GL_SURFACE preview. Current:" + mPreview);
boolean isNoFilter = filter instanceof NoFilter;
boolean isFilterPreview = mCameraPreview instanceof FilterCameraPreview;
// If not experimental, we only allow NoFilter (called on creation).
if (!isNoFilter && !mExperimental) {
throw new RuntimeException("Filters are an experimental features and" +
" need the experimental flag set.");
}
// If not a filter preview, we only allow NoFilter (called on creation).
if (!isNoFilter && !isFilterPreview) {
throw new RuntimeException("Filters are only supported by the GL_SURFACE preview." +
" Current preview:" + mPreview);
}
// If we have a filter preview, apply.
if (isFilterPreview) {
((FilterCameraPreview) mCameraPreview).setFilter(filter);
}
// No-op: !isFilterPreview && isNoPreview
}
}
/**
* Returns the current real-time filter applied to the camera preview.
*
* This method will throw an exception if the current preview does not support real-time filters.
* Make sure you use {@link Preview#GL_SURFACE} (the default).
* This method will throw an exception if the current preview does not support real-time
* filters. Make sure you use {@link Preview#GL_SURFACE} (the default).
*
* @see #setFilter(Filter)
* @return the current filter
@ -2228,13 +2278,15 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
@NonNull
public Filter getFilter() {
if (!mExperimental) {
throw new RuntimeException("Filters are an experimental features and need the experimental flag set.");
throw new RuntimeException("Filters are an experimental features and need " +
"the experimental flag set.");
} else if (mCameraPreview == null) {
return mPendingFilter;
} else if (mCameraPreview instanceof FilterCameraPreview) {
return ((FilterCameraPreview) mCameraPreview).getCurrentFilter();
} else {
throw new RuntimeException("Filters are only supported by the GL_SURFACE preview. Current:" + mPreview);
throw new RuntimeException("Filters are only supported by the GL_SURFACE preview. " +
"Current:" + mPreview);
}
}

@ -137,7 +137,8 @@ public class PictureResult {
* @param callback a callback to be notified of image decoding
*/
public void toBitmap(int maxWidth, int maxHeight, @NonNull BitmapCallback callback) {
CameraUtils.decodeBitmap(getData(), maxWidth, maxHeight, new BitmapFactory.Options(), rotation, callback);
CameraUtils.decodeBitmap(getData(), maxWidth, maxHeight, new BitmapFactory.Options(),
rotation, callback);
}
/**

@ -185,7 +185,8 @@ public class VideoResult {
/**
* Returns the reason why the recording was stopped.
* @return one of {@link #REASON_USER}, {@link #REASON_MAX_DURATION_REACHED} or {@link #REASON_MAX_SIZE_REACHED}.
* @return one of {@link #REASON_USER}, {@link #REASON_MAX_DURATION_REACHED}
* or {@link #REASON_MAX_SIZE_REACHED}.
*/
public int getTerminationReason() {
return endReason;

@ -25,16 +25,19 @@ public class ControlParser {
private int engine;
public ControlParser(@NonNull Context context, @NonNull TypedArray array) {
this.preview = array.getInteger(R.styleable.CameraView_cameraPreview, Preview.DEFAULT.value());
this.facing = array.getInteger(R.styleable.CameraView_cameraFacing, Facing.DEFAULT(context).value());
this.flash = array.getInteger(R.styleable.CameraView_cameraFlash, Flash.DEFAULT.value());
this.grid = array.getInteger(R.styleable.CameraView_cameraGrid, Grid.DEFAULT.value());
this.whiteBalance = array.getInteger(R.styleable.CameraView_cameraWhiteBalance, WhiteBalance.DEFAULT.value());
this.mode = array.getInteger(R.styleable.CameraView_cameraMode, Mode.DEFAULT.value());
this.hdr = array.getInteger(R.styleable.CameraView_cameraHdr, Hdr.DEFAULT.value());
this.audio = array.getInteger(R.styleable.CameraView_cameraAudio, Audio.DEFAULT.value());
this.videoCodec = array.getInteger(R.styleable.CameraView_cameraVideoCodec, VideoCodec.DEFAULT.value());
this.engine = array.getInteger(R.styleable.CameraView_cameraEngine, Engine.DEFAULT.value());
preview = array.getInteger(R.styleable.CameraView_cameraPreview, Preview.DEFAULT.value());
facing = array.getInteger(R.styleable.CameraView_cameraFacing,
Facing.DEFAULT(context).value());
flash = array.getInteger(R.styleable.CameraView_cameraFlash, Flash.DEFAULT.value());
grid = array.getInteger(R.styleable.CameraView_cameraGrid, Grid.DEFAULT.value());
whiteBalance = array.getInteger(R.styleable.CameraView_cameraWhiteBalance,
WhiteBalance.DEFAULT.value());
mode = array.getInteger(R.styleable.CameraView_cameraMode, Mode.DEFAULT.value());
hdr = array.getInteger(R.styleable.CameraView_cameraHdr, Hdr.DEFAULT.value());
audio = array.getInteger(R.styleable.CameraView_cameraAudio, Audio.DEFAULT.value());
videoCodec = array.getInteger(R.styleable.CameraView_cameraVideoCodec,
VideoCodec.DEFAULT.value());
engine = array.getInteger(R.styleable.CameraView_cameraEngine, Engine.DEFAULT.value());
}
@NonNull

@ -76,7 +76,8 @@ public class Camera1Engine extends CameraEngine implements
public void onError(int error, Camera camera) {
if (error == Camera.CAMERA_ERROR_SERVER_DIED) {
// Looks like this is recoverable.
LOG.w("Recoverable error inside the onError callback.", "CAMERA_ERROR_SERVER_DIED");
LOG.w("Recoverable error inside the onError callback.",
"CAMERA_ERROR_SERVER_DIED");
restart();
return;
}
@ -118,7 +119,10 @@ public class Camera1Engine extends CameraEngine implements
@Override
protected boolean collectCameraInfo(@NonNull Facing facing) {
int internalFacing = mMapper.mapFacing(facing);
LOG.i("collectCameraInfo", "Facing:", facing, "Internal:", internalFacing, "Cameras:", Camera.getNumberOfCameras());
LOG.i("collectCameraInfo",
"Facing:", facing,
"Internal:", internalFacing,
"Cameras:", Camera.getNumberOfCameras());
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int i = 0, count = Camera.getNumberOfCameras(); i < count; i++) {
Camera.getCameraInfo(i, cameraInfo);
@ -150,10 +154,12 @@ public class Camera1Engine extends CameraEngine implements
// Set parameters that might have been set before the camera was opened.
LOG.i("onStartEngine:", "Applying default parameters.");
Camera.Parameters params = mCamera.getParameters();
mCameraOptions = new CameraOptions(params, getAngles().flip(Reference.SENSOR, Reference.VIEW));
mCameraOptions = new CameraOptions(params, getAngles()
.flip(Reference.SENSOR, Reference.VIEW));
applyAllParameters(params);
mCamera.setParameters(params);
mCamera.setDisplayOrientation(getAngles().offset(Reference.SENSOR, Reference.VIEW, Axis.ABSOLUTE)); // <- not allowed during preview
mCamera.setDisplayOrientation(getAngles().offset(Reference.SENSOR, Reference.VIEW,
Axis.ABSOLUTE)); // <- not allowed during preview
LOG.i("onStartEngine:", "Ended");
return Tasks.forResult(null);
}
@ -194,14 +200,19 @@ public class Camera1Engine extends CameraEngine implements
mPreview.setStreamSize(previewSize.getWidth(), previewSize.getHeight());
Camera.Parameters params = mCamera.getParameters();
params.setPreviewFormat(ImageFormat.NV21); // should be the default, but let's make sure, since YuvImage will only support this & a few others
params.setPreviewSize(mPreviewStreamSize.getWidth(), mPreviewStreamSize.getHeight()); // not allowed during preview
// NV21 should be the default, but let's make sure, since YuvImage will only support this
// and a few others
params.setPreviewFormat(ImageFormat.NV21);
// setPreviewSize is not allowed during preview
params.setPreviewSize(mPreviewStreamSize.getWidth(), mPreviewStreamSize.getHeight());
if (getMode() == Mode.PICTURE) {
params.setPictureSize(mCaptureSize.getWidth(), mCaptureSize.getHeight()); // allowed during preview
// setPictureSize is allowed during preview
params.setPictureSize(mCaptureSize.getWidth(), mCaptureSize.getHeight());
} else {
// mCaptureSize in this case is a video size. The available video sizes are not necessarily
// a subset of the picture sizes, so we can't use the mCaptureSize value: it might crash.
// However, the setPictureSize() passed here is useless : we don't allow HQ pictures in video mode.
// mCaptureSize in this case is a video size. The available video sizes are not
// necessarily a subset of the picture sizes, so we can't use the mCaptureSize value:
// it might crash. However, the setPictureSize() passed here is useless : we don't allow
// HQ pictures in video mode.
// While this might be lifted in the future, for now, just use a picture capture size.
Size pictureSize = computeCaptureSize(Mode.PICTURE);
params.setPictureSize(pictureSize.getWidth(), pictureSize.getHeight());
@ -298,7 +309,8 @@ public class Camera1Engine extends CameraEngine implements
@WorkerThread
@Override
protected void onTakePicture(@NonNull PictureResult.Stub stub, boolean doMetering) {
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR);
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT,
Axis.RELATIVE_TO_SENSOR);
stub.size = getPictureSize(Reference.OUTPUT);
mPictureRecorder = new Full1PictureRecorder(stub, Camera1Engine.this, mCamera);
mPictureRecorder.take();
@ -306,10 +318,13 @@ public class Camera1Engine extends CameraEngine implements
@WorkerThread
@Override
protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio outputRatio, boolean doMetering) {
stub.size = getUncroppedSnapshotSize(Reference.OUTPUT); // Not the real size: it will be cropped to match the view ratio
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR); // Actually it will be rotated and set to 0.
protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub,
@NonNull AspectRatio outputRatio,
boolean doMetering) {
// Not the real size: it will be cropped to match the view ratio
stub.size = getUncroppedSnapshotSize(Reference.OUTPUT);
// Actually it will be rotated and set to 0.
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR);
if (mPreview instanceof GlCameraPreview && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
mPictureRecorder = new SnapshotGlPictureRecorder(stub, this, (GlCameraPreview) mPreview, outputRatio);
} else {
@ -324,8 +339,10 @@ public class Camera1Engine extends CameraEngine implements
@Override
protected void onTakeVideo(@NonNull VideoResult.Stub stub) {
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR);
stub.size = getAngles().flip(Reference.SENSOR, Reference.OUTPUT) ? mCaptureSize.flip() : mCaptureSize;
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT,
Axis.RELATIVE_TO_SENSOR);
stub.size = getAngles().flip(Reference.SENSOR, Reference.OUTPUT) ? mCaptureSize.flip()
: mCaptureSize;
// Unlock the camera and start recording.
try {
mCamera.unlock();
@ -342,12 +359,13 @@ public class Camera1Engine extends CameraEngine implements
@SuppressLint("NewApi")
@WorkerThread
@Override
protected void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull AspectRatio outputRatio) {
protected void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub,
@NonNull AspectRatio outputRatio) {
if (!(mPreview instanceof GlCameraPreview)) {
throw new IllegalStateException("Video snapshots are only supported with GlCameraPreview.");
throw new IllegalStateException("Video snapshots are only supported with GL_SURFACE.");
}
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR2) {
throw new IllegalStateException("Video snapshots are only supported starting from API 18.");
throw new IllegalStateException("Video snapshots are only supported on API 18+.");
}
GlCameraPreview glPreview = (GlCameraPreview) mPreview;
Size outputSize = getUncroppedSnapshotSize(Reference.OUTPUT);
@ -370,7 +388,8 @@ public class Camera1Engine extends CameraEngine implements
LOG.i("onTakeVideoSnapshot", "rotation:", stub.rotation, "size:", stub.size);
// Start.
mVideoRecorder = new SnapshotVideoRecorder(Camera1Engine.this, glPreview, getOverlay(), stub.rotation);
mVideoRecorder = new SnapshotVideoRecorder(Camera1Engine.this, glPreview,
getOverlay(), stub.rotation);
mVideoRecorder.start(stub);
}
@ -494,7 +513,8 @@ public class Camera1Engine extends CameraEngine implements
});
}
private boolean applyWhiteBalance(@NonNull Camera.Parameters params, @NonNull WhiteBalance oldWhiteBalance) {
private boolean applyWhiteBalance(@NonNull Camera.Parameters params,
@NonNull WhiteBalance oldWhiteBalance) {
if (mCameraOptions.supports(mWhiteBalance)) {
params.setWhiteBalance(mMapper.mapWhiteBalance(mWhiteBalance));
return true;
@ -573,7 +593,8 @@ public class Camera1Engine extends CameraEngine implements
if (applyExposureCorrection(params, old)) {
mCamera.setParameters(params);
if (notify) {
mCallback.dispatchOnExposureCorrectionChanged(mExposureCorrectionValue, bounds, points);
mCallback.dispatchOnExposureCorrectionChanged(mExposureCorrectionValue,
bounds, points);
}
}
}
@ -582,7 +603,8 @@ public class Camera1Engine extends CameraEngine implements
});
}
private boolean applyExposureCorrection(@NonNull Camera.Parameters params, float oldExposureCorrection) {
private boolean applyExposureCorrection(@NonNull Camera.Parameters params,
float oldExposureCorrection) {
if (mCameraOptions.isExposureCorrectionSupported()) {
// Just make sure we're inside boundaries.
float max = mCameraOptions.getExposureCorrectionMaxValue();
@ -591,7 +613,8 @@ public class Camera1Engine extends CameraEngine implements
val = val < min ? min : val > max ? max : val; // cap
mExposureCorrectionValue = val;
// Apply.
int indexValue = (int) (mExposureCorrectionValue / params.getExposureCompensationStep());
int indexValue = (int) (mExposureCorrectionValue
/ params.getExposureCompensationStep());
params.setExposureCompensation(indexValue);
return true;
}
@ -692,7 +715,8 @@ public class Camera1Engine extends CameraEngine implements
viewWidthF, viewHeightF, offset);
List<Camera.Area> meteringAreas1 = meteringAreas2.subList(0, 1);
// At this point we are sure that camera supports auto focus... right? Look at CameraView.onTouchEvent().
// At this point we are sure that camera supports auto focus... right?
// Look at CameraView.onTouchEvent().
Camera.Parameters params = mCamera.getParameters();
int maxAF = params.getMaxNumFocusAreas();
int maxAE = params.getMaxNumMeteringAreas();
@ -702,8 +726,8 @@ public class Camera1Engine extends CameraEngine implements
mCamera.setParameters(params);
mCallback.dispatchOnFocusStart(gesture, p);
// The auto focus callback is not guaranteed to be called, but we really want it to be.
// So we remove the old runnable if still present and post a new one.
// The auto focus callback is not guaranteed to be called, but we really want it
// to be. So we remove the old runnable if still present and post a new one.
if (mFocusEndRunnable != null) mHandler.remove(mFocusEndRunnable);
mFocusEndRunnable = new Runnable() {
@Override
@ -778,7 +802,11 @@ public class Camera1Engine extends CameraEngine implements
int bottom = (int) Math.min(centerY + delta, 1000);
int left = (int) Math.max(centerX - delta, -1000);
int right = (int) Math.min(centerX + delta, 1000);
LOG.i("focus:", "computeMeteringArea:", "top:", top, "left:", left, "bottom:", bottom, "right:", right);
LOG.i("focus:", "computeMeteringArea:",
"top:", top,
"left:", left,
"bottom:", bottom,
"right:", right);
return new Rect(left, top, right, bottom);
}

@ -27,6 +27,7 @@ import android.view.SurfaceHolder;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.annotation.VisibleForTesting;
import androidx.annotation.WorkerThread;
import com.google.android.gms.tasks.Task;
@ -81,7 +82,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
private static final int FRAME_PROCESSING_FORMAT = ImageFormat.NV21;
private static final int FRAME_PROCESSING_INPUT_FORMAT = ImageFormat.YUV_420_888;
private static final long METER_TIMEOUT = 2500;
@VisibleForTesting static final long METER_TIMEOUT = 2500;
private final CameraManager mManager;
private String mCameraId;
@ -96,17 +97,19 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
private Size mFrameProcessingSize;
private ImageReader mFrameProcessingReader; // need this or the reader surface is collected
private final WorkerHandler mFrameConversionHandler;
private final Object mFrameProcessingImageLock = new Object();
private Surface mFrameProcessingSurface;
// Preview
private Surface mPreviewStreamSurface;
// Video recording
private VideoResult.Stub mFullVideoPendingStub; // When takeVideo is called, we have to reset the session.
// When takeVideo is called, we restart the session.
private VideoResult.Stub mFullVideoPendingStub;
// Picture capturing
private ImageReader mPictureReader;
private final boolean mPictureCaptureStopsPreview = false; // can make configurable at some point
private final boolean mPictureCaptureStopsPreview = false; // can be configurable at some point
// Actions
private final List<Action> mActions = new ArrayList<>();
@ -135,32 +138,46 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
return value == null ? fallback : value;
}
@SuppressWarnings("DuplicateBranchesInSwitch")
@NonNull
private CameraException createCameraException(@NonNull CameraAccessException exception) {
int reason;
switch (exception.getReason()) {
case CameraAccessException.CAMERA_DISABLED: reason = CameraException.REASON_FAILED_TO_CONNECT; break;
case CameraAccessException.CAMERA_ERROR: reason = CameraException.REASON_DISCONNECTED; break;
case CameraAccessException.CAMERA_DISCONNECTED: reason = CameraException.REASON_DISCONNECTED; break;
case CameraAccessException.CAMERA_IN_USE: reason = CameraException.REASON_FAILED_TO_CONNECT; break;
case CameraAccessException.MAX_CAMERAS_IN_USE: reason = CameraException.REASON_FAILED_TO_CONNECT; break;
default: reason = CameraException.REASON_UNKNOWN; break;
case CameraAccessException.CAMERA_DISABLED:
case CameraAccessException.CAMERA_IN_USE:
case CameraAccessException.MAX_CAMERAS_IN_USE: {
reason = CameraException.REASON_FAILED_TO_CONNECT;
break;
}
case CameraAccessException.CAMERA_ERROR:
case CameraAccessException.CAMERA_DISCONNECTED: {
reason = CameraException.REASON_DISCONNECTED;
break;
}
default: {
reason = CameraException.REASON_UNKNOWN;
break;
}
}
return new CameraException(exception, reason);
}
@SuppressWarnings("DuplicateBranchesInSwitch")
@NonNull
private CameraException createCameraException(int stateCallbackError) {
int reason;
switch (stateCallbackError) {
case CameraDevice.StateCallback.ERROR_CAMERA_DISABLED: reason = CameraException.REASON_FAILED_TO_CONNECT; break; // Device policy
case CameraDevice.StateCallback.ERROR_CAMERA_DEVICE: reason = CameraException.REASON_FAILED_TO_CONNECT; break; // Fatal error
case CameraDevice.StateCallback.ERROR_CAMERA_SERVICE: reason = CameraException.REASON_FAILED_TO_CONNECT; break; // Fatal error, device might have to be restarted
case CameraDevice.StateCallback.ERROR_CAMERA_IN_USE: reason = CameraException.REASON_FAILED_TO_CONNECT; break;
case CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE: reason = CameraException.REASON_FAILED_TO_CONNECT; break;
default: reason = CameraException.REASON_UNKNOWN; break;
case CameraDevice.StateCallback.ERROR_CAMERA_DISABLED: // Device policy
case CameraDevice.StateCallback.ERROR_CAMERA_DEVICE: // Fatal error
case CameraDevice.StateCallback.ERROR_CAMERA_SERVICE: // Fatal error, might have to
// restart the device
case CameraDevice.StateCallback.ERROR_CAMERA_IN_USE:
case CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE: {
reason = CameraException.REASON_FAILED_TO_CONNECT;
break;
}
default: {
reason = CameraException.REASON_UNKNOWN;
break;
}
}
return new CameraException(reason);
}
@ -173,7 +190,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
*/
@SuppressWarnings("UnusedReturnValue")
@NonNull
private CaptureRequest.Builder createRepeatingRequestBuilder(int template) throws CameraAccessException {
private CaptureRequest.Builder createRepeatingRequestBuilder(int template)
throws CameraAccessException {
CaptureRequest.Builder oldBuilder = mRepeatingRequestBuilder;
mRepeatingRequestBuilder = mCamera.createCaptureRequest(template);
mRepeatingRequestBuilder.setTag(template);
@ -199,8 +217,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
}
/**
* Sets up the repeating request builder with default surfaces and extra ones
* if needed (like a video recording surface).
* Removes default surfaces from the repeating request builder.
*/
private void removeRepeatingRequestBuilderSurfaces() {
mRepeatingRequestBuilder.removeTarget(mPreviewStreamSurface);
@ -209,17 +226,6 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
}
}
private void applyRepeatingRequestBuilderAsSingle() {
if (getPreviewState() == STATE_STARTED) {
try {
mSession.capture(mRepeatingRequestBuilder.build(),
mRepeatingRequestCallback, null);
} catch (CameraAccessException e) {
throw createCameraException(e);
}
}
}
/**
* Applies the repeating request builder to the preview, assuming we actually have a preview
* running. Can be called after changing parameters to the builder.
@ -228,7 +234,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
* it should be set before calling this method, for example by calling
* {@link #createRepeatingRequestBuilder(int)}.
*/
private void applyRepeatingRequestBuilder() {
@SuppressWarnings("WeakerAccess")
protected void applyRepeatingRequestBuilder() {
applyRepeatingRequestBuilder(true, CameraException.REASON_DISCONNECTED);
}
@ -284,7 +291,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
protected List<Size> getPreviewStreamAvailableSizes() {
try {
CameraCharacteristics characteristics = mManager.getCameraCharacteristics(mCameraId);
StreamConfigurationMap streamMap = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
StreamConfigurationMap streamMap =
characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (streamMap == null) {
throw new RuntimeException("StreamConfigurationMap is null. Should not happen.");
}
@ -309,7 +317,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
}
@Override
protected boolean collectCameraInfo(@NonNull Facing facing) {
protected final boolean collectCameraInfo(@NonNull Facing facing) {
int internalFacing = mMapper.mapFacing(facing);
String[] cameraIds = null;
try {
@ -325,7 +333,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
for (String cameraId : cameraIds) {
try {
CameraCharacteristics characteristics = mManager.getCameraCharacteristics(cameraId);
if (internalFacing == readCharacteristic(characteristics, CameraCharacteristics.LENS_FACING, -99)) {
if (internalFacing == readCharacteristic(characteristics,
CameraCharacteristics.LENS_FACING, -99)) {
mCameraId = cameraId;
int sensorOffset = readCharacteristic(characteristics,
CameraCharacteristics.SENSOR_ORIENTATION, 0);
@ -372,12 +381,13 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@Override
public void onDisconnected(@NonNull CameraDevice camera) {
// Not sure if this is called INSTEAD of onOpened() or can be called after as well.
// However, using trySetException should address this problem - it will only trigger
// if the task has no result.
// Not sure if this is called INSTEAD of onOpened() or can be called after
// as well. However, using trySetException should address this problem -
// it will only trigger if the task has no result.
//
// Docs say to release this camera instance, however, since we throw an unrecoverable
// CameraException, this will trigger a stop() through the exception handler.
// Docs say to release this camera instance, however, since we throw an
// unrecoverable CameraException, this will trigger a stop() through the
// exception handler.
task.trySetException(new CameraException(CameraException.REASON_DISCONNECTED));
}
@ -463,8 +473,11 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
// 4. FRAME PROCESSING
if (hasFrameProcessors()) {
// Choose the size.
StreamConfigurationMap streamMap = mCameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (streamMap == null) throw new RuntimeException("StreamConfigurationMap is null. Should not happen.");
StreamConfigurationMap streamMap = mCameraCharacteristics
.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (streamMap == null) {
throw new RuntimeException("StreamConfigurationMap is null. Should not happen.");
}
android.util.Size[] aSizes = streamMap.getOutputSizes(FRAME_PROCESSING_INPUT_FORMAT);
List<Size> sizes = new ArrayList<>();
for (android.util.Size aSize : aSizes) {
@ -479,7 +492,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
mFrameProcessingSize.getHeight(),
FRAME_PROCESSING_INPUT_FORMAT,
2);
mFrameProcessingReader.setOnImageAvailableListener(this, mFrameConversionHandler.getHandler());
mFrameProcessingReader.setOnImageAvailableListener(this,
mFrameConversionHandler.getHandler());
mFrameProcessingSurface = mFrameProcessingReader.getSurface();
outputSurfaces.add(mFrameProcessingSurface);
} else {
@ -500,7 +514,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession session) {
// I would say this should be a library error and as such we throw a Runtime Exception.
// This SHOULD be a library error so we throw a RuntimeException.
String message = LOG.e("onConfigureFailed! Session", session);
throw new RuntimeException(message);
}
@ -529,13 +543,14 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
LOG.i("onStartPreview", "Starting preview.");
addRepeatingRequestBuilderSurfaces();
applyRepeatingRequestBuilder(false, CameraException.REASON_FAILED_TO_START_PREVIEW);
applyRepeatingRequestBuilder(false,
CameraException.REASON_FAILED_TO_START_PREVIEW);
LOG.i("onStartPreview", "Started preview.");
// Start delayed video if needed.
if (mFullVideoPendingStub != null) {
// Do not call takeVideo/onTakeVideo. It will reset some stub parameters that the recorder sets.
// Also we are posting this so that doTakeVideo sees a started preview.
// Do not call takeVideo/onTakeVideo. It will reset some stub parameters that
// the recorder sets. Also we are posting so that doTakeVideo sees a started preview.
LOG.i("onStartPreview", "Posting doTakeVideo call.");
final VideoResult.Stub stub = mFullVideoPendingStub;
mFullVideoPendingStub = null;
@ -594,7 +609,12 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
mCaptureSize = null;
mFrameProcessingSize = null;
if (mFrameProcessingReader != null) {
synchronized (mFrameProcessingImageLock) {
// This call synchronously releases all Images and their underlying properties.
// This can cause a segmentation fault while converting the Image to NV21.
// So we use this lock for the two operations.
mFrameProcessingReader.close();
}
mFrameProcessingReader = null;
}
if (mPictureReader != null) {
@ -611,7 +631,6 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@NonNull
@Override
protected Task<Void> onStopEngine() {
LOG.i("onStopEngine:", "About to clean up.");
try {
LOG.i("onStopEngine:", "Clean up.", "Releasing camera.");
mCamera.close();
@ -620,6 +639,16 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
LOG.w("onStopEngine:", "Clean up.", "Exception while releasing camera.", e);
}
mCamera = null;
// After engine is stopping, the repeating request builder will be null,
// so the ActionHolder.getBuilder() contract would be broken. Same for characteristics.
// This can cause crashes if some ongoing Action queries the holder. So we abort them.
LOG.i("onStopEngine:", "Aborting actions.");
for (Action action : mActions) {
action.abort(this);
}
mCameraCharacteristics = null;
mCameraOptions = null;
mVideoRecorder = null;
mRepeatingRequestBuilder = null;
@ -633,7 +662,9 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@WorkerThread
@Override
protected void onTakePictureSnapshot(@NonNull final PictureResult.Stub stub, @NonNull final AspectRatio outputRatio, boolean doMetering) {
protected void onTakePictureSnapshot(@NonNull final PictureResult.Stub stub,
@NonNull final AspectRatio outputRatio,
boolean doMetering) {
if (doMetering) {
LOG.i("onTakePictureSnapshot:", "doMetering is true. Delaying.");
Action action = Actions.timeout(METER_TIMEOUT, createMeterAction(null));
@ -653,7 +684,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
// stub.size is not the real size: it will be cropped to the given ratio
// stub.rotation will be set to 0 - we rotate the texture instead.
stub.size = getUncroppedSnapshotSize(Reference.OUTPUT);
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR);
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT,
Axis.RELATIVE_TO_SENSOR);
mPictureRecorder = new Snapshot2PictureRecorder(stub, this,
(GlCameraPreview) mPreview, outputRatio);
mPictureRecorder.take();
@ -674,21 +706,24 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
action.start(this);
} else {
LOG.i("onTakePicture:", "doMetering is false. Performing.");
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR);
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT,
Axis.RELATIVE_TO_SENSOR);
stub.size = getPictureSize(Reference.OUTPUT);
try {
if (mPictureCaptureStopsPreview) {
// These two are present in official samples and are probably meant to speed things up?
// But from my tests, they actually make everything slower. So this is disabled by default
// with a boolean flag. Maybe in the future we can make this configurable as some
// people might want to stop the preview while picture is being taken even if it
// increases the latency.
// These two are present in official samples and are probably meant to
// speed things up? But from my tests, they actually make everything slower.
// So this is disabled by default with a boolean flag. Maybe in the future
// we can make this configurable as some people might want to stop the preview
// while picture is being taken even if it increases the latency.
mSession.stopRepeating();
mSession.abortCaptures();
}
CaptureRequest.Builder builder = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
CaptureRequest.Builder builder
= mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
applyAllParameters(builder, mRepeatingRequestBuilder);
mPictureRecorder = new Full2PictureRecorder(stub, this, builder, mPictureReader);
mPictureRecorder = new Full2PictureRecorder(stub, this, builder,
mPictureReader);
mPictureRecorder.take();
} catch (CameraAccessException e) {
throw createCameraException(e);
@ -721,8 +756,10 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@Override
protected void onTakeVideo(@NonNull VideoResult.Stub stub) {
LOG.i("onTakeVideo", "called.");
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR);
stub.size = getAngles().flip(Reference.SENSOR, Reference.OUTPUT) ? mCaptureSize.flip() : mCaptureSize;
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT,
Axis.RELATIVE_TO_SENSOR);
stub.size = getAngles().flip(Reference.SENSOR, Reference.OUTPUT) ?
mCaptureSize.flip() : mCaptureSize;
// We must restart the session at each time.
// Save the pending data and restart the session.
LOG.w("onTakeVideo", "calling restartBind.");
@ -752,9 +789,10 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@WorkerThread
@Override
protected void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull AspectRatio outputRatio) {
protected void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub,
@NonNull AspectRatio outputRatio) {
if (!(mPreview instanceof GlCameraPreview)) {
throw new IllegalStateException("Video snapshots are only supported with GlCameraPreview.");
throw new IllegalStateException("Video snapshots are only supported with GL_SURFACE.");
}
GlCameraPreview glPreview = (GlCameraPreview) mPreview;
Size outputSize = getUncroppedSnapshotSize(Reference.OUTPUT);
@ -779,18 +817,21 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
// Start.
// The overlay rotation should alway be VIEW-OUTPUT, just liek Camera1Engine.
int overlayRotation = getAngles().offset(Reference.VIEW, Reference.OUTPUT, Axis.ABSOLUTE);
mVideoRecorder = new SnapshotVideoRecorder(this, glPreview, getOverlay(), overlayRotation);
mVideoRecorder = new SnapshotVideoRecorder(this, glPreview, getOverlay(),
overlayRotation);
mVideoRecorder.start(stub);
}
/**
* When video ends we must stop the recorder and remove the recorder surface from
* camera outputs. This is done in onVideoResult. However, on some devices, order matters.
* If we stop the recorder and AFTER send camera frames to it, the camera will try to fill
* the recorder "abandoned" Surface and on some devices with a poor internal implementation
* (HW_LEVEL_LEGACY) this crashes. So if the conditions are met, we restore here. Issue #549.
*/
@Override
public void onVideoRecordingEnd() {
super.onVideoRecordingEnd();
// When video ends we must stop the recorder and remove the recorder surface from camera outputs.
// This is done in onVideoResult. However, on some devices, order matters. If we stop the recorder
// and AFTER send camera frames to it, the camera will try to fill the recorder "abandoned"
// Surface and on some devices with a poor internal implementation (HW_LEVEL_LEGACY) this crashes.
// So if the conditions are met, we restore here. Issue #549.
boolean needsIssue549Workaround = (mVideoRecorder instanceof Full2VideoRecorder) ||
(readCharacteristic(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL, -1)
== CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY);
@ -806,7 +847,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
}
/**
* Some video recorders might change the camera template to {@link CameraDevice#TEMPLATE_RECORD}.
* Video recorders might change the camera template to {@link CameraDevice#TEMPLATE_RECORD}.
* After the video is taken, we should restore the template preview, which also means that
* we'll remove any extra surface target that was added by the video recorder.
*
@ -846,26 +887,34 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
// We might be in a metering operation, or the old builder might have some special
// metering parameters. Copy these special keys over to the new builder.
// These are the keys changed by metering.Parameters, or by us in applyFocusForMetering.
builder.set(CaptureRequest.CONTROL_AF_REGIONS, oldBuilder.get(CaptureRequest.CONTROL_AF_REGIONS));
builder.set(CaptureRequest.CONTROL_AE_REGIONS, oldBuilder.get(CaptureRequest.CONTROL_AE_REGIONS));
builder.set(CaptureRequest.CONTROL_AWB_REGIONS, oldBuilder.get(CaptureRequest.CONTROL_AWB_REGIONS));
builder.set(CaptureRequest.CONTROL_AF_MODE, oldBuilder.get(CaptureRequest.CONTROL_AF_MODE));
builder.set(CaptureRequest.CONTROL_AF_REGIONS,
oldBuilder.get(CaptureRequest.CONTROL_AF_REGIONS));
builder.set(CaptureRequest.CONTROL_AE_REGIONS,
oldBuilder.get(CaptureRequest.CONTROL_AE_REGIONS));
builder.set(CaptureRequest.CONTROL_AWB_REGIONS,
oldBuilder.get(CaptureRequest.CONTROL_AWB_REGIONS));
builder.set(CaptureRequest.CONTROL_AF_MODE,
oldBuilder.get(CaptureRequest.CONTROL_AF_MODE));
// Do NOT copy exposure or focus triggers!
}
}
private void applyDefaultFocus(@NonNull CaptureRequest.Builder builder) {
int[] modesArray = readCharacteristic(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES, new int[]{});
@SuppressWarnings("WeakerAccess")
protected void applyDefaultFocus(@NonNull CaptureRequest.Builder builder) {
int[] modesArray = readCharacteristic(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES,
new int[]{});
List<Integer> modes = new ArrayList<>();
for (int mode : modesArray) { modes.add(mode); }
if (getMode() == Mode.VIDEO &&
modes.contains(CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO)) {
builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
builder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
return;
}
if (modes.contains(CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE)) {
builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
builder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
return;
}
@ -882,10 +931,18 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
}
}
private void applyFocusForMetering(@NonNull CaptureRequest.Builder builder) {
// All focus modes support the AF trigger, except OFF and EDOF.
// However, unlike the preview, we'd prefer AUTO to any CONTINUOUS value.
int[] modesArray = readCharacteristic(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES, new int[]{});
/**
* All focus modes support the AF trigger, except OFF and EDOF.
* However, unlike the preview, we'd prefer AUTO to any CONTINUOUS value.
* An AUTO value means that focus is locked unless we run the focus trigger,
* which is what metering does.
*
* @param builder builder
*/
@SuppressWarnings("WeakerAccess")
protected void applyFocusForMetering(@NonNull CaptureRequest.Builder builder) {
int[] modesArray = readCharacteristic(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES,
new int[]{});
List<Integer> modes = new ArrayList<>();
for (int mode : modesArray) { modes.add(mode); }
if (modes.contains(CaptureRequest.CONTROL_AF_MODE_AUTO)) {
@ -894,12 +951,14 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
}
if (getMode() == Mode.VIDEO &&
modes.contains(CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO)) {
builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
builder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
return;
}
if (modes.contains(CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE)) {
builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
builder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
//noinspection UnnecessaryReturnStatement
return;
}
@ -916,13 +975,14 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
boolean shouldApply = applyFlash(mRepeatingRequestBuilder, old);
boolean needsWorkaround = getPreviewState() == STATE_STARTED;
if (needsWorkaround) {
// Runtime changes to the flash value are not correctly handled by the driver.
// See https://stackoverflow.com/q/53003383/4288782 for example.
// Runtime changes to the flash value are not correctly handled by the
// driver. See https://stackoverflow.com/q/53003383/4288782 for example.
// For this reason, we go back to OFF, capture once, then go to the new one.
mFlash = Flash.OFF;
applyFlash(mRepeatingRequestBuilder, old);
try {
mSession.capture(mRepeatingRequestBuilder.build(), null, null);
mSession.capture(mRepeatingRequestBuilder.build(), null,
null);
} catch (CameraAccessException e) {
throw createCameraException(e);
}
@ -946,17 +1006,18 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
* - {@link CaptureRequest#CONTROL_AE_MODE_ON_ALWAYS_FLASH}
*
* The API offers a high level control through {@link CaptureRequest#CONTROL_AE_MODE},
* which is what the mapper looks at. It will trigger (if specified) flash only for still captures
* which is exactly what we want.
* which is what the mapper looks at. It will trigger (if specified) flash only for
* still captures which is exactly what we want.
*
* However, we set CONTROL_AE_MODE to ON/OFF (depending
* on which is available) with both {@link Flash#OFF} and {@link Flash#TORCH}.
*
* When CONTROL_AE_MODE is ON or OFF, the low level control, called {@link CaptureRequest#FLASH_MODE},
* becomes effective, and that's where we can actually distinguish between a turned off flash
* and a torch flash.
* When CONTROL_AE_MODE is ON or OFF, the low level control, called
* {@link CaptureRequest#FLASH_MODE}, becomes effective, and that's where we can actually
* distinguish between a turned off flash and a torch flash.
*/
private boolean applyFlash(@NonNull CaptureRequest.Builder builder,
@SuppressWarnings("WeakerAccess")
protected boolean applyFlash(@NonNull CaptureRequest.Builder builder,
@NonNull Flash oldFlash) {
if (mCameraOptions.supports(mFlash)) {
int[] availableAeModesArray = readCharacteristic(
@ -996,7 +1057,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
});
}
private boolean applyLocation(@NonNull CaptureRequest.Builder builder,
@SuppressWarnings("WeakerAccess")
protected boolean applyLocation(@NonNull CaptureRequest.Builder builder,
@SuppressWarnings("unused") @Nullable Location oldLocation) {
if (mLocation != null) {
builder.set(CaptureRequest.JPEG_GPS_LOCATION, mLocation);
@ -1021,7 +1083,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
});
}
private boolean applyWhiteBalance(@NonNull CaptureRequest.Builder builder,
@SuppressWarnings("WeakerAccess")
protected boolean applyWhiteBalance(@NonNull CaptureRequest.Builder builder,
@NonNull WhiteBalance oldWhiteBalance) {
if (mCameraOptions.supports(mWhiteBalance)) {
int whiteBalance = mMapper.mapWhiteBalance(mWhiteBalance);
@ -1049,8 +1112,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
});
}
private boolean applyHdr(@NonNull CaptureRequest.Builder builder, @NonNull Hdr oldHdr) {
@SuppressWarnings("WeakerAccess")
protected boolean applyHdr(@NonNull CaptureRequest.Builder builder, @NonNull Hdr oldHdr) {
if (mCameraOptions.supports(mHdr)) {
int hdr = mMapper.mapHdr(mHdr);
builder.set(CaptureRequest.CONTROL_SCENE_MODE, hdr);
@ -1080,9 +1143,11 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
});
}
private boolean applyZoom(@NonNull CaptureRequest.Builder builder, float oldZoom) {
@SuppressWarnings("WeakerAccess")
protected boolean applyZoom(@NonNull CaptureRequest.Builder builder, float oldZoom) {
if (mCameraOptions.isZoomSupported()) {
float maxZoom = readCharacteristic(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, 1F);
float maxZoom = readCharacteristic(
CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, 1F);
// converting 0.0f-1.0f zoom scale to the actual camera digital zoom scale
// (which will be for example, 1.0-10.0)
float calculatedZoom = (mZoomValue * (maxZoom - 1.0f)) + 1.0f;
@ -1096,8 +1161,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@NonNull
private Rect getZoomRect(float zoomLevel, float maxDigitalZoom) {
Rect activeRect = readCharacteristic(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE, new Rect());
Rect activeRect = readCharacteristic(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE,
new Rect());
int minW = (int) (activeRect.width() / maxDigitalZoom);
int minH = (int) (activeRect.height() / maxDigitalZoom);
int difW = activeRect.width() - minW;
@ -1107,11 +1172,15 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
// When zoom is maxZoom, we want to return a centered rect with minW and minH
int cropW = (int) (difW * (zoomLevel - 1) / (maxDigitalZoom - 1) / 2F);
int cropH = (int) (difH * (zoomLevel - 1) / (maxDigitalZoom - 1) / 2F);
return new Rect(cropW, cropH, activeRect.width() - cropW, activeRect.height() - cropH);
return new Rect(cropW, cropH, activeRect.width() - cropW,
activeRect.height() - cropH);
}
@Override
public void setExposureCorrection(final float EVvalue, @NonNull final float[] bounds, @Nullable final PointF[] points, final boolean notify) {
public void setExposureCorrection(final float EVvalue,
@NonNull final float[] bounds,
@Nullable final PointF[] points,
final boolean notify) {
final float old = mExposureCorrectionValue;
mExposureCorrectionValue = EVvalue;
mHandler.run(new Runnable() {
@ -1130,11 +1199,15 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
});
}
private boolean applyExposureCorrection(@NonNull CaptureRequest.Builder builder, float oldEVvalue) {
@SuppressWarnings("WeakerAccess")
protected boolean applyExposureCorrection(@NonNull CaptureRequest.Builder builder,
float oldEVvalue) {
if (mCameraOptions.isExposureCorrectionSupported()) {
Rational exposureCorrectionStep = readCharacteristic(CameraCharacteristics.CONTROL_AE_COMPENSATION_STEP,
Rational exposureCorrectionStep = readCharacteristic(
CameraCharacteristics.CONTROL_AE_COMPENSATION_STEP,
new Rational(1, 1));
int exposureCorrectionSteps = Math.round(mExposureCorrectionValue * exposureCorrectionStep.floatValue());
int exposureCorrectionSteps = Math.round(mExposureCorrectionValue
* exposureCorrectionStep.floatValue());
builder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, exposureCorrectionSteps);
return true;
}
@ -1165,6 +1238,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
LOG.w("onImageAvailable", "no byte buffer!");
return;
}
LOG.v("onImageAvailable", "trying to acquire Image.");
Image image = null;
try {
image = reader.acquireLatestImage();
@ -1174,9 +1248,11 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
getFrameManager().onBufferUnused(data);
return;
}
LOG.i("onImageAvailable", "we have both a byte buffer and an Image.");
LOG.v("onImageAvailable", "we have both a byte buffer and an Image.");
try {
synchronized (mFrameProcessingImageLock) {
ImageHelper.convertToNV21(image, data);
}
} catch (Exception e) {
LOG.w("onImageAvailable", "error while converting.");
getFrameManager().onBufferUnused(data);
@ -1188,7 +1264,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
// After preview, the frame manager is correctly set up
Frame frame = getFrameManager().getFrame(data,
System.currentTimeMillis(),
getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR));
getAngles().offset(Reference.SENSOR, Reference.OUTPUT,
Axis.RELATIVE_TO_SENSOR));
mCallback.dispatchFrame(frame);
} else {
getFrameManager().onBufferUnused(data);
@ -1197,18 +1274,35 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@Override
public void setHasFrameProcessors(final boolean hasFrameProcessors) {
super.setHasFrameProcessors(hasFrameProcessors);
LOG.i("setHasFrameProcessors", "changed to", hasFrameProcessors, "posting.");
LOG.i("setHasFrameProcessors", "changing to", hasFrameProcessors, "posting.");
Camera2Engine.super.setHasFrameProcessors(hasFrameProcessors);
mHandler.run(new Runnable() {
@Override
public void run() {
LOG.i("setHasFrameProcessors", "changed to", hasFrameProcessors, "executing. BindState:", getBindState());
if (getBindState() == STATE_STARTED) {
LOG.i("setHasFrameProcessors", "triggering a restart.");
// TODO if taking video, this stops it.
LOG.i("setHasFrameProcessors", "changing to", hasFrameProcessors,
"executing. BindState:", getBindState(),
"PreviewState:", getPreviewState());
// Frame processing is set up partially when binding and partially when starting
// the preview. We don't want to only check bind state or startPreview can fail.
if (getBindState() == STATE_STOPPED) {
LOG.i("setHasFrameProcessors", "not bound so won't restart.");
} else if (getPreviewState() == STATE_STARTED) {
// This needs a restartBind(). NOTE: if taking video, this stops it.
LOG.i("setHasFrameProcessors", "bound with preview.",
"Calling restartBind().");
restartBind();
} else {
LOG.i("setHasFrameProcessors", "not bound so won't restart.");
// Bind+Preview is not completely started yet not completely stopped.
// This can happen if the user adds a frame processor in onCameraOpened().
// Supporting this would add lot of complexity to this class, and
// this should be discouraged anyway since changing the frame processor number
// at this time requires restarting the camera when it was just opened.
// For these reasons, let's throw.
throw new IllegalStateException("Added/removed a FrameProcessor at illegal " +
"time. These operations should be done before opening the camera, or " +
"before closing it - NOT when it just opened, for example during the " +
"onCameraOpened() callback.");
}
}
});
@ -1225,8 +1319,9 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@Override
public void run() {
LOG.i("startAutoFocus", "executing. Preview state:", getPreviewState());
// This will only work when we have a preview, since it launches the preview in the end.
// Even without this it would need the bind state at least, since we need the preview size.
// This will only work when we have a preview, since it launches the preview
// in the end. Even without this it would need the bind state at least,
// since we need the preview size.
if (getPreviewState() < STATE_STARTED) return;
// The camera options API still has the auto focus API but it really
@ -1244,7 +1339,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
mCallback.dispatchOnFocusEnd(gesture, action.isSuccessful(), point);
mHandler.remove(mUnlockAndResetMeteringRunnable);
if (shouldResetAutoFocus()) {
mHandler.post(getAutoFocusResetDelay(), mUnlockAndResetMeteringRunnable);
mHandler.post(getAutoFocusResetDelay(),
mUnlockAndResetMeteringRunnable);
}
}
});
@ -1263,7 +1359,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
// The last one is under our control because the library has no focus API.
// So let's set a good af mode here. This operation is reverted during onMeteringReset().
applyFocusForMetering(mRepeatingRequestBuilder);
mMeterAction = new MeterAction(Camera2Engine.this, point, point == null);
mMeterAction = new MeterAction(Camera2Engine.this, point,
point == null);
return mMeterAction;
}
@ -1282,8 +1379,10 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
protected void onStart(@NonNull ActionHolder holder) {
super.onStart(holder);
applyDefaultFocus(holder.getBuilder(this));
holder.getBuilder(this).set(CaptureRequest.CONTROL_AE_LOCK, false);
holder.getBuilder(this).set(CaptureRequest.CONTROL_AWB_LOCK, false);
holder.getBuilder(this)
.set(CaptureRequest.CONTROL_AE_LOCK, false);
holder.getBuilder(this)
.set(CaptureRequest.CONTROL_AWB_LOCK, false);
holder.applyBuilder(this);
setState(STATE_COMPLETED);
// TODO should wait results?
@ -1348,7 +1447,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
}
@Override
public void applyBuilder(@NonNull Action source, @NonNull CaptureRequest.Builder builder) throws CameraAccessException {
public void applyBuilder(@NonNull Action source, @NonNull CaptureRequest.Builder builder)
throws CameraAccessException {
mSession.capture(builder.build(), mRepeatingRequestCallback, null);
}

@ -64,8 +64,10 @@ import java.util.concurrent.TimeUnit;
* Setting up the Camera is usually a 4 steps process:
* 1. Setting up the Surface. Done by {@link CameraPreview}.
* 2. Starting the camera. Done by us. See {@link #startEngine()}, {@link #onStartEngine()}.
* 3. Binding the camera to the surface. Done by us. See {@link #startBind()}, {@link #onStartBind()}
* 4. Streaming the camera preview. Done by us. See {@link #startPreview()}, {@link #onStartPreview()}
* 3. Binding the camera to the surface. Done by us. See {@link #startBind()},
* {@link #onStartBind()}
* 4. Streaming the camera preview. Done by us. See {@link #startPreview()},
* {@link #onStartPreview()}
*
* The first two steps can actually happen at the same time, anyway
* the order is not guaranteed, we just get a callback from the Preview when 1 happens.
@ -80,14 +82,16 @@ import java.util.concurrent.TimeUnit;
* STATE
* We only expose generic {@link #start()} and {@link #stop()} calls to the outside.
* The external users of this class are most likely interested in whether we have completed step 2
* or not, since that tells us if we can act on the camera or not, rather than knowing about steps 3 and 4.
* or not, since that tells us if we can act on the camera or not, rather than knowing about
* steps 3 and 4.
*
* So in the {@link CameraEngine} notation,
* - {@link #start()}: ASYNC - starts the engine (S2). When possible, at a later time, S3 and S4 are also performed.
* - {@link #start()}: ASYNC - starts the engine (S2). When possible, at a later time,
* S3 and S4 are also performed.
* - {@link #stop()}: ASYNC - stops everything: undoes S4, then S3, then S2.
* - {@link #restart()}: ASYNC - completes a stop then a start.
* - {@link #destroy()}: SYNC - performs a {@link #stop()} that will go on no matter the exceptions, without throwing.
* Makes the engine unusable and clears resources.
* - {@link #destroy()}: SYNC - performs a {@link #stop()} that will go on no matter the exceptions,
* without throwing. Makes the engine unusable and clears resources.
*
* For example, we expose the engine (S2) state through {@link #getEngineState()}. It will be:
* - {@link #STATE_STARTING} if we're into step 2
@ -109,13 +113,13 @@ import java.util.concurrent.TimeUnit;
*
*
* ERROR HANDLING
* THe {@link #mHandler} thread has a special {@link Thread.UncaughtExceptionHandler} that handles exceptions
* and dispatches error to the callback (instead of crashing the app). This lets subclasses run code
* safely and directly throw {@link CameraException}s when needed.
* THe {@link #mHandler} thread has a special {@link Thread.UncaughtExceptionHandler} that handles
* exceptions and dispatches error to the callback (instead of crashing the app).
* This lets subclasses run code safely and directly throw {@link CameraException}s when needed.
*
* For convenience, the two main method {@link #onStartEngine()} and {@link #onStopEngine()} are already
* called on the engine thread, but they can still be asynchronous by returning a Google's
* {@link com.google.android.gms.tasks.Task}.
* For convenience, the two main method {@link #onStartEngine()} and {@link #onStopEngine()}
* are already called on the engine thread, but they can still be asynchronous by returning a
* Google's {@link com.google.android.gms.tasks.Task}.
*/
public abstract class CameraEngine implements
CameraPreview.SurfaceCallback,
@ -133,7 +137,9 @@ public abstract class CameraEngine implements
void dispatchOnFocusStart(@Nullable Gesture trigger, @NonNull PointF where);
void dispatchOnFocusEnd(@Nullable Gesture trigger, boolean success, @NonNull PointF where);
void dispatchOnZoomChanged(final float newValue, @Nullable final PointF[] fingers);
void dispatchOnExposureCorrectionChanged(float newValue, @NonNull float[] bounds, @Nullable PointF[] fingers);
void dispatchOnExposureCorrectionChanged(float newValue,
@NonNull float[] bounds,
@Nullable PointF[] fingers);
void dispatchFrame(@NonNull Frame frame);
void dispatchError(CameraException exception);
void dispatchOnVideoRecordingStart();
@ -186,8 +192,10 @@ public abstract class CameraEngine implements
private int mAudioBitRate;
private boolean mHasFrameProcessors;
private long mAutoFocusResetDelayMillis;
private int mSnapshotMaxWidth = Integer.MAX_VALUE; // in REF_VIEW for consistency with SizeSelectors
private int mSnapshotMaxHeight = Integer.MAX_VALUE; // in REF_VIEW for consistency with SizeSelectors
// in REF_VIEW, for consistency with SizeSelectors
private int mSnapshotMaxWidth = Integer.MAX_VALUE;
// in REF_VIEW, for consistency with SizeSelectors
private int mSnapshotMaxHeight = Integer.MAX_VALUE;
private Overlay overlay;
// Steps
@ -205,9 +213,11 @@ public abstract class CameraEngine implements
// Ops used for testing.
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Op<Void> mZoomOp = new Op<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Op<Void> mExposureCorrectionOp = new Op<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Op<Void> mExposureCorrectionOp
= new Op<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Op<Void> mFlashOp = new Op<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Op<Void> mWhiteBalanceOp = new Op<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Op<Void> mWhiteBalanceOp
= new Op<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Op<Void> mHdrOp = new Op<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Op<Void> mLocationOp = new Op<>();
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Op<Void> mPlaySoundsOp = new Op<>();
@ -258,9 +268,9 @@ public abstract class CameraEngine implements
/**
* Handles exceptions coming from either runtime errors on the {@link #mHandler} code that is
* not caught (using the {@link CrashExceptionHandler}), as might happen during standard mHandler.post()
* operations that subclasses might do, OR for errors caught by tasks and continuations that
* we launch here.
* not caught (using the {@link CrashExceptionHandler}), as might happen during standard
* mHandler.post() operations that subclasses might do, OR for errors caught by tasks and
* continuations that we launch here.
*
* In the first case, the thread is about to be terminated. In the second case,
* we can actually keep using it.
@ -269,7 +279,9 @@ public abstract class CameraEngine implements
* @param throwable the throwable
* @param fromExceptionHandler true if coming from exception handler
*/
private void handleException(@NonNull Thread thread, final @NonNull Throwable throwable, final boolean fromExceptionHandler) {
private void handleException(@NonNull Thread thread,
final @NonNull Throwable throwable,
final boolean fromExceptionHandler) {
if (!(throwable instanceof CameraException)) {
// This is unexpected, either a bug or something the developer should know.
// Release and crash the UI thread so we get bug reports.
@ -290,7 +302,8 @@ public abstract class CameraEngine implements
}
final CameraException cameraException = (CameraException) throwable;
LOG.e("uncaughtException:", "Got CameraException:", cameraException, "on engine state:", getEngineStateName());
LOG.e("uncaughtException:", "Got CameraException:", cameraException,
"on engine state:", getEngineStateName());
if (fromExceptionHandler) {
// Got to restart the handler.
thread.interrupt();
@ -478,7 +491,8 @@ public abstract class CameraEngine implements
@Override
public void run() {
LOG.w("restartBind", "executing stopPreview.");
stopPreview(false).continueWithTask(mHandler.getExecutor(), new Continuation<Void, Task<Void>>() {
stopPreview(false).continueWithTask(mHandler.getExecutor(),
new Continuation<Void, Task<Void>>() {
@Override
public Task<Void> then(@NonNull Task<Void> task) {
LOG.w("restartBind", "executing stopBind.");
@ -525,7 +539,9 @@ public abstract class CameraEngine implements
@NonNull
@WorkerThread
private Task<Void> stopPreview(boolean swallowExceptions) {
LOG.i("stopPreview", "needsStopPreview:", needsStopPreview(), "swallowExceptions:", swallowExceptions);
LOG.i("stopPreview",
"needsStopPreview:", needsStopPreview(),
"swallowExceptions:", swallowExceptions);
if (needsStopPreview()) {
mPreviewStep.doStop(swallowExceptions, new Callable<Task<Void>>() {
@Override
@ -595,7 +611,8 @@ public abstract class CameraEngine implements
@Override
public final void onSurfaceChanged() {
LOG.i("onSurfaceChanged:", "Size is", getPreviewSurfaceSize(Reference.VIEW), "Posting.");
LOG.i("onSurfaceChanged:", "Size is", getPreviewSurfaceSize(Reference.VIEW),
"Posting.");
mHandler.run(new Runnable() {
@Override
public void run() {
@ -608,9 +625,11 @@ public abstract class CameraEngine implements
// Compute a new camera preview size and apply.
Size newSize = computePreviewStreamSize();
if (newSize.equals(mPreviewStreamSize)) {
LOG.i("onSurfaceChanged:", "The computed preview size is identical. No op.");
LOG.i("onSurfaceChanged:",
"The computed preview size is identical. No op.");
} else {
LOG.i("onSurfaceChanged:", "Computed a new preview size. Calling onPreviewStreamSizeChanged().");
LOG.i("onSurfaceChanged:",
"Computed a new preview size. Calling onPreviewStreamSizeChanged().");
mPreviewStreamSize = newSize;
onPreviewStreamSizeChanged();
}
@ -633,7 +652,8 @@ public abstract class CameraEngine implements
mHandler.run(new Runnable() {
@Override
public void run() {
stopPreview(false).onSuccessTask(mHandler.getExecutor(), new SuccessContinuation<Void, Void>() {
stopPreview(false).onSuccessTask(mHandler.getExecutor(),
new SuccessContinuation<Void, Void>() {
@NonNull
@Override
public Task<Void> then(@Nullable Void aVoid) {
@ -663,7 +683,8 @@ public abstract class CameraEngine implements
// Stop if needed, synchronously and silently.
// Cannot use Tasks.await() because we might be on the UI thread.
final CountDownLatch latch = new CountDownLatch(1);
stop(true).addOnCompleteListener(mHandler.getExecutor(), new OnCompleteListener<Void>() {
stop(true).addOnCompleteListener(mHandler.getExecutor(),
new OnCompleteListener<Void>() {
@Override
public void onComplete(@NonNull Task<Void> task) {
latch.countDown();
@ -695,14 +716,17 @@ public abstract class CameraEngine implements
@Override
public void run() {
LOG.w("Start:", "executing runnable. AllState is", mAllStep.getState());
// It's better to schedule anyway. allStep might be STARTING and we might be tempted to early return here,
// But the truth is that there might be a stop already scheduled when the STARTING op ends.
// It's better to schedule anyway. allStep might be STARTING and we might be
// tempted to early return here, but the truth is that there might be a stop
// already scheduled when the STARTING op ends.
// if (mAllStep.isStoppingOrStopped()) {
// LOG.i("Start:", "executing runnable. AllState is STOPPING or STOPPED, so we schedule a start.");
// LOG.i("Start:", "executing runnable. AllState is STOPPING or STOPPED,
// so we schedule a start.");
mAllStep.doStart(false, new Callable<Task<Void>>() {
@Override
public Task<Void> call() {
return startEngine().addOnFailureListener(mHandler.getExecutor(), new OnFailureListener() {
return startEngine().addOnFailureListener(mHandler.getExecutor(),
new OnFailureListener() {
@Override
public void onFailure(@NonNull Exception e) {
outTask.trySetException(e);
@ -725,7 +749,8 @@ public abstract class CameraEngine implements
});
// } else {
// // NOTE: this returns early if we were STARTING.
// LOG.i("Start:", "executing runnable. AllState is STARTING or STARTED, so we return early.");
// LOG.i("Start:",
// "executing runnable. AllState is STARTING or STARTED, so we return early.");
// outTask.trySetResult(null);
// }
}
@ -746,14 +771,17 @@ public abstract class CameraEngine implements
@Override
public void run() {
LOG.w("Stop:", "executing runnable. AllState is", mAllStep.getState());
// It's better to schedule anyway. allStep might be STOPPING and we might be tempted to early return here,
// But the truth is that there might be a start already scheduled when the STOPPING op ends.
// It's better to schedule anyway. allStep might be STOPPING and we might be
// tempted to early return here, but the truth is that there might be a start
// already scheduled when the STOPPING op ends.
// if (mAllStep.isStartedOrStarting()) {
// LOG.i("Stop:", "executing runnable. AllState is STARTING or STARTED, so we schedule a stop.");
// LOG.i("Stop:", "executing runnable. AllState is STARTING or STARTED,
// so we schedule a stop.");
mAllStep.doStop(swallowExceptions, new Callable<Task<Void>>() {
@Override
public Task<Void> call() {
return stopPreview(swallowExceptions).continueWithTask(mHandler.getExecutor(), new Continuation<Void, Task<Void>>() {
return stopPreview(swallowExceptions).continueWithTask(
mHandler.getExecutor(), new Continuation<Void, Task<Void>>() {
@Override
public Task<Void> then(@NonNull Task<Void> task) {
return stopBind(swallowExceptions);
@ -779,7 +807,8 @@ public abstract class CameraEngine implements
});
// } else {
// // NOTE: this returns early if we were STOPPING.
// LOG.i("Stop:", "executing runnable. AllState is STOPPING or STOPPED, so we return early.");
// LOG.i("Stop:", "executing runnable.
// AllState is STOPPING or STOPPED, so we return early.");
// outTask.trySetResult(null);
// }
}
@ -800,7 +829,6 @@ public abstract class CameraEngine implements
return overlay;
}
@SuppressWarnings("WeakerAccess")
public final Angles getAngles() {
return mAngles;
}
@ -888,9 +916,13 @@ public abstract class CameraEngine implements
return mSnapshotMaxHeight;
}
public final void setAutoFocusResetDelay(long delayMillis) { mAutoFocusResetDelayMillis = delayMillis; }
public final void setAutoFocusResetDelay(long delayMillis) {
mAutoFocusResetDelayMillis = delayMillis;
}
public final long getAutoFocusResetDelay() { return mAutoFocusResetDelayMillis; }
public final long getAutoFocusResetDelay() {
return mAutoFocusResetDelayMillis;
}
/**
* Sets a new facing value. This will restart the session (if there's any)
@ -1039,8 +1071,8 @@ public abstract class CameraEngine implements
* Camera is about to be opened. Implementors should look into available cameras
* and see if anyone matches the given {@link Facing value}.
*
* If so, implementors should set {@link Angles#setSensorOffset(Facing, int)} and any other information
* (like camera ID) needed to start the engine.
* If so, implementors should set {@link Angles#setSensorOffset(Facing, int)}
* and any other information (like camera ID) needed to start the engine.
*
* @param facing the facing value
* @return true if we have one
@ -1059,7 +1091,10 @@ public abstract class CameraEngine implements
public abstract void setZoom(float zoom, @Nullable PointF[] points, boolean notify);
// If closed, no-op. If opened, check supported and apply.
public abstract void setExposureCorrection(float EVvalue, @NonNull float[] bounds, @Nullable PointF[] points, boolean notify);
public abstract void setExposureCorrection(float EVvalue,
@NonNull float[] bounds,
@Nullable PointF[] points,
boolean notify);
// If closed, keep. If opened, check supported and apply.
public abstract void setFlash(@NonNull Flash flash);
@ -1091,7 +1126,8 @@ public abstract class CameraEngine implements
mHandler.run(new Runnable() {
@Override
public void run() {
LOG.v("takePicture", "performing. BindState:", getBindState(), "isTakingPicture:", isTakingPicture());
LOG.v("takePicture", "performing. BindState:", getBindState(),
"isTakingPicture:", isTakingPicture());
if (mMode == Mode.VIDEO) {
throw new IllegalStateException("Can't take hq pictures while in VIDEO mode");
}
@ -1115,7 +1151,8 @@ public abstract class CameraEngine implements
mHandler.run(new Runnable() {
@Override
public void run() {
LOG.v("takePictureSnapshot", "performing. BindState:", getBindState(), "isTakingPicture:", isTakingPicture());
LOG.v("takePictureSnapshot", "performing. BindState:",
getBindState(), "isTakingPicture:", isTakingPicture());
if (getBindState() < STATE_STARTED) return;
if (isTakingPicture()) return;
stub.location = mLocation;
@ -1141,7 +1178,8 @@ public abstract class CameraEngine implements
mCallback.dispatchOnPictureTaken(result);
} else {
LOG.e("onPictureResult", "result is null: something went wrong.", error);
mCallback.dispatchError(new CameraException(error, CameraException.REASON_PICTURE_FAILED));
mCallback.dispatchError(new CameraException(error,
CameraException.REASON_PICTURE_FAILED));
}
}
@ -1154,7 +1192,8 @@ public abstract class CameraEngine implements
mHandler.run(new Runnable() {
@Override
public void run() {
LOG.v("takeVideo", "performing. BindState:", getBindState(), "isTakingVideo:", isTakingVideo());
LOG.v("takeVideo", "performing. BindState:", getBindState(),
"isTakingVideo:", isTakingVideo());
if (getBindState() < STATE_STARTED) return;
if (isTakingVideo()) return;
if (mMode == Mode.PICTURE) {
@ -1179,12 +1218,14 @@ public abstract class CameraEngine implements
* @param stub a video stub
* @param file the output file
*/
public final void takeVideoSnapshot(final @NonNull VideoResult.Stub stub, @NonNull final File file) {
public final void takeVideoSnapshot(@NonNull final VideoResult.Stub stub,
@NonNull final File file) {
LOG.v("takeVideoSnapshot", "scheduling");
mHandler.run(new Runnable() {
@Override
public void run() {
LOG.v("takeVideoSnapshot", "performing. BindState:", getBindState(), "isTakingVideo:", isTakingVideo());
LOG.v("takeVideoSnapshot", "performing. BindState:", getBindState(),
"isTakingVideo:", isTakingVideo());
if (getBindState() < STATE_STARTED) return;
if (isTakingVideo()) return;
stub.file = file;
@ -1215,10 +1256,13 @@ public abstract class CameraEngine implements
});
}
@SuppressWarnings("WeakerAccess")
protected void onStopVideo() {
if (mVideoRecorder != null) {
mVideoRecorder.stop(false);
mVideoRecorder = null;
// Do not null this, so we respond correctly to isTakingVideo(),
// which checks for recorder presence and recorder.isRecording().
// It will be nulled in onVideoResult.
}
}
@ -1230,7 +1274,8 @@ public abstract class CameraEngine implements
mCallback.dispatchOnVideoTaken(result);
} else {
LOG.e("onVideoResult", "result is null: something went wrong.", exception);
mCallback.dispatchError(new CameraException(exception, CameraException.REASON_VIDEO_FAILED));
mCallback.dispatchError(new CameraException(exception,
CameraException.REASON_VIDEO_FAILED));
}
}
@ -1248,10 +1293,13 @@ public abstract class CameraEngine implements
protected abstract void onTakePicture(@NonNull PictureResult.Stub stub, boolean doMetering);
@WorkerThread
protected abstract void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio outputRatio, boolean doMetering);
protected abstract void onTakePictureSnapshot(@NonNull PictureResult.Stub stub,
@NonNull AspectRatio outputRatio,
boolean doMetering);
@WorkerThread
protected abstract void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull AspectRatio outputRatio);
protected abstract void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub,
@NonNull AspectRatio outputRatio);
@WorkerThread
protected abstract void onTakeVideo(@NonNull VideoResult.Stub stub);
@ -1286,7 +1334,8 @@ public abstract class CameraEngine implements
private Size getPreviewSurfaceSize(@NonNull Reference reference) {
CameraPreview preview = mPreview;
if (preview == null) return null;
return getAngles().flip(Reference.VIEW, reference) ? preview.getSurfaceSize().flip() : preview.getSurfaceSize();
return getAngles().flip(Reference.VIEW, reference) ? preview.getSurfaceSize().flip()
: preview.getSurfaceSize();
}
/**
@ -1295,7 +1344,7 @@ public abstract class CameraEngine implements
* levels so we don't want to perform the op here.
*
* The base snapshot size is based on PreviewStreamSize (later cropped with view ratio). Why?
* One might be tempted to say that it is the SurfaceSize (which already matches the view ratio).
* One might be tempted to say that it's the SurfaceSize (which already matches the view ratio).
*
* The camera sensor will capture preview frames with PreviewStreamSize and that's it. Then they
* are hardware-scaled by the preview surface, but this does not affect the snapshot, as the
@ -1367,7 +1416,8 @@ public abstract class CameraEngine implements
List<Size> list = new ArrayList<>(sizes);
Size result = selector.select(list).get(0);
if (!list.contains(result)) {
throw new RuntimeException("SizeSelectors must not return Sizes other than those in the input list.");
throw new RuntimeException("SizeSelectors must not return Sizes other than " +
"those in the input list.");
}
LOG.i("computeCaptureSize:", "result:", result, "flip:", flip, "mode:", mode);
if (flip) result = result.flip(); // Go back to REF_SENSOR
@ -1395,13 +1445,17 @@ public abstract class CameraEngine implements
sizes.add(flip ? size.flip() : size);
}
// Create our own default selector, which will be used if the external mPreviewStreamSizeSelector
// is null, or if it fails in finding a size.
// Create our own default selector, which will be used if the external
// mPreviewStreamSizeSelector is null, or if it fails in finding a size.
Size targetMinSize = getPreviewSurfaceSize(Reference.VIEW);
if (targetMinSize == null) throw new IllegalStateException("targetMinSize should not be null here.");
if (targetMinSize == null) {
throw new IllegalStateException("targetMinSize should not be null here.");
}
AspectRatio targetRatio = AspectRatio.of(mCaptureSize.getWidth(), mCaptureSize.getHeight());
if (flip) targetRatio = targetRatio.flip();
LOG.i("computePreviewStreamSize:", "targetRatio:", targetRatio, "targetMinSize:", targetMinSize);
LOG.i("computePreviewStreamSize:",
"targetRatio:", targetRatio,
"targetMinSize:", targetMinSize);
SizeSelector matchRatio = SizeSelectors.and( // Match this aspect ratio and sort by biggest
SizeSelectors.aspectRatio(targetRatio, 0),
SizeSelectors.biggest());
@ -1426,7 +1480,8 @@ public abstract class CameraEngine implements
}
Size result = selector.select(sizes).get(0);
if (!sizes.contains(result)) {
throw new RuntimeException("SizeSelectors must not return Sizes other than those in the input list.");
throw new RuntimeException("SizeSelectors must not return Sizes other than " +
"those in the input list.");
}
if (flip) result = result.flip();
LOG.i("computePreviewStreamSize:", "result:", result, "flip:", flip);

@ -15,12 +15,14 @@ import com.otaliastudios.cameraview.engine.action.BaseAction;
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
class LogAction extends BaseAction {
private final static CameraLogger LOG = CameraLogger.create(Camera2Engine.class.getSimpleName());
private final static CameraLogger LOG
= CameraLogger.create(Camera2Engine.class.getSimpleName());
private String lastLog;
@Override
public void onCaptureCompleted(@NonNull ActionHolder holder, @NonNull CaptureRequest request,
public void onCaptureCompleted(@NonNull ActionHolder holder,
@NonNull CaptureRequest request,
@NonNull TotalCaptureResult result) {
super.onCaptureCompleted(holder, request, result);
Integer aeMode = result.get(CaptureResult.CONTROL_AE_MODE);
@ -34,29 +36,14 @@ class LogAction extends BaseAction {
" afState: " + afState + " afTriggerState: " + afTriggerState;
if (!log.equals(lastLog)) {
lastLog = log;
LOG.w(log);
LOG.i(log);
}
}
// START
// aeMode: 3 aeLock: false aeState: 4 aeTriggerState: 0 afState: 2 afTriggerState: 0
//
// DURING metering (focus skips)
// aeMode: 3 aeLock: false aeState: 4 aeTriggerState: 0 afState: 0 afTriggerState: 0
// aeMode: 3 aeLock: false aeState: 5 aeTriggerState: 1 afState: 0 afTriggerState: 0
//
// DURING locking (focus skips)
// aeMode: 3 aeLock: false aeState: 4 aeTriggerState: 1 afState: 0 afTriggerState: 0
// aeMode: 3 aeLock: true aeState: 5 aeTriggerState: 1 afState: 0 afTriggerState: 0
//
// AFTER locked
// aeMode: 3 aeLock: true aeState: 3 aeTriggerState: 1 afState: 0 afTriggerState: 0
//
// AFTER super.take() called
// aeMode: 1 aeLock: true aeState: 5 aeTriggerState: 1 afState: 0 afTriggerState: 0
// aeMode: 1 aeLock: true aeState: 3 aeTriggerState: 1 afState: 0 afTriggerState: 0
//
// Reverting flash changes + reset lock + reset metering
// aeMode: 3 aeLock: false aeState: 4 aeTriggerState: 2(1 now) afState: 2 afTriggerState: 0
// aeMode: 3 aeLock: false aeState: 1 aeTriggerState: 2(1 now) afState: 2 afTriggerState: 0
@Override
protected void onCompleted(@NonNull ActionHolder holder) {
super.onCompleted(holder);
setState(0); // set another state.
start(holder); // restart.
}
}

@ -105,17 +105,21 @@ class Step {
return doStart(swallowExceptions, op, null);
}
Task<Void> doStart(final boolean swallowExceptions, final @NonNull Callable<Task<Void>> op, final @Nullable Runnable onStarted) {
Task<Void> doStart(final boolean swallowExceptions,
final @NonNull Callable<Task<Void>> op,
final @Nullable Runnable onStarted) {
LOG.i(name, "doStart", "Called. Enqueuing.");
task = task.continueWithTask(callback.getExecutor(), new Continuation<Void, Task<Void>>() {
@Override
public Task<Void> then(@NonNull Task<Void> task) throws Exception {
LOG.i(name, "doStart", "About to start. Setting state to STARTING");
setState(STATE_STARTING);
return op.call().addOnFailureListener(callback.getExecutor(), new OnFailureListener() {
return op.call().addOnFailureListener(callback.getExecutor(),
new OnFailureListener() {
@Override
public void onFailure(@NonNull Exception e) {
LOG.w(name, "doStart", "Failed with error", e, "Setting state to STOPPED");
LOG.w(name, "doStart", "Failed with error", e,
"Setting state to STOPPED");
setState(STATE_STOPPED);
if (!swallowExceptions) callback.handleException(e);
}
@ -139,17 +143,21 @@ class Step {
return doStop(swallowExceptions, op, null);
}
Task<Void> doStop(final boolean swallowExceptions, final @NonNull Callable<Task<Void>> op, final @Nullable Runnable onStopped) {
Task<Void> doStop(final boolean swallowExceptions,
final @NonNull Callable<Task<Void>> op,
final @Nullable Runnable onStopped) {
LOG.i(name, "doStop", "Called. Enqueuing.");
task = task.continueWithTask(callback.getExecutor(), new Continuation<Void, Task<Void>>() {
@Override
public Task<Void> then(@NonNull Task<Void> task) throws Exception {
LOG.i(name, "doStop", "About to stop. Setting state to STOPPING");
state = STATE_STOPPING;
return op.call().addOnFailureListener(callback.getExecutor(), new OnFailureListener() {
return op.call().addOnFailureListener(callback.getExecutor(),
new OnFailureListener() {
@Override
public void onFailure(@NonNull Exception e) {
LOG.w(name, "doStop", "Failed with error", e, "Setting state to STOPPED");
LOG.w(name, "doStop", "Failed with error", e,
"Setting state to STOPPED");
state = STATE_STOPPED;
if (!swallowExceptions) callback.handleException(e);
}

@ -13,8 +13,8 @@ import androidx.annotation.RequiresApi;
/**
* The Action class encapsulates logic for completing an action in a Camera2 environment.
* In this case, we are often interested in constantly receiving the {@link CaptureResult}
* and {@link CaptureRequest} callbacks, as well as applying changes to a {@link CaptureRequest.Builder}
* and having them applied to the sensor.
* and {@link CaptureRequest} callbacks, as well as applying changes to a
* {@link CaptureRequest.Builder} and having them applied to the sensor.
*
* The Action class receives the given callbacks and can operate over the engine
* through the {@link ActionHolder} object.
@ -61,25 +61,32 @@ public interface Action {
void removeCallback(@NonNull ActionCallback callback);
/**
* Called from {@link CaptureCallback#onCaptureStarted(CameraCaptureSession, CaptureRequest, long, long)}.
* Called from {@link CaptureCallback#onCaptureStarted(CameraCaptureSession, CaptureRequest,
* long, long)}.
* @param holder the holder
* @param request the request
*/
void onCaptureStarted(@NonNull ActionHolder holder, @NonNull CaptureRequest request);
/**
* Called from {@link CaptureCallback#onCaptureProgressed(CameraCaptureSession, CaptureRequest, CaptureResult)}.
* Called from {@link CaptureCallback#onCaptureProgressed(CameraCaptureSession, CaptureRequest,
* CaptureResult)}.
* @param holder the holder
* @param request the request
* @param result the result
*/
void onCaptureProgressed(@NonNull ActionHolder holder, @NonNull CaptureRequest request, @NonNull CaptureResult result);
void onCaptureProgressed(@NonNull ActionHolder holder,
@NonNull CaptureRequest request,
@NonNull CaptureResult result);
/**
* Called from {@link CaptureCallback#onCaptureCompleted(CameraCaptureSession, CaptureRequest, TotalCaptureResult)}.
* Called from {@link CaptureCallback#onCaptureCompleted(CameraCaptureSession, CaptureRequest,
* TotalCaptureResult)}.
* @param holder the holder
* @param request the request
* @param result the result
*/
void onCaptureCompleted(@NonNull ActionHolder holder, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result);
void onCaptureCompleted(@NonNull ActionHolder holder,
@NonNull CaptureRequest request,
@NonNull TotalCaptureResult result);
}

@ -77,5 +77,6 @@ public interface ActionHolder {
* @param builder builder
* @throws CameraAccessException camera exception
*/
void applyBuilder(@NonNull Action source, @NonNull CaptureRequest.Builder builder) throws CameraAccessException;
void applyBuilder(@NonNull Action source, @NonNull CaptureRequest.Builder builder)
throws CameraAccessException;
}

@ -50,13 +50,17 @@ public abstract class ActionWrapper extends BaseAction {
}
@Override
public void onCaptureProgressed(@NonNull ActionHolder holder, @NonNull CaptureRequest request, @NonNull CaptureResult result) {
public void onCaptureProgressed(@NonNull ActionHolder holder,
@NonNull CaptureRequest request,
@NonNull CaptureResult result) {
super.onCaptureProgressed(holder, request, result);
getAction().onCaptureProgressed(holder, request, result);
}
@Override
public void onCaptureCompleted(@NonNull ActionHolder holder, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) {
public void onCaptureCompleted(@NonNull ActionHolder holder,
@NonNull CaptureRequest request,
@NonNull TotalCaptureResult result) {
super.onCaptureCompleted(holder, request, result);
getAction().onCaptureCompleted(holder, request, result);
}

@ -78,12 +78,16 @@ public abstract class BaseAction implements Action {
}
@Override
public void onCaptureProgressed(@NonNull ActionHolder holder, @NonNull CaptureRequest request, @NonNull CaptureResult result) {
public void onCaptureProgressed(@NonNull ActionHolder holder,
@NonNull CaptureRequest request,
@NonNull CaptureResult result) {
// Overrideable
}
@Override
public void onCaptureCompleted(@NonNull ActionHolder holder, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) {
public void onCaptureCompleted(@NonNull ActionHolder holder,
@NonNull CaptureRequest request,
@NonNull TotalCaptureResult result) {
// Overrideable
}
@ -92,7 +96,7 @@ public abstract class BaseAction implements Action {
* this removes this action from the holder.
* @param newState new state
*/
protected void setState(int newState) {
protected final void setState(int newState) {
if (newState != state) {
state = newState;
for (ActionCallback callback : callbacks) {

@ -75,7 +75,8 @@ class SequenceAction extends BaseAction {
}
@Override
public void onCaptureProgressed(@NonNull ActionHolder holder, @NonNull CaptureRequest request,
public void onCaptureProgressed(@NonNull ActionHolder holder,
@NonNull CaptureRequest request,
@NonNull CaptureResult result) {
super.onCaptureProgressed(holder, request, result);
if (runningAction >= 0) {
@ -84,7 +85,8 @@ class SequenceAction extends BaseAction {
}
@Override
public void onCaptureCompleted(@NonNull ActionHolder holder, @NonNull CaptureRequest request,
public void onCaptureCompleted(@NonNull ActionHolder holder,
@NonNull CaptureRequest request,
@NonNull TotalCaptureResult result) {
super.onCaptureCompleted(holder, request, result);
if (runningAction >= 0) {

@ -65,7 +65,8 @@ class TogetherAction extends BaseAction {
}
@Override
public void onCaptureProgressed(@NonNull ActionHolder holder, @NonNull CaptureRequest request,
public void onCaptureProgressed(@NonNull ActionHolder holder,
@NonNull CaptureRequest request,
@NonNull CaptureResult result) {
super.onCaptureProgressed(holder, request, result);
for (BaseAction action : actions) {
@ -74,7 +75,8 @@ class TogetherAction extends BaseAction {
}
@Override
public void onCaptureCompleted(@NonNull ActionHolder holder, @NonNull CaptureRequest request,
public void onCaptureCompleted(@NonNull ActionHolder holder,
@NonNull CaptureRequest request,
@NonNull TotalCaptureResult result) {
super.onCaptureCompleted(holder, request, result);
for (BaseAction action : actions) {

@ -20,7 +20,8 @@ public class ExposureLock extends BaseLock {
@Override
protected boolean checkIsSupported(@NonNull ActionHolder holder) {
boolean isNotLegacy = readCharacteristic(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL, -1)
boolean isNotLegacy = readCharacteristic(
CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL, -1)
!= CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY;
// Not sure we should check aeMode as well, probably all aeModes support locking,
// but this should not be a big issue since we're not even using different AE modes.
@ -30,7 +31,8 @@ public class ExposureLock extends BaseLock {
|| aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON_ALWAYS_FLASH
|| aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH
|| aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE
|| aeMode == 5 /* CameraCharacteristics.CONTROL_AE_MODE_ON_EXTERNAL_FLASH, API 28 */);
|| aeMode == 5
/* CameraCharacteristics.CONTROL_AE_MODE_ON_EXTERNAL_FLASH, API 28 */);
boolean result = isNotLegacy && isAEOn;
LOG.i("checkIsSupported:", result);
return result;
@ -49,13 +51,16 @@ public class ExposureLock extends BaseLock {
int cancelTrigger = Build.VERSION.SDK_INT >= 23
? CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
: CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
holder.getBuilder(this).set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, cancelTrigger);
holder.getBuilder(this).set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
cancelTrigger);
holder.getBuilder(this).set(CaptureRequest.CONTROL_AE_LOCK, true);
holder.applyBuilder(this);
}
@Override
public void onCaptureCompleted(@NonNull ActionHolder holder, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) {
public void onCaptureCompleted(@NonNull ActionHolder holder,
@NonNull CaptureRequest request,
@NonNull TotalCaptureResult result) {
super.onCaptureCompleted(holder, request, result);
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
LOG.i("processCapture:", "aeState:", aeState);

@ -22,7 +22,8 @@ public class FocusLock extends BaseLock {
protected boolean checkIsSupported(@NonNull ActionHolder holder) {
// We'll lock by changing the AF mode to AUTO.
// In that mode, AF won't change unless someone starts a trigger operation.
int[] modes = readCharacteristic(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES, new int[]{});
int[] modes = readCharacteristic(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES,
new int[]{});
for (int mode : modes) {
if (mode == CameraCharacteristics.CONTROL_AF_MODE_AUTO) {
return true;
@ -50,13 +51,17 @@ public class FocusLock extends BaseLock {
@Override
protected void onStarted(@NonNull ActionHolder holder) {
holder.getBuilder(this).set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO);
holder.getBuilder(this).set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_CANCEL);
holder.getBuilder(this).set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_AUTO);
holder.getBuilder(this).set(CaptureRequest.CONTROL_AF_TRIGGER,
CaptureRequest.CONTROL_AF_TRIGGER_CANCEL);
holder.applyBuilder(this);
}
@Override
public void onCaptureCompleted(@NonNull ActionHolder holder, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) {
public void onCaptureCompleted(@NonNull ActionHolder holder,
@NonNull CaptureRequest request,
@NonNull TotalCaptureResult result) {
super.onCaptureCompleted(holder, request, result);
Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
Integer afMode = result.get(CaptureResult.CONTROL_AF_MODE);

@ -20,10 +20,13 @@ public class WhiteBalanceLock extends BaseLock {
@Override
protected boolean checkIsSupported(@NonNull ActionHolder holder) {
boolean isNotLegacy = readCharacteristic(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL, -1)
boolean isNotLegacy = readCharacteristic(
CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL, -1)
!= CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY;
Integer awbMode = holder.getBuilder(this).get(CaptureRequest.CONTROL_AWB_MODE);
boolean result = isNotLegacy && awbMode != null && awbMode == CaptureRequest.CONTROL_AWB_MODE_AUTO;
boolean result = isNotLegacy
&& awbMode != null
&& awbMode == CaptureRequest.CONTROL_AWB_MODE_AUTO;
LOG.i("checkIsSupported:", result);
return result;
}
@ -43,7 +46,9 @@ public class WhiteBalanceLock extends BaseLock {
}
@Override
public void onCaptureCompleted(@NonNull ActionHolder holder, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) {
public void onCaptureCompleted(@NonNull ActionHolder holder,
@NonNull CaptureRequest request,
@NonNull TotalCaptureResult result) {
super.onCaptureCompleted(holder, request, result);
Integer awbState = result.get(CaptureResult.CONTROL_AWB_STATE);
LOG.i("processCapture:", "awbState:", awbState);

@ -29,7 +29,8 @@ public abstract class BaseReset extends BaseAction {
super.onStart(holder);
MeteringRectangle area = null;
if (resetArea) {
Rect rect = readCharacteristic(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE, new Rect());
Rect rect = readCharacteristic(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE,
new Rect());
area = new MeteringRectangle(rect, MeteringRectangle.METERING_WEIGHT_DONT_CARE);
}
onStarted(holder, area);

@ -32,7 +32,8 @@ public class ExposureMeter extends BaseMeter {
@Override
protected boolean checkIsSupported(@NonNull ActionHolder holder) {
// In our case, this means checking if we support the AE precapture trigger.
boolean isNotLegacy = readCharacteristic(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL, -1)
boolean isNotLegacy = readCharacteristic(
CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL, -1)
!= CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY;
Integer aeMode = holder.getBuilder(this).get(CaptureRequest.CONTROL_AE_MODE);
boolean isAEOn = aeMode != null &&
@ -40,7 +41,8 @@ public class ExposureMeter extends BaseMeter {
|| aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON_ALWAYS_FLASH
|| aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH
|| aeMode == CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE
|| aeMode == 5 /* CameraCharacteristics.CONTROL_AE_MODE_ON_EXTERNAL_FLASH, API 28 */);
|| aeMode == 5
/* CameraCharacteristics.CONTROL_AE_MODE_ON_EXTERNAL_FLASH, API 28 */);
boolean result = isNotLegacy && isAEOn;
LOG.i("checkIsSupported:", result);
return result;
@ -63,7 +65,8 @@ public class ExposureMeter extends BaseMeter {
CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
// Check the regions.
int maxRegions = readCharacteristic(CameraCharacteristics.CONTROL_MAX_REGIONS_AE, 0);
int maxRegions = readCharacteristic(CameraCharacteristics.CONTROL_MAX_REGIONS_AE,
0);
if (!areas.isEmpty() && maxRegions > 0) {
int max = Math.min(maxRegions, areas.size());
holder.getBuilder(this).set(CaptureRequest.CONTROL_AE_REGIONS,
@ -103,8 +106,8 @@ public class ExposureMeter extends BaseMeter {
// PRECAPTURE is a transient state. Being here might mean that precapture run
// and was successful, OR that the trigger was not even received yet. To
// distinguish, check the trigger state.
if (aeTriggerState != null
&& aeTriggerState == CaptureResult.CONTROL_AE_PRECAPTURE_TRIGGER_START) {
if (aeTriggerState != null && aeTriggerState
== CaptureResult.CONTROL_AE_PRECAPTURE_TRIGGER_START) {
setSuccessful(true);
setState(STATE_COMPLETED);
}

@ -29,29 +29,32 @@ public class ExposureReset extends BaseReset {
@Override
protected void onStarted(@NonNull ActionHolder holder, @Nullable MeteringRectangle area) {
int maxRegions = readCharacteristic(CameraCharacteristics.CONTROL_MAX_REGIONS_AE, 0);
int maxRegions = readCharacteristic(CameraCharacteristics.CONTROL_MAX_REGIONS_AE,
0);
if (area != null && maxRegions > 0) {
holder.getBuilder(this).set(CaptureRequest.CONTROL_AE_REGIONS,
new MeteringRectangle[]{area});
}
// NOTE: precapture might not be supported, in which case I think it will be ignored.
Integer trigger = holder.getLastResult(this).get(CaptureResult.CONTROL_AE_PRECAPTURE_TRIGGER);
Integer trigger = holder.getLastResult(this)
.get(CaptureResult.CONTROL_AE_PRECAPTURE_TRIGGER);
LOG.i("onStarted:", "last precapture trigger is", trigger);
if (trigger != null && trigger == CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START) {
LOG.i("onStarted:", "canceling precapture.");
int newTrigger = Build.VERSION.SDK_INT >= 23
? CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
: CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
holder.getBuilder(this).set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, newTrigger);
holder.getBuilder(this).set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
newTrigger);
}
// Documentation about CONTROL_AE_PRECAPTURE_TRIGGER says that, if it was started but not
// followed by a CAPTURE_INTENT_STILL_PICTURE request, the internal AE routine might remain
// locked unless we unlock manually.
// This is often the case for us, since the snapshot picture recorder does not use the intent
// and anyway we use the precapture sequence for touch metering as well.
// To reset, docs suggest the use of CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL, which we do above,
// This is often the case for us, since the snapshot picture recorder does not use the
// intent and anyway we use the precapture sequence for touch metering as well.
// To reset docs suggest the use of CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL, which we do above,
// or the technique used below: locking then unlocking. This proved to be the ONLY method
// to unlock reliably, unlike the cancel trigger (which we'll run anyway).
holder.getBuilder(this).set(CaptureRequest.CONTROL_AE_LOCK, true);

@ -53,7 +53,8 @@ public class FocusMeter extends BaseMeter {
LOG.i("onStarted:", "with areas:", areas);
holder.getBuilder(this).set(CaptureRequest.CONTROL_AF_TRIGGER,
CaptureRequest.CONTROL_AF_TRIGGER_START);
int maxRegions = readCharacteristic(CameraCharacteristics.CONTROL_MAX_REGIONS_AF, 0);
int maxRegions = readCharacteristic(CameraCharacteristics.CONTROL_MAX_REGIONS_AF,
0);
if (!areas.isEmpty() && maxRegions > 0) {
int max = Math.min(maxRegions, areas.size());
holder.getBuilder(this).set(CaptureRequest.CONTROL_AF_REGIONS,

@ -27,7 +27,8 @@ public class FocusReset extends BaseReset {
@Override
protected void onStarted(@NonNull ActionHolder holder, @Nullable MeteringRectangle area) {
boolean changed = false;
int maxRegions = readCharacteristic(CameraCharacteristics.CONTROL_MAX_REGIONS_AF, 0);
int maxRegions = readCharacteristic(CameraCharacteristics.CONTROL_MAX_REGIONS_AF,
0);
if (area != null && maxRegions > 0) {
holder.getBuilder(this).set(CaptureRequest.CONTROL_AF_REGIONS,
new MeteringRectangle[]{area});

@ -82,8 +82,8 @@ public class MeterAction extends ActionWrapper {
List<MeteringRectangle> areas = new ArrayList<>();
if (point != null) {
// This is a good Q/A. https://stackoverflow.com/a/33181620/4288782
// At first, the point is relative to the View system and does not account our own cropping.
// Will keep updating these two below.
// At first, the point is relative to the View system and does not account
// our own cropping. Will keep updating these two below.
final PointF referencePoint = new PointF(point.x, point.y);
Size referenceSize = engine.getPreview().getSurfaceSize();
@ -132,7 +132,7 @@ public class MeterAction extends ActionWrapper {
Size previewStreamSize = engine.getPreviewStreamSize(Reference.VIEW);
Size previewSurfaceSize = referenceSize;
if (previewStreamSize == null) {
throw new IllegalStateException("getPreviewStreamSize should not be null at this point.");
throw new IllegalStateException("getPreviewStreamSize should not be null here.");
}
int referenceWidth = previewSurfaceSize.getWidth();
int referenceHeight = previewSurfaceSize.getHeight();
@ -142,13 +142,15 @@ public class MeterAction extends ActionWrapper {
if (previewStreamAspectRatio.toFloat() > previewSurfaceAspectRatio.toFloat()) {
// Stream is larger. The x coordinate must be increased: a touch on the left side
// of the surface is not on the left size of stream (it's more to the right).
float scale = previewStreamAspectRatio.toFloat() / previewSurfaceAspectRatio.toFloat();
float scale = previewStreamAspectRatio.toFloat()
/ previewSurfaceAspectRatio.toFloat();
referencePoint.x += previewSurfaceSize.getWidth() * (scale - 1F) / 2F;
referenceWidth = Math.round(previewSurfaceSize.getWidth() * scale);
} else {
// Stream is taller. The y coordinate must be increased: a touch on the top side
// of the surface is not on the top size of stream (it's a bit lower).
float scale = previewSurfaceAspectRatio.toFloat() / previewStreamAspectRatio.toFloat();
float scale = previewSurfaceAspectRatio.toFloat()
/ previewStreamAspectRatio.toFloat();
referencePoint.y += previewSurfaceSize.getHeight() * (scale - 1F) / 2F;
referenceHeight = Math.round(previewSurfaceSize.getHeight() * scale);
}
@ -169,7 +171,8 @@ public class MeterAction extends ActionWrapper {
@SuppressWarnings("SuspiciousNameCombination")
@NonNull
private Size applyPreviewToSensorRotation(@NonNull Size referenceSize, @NonNull PointF referencePoint) {
private Size applyPreviewToSensorRotation(@NonNull Size referenceSize,
@NonNull PointF referencePoint) {
// Not elegant, but the sin/cos way was failing for some reason.
int angle = engine.getAngles().offset(Reference.SENSOR, Reference.VIEW, Axis.ABSOLUTE);
boolean flip = angle % 180 != 0;
@ -194,12 +197,13 @@ public class MeterAction extends ActionWrapper {
}
@NonNull
private Size applyCropRegionCoordinates(@NonNull Size referenceSize, @NonNull PointF referencePoint) {
private Size applyCropRegionCoordinates(@NonNull Size referenceSize,
@NonNull PointF referencePoint) {
// The input point and size refer to the stream rect.
// The stream rect is part of the 'crop region', as described below.
// https://source.android.com/devices/camera/camera3_crop_reprocess.html
Rect cropRect = holder.getBuilder(this).get(CaptureRequest.SCALER_CROP_REGION);
// For now, we don't care about x and y position. Rect should be non-null, but let's be safe.
// For now we don't care about x and y position. Rect should not be null, but let's be safe.
int cropRectWidth = cropRect == null ? referenceSize.getWidth() : cropRect.width();
int cropRectHeight = cropRect == null ? referenceSize.getHeight() : cropRect.height();
// The stream is always centered inside the crop region, and one of the dimensions
@ -210,16 +214,19 @@ public class MeterAction extends ActionWrapper {
}
@NonNull
private Size applyActiveArrayCoordinates(@NonNull Size referenceSize, @NonNull PointF referencePoint) {
private Size applyActiveArrayCoordinates(@NonNull Size referenceSize,
@NonNull PointF referencePoint) {
// The input point and size refer to the scaler crop region.
// We can query for the crop region position inside the active array, so this is easy.
Rect cropRect = holder.getBuilder(this).get(CaptureRequest.SCALER_CROP_REGION);
referencePoint.x += cropRect == null ? 0 : cropRect.left;
referencePoint.y += cropRect == null ? 0 : cropRect.top;
// Finally, get the active rect width and height from characteristics.
Rect activeRect = holder.getCharacteristics(this).get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
Rect activeRect = holder.getCharacteristics(this)
.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
if (activeRect == null) { // Should never happen
activeRect = new Rect(0, 0, referenceSize.getWidth(), referenceSize.getHeight());
activeRect = new Rect(0, 0, referenceSize.getWidth(),
referenceSize.getHeight());
}
return new Size(activeRect.width(), activeRect.height());
}

@ -27,10 +27,13 @@ public class WhiteBalanceMeter extends BaseMeter {
@Override
protected boolean checkIsSupported(@NonNull ActionHolder holder) {
boolean isNotLegacy = readCharacteristic(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL, -1)
boolean isNotLegacy = readCharacteristic(
CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL, -1)
!= CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY;
Integer awbMode = holder.getBuilder(this).get(CaptureRequest.CONTROL_AWB_MODE);
boolean result = isNotLegacy && awbMode != null && awbMode == CaptureRequest.CONTROL_AWB_MODE_AUTO;
boolean result = isNotLegacy
&& awbMode != null
&& awbMode == CaptureRequest.CONTROL_AWB_MODE_AUTO;
LOG.i("checkIsSupported:", result);
return result;
}
@ -46,7 +49,8 @@ public class WhiteBalanceMeter extends BaseMeter {
@Override
protected void onStarted(@NonNull ActionHolder holder, @NonNull List<MeteringRectangle> areas) {
LOG.i("onStarted:", "with areas:", areas);
int maxRegions = readCharacteristic(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB, 0);
int maxRegions = readCharacteristic(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB,
0);
if (!areas.isEmpty() && maxRegions > 0) {
int max = Math.min(maxRegions, areas.size());
holder.getBuilder(this).set(CaptureRequest.CONTROL_AWB_REGIONS,
@ -56,7 +60,8 @@ public class WhiteBalanceMeter extends BaseMeter {
}
@Override
public void onCaptureCompleted(@NonNull ActionHolder holder, @NonNull CaptureRequest request,
public void onCaptureCompleted(@NonNull ActionHolder holder,
@NonNull CaptureRequest request,
@NonNull TotalCaptureResult result) {
super.onCaptureCompleted(holder, request, result);
Integer awbState = result.get(CaptureResult.CONTROL_AWB_STATE);

@ -30,9 +30,11 @@ public class WhiteBalanceReset extends BaseReset {
@Override
protected void onStarted(@NonNull ActionHolder holder, @Nullable MeteringRectangle area) {
LOG.w("onStarted:", "with area:", area);
int maxRegions = readCharacteristic(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB, 0);
int maxRegions = readCharacteristic(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB,
0);
if (area != null && maxRegions > 0) {
holder.getBuilder(this).set(CaptureRequest.CONTROL_AWB_REGIONS, new MeteringRectangle[]{area});
holder.getBuilder(this).set(CaptureRequest.CONTROL_AWB_REGIONS,
new MeteringRectangle[]{area});
holder.applyBuilder(this);
}
setState(STATE_COMPLETED);

@ -21,7 +21,7 @@ public enum Axis {
* This rotation axis takes into account the current
* {@link com.otaliastudios.cameraview.controls.Facing} value.
*
* - for {@link com.otaliastudios.cameraview.controls.Facing#BACK}, this equals {@link #ABSOLUTE}
* - for {@link com.otaliastudios.cameraview.controls.Facing#BACK}, this is {@link #ABSOLUTE}
* - for {@link com.otaliastudios.cameraview.controls.Facing#FRONT}, this is inverted
*/
RELATIVE_TO_SENSOR

@ -36,7 +36,8 @@ import java.nio.FloatBuffer;
* NOTE - the {@link android.graphics.SurfaceTexture} restrictions apply:
* We only support the {@link android.opengl.GLES11Ext#GL_TEXTURE_EXTERNAL_OES} texture target
* and it must be specified in the fragment shader as a samplerExternalOES texture.
* You also have to explicitly require the extension: see {@link #createDefaultFragmentShader(String)}.
* You also have to explicitly require the extension: see
* {@link #createDefaultFragmentShader(String)}.
*
*/
public abstract class BaseFilter implements Filter {
@ -44,31 +45,42 @@ public abstract class BaseFilter implements Filter {
private final static String TAG = BaseFilter.class.getSimpleName();
private final static CameraLogger LOG = CameraLogger.create(TAG);
private final static String DEFAULT_VERTEX_POSITION_NAME = "aPosition";
private final static String DEFAULT_VERTEX_TEXTURE_COORDINATE_NAME = "aTextureCoord";
private final static String DEFAULT_VERTEX_MVP_MATRIX_NAME = "uMVPMatrix";
private final static String DEFAULT_VERTEX_TRANSFORM_MATRIX_NAME = "uTexMatrix";
private final static String DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME = "vTextureCoord";
@SuppressWarnings("WeakerAccess")
protected final static String DEFAULT_VERTEX_POSITION_NAME = "aPosition";
@SuppressWarnings("WeakerAccess")
protected final static String DEFAULT_VERTEX_TEXTURE_COORDINATE_NAME = "aTextureCoord";
@SuppressWarnings("WeakerAccess")
protected final static String DEFAULT_VERTEX_MVP_MATRIX_NAME = "uMVPMatrix";
@SuppressWarnings("WeakerAccess")
protected final static String DEFAULT_VERTEX_TRANSFORM_MATRIX_NAME = "uTexMatrix";
protected final static String DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME = "vTextureCoord";
@NonNull
private static String createDefaultVertexShader(@NonNull String vertexPositionName,
private static String createDefaultVertexShader(
@NonNull String vertexPositionName,
@NonNull String vertexTextureCoordinateName,
@NonNull String vertexModelViewProjectionMatrixName,
@NonNull String vertexTransformMatrixName,
@NonNull String fragmentTextureCoordinateName) {
return "uniform mat4 "+vertexModelViewProjectionMatrixName+";\n" +
"uniform mat4 "+vertexTransformMatrixName+";\n" +
"attribute vec4 "+vertexPositionName+";\n" +
"attribute vec4 "+vertexTextureCoordinateName+";\n" +
"varying vec2 "+fragmentTextureCoordinateName+";\n" +
"void main() {\n" +
" gl_Position = "+vertexModelViewProjectionMatrixName+" * "+vertexPositionName+";\n" +
" vTextureCoord = ("+vertexTransformMatrixName+" * "+vertexTextureCoordinateName+").xy;\n" +
"}\n";
return "uniform mat4 "+vertexModelViewProjectionMatrixName+";\n"
+ "uniform mat4 "+vertexTransformMatrixName+";\n"
+ "attribute vec4 "+vertexPositionName+";\n"
+ "attribute vec4 "+vertexTextureCoordinateName+";\n"
+ "varying vec2 "+fragmentTextureCoordinateName+";\n"
+ "void main() {\n"
+ " gl_Position = " +vertexModelViewProjectionMatrixName+" * "
+ vertexPositionName+";\n"
+ " "+fragmentTextureCoordinateName+" = ("+vertexTransformMatrixName+" * "
+ vertexTextureCoordinateName+").xy;\n"
+ "}\n";
}
@NonNull
private static String createDefaultFragmentShader(@NonNull String fragmentTextureCoordinateName) {
private static String createDefaultFragmentShader(
@NonNull String fragmentTextureCoordinateName) {
return "#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n"
+ "varying vec2 "+fragmentTextureCoordinateName+";\n"
@ -94,7 +106,7 @@ public abstract class BaseFilter implements Filter {
});
private int vertexModelViewProjectionMatrixLocation = -1;
private int vertexTranformMatrixLocation = -1;
private int vertexTransformMatrixLocation = -1;
private int vertexPositionLocation = -1;
private int vertexTextureCoordinateLocation = -1;
@VisibleForTesting int programHandle = -1;
@ -121,6 +133,7 @@ public abstract class BaseFilter implements Filter {
fragmentTextureCoordinateName);
}
@SuppressWarnings("WeakerAccess")
@NonNull
protected String createDefaultFragmentShader() {
return createDefaultFragmentShader(fragmentTextureCoordinateName);
@ -131,12 +144,16 @@ public abstract class BaseFilter implements Filter {
this.programHandle = programHandle;
vertexPositionLocation = GLES20.glGetAttribLocation(programHandle, vertexPositionName);
GlUtils.checkLocation(vertexPositionLocation, vertexPositionName);
vertexTextureCoordinateLocation = GLES20.glGetAttribLocation(programHandle, vertexTextureCoordinateName);
vertexTextureCoordinateLocation = GLES20.glGetAttribLocation(programHandle,
vertexTextureCoordinateName);
GlUtils.checkLocation(vertexTextureCoordinateLocation, vertexTextureCoordinateName);
vertexModelViewProjectionMatrixLocation = GLES20.glGetUniformLocation(programHandle, vertexModelViewProjectionMatrixName);
GlUtils.checkLocation(vertexModelViewProjectionMatrixLocation, vertexModelViewProjectionMatrixName);
vertexTranformMatrixLocation = GLES20.glGetUniformLocation(programHandle, vertexTransformMatrixName);
GlUtils.checkLocation(vertexTranformMatrixLocation, vertexTransformMatrixName);
vertexModelViewProjectionMatrixLocation = GLES20.glGetUniformLocation(programHandle,
vertexModelViewProjectionMatrixName);
GlUtils.checkLocation(vertexModelViewProjectionMatrixLocation,
vertexModelViewProjectionMatrixName);
vertexTransformMatrixLocation = GLES20.glGetUniformLocation(programHandle,
vertexTransformMatrixName);
GlUtils.checkLocation(vertexTransformMatrixLocation, vertexTransformMatrixName);
}
@Override
@ -145,7 +162,7 @@ public abstract class BaseFilter implements Filter {
vertexPositionLocation = -1;
vertexTextureCoordinateLocation = -1;
vertexModelViewProjectionMatrixLocation = -1;
vertexTranformMatrixLocation = -1;
vertexTransformMatrixLocation = -1;
}
@NonNull
@ -160,48 +177,53 @@ public abstract class BaseFilter implements Filter {
}
@Override
public void draw(float[] transformMatrix) {
public void draw(long timestampUs, float[] transformMatrix) {
if (programHandle == -1) {
LOG.w("Filter.draw() called after destroying the filter. This can happen rarely because of threading.");
LOG.w("Filter.draw() called after destroying the filter. " +
"This can happen rarely because of threading.");
} else {
onPreDraw(transformMatrix);
onDraw();
onPostDraw();
onPreDraw(timestampUs, transformMatrix);
onDraw(timestampUs);
onPostDraw(timestampUs);
}
}
protected void onPreDraw(float[] transformMatrix) {
protected void onPreDraw(long timestampUs, float[] transformMatrix) {
// Copy the model / view / projection matrix over.
GLES20.glUniformMatrix4fv(vertexModelViewProjectionMatrixLocation, 1, false, GlUtils.IDENTITY_MATRIX, 0);
GLES20.glUniformMatrix4fv(vertexModelViewProjectionMatrixLocation, 1,
false, GlUtils.IDENTITY_MATRIX, 0);
GlUtils.checkError("glUniformMatrix4fv");
// Copy the texture transformation matrix over.
GLES20.glUniformMatrix4fv(vertexTranformMatrixLocation, 1, false, transformMatrix, 0);
GLES20.glUniformMatrix4fv(vertexTransformMatrixLocation, 1,
false, transformMatrix, 0);
GlUtils.checkError("glUniformMatrix4fv");
// Enable the "aPosition" vertex attribute.
// Connect vertexBuffer to "aPosition".
GLES20.glEnableVertexAttribArray(vertexPositionLocation);
GlUtils.checkError("glEnableVertexAttribArray: " + vertexPositionLocation);
GLES20.glVertexAttribPointer(vertexPositionLocation, 2, GLES20.GL_FLOAT, false, 8, vertexPosition);
GLES20.glVertexAttribPointer(vertexPositionLocation, 2, GLES20.GL_FLOAT,
false, 8, vertexPosition);
GlUtils.checkError("glVertexAttribPointer");
// Enable the "aTextureCoord" vertex attribute.
// Connect texBuffer to "aTextureCoord".
GLES20.glEnableVertexAttribArray(vertexTextureCoordinateLocation);
GlUtils.checkError("glEnableVertexAttribArray");
GLES20.glVertexAttribPointer(vertexTextureCoordinateLocation, 2, GLES20.GL_FLOAT, false, 8, textureCoordinates);
GLES20.glVertexAttribPointer(vertexTextureCoordinateLocation, 2, GLES20.GL_FLOAT,
false, 8, textureCoordinates);
GlUtils.checkError("glVertexAttribPointer");
}
@SuppressWarnings("WeakerAccess")
protected void onDraw() {
protected void onDraw(long timestampUs) {
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
GlUtils.checkError("glDrawArrays");
}
@SuppressWarnings("WeakerAccess")
protected void onPostDraw() {
protected void onPostDraw(long timestampUs) {
GLES20.glDisableVertexAttribArray(vertexPositionLocation);
GLES20.glDisableVertexAttribArray(vertexTextureCoordinateLocation);
}

@ -67,9 +67,10 @@ public interface Filter {
* Called to render the actual texture. The given transformation matrix
* should be applied.
*
* @param timestampUs timestamp in microseconds
* @param transformMatrix matrix
*/
void draw(float[] transformMatrix);
void draw(long timestampUs, float[] transformMatrix);
/**
* Called anytime the output size changes.

@ -26,8 +26,9 @@ import java.util.Map;
* New filters can be added at any time through {@link #addFilter(Filter)}, but currently they
* can not be removed because we can not easily ensure that they would be correctly released.
*
* The {@link MultiFilter} does also implement {@link OneParameterFilter} and {@link TwoParameterFilter},
* dispatching all the parameter calls to child filters, assuming they support it.
* The {@link MultiFilter} does also implement {@link OneParameterFilter} and
* {@link TwoParameterFilter}, dispatching all the parameter calls to child filters,
* assuming they support it.
*
* There are some important technical caveats when using {@link MultiFilter}:
* - each child filter requires the allocation of a GL framebuffer. Using a large number of filters
@ -146,11 +147,17 @@ public class MultiFilter implements Filter, OneParameterFilter, TwoParameterFilt
state.textureId = textureArray[0];
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, state.textureId);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, state.size.getWidth(), state.size.getHeight(), 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA,
state.size.getWidth(), state.size.getHeight(), 0, GLES20.GL_RGBA,
GLES20.GL_UNSIGNED_BYTE, null);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S,
GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T,
GLES20.GL_CLAMP_TO_EDGE);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, state.framebufferId);
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER,
GLES20.GL_COLOR_ATTACHMENT0,
@ -230,7 +237,7 @@ public class MultiFilter implements Filter, OneParameterFilter, TwoParameterFilt
}
@Override
public void draw(float[] transformMatrix) {
public void draw(long timestampUs, float[] transformMatrix) {
synchronized (lock) {
for (int i = 0; i < filters.size(); i++) {
boolean isFirst = i == 0;
@ -258,9 +265,9 @@ public class MultiFilter implements Filter, OneParameterFilter, TwoParameterFilt
// The first filter should apply all the transformations. Then,
// since they are applied, we should use a no-op matrix.
if (isFirst) {
filter.draw(transformMatrix);
filter.draw(timestampUs, transformMatrix);
} else {
filter.draw(GlUtils.IDENTITY_MATRIX);
filter.draw(timestampUs, GlUtils.IDENTITY_MATRIX);
}
// Set the input for the next cycle:

@ -24,7 +24,7 @@ public class AutoFixFilter extends BaseFilter implements OneParameterFilter {
+ "float hist_scale;\n"
+ "float density_offset;\n"
+ "float density_scale;\n"
+ "varying vec2 vTextureCoord;\n"
+ "varying vec2 "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+";\n"
+ "void main() {\n"
+ " shift_scale = " + (1.0f / 256f) + ";\n"
+ " hist_offset = " + (0.5f / 766f) + ";\n"
@ -32,7 +32,8 @@ public class AutoFixFilter extends BaseFilter implements OneParameterFilter {
+ " density_offset = " + (0.5f / 1024f) + ";\n"
+ " density_scale = " + (1023f / 1024f) + ";\n"
+ " const vec3 weights = vec3(0.33333, 0.33333, 0.33333);\n"
+ " vec4 color = texture2D(tex_sampler_0, vTextureCoord);\n"
+ " vec4 color = texture2D(tex_sampler_0, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME
+ ");\n"
+ " float energy = dot(color.rgb, weights);\n"
+ " float mask_value = energy - 0.5;\n"
+ " float alpha;\n"
@ -116,8 +117,8 @@ public class AutoFixFilter extends BaseFilter implements OneParameterFilter {
}
@Override
protected void onPreDraw(float[] transformMatrix) {
super.onPreDraw(transformMatrix);
protected void onPreDraw(long timestampUs, float[] transformMatrix) {
super.onPreDraw(timestampUs, transformMatrix);
GLES20.glUniform1f(scaleLocation, scale);
GlUtils.checkError("glUniform1f");
}

@ -11,9 +11,9 @@ public class BlackAndWhiteFilter extends BaseFilter {
private final static String FRAGMENT_SHADER = "#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n"
+ "varying vec2 vTextureCoord;\n"
+ "varying vec2 "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+";\n"
+ "uniform samplerExternalOES sTexture;\n" + "void main() {\n"
+ " vec4 color = texture2D(sTexture, vTextureCoord);\n"
+ " vec4 color = texture2D(sTexture, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+");\n"
+ " float colorR = (color.r + color.g + color.b) / 3.0;\n"
+ " float colorG = (color.r + color.g + color.b) / 3.0;\n"
+ " float colorB = (color.r + color.g + color.b) / 3.0;\n"

@ -17,9 +17,9 @@ public class BrightnessFilter extends BaseFilter implements OneParameterFilter {
+ "precision mediump float;\n"
+ "uniform samplerExternalOES sTexture;\n"
+ "uniform float brightness;\n"
+ "varying vec2 vTextureCoord;\n"
+ "varying vec2 "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+";\n"
+ "void main() {\n"
+ " vec4 color = texture2D(sTexture, vTextureCoord);\n"
+ " vec4 color = texture2D(sTexture, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+");\n"
+ " gl_FragColor = brightness * color;\n"
+ "}\n";
@ -86,8 +86,8 @@ public class BrightnessFilter extends BaseFilter implements OneParameterFilter {
}
@Override
protected void onPreDraw(float[] transformMatrix) {
super.onPreDraw(transformMatrix);
protected void onPreDraw(long timestampUs, float[] transformMatrix) {
super.onPreDraw(timestampUs, transformMatrix);
GLES20.glUniform1f(brightnessLocation, brightness);
GlUtils.checkError("glUniform1f");
}

@ -17,9 +17,9 @@ public class ContrastFilter extends BaseFilter implements OneParameterFilter {
+ "precision mediump float;\n"
+ "uniform samplerExternalOES sTexture;\n"
+ "uniform float contrast;\n"
+ "varying vec2 vTextureCoord;\n"
+ "varying vec2 "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+";\n"
+ "void main() {\n"
+ " vec4 color = texture2D(sTexture, vTextureCoord);\n"
+ " vec4 color = texture2D(sTexture, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+");\n"
+ " color -= 0.5;\n"
+ " color *= contrast;\n"
+ " color += 0.5;\n"
@ -88,8 +88,8 @@ public class ContrastFilter extends BaseFilter implements OneParameterFilter {
}
@Override
protected void onPreDraw(float[] transformMatrix) {
super.onPreDraw(transformMatrix);
protected void onPreDraw(long timestampUs, float[] transformMatrix) {
super.onPreDraw(timestampUs, transformMatrix);
GLES20.glUniform1f(contrastLocation, contrast);
GlUtils.checkError("glUniform1f");
}

@ -13,9 +13,9 @@ public class CrossProcessFilter extends BaseFilter {
private final static String FRAGMENT_SHADER = "#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n"
+ "uniform samplerExternalOES sTexture;\n"
+ "varying vec2 vTextureCoord;\n"
+ "varying vec2 "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+";\n"
+ "void main() {\n"
+ " vec4 color = texture2D(sTexture, vTextureCoord);\n"
+ " vec4 color = texture2D(sTexture, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+");\n"
+ " vec3 ncolor = vec3(0.0, 0.0, 0.0);\n"
+ " float value;\n"
+ " if (color.r < 0.5) {\n"

@ -22,7 +22,7 @@ public class DocumentaryFilter extends BaseFilter {
+ "float stepsize;\n"
+ "uniform float inv_max_dist;\n"
+ "uniform vec2 scale;\n"
+ "varying vec2 vTextureCoord;\n"
+ "varying vec2 "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+";\n"
+ "float rand(vec2 loc) {\n"
+ " float theta1 = dot(loc, vec2(0.9898, 0.233));\n"
+ " float theta2 = dot(loc, vec2(12.0, 78.0));\n"
@ -41,8 +41,8 @@ public class DocumentaryFilter extends BaseFilter {
+ " stepsize = " + 1.0f / 255.0f + ";\n"
// black white
+ " vec4 color = texture2D(sTexture, vTextureCoord);\n"
+ " float dither = rand(vTextureCoord + seed);\n"
+ " vec4 color = texture2D(sTexture, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+");\n"
+ " float dither = rand("+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+" + seed);\n"
+ " vec3 xform = clamp(2.0 * color.rgb, 0.0, 1.0);\n"
+ " vec3 temp = clamp(2.0 * (color.rgb + stepsize), 0.0, 1.0);\n"
+ " vec3 new_color = clamp(xform + (temp - xform) * (dither - 0.5), 0.0, 1.0);\n"
@ -50,7 +50,7 @@ public class DocumentaryFilter extends BaseFilter {
+ " float gray = dot(new_color, vec3(0.299, 0.587, 0.114));\n"
+ " new_color = vec3(gray, gray, gray);\n"
// vignette
+ " vec2 coord = vTextureCoord - vec2(0.5, 0.5);\n"
+ " vec2 coord = "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+" - vec2(0.5, 0.5);\n"
+ " float dist = length(coord * scale);\n"
+ " float lumen = 0.85 / (1.0 + exp((dist * inv_max_dist - 0.83) * 20.0)) + 0.15;\n"
+ " gl_FragColor = vec4(new_color * lumen, color.a);\n"
@ -93,8 +93,8 @@ public class DocumentaryFilter extends BaseFilter {
}
@Override
protected void onPreDraw(float[] transformMatrix) {
super.onPreDraw(transformMatrix);
protected void onPreDraw(long timestampUs, float[] transformMatrix) {
super.onPreDraw(timestampUs, transformMatrix);
float[] scale = new float[2];
if (mWidth > mHeight) {
scale[0] = 1f;

@ -20,9 +20,9 @@ public class DuotoneFilter extends BaseFilter implements TwoParameterFilter {
+ "uniform samplerExternalOES sTexture;\n"
+ "uniform vec3 first;\n"
+ "uniform vec3 second;\n"
+ "varying vec2 vTextureCoord;\n"
+ "varying vec2 "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+";\n"
+ "void main() {\n"
+ " vec4 color = texture2D(sTexture, vTextureCoord);\n"
+ " vec4 color = texture2D(sTexture, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+");\n"
+ " float energy = (color.r + color.g + color.b) * 0.3333;\n"
+ " vec3 new_color = (1.0 - energy) * first + energy * second;\n"
+ " gl_FragColor = vec4(new_color.rgb, color.a);\n"
@ -137,8 +137,8 @@ public class DuotoneFilter extends BaseFilter implements TwoParameterFilter {
}
@Override
protected void onPreDraw(float[] transformMatrix) {
super.onPreDraw(transformMatrix);
protected void onPreDraw(long timestampUs, float[] transformMatrix) {
super.onPreDraw(timestampUs, transformMatrix);
float[] first = new float[]{
Color.red(mFirstColor) / 255f,
Color.green(mFirstColor) / 255f,

@ -18,10 +18,10 @@ public class FillLightFilter extends BaseFilter implements OneParameterFilter {
+ "uniform samplerExternalOES sTexture;\n"
+ "uniform float mult;\n"
+ "uniform float igamma;\n"
+ "varying vec2 vTextureCoord;\n"
+ "varying vec2 "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+";\n"
+ "void main() {\n"
+ " const vec3 color_weights = vec3(0.25, 0.5, 0.25);\n"
+ " vec4 color = texture2D(sTexture, vTextureCoord);\n"
+ " vec4 color = texture2D(sTexture, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+");\n"
+ " float lightmask = dot(color.rgb, color_weights);\n"
+ " float backmask = (1.0 - lightmask);\n"
+ " vec3 ones = vec3(1.0, 1.0, 1.0);\n"
@ -95,8 +95,8 @@ public class FillLightFilter extends BaseFilter implements OneParameterFilter {
}
@Override
protected void onPreDraw(float[] transformMatrix) {
super.onPreDraw(transformMatrix);
protected void onPreDraw(long timestampUs, float[] transformMatrix) {
super.onPreDraw(timestampUs, transformMatrix);
float amount = 1.0f - strength;
float multiplier = 1.0f / (amount * 0.7f + 0.3f);
GLES20.glUniform1f(multiplierLocation, multiplier);

@ -15,11 +15,12 @@ public class GammaFilter extends BaseFilter implements OneParameterFilter {
private final static String FRAGMENT_SHADER = "#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n"
+ "varying vec2 vTextureCoord;\n"
+ "varying vec2 "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+";\n"
+ "uniform samplerExternalOES sTexture;\n"
+ "uniform float gamma;\n"
+ "void main() {\n"
+ " vec4 textureColor = texture2D(sTexture, vTextureCoord);\n"
+ " vec4 textureColor = texture2D(sTexture, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME
+ ");\n"
+ " gl_FragColor = vec4(pow(textureColor.rgb, vec3(gamma)), textureColor.w);\n"
+ "}\n";
@ -82,8 +83,8 @@ public class GammaFilter extends BaseFilter implements OneParameterFilter {
}
@Override
protected void onPreDraw(float[] transformMatrix) {
super.onPreDraw(transformMatrix);
protected void onPreDraw(long timestampUs, float[] transformMatrix) {
super.onPreDraw(timestampUs, transformMatrix);
GLES20.glUniform1f(gammaLocation, gamma);
GlUtils.checkError("glUniform1f");
}

@ -19,7 +19,7 @@ public class GrainFilter extends BaseFilter implements OneParameterFilter {
private final static String FRAGMENT_SHADER = "#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n"
+ "vec2 seed;\n"
+ "varying vec2 vTextureCoord;\n"
+ "varying vec2 "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+";\n"
+ "uniform samplerExternalOES tex_sampler_0;\n"
+ "uniform samplerExternalOES tex_sampler_1;\n"
+ "uniform float scale;\n"
@ -40,18 +40,25 @@ public class GrainFilter extends BaseFilter implements OneParameterFilter {
+ "void main() {\n"
+ " seed[0] = " + RANDOM.nextFloat() + ";\n"
+ " seed[1] = " + RANDOM.nextFloat() + ";\n"
+ " float noise = texture2D(tex_sampler_1, vTextureCoord + vec2(-stepX, -stepY)).r * 0.224;\n"
+ " noise += texture2D(tex_sampler_1, vTextureCoord + vec2(-stepX, stepY)).r * 0.224;\n"
+ " noise += texture2D(tex_sampler_1, vTextureCoord + vec2(stepX, -stepY)).r * 0.224;\n"
+ " noise += texture2D(tex_sampler_1, vTextureCoord + vec2(stepX, stepY)).r * 0.224;\n"
+ " float noise = texture2D(tex_sampler_1, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME
+ " + vec2(-stepX, -stepY)).r * 0.224;\n"
+ " noise += texture2D(tex_sampler_1, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME
+ " + vec2(-stepX, stepY)).r * 0.224;\n"
+ " noise += texture2D(tex_sampler_1, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME
+ " + vec2(stepX, -stepY)).r * 0.224;\n"
+ " noise += texture2D(tex_sampler_1, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME
+ " + vec2(stepX, stepY)).r * 0.224;\n"
+ " noise += 0.4448;\n"
+ " noise *= scale;\n"
+ " vec4 color = texture2D(tex_sampler_0, vTextureCoord);\n"
+ " vec4 color = texture2D(tex_sampler_0, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME
+ ");\n"
+ " float energy = 0.33333 * color.r + 0.33333 * color.g + 0.33333 * color.b;\n"
+ " float mask = (1.0 - sqrt(energy));\n"
+ " float weight = 1.0 - 1.333 * mask * noise;\n"
+ " gl_FragColor = vec4(color.rgb * weight, color.a);\n"
+ " gl_FragColor = gl_FragColor+vec4(rand(vTextureCoord + seed), rand(vTextureCoord + seed),rand(vTextureCoord + seed),1);\n"
+ " gl_FragColor = gl_FragColor+vec4(rand("+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME
+ " + seed), rand("+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+" + seed),rand("
+ DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+" + seed),1);\n"
+ "}\n";
private float strength = 0.5f;
@ -61,7 +68,6 @@ public class GrainFilter extends BaseFilter implements OneParameterFilter {
private int stepXLocation = -1;
private int stepYLocation = -1;
@SuppressWarnings("WeakerAccess")
public GrainFilter() { }
@Override
@ -132,8 +138,8 @@ public class GrainFilter extends BaseFilter implements OneParameterFilter {
}
@Override
protected void onPreDraw(float[] transformMatrix) {
super.onPreDraw(transformMatrix);
protected void onPreDraw(long timestampUs, float[] transformMatrix) {
super.onPreDraw(timestampUs, transformMatrix);
GLES20.glUniform1f(strengthLocation, strength);
GlUtils.checkError("glUniform1f");
GLES20.glUniform1f(stepXLocation, 0.5f / width);

@ -12,9 +12,9 @@ public class GrayscaleFilter extends BaseFilter {
private final static String FRAGMENT_SHADER = "#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n"
+ "uniform samplerExternalOES sTexture;\n"
+ "varying vec2 vTextureCoord;\n"
+ "varying vec2 "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+";\n"
+ "void main() {\n"
+ " vec4 color = texture2D(sTexture, vTextureCoord);\n"
+ " vec4 color = texture2D(sTexture, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+");\n"
+ " float y = dot(color, vec4(0.299, 0.587, 0.114, 0));\n"
+ " gl_FragColor = vec4(y, y, y, color.a);\n"
+ "}\n";

@ -15,7 +15,7 @@ public class HueFilter extends BaseFilter implements OneParameterFilter {
private final static String FRAGMENT_SHADER = "#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n"
+ "varying vec2 vTextureCoord;\n"
+ "varying vec2 "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+";\n"
+ "uniform samplerExternalOES sTexture;\n"
+ "uniform float hue;\n"
+ "void main() {\n"
@ -25,7 +25,7 @@ public class HueFilter extends BaseFilter implements OneParameterFilter {
+ " vec4 kYIQToR = vec4 (1.0, 0.9563, 0.6210, 0.0);\n"
+ " vec4 kYIQToG = vec4 (1.0, -0.2721, -0.6474, 0.0);\n"
+ " vec4 kYIQToB = vec4 (1.0, -1.1070, 1.7046, 0.0);\n"
+ " vec4 color = texture2D(sTexture, vTextureCoord);\n"
+ " vec4 color = texture2D(sTexture, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+");\n"
+ " float YPrime = dot(color, kRGBToYPrime);\n"
+ " float I = dot(color, kRGBToI);\n"
+ " float Q = dot(color, kRGBToQ);\n"
@ -96,8 +96,8 @@ public class HueFilter extends BaseFilter implements OneParameterFilter {
}
@Override
protected void onPreDraw(float[] transformMatrix) {
super.onPreDraw(transformMatrix);
protected void onPreDraw(long timestampUs, float[] transformMatrix) {
super.onPreDraw(timestampUs, transformMatrix);
// map it on 360 degree circle
float shaderHue = ((hue - 45) / 45f + 0.5f) * -1;
GLES20.glUniform1f(hueLocation, shaderHue);

@ -11,10 +11,10 @@ public class InvertColorsFilter extends BaseFilter {
private final static String FRAGMENT_SHADER = "#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n"
+ "varying vec2 vTextureCoord;\n"
+ "varying vec2 "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+";\n"
+ "uniform samplerExternalOES sTexture;\n"
+ "void main() {\n"
+ " vec4 color = texture2D(sTexture, vTextureCoord);\n"
+ " vec4 color = texture2D(sTexture, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+");\n"
+ " float colorR = (1.0 - color.r) / 1.0;\n"
+ " float colorG = (1.0 - color.g) / 1.0;\n"
+ " float colorB = (1.0 - color.b) / 1.0;\n"

@ -7,7 +7,6 @@ import androidx.annotation.NonNull;
import com.otaliastudios.cameraview.filter.BaseFilter;
import com.otaliastudios.cameraview.internal.GlUtils;
import java.util.Date;
import java.util.Random;
/**
@ -25,7 +24,7 @@ public class LomoishFilter extends BaseFilter {
+ "uniform float inv_max_dist;\n"
+ "vec2 seed;\n"
+ "float stepsize;\n"
+ "varying vec2 vTextureCoord;\n"
+ "varying vec2 "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+";\n"
+ "float rand(vec2 loc) {\n"
+ " float theta1 = dot(loc, vec2(0.9898, 0.233));\n"
+ " float theta2 = dot(loc, vec2(12.0, 78.0));\n"
@ -44,18 +43,18 @@ public class LomoishFilter extends BaseFilter {
// sharpen
+ " vec3 nbr_color = vec3(0.0, 0.0, 0.0);\n"
+ " vec2 coord;\n"
+ " vec4 color = texture2D(sTexture, vTextureCoord);\n"
+ " coord.x = vTextureCoord.x - 0.5 * stepsizeX;\n"
+ " coord.y = vTextureCoord.y - stepsizeY;\n"
+ " vec4 color = texture2D(sTexture, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+");\n"
+ " coord.x = "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+".x - 0.5 * stepsizeX;\n"
+ " coord.y = "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+".y - stepsizeY;\n"
+ " nbr_color += texture2D(sTexture, coord).rgb - color.rgb;\n"
+ " coord.x = vTextureCoord.x - stepsizeX;\n"
+ " coord.y = vTextureCoord.y + 0.5 * stepsizeY;\n"
+ " coord.x = "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+".x - stepsizeX;\n"
+ " coord.y = "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+".y + 0.5 * stepsizeY;\n"
+ " nbr_color += texture2D(sTexture, coord).rgb - color.rgb;\n"
+ " coord.x = vTextureCoord.x + stepsizeX;\n"
+ " coord.y = vTextureCoord.y - 0.5 * stepsizeY;\n"
+ " coord.x = "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+".x + stepsizeX;\n"
+ " coord.y = "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+".y - 0.5 * stepsizeY;\n"
+ " nbr_color += texture2D(sTexture, coord).rgb - color.rgb;\n"
+ " coord.x = vTextureCoord.x + stepsizeX;\n"
+ " coord.y = vTextureCoord.y + 0.5 * stepsizeY;\n"
+ " coord.x = "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+".x + stepsizeX;\n"
+ " coord.y = "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+".y + 0.5 * stepsizeY;\n"
+ " nbr_color += texture2D(sTexture, coord).rgb - color.rgb;\n"
+ " vec3 s_color = vec3(color.rgb + 0.3 * nbr_color);\n"
// cross process
@ -85,12 +84,12 @@ public class LomoishFilter extends BaseFilter {
+ " }\n"
+ " c_color.b = s_color.b * 0.5 + 0.25;\n"
// blackwhite
+ " float dither = rand(vTextureCoord + seed);\n"
+ " float dither = rand("+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+" + seed);\n"
+ " vec3 xform = clamp((c_color.rgb - 0.15) * 1.53846, 0.0, 1.0);\n"
+ " vec3 temp = clamp((color.rgb + stepsize - 0.15) * 1.53846, 0.0, 1.0);\n"
+ " vec3 bw_color = clamp(xform + (temp - xform) * (dither - 0.5), 0.0, 1.0);\n"
// vignette
+ " coord = vTextureCoord - vec2(0.5, 0.5);\n"
+ " coord = "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+" - vec2(0.5, 0.5);\n"
+ " float dist = length(coord * scale);\n"
+ " float lumen = 0.85 / (1.0 + exp((dist * inv_max_dist - 0.73) * 20.0)) + 0.15;\n"
+ " gl_FragColor = vec4(bw_color * lumen, color.a);\n"
@ -142,8 +141,8 @@ public class LomoishFilter extends BaseFilter {
}
@Override
protected void onPreDraw(float[] transformMatrix) {
super.onPreDraw(transformMatrix);
protected void onPreDraw(long timestampUs, float[] transformMatrix) {
super.onPreDraw(timestampUs, transformMatrix);
float[] scale = new float[2];
if (width > height) {
scale[0] = 1f;

@ -12,8 +12,8 @@ public class PosterizeFilter extends BaseFilter {
private final static String FRAGMENT_SHADER = "#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n"
+ "uniform samplerExternalOES sTexture;\n"
+ "varying vec2 vTextureCoord;\n" + "void main() {\n"
+ " vec4 color = texture2D(sTexture, vTextureCoord);\n"
+ "varying vec2 "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+";\n" + "void main() {\n"
+ " vec4 color = texture2D(sTexture, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+");\n"
+ " vec3 pcolor;\n"
+ " pcolor.r = (color.r >= 0.5) ? 0.75 : 0.25;\n"
+ " pcolor.g = (color.g >= 0.5) ? 0.75 : 0.25;\n"

@ -20,21 +20,24 @@ public class SaturationFilter extends BaseFilter implements OneParameterFilter {
+ "uniform vec3 exponents;\n"
+ "float shift;\n"
+ "vec3 weights;\n"
+ "varying vec2 vTextureCoord;\n"
+ "varying vec2 "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+";\n"
+ "void main() {\n"
+ " weights[0] = " + 2f / 8f + ";\n"
+ " weights[1] = " + 5f / 8f + ";\n"
+ " weights[2] = " + 1f / 8f + ";\n"
+ " shift = " + 1.0f / 255.0f + ";\n"
+ " vec4 oldcolor = texture2D(sTexture, vTextureCoord);\n"
+ " vec4 oldcolor = texture2D(sTexture, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME
+ ");\n"
+ " float kv = dot(oldcolor.rgb, weights) + shift;\n"
+ " vec3 new_color = scale * oldcolor.rgb + (1.0 - scale) * kv;\n"
+ " gl_FragColor = vec4(new_color, oldcolor.a);\n"
+ " vec4 color = texture2D(sTexture, vTextureCoord);\n"
+ " vec4 color = texture2D(sTexture, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME
+ ");\n"
+ " float de = dot(color.rgb, weights);\n"
+ " float inv_de = 1.0 / de;\n"
+ " vec3 verynew_color = de * pow(color.rgb * inv_de, exponents);\n"
+ " float max_color = max(max(max(verynew_color.r, verynew_color.g), verynew_color.b), 1.0);\n"
+ " float max_color = max(max(max(verynew_color.r, verynew_color.g), "
+ "verynew_color.b), 1.0);\n"
+ " gl_FragColor = gl_FragColor+vec4(verynew_color / max_color, color.a);\n"
+ "}\n";
@ -103,8 +106,8 @@ public class SaturationFilter extends BaseFilter implements OneParameterFilter {
}
@Override
protected void onPreDraw(float[] transformMatrix) {
super.onPreDraw(transformMatrix);
protected void onPreDraw(long timestampUs, float[] transformMatrix) {
super.onPreDraw(timestampUs, transformMatrix);
if (scale > 0.0f) {
GLES20.glUniform1f(scaleLocation, 0F);
GlUtils.checkError("glUniform1f");

@ -13,7 +13,7 @@ public class SepiaFilter extends BaseFilter {
+ "precision mediump float;\n"
+ "uniform samplerExternalOES sTexture;\n"
+ "mat3 matrix;\n"
+ "varying vec2 vTextureCoord;\n"
+ "varying vec2 "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+";\n"
+ "void main() {\n"
+ " matrix[0][0]=" + 805.0f / 2048.0f + ";\n"
+ " matrix[0][1]=" + 715.0f / 2048.0f + ";\n"
@ -24,7 +24,7 @@ public class SepiaFilter extends BaseFilter {
+ " matrix[2][0]=" + 387.0f / 2048.0f + ";\n"
+ " matrix[2][1]=" + 344.0f / 2048.0f + ";\n"
+ " matrix[2][2]=" + 268.0f / 2048.0f + ";\n"
+ " vec4 color = texture2D(sTexture, vTextureCoord);\n"
+ " vec4 color = texture2D(sTexture, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+");\n"
+ " vec3 new_color = min(matrix * color.rgb, 1.0);\n"
+ " gl_FragColor = vec4(new_color.rgb, color.a);\n"
+ "}\n";

@ -19,22 +19,22 @@ public class SharpnessFilter extends BaseFilter implements OneParameterFilter {
+ "uniform float scale;\n"
+ "uniform float stepsizeX;\n"
+ "uniform float stepsizeY;\n"
+ "varying vec2 vTextureCoord;\n"
+ "varying vec2 "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+";\n"
+ "void main() {\n"
+ " vec3 nbr_color = vec3(0.0, 0.0, 0.0);\n"
+ " vec2 coord;\n"
+ " vec4 color = texture2D(sTexture, vTextureCoord);\n"
+ " coord.x = vTextureCoord.x - 0.5 * stepsizeX;\n"
+ " coord.y = vTextureCoord.y - stepsizeY;\n"
+ " vec4 color = texture2D(sTexture, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+");\n"
+ " coord.x = "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+".x - 0.5 * stepsizeX;\n"
+ " coord.y = "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+".y - stepsizeY;\n"
+ " nbr_color += texture2D(sTexture, coord).rgb - color.rgb;\n"
+ " coord.x = vTextureCoord.x - stepsizeX;\n"
+ " coord.y = vTextureCoord.y + 0.5 * stepsizeY;\n"
+ " coord.x = "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+".x - stepsizeX;\n"
+ " coord.y = "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+".y + 0.5 * stepsizeY;\n"
+ " nbr_color += texture2D(sTexture, coord).rgb - color.rgb;\n"
+ " coord.x = vTextureCoord.x + stepsizeX;\n"
+ " coord.y = vTextureCoord.y - 0.5 * stepsizeY;\n"
+ " coord.x = "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+".x + stepsizeX;\n"
+ " coord.y = "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+".y - 0.5 * stepsizeY;\n"
+ " nbr_color += texture2D(sTexture, coord).rgb - color.rgb;\n"
+ " coord.x = vTextureCoord.x + stepsizeX;\n"
+ " coord.y = vTextureCoord.y + 0.5 * stepsizeY;\n"
+ " coord.x = "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+".x + stepsizeX;\n"
+ " coord.y = "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+".y + 0.5 * stepsizeY;\n"
+ " nbr_color += texture2D(sTexture, coord).rgb - color.rgb;\n"
+ " gl_FragColor = vec4(color.rgb - 2.0 * scale * nbr_color, color.a);\n"
+ "}\n";
@ -116,8 +116,8 @@ public class SharpnessFilter extends BaseFilter implements OneParameterFilter {
}
@Override
protected void onPreDraw(float[] transformMatrix) {
super.onPreDraw(transformMatrix);
protected void onPreDraw(long timestampUs, float[] transformMatrix) {
super.onPreDraw(timestampUs, transformMatrix);
GLES20.glUniform1f(scaleLocation, scale);
GlUtils.checkError("glUniform1f");
GLES20.glUniform1f(stepSizeXLocation, 1.0F / width);

@ -17,9 +17,9 @@ public class TemperatureFilter extends BaseFilter implements OneParameterFilter
+ "precision mediump float;\n"
+ "uniform samplerExternalOES sTexture;\n"
+ "uniform float scale;\n"
+ "varying vec2 vTextureCoord;\n"
+ "varying vec2 "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+";\n"
+ "void main() {\n"
+ " vec4 color = texture2D(sTexture, vTextureCoord);\n"
+ " vec4 color = texture2D(sTexture, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+");\n"
+ " vec3 new_color = color.rgb;\n"
+ " new_color.r = color.r + color.r * ( 1.0 - color.r) * scale;\n"
+ " new_color.b = color.b - color.b * ( 1.0 - color.b) * scale;\n"
@ -94,8 +94,8 @@ public class TemperatureFilter extends BaseFilter implements OneParameterFilter
}
@Override
protected void onPreDraw(float[] transformMatrix) {
super.onPreDraw(transformMatrix);
protected void onPreDraw(long timestampUs, float[] transformMatrix) {
super.onPreDraw(timestampUs, transformMatrix);
GLES20.glUniform1f(scaleLocation, scale);
GlUtils.checkError("glUniform1f");
}

@ -21,12 +21,12 @@ public class TintFilter extends BaseFilter implements OneParameterFilter {
+ "uniform samplerExternalOES sTexture;\n"
+ "uniform vec3 tint;\n"
+ "vec3 color_ratio;\n"
+ "varying vec2 vTextureCoord;\n"
+ "varying vec2 "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+";\n"
+ "void main() {\n"
+ " color_ratio[0] = " + 0.21f + ";\n"
+ " color_ratio[1] = " + 0.71f + ";\n"
+ " color_ratio[2] = " + 0.07f + ";\n"
+ " vec4 color = texture2D(sTexture, vTextureCoord);\n"
+ " vec4 color = texture2D(sTexture, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+");\n"
+ " float avg_color = dot(color_ratio, color.rgb);\n"
+ " vec3 new_color = min(0.8 * avg_color + 0.2 * tint, 1.0);\n"
+ " gl_FragColor = vec4(new_color.rgb, color.a);\n" + "}\n";
@ -91,8 +91,8 @@ public class TintFilter extends BaseFilter implements OneParameterFilter {
}
@Override
protected void onPreDraw(float[] transformMatrix) {
super.onPreDraw(transformMatrix);
protected void onPreDraw(long timestampUs, float[] transformMatrix) {
super.onPreDraw(timestampUs, transformMatrix);
float[] channels = new float[]{
Color.red(tint) / 255f,
Color.green(tint) / 255f,

@ -21,13 +21,14 @@ public class VignetteFilter extends BaseFilter implements TwoParameterFilter {
+ "uniform float inv_max_dist;\n"
+ "uniform float shade;\n"
+ "uniform vec2 scale;\n"
+ "varying vec2 vTextureCoord;\n"
+ "varying vec2 "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+";\n"
+ "void main() {\n"
+ " const float slope = 20.0;\n"
+ " vec2 coord = vTextureCoord - vec2(0.5, 0.5);\n"
+ " vec2 coord = "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+" - vec2(0.5, 0.5);\n"
+ " float dist = length(coord * scale);\n"
+ " float lumen = shade / (1.0 + exp((dist * inv_max_dist - range) * slope)) + (1.0 - shade);\n"
+ " vec4 color = texture2D(sTexture, vTextureCoord);\n"
+ " float lumen = shade / (1.0 + exp((dist * inv_max_dist - range) * slope)) "
+ "+ (1.0 - shade);\n"
+ " vec4 color = texture2D(sTexture, "+DEFAULT_FRAGMENT_TEXTURE_COORDINATE_NAME+");\n"
+ " gl_FragColor = vec4(color.rgb * lumen, color.a);\n"
+ "}\n";
@ -144,8 +145,8 @@ public class VignetteFilter extends BaseFilter implements TwoParameterFilter {
}
@Override
protected void onPreDraw(float[] transformMatrix) {
super.onPreDraw(transformMatrix);
protected void onPreDraw(long timestampUs, float[] transformMatrix) {
super.onPreDraw(timestampUs, transformMatrix);
float[] scale = new float[2];
if (mWidth > mHeight) {
scale[0] = 1f;

@ -44,8 +44,8 @@ public class Frame {
if (!hasContent()) {
LOG.e("Frame is dead! time:", mTime, "lastTime:", mLastTime);
throw new RuntimeException("You should not access a released frame. " +
"If this frame was passed to a FrameProcessor, you can only use its contents synchronously, " +
"for the duration of the process() method.");
"If this frame was passed to a FrameProcessor, you can only use its contents " +
"synchronously, for the duration of the process() method.");
}
}

@ -28,18 +28,20 @@ import java.util.concurrent.LinkedBlockingQueue;
* For both byte buffers and frames to get back to the FrameManager pool, all you have to do
* is call {@link Frame#release()} when done.
*
* Other than this, the FrameManager can work in two modes, depending on whether a {@link BufferCallback}
* is passed to the constructor. The modes changes the buffer behavior.
* Other than this, the FrameManager can work in two modes, depending on whether a
* {@link BufferCallback} is passed to the constructor. The modes changes the buffer behavior.
*
* 1. {@link #BUFFER_MODE_DISPATCH}: in this mode, as soon as we have a buffer, it is dispatched to
* the {@link BufferCallback}. The callback should then fill the buffer, and finally call
* {@link #getFrame(byte[], long, int)} to receive a frame.
* This is used for Camera1.
*
* 2. {@link #BUFFER_MODE_ENQUEUE}: in this mode, the manager internally keeps a queue of byte buffers,
* instead of handing them to the callback. The users can ask for buffers through {@link #getBuffer()}.
* This buffer can be filled with data and used to get a frame {@link #getFrame(byte[], long, int)},
* or, in case it was not filled, returned to the queue using {@link #onBufferUnused(byte[])}.
* 2. {@link #BUFFER_MODE_ENQUEUE}: in this mode, the manager internally keeps a queue of byte
* buffers, instead of handing them to the callback. The users can ask for buffers through
* {@link #getBuffer()}.
* This buffer can be filled with data and used to get a frame
* {@link #getFrame(byte[], long, int)}, or, in case it was not filled, returned to the queue
* using {@link #onBufferUnused(byte[])}.
* This is used for Camera2.
*/
public class FrameManager {
@ -149,7 +151,8 @@ public class FrameManager {
@Nullable
public byte[] getBuffer() {
if (mBufferMode != BUFFER_MODE_ENQUEUE) {
throw new IllegalStateException("Can't call getBuffer() when not in BUFFER_MODE_ENQUEUE.");
throw new IllegalStateException("Can't call getBuffer() " +
"when not in BUFFER_MODE_ENQUEUE.");
}
return mBufferQueue.poll();
}
@ -161,7 +164,8 @@ public class FrameManager {
*/
public void onBufferUnused(@NonNull byte[] buffer) {
if (mBufferMode != BUFFER_MODE_ENQUEUE) {
throw new IllegalStateException("Can't call onBufferUnused() when not in BUFFER_MODE_ENQUEUE.");
throw new IllegalStateException("Can't call onBufferUnused() " +
"when not in BUFFER_MODE_ENQUEUE.");
}
if (isSetUp()) {
@ -188,7 +192,8 @@ public class FrameManager {
@NonNull
public Frame getFrame(@NonNull byte[] data, long time, int rotation) {
if (!isSetUp()) {
throw new IllegalStateException("Can't call getFrame() after releasing or before setUp.");
throw new IllegalStateException("Can't call getFrame() after releasing " +
"or before setUp.");
}
Frame frame = mFrameQueue.poll();

@ -8,7 +8,8 @@ import androidx.annotation.NonNull;
/**
* Gestures listen to finger gestures over the {@link CameraView} bounds and can be mapped
* to one or more camera controls using XML attributes or {@link CameraView#mapGesture(Gesture, GestureAction)}.
* to one or more camera controls using XML attributes or {@link CameraView#mapGesture(Gesture,
* GestureAction)}.
*
* Not every gesture can control a certain action. For example, pinch gestures can only control
* continuous values, such as zoom or AE correction. Single point gestures, on the other hand,

@ -2,6 +2,7 @@ package com.otaliastudios.cameraview.gesture;
import com.otaliastudios.cameraview.CameraView;
import com.otaliastudios.cameraview.filter.Filter;
import com.otaliastudios.cameraview.markers.AutoFocusMarker;
import androidx.annotation.NonNull;
@ -63,7 +64,7 @@ public enum GestureAction {
EXPOSURE_CORRECTION(4, GestureType.CONTINUOUS),
/**
* Controls the first parameter of a real-time {@link com.otaliastudios.cameraview.filter.Filter},
* Controls the first parameter of a real-time {@link Filter},
* if it accepts one. This action can be mapped to continuous gestures:
*
* - {@link Gesture#PINCH}
@ -73,7 +74,7 @@ public enum GestureAction {
FILTER_CONTROL_1(5, GestureType.CONTINUOUS),
/**
* Controls the second parameter of a real-time {@link com.otaliastudios.cameraview.filter.Filter},
* Controls the second parameter of a real-time {@link Filter},
* if it accepts one. This action can be mapped to continuous gestures:
*
* - {@link Gesture#PINCH}

@ -18,11 +18,17 @@ public class GestureParser {
private int verticalScrollAction;
public GestureParser(@NonNull TypedArray array) {
this.tapAction = array.getInteger(R.styleable.CameraView_cameraGestureTap, GestureAction.DEFAULT_TAP.value());
this.longTapAction = array.getInteger(R.styleable.CameraView_cameraGestureLongTap, GestureAction.DEFAULT_LONG_TAP.value());
this.pinchAction = array.getInteger(R.styleable.CameraView_cameraGesturePinch, GestureAction.DEFAULT_PINCH.value());
this.horizontalScrollAction = array.getInteger(R.styleable.CameraView_cameraGestureScrollHorizontal, GestureAction.DEFAULT_SCROLL_HORIZONTAL.value());
this.verticalScrollAction = array.getInteger(R.styleable.CameraView_cameraGestureScrollVertical, GestureAction.DEFAULT_SCROLL_VERTICAL.value());
tapAction = array.getInteger(R.styleable.CameraView_cameraGestureTap,
GestureAction.DEFAULT_TAP.value());
longTapAction = array.getInteger(R.styleable.CameraView_cameraGestureLongTap,
GestureAction.DEFAULT_LONG_TAP.value());
pinchAction = array.getInteger(R.styleable.CameraView_cameraGesturePinch,
GestureAction.DEFAULT_PINCH.value());
horizontalScrollAction = array.getInteger(
R.styleable.CameraView_cameraGestureScrollHorizontal,
GestureAction.DEFAULT_SCROLL_HORIZONTAL.value());
verticalScrollAction = array.getInteger(R.styleable.CameraView_cameraGestureScrollVertical,
GestureAction.DEFAULT_SCROLL_VERTICAL.value());
}
private GestureAction get(int which) {

@ -19,7 +19,8 @@ public class PinchGestureFinder extends GestureFinder {
public PinchGestureFinder(@NonNull Controller controller) {
super(controller, 2);
setGesture(Gesture.PINCH);
mDetector = new ScaleGestureDetector(controller.getContext(), new ScaleGestureDetector.SimpleOnScaleGestureListener() {
mDetector = new ScaleGestureDetector(controller.getContext(),
new ScaleGestureDetector.SimpleOnScaleGestureListener() {
@Override
public boolean onScale(ScaleGestureDetector detector) {
mNotify = true;

@ -21,10 +21,14 @@ public class ScrollGestureFinder extends GestureFinder {
public ScrollGestureFinder(final @NonNull Controller controller) {
super(controller, 2);
mDetector = new GestureDetector(controller.getContext(), new GestureDetector.SimpleOnGestureListener() {
mDetector = new GestureDetector(controller.getContext(),
new GestureDetector.SimpleOnGestureListener() {
@Override
public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) {
public boolean onScroll(MotionEvent e1,
MotionEvent e2,
float distanceX,
float distanceY) {
boolean horizontal;
LOG.i("onScroll:", "distanceX="+distanceX, "distanceY="+distanceY);
if (e1 == null || e2 == null) return false; // Got some crashes about this.
@ -39,7 +43,8 @@ public class ScrollGestureFinder extends GestureFinder {
horizontal = getGesture() == Gesture.SCROLL_HORIZONTAL;
}
getPoint(1).set(e2.getX(), e2.getY());
mFactor = horizontal ? (distanceX / controller.getWidth()) : (distanceY / controller.getHeight());
mFactor = horizontal ? (distanceX / controller.getWidth())
: (distanceY / controller.getHeight());
mFactor = horizontal ? -mFactor : mFactor; // When vertical, up = positive
mNotify = true;
return true;

@ -16,7 +16,8 @@ public class TapGestureFinder extends GestureFinder {
public TapGestureFinder(@NonNull Controller controller) {
super(controller, 1);
mDetector = new GestureDetector(controller.getContext(), new GestureDetector.SimpleOnGestureListener() {
mDetector = new GestureDetector(controller.getContext(),
new GestureDetector.SimpleOnGestureListener() {
@Override
public boolean onSingleTapUp(MotionEvent e) {

@ -20,9 +20,18 @@ import java.util.List;
/**
* Checks the capabilities of device encoders and adjust parameters to ensure
* that they'll be supported by the final encoder.
* This can choose the encoder in two ways, based on the mode flag:
*
* 1. {@link #MODE_TAKE_FIRST}
* Methods in this class might throw either a {@link VideoException} or a {@link AudioException}.
* Throwing this exception means that the given parameters will not be supported by the encoder
* for that type, and cannot be tweaked to be.
*
* When this happens, users should retry with a new {@link DeviceEncoders} instance, but with
* the audio or video encoder offset incremented. This offset is the position in the encoder list
* from which we'll choose the potential encoder.
*
* This class will inspect the encoders list in two ways, based on the mode flag:
*
* 1. {@link #MODE_RESPECT_ORDER}
*
* Chooses the encoder as the first one that matches the given mime type.
* This is what {@link android.media.MediaCodec#createEncoderByType(String)} does,
@ -40,11 +49,12 @@ import java.util.List;
* - MediaCodecList (https://android.googlesource.com/platform/frameworks/av/+/master/media/libstagefright/MediaCodecList.cpp#322)
*
* To be fair, what {@link android.media.MediaRecorder} does is actually choose the first one
* that configures itself without errors. We currently do not offer this option here. TODO
* that configures itself without errors. We currently do not offer this option here.
* TODO add a tryConfigure() step, that throws AudioException/VideoException ?
*
* 2. {@link #MODE_PREFER_HARDWARE}
*
* This takes the list - as ordered by the vendor - and just sorts it such that hardware encoders
* This takes the list - as ordered by the vendor - and just sorts it so that hardware encoders
* are preferred over software ones. It's questionable whether this is good or not. Some vendors
* might forget to put hardware encoders first in the list, some others might put poor hardware
* encoders on the bottom of the list on purpose.
@ -56,9 +66,29 @@ public class DeviceEncoders {
@VisibleForTesting static boolean ENABLED = Build.VERSION.SDK_INT >= 21;
public final static int MODE_TAKE_FIRST = 0;
public final static int MODE_RESPECT_ORDER = 0;
public final static int MODE_PREFER_HARDWARE = 1;
/**
* Exception thrown when trying to find appropriate values
* for a video encoder.
*/
public class VideoException extends RuntimeException {
private VideoException(@NonNull String message) {
super(message);
}
}
/**
* Exception thrown when trying to find appropriate values
* for an audio encoder. Currently never thrown.
*/
public class AudioException extends RuntimeException {
private AudioException(@NonNull String message) {
super(message);
}
}
@SuppressWarnings("FieldCanBeLocal")
private final MediaCodecInfo mVideoEncoder;
@SuppressWarnings("FieldCanBeLocal")
@ -67,17 +97,23 @@ public class DeviceEncoders {
private final MediaCodecInfo.AudioCapabilities mAudioCapabilities;
@SuppressLint("NewApi")
public DeviceEncoders(@NonNull String videoType, @NonNull String audioType, int mode) {
public DeviceEncoders(int mode,
@NonNull String videoType,
@NonNull String audioType,
int videoOffset,
int audioOffset) {
// We could still get a list of MediaCodecInfo for API >= 16, but it seems that the APIs
// for querying the availability of a specified MediaFormat were only added in 21 anyway.
if (ENABLED) {
List<MediaCodecInfo> encoders = getDeviceEncoders();
mVideoEncoder = findDeviceEncoder(encoders, videoType, mode);
mVideoEncoder = findDeviceEncoder(encoders, videoType, mode, videoOffset);
LOG.i("Enabled. Found video encoder:", mVideoEncoder.getName());
mAudioEncoder = findDeviceEncoder(encoders, audioType, mode);
mAudioEncoder = findDeviceEncoder(encoders, audioType, mode, audioOffset);
LOG.i("Enabled. Found audio encoder:", mAudioEncoder.getName());
mVideoCapabilities = mVideoEncoder.getCapabilitiesForType(videoType).getVideoCapabilities();
mAudioCapabilities = mAudioEncoder.getCapabilitiesForType(audioType).getAudioCapabilities();
mVideoCapabilities = mVideoEncoder.getCapabilitiesForType(videoType)
.getVideoCapabilities();
mAudioCapabilities = mAudioEncoder.getCapabilitiesForType(audioType)
.getAudioCapabilities();
} else {
mVideoEncoder = null;
mAudioEncoder = null;
@ -123,7 +159,7 @@ public class DeviceEncoders {
/**
* Finds the encoder we'll be using, depending on the given mode flag:
* - {@link #MODE_TAKE_FIRST} will just take the first of the list
* - {@link #MODE_RESPECT_ORDER} will just take the first of the list
* - {@link #MODE_PREFER_HARDWARE} will prefer hardware encoders
* Throws if we find no encoder for this type.
*
@ -135,7 +171,10 @@ public class DeviceEncoders {
@SuppressLint("NewApi")
@NonNull
@VisibleForTesting
MediaCodecInfo findDeviceEncoder(@NonNull List<MediaCodecInfo> encoders, @NonNull String mimeType, int mode) {
MediaCodecInfo findDeviceEncoder(@NonNull List<MediaCodecInfo> encoders,
@NonNull String mimeType,
int mode,
int offset) {
ArrayList<MediaCodecInfo> results = new ArrayList<>();
for (MediaCodecInfo encoder : encoders) {
String[] types = encoder.getSupportedTypes();
@ -160,10 +199,12 @@ public class DeviceEncoders {
}
});
}
if (results.isEmpty()) {
if (results.size() < offset + 1) {
// This should not be a VideoException or AudioException - we want the process
// to crash here.
throw new RuntimeException("No encoders for type:" + mimeType);
}
return results.get(0);
return results.get(offset);
}
/**
@ -199,19 +240,19 @@ public class DeviceEncoders {
// It's still possible that we're BELOW the lower.
if (!mVideoCapabilities.getSupportedWidths().contains(width)) {
throw new RuntimeException("Width not supported after adjustment." +
throw new VideoException("Width not supported after adjustment." +
" Desired:" + width +
" Range:" + mVideoCapabilities.getSupportedWidths());
}
if (!mVideoCapabilities.getSupportedHeights().contains(height)) {
throw new RuntimeException("Height not supported after adjustment." +
throw new VideoException("Height not supported after adjustment." +
" Desired:" + height +
" Range:" + mVideoCapabilities.getSupportedHeights());
}
// It's still possible that we're unsupported for other reasons.
if (!mVideoCapabilities.isSizeSupported(width, height)) {
throw new RuntimeException("Size not supported for unknown reason." +
throw new VideoException("Size not supported for unknown reason." +
" Might be an aspect ratio issue." +
" Desired size:" + new Size(width, height));
}
@ -231,7 +272,9 @@ public class DeviceEncoders {
public int getSupportedVideoBitRate(int bitRate) {
if (!ENABLED) return bitRate;
int newBitRate = mVideoCapabilities.getBitrateRange().clamp(bitRate);
LOG.i("getSupportedVideoBitRate -", "inputRate:", bitRate, "adjustedRate:", newBitRate);
LOG.i("getSupportedVideoBitRate -",
"inputRate:", bitRate,
"adjustedRate:", newBitRate);
return newBitRate;
}
@ -248,7 +291,9 @@ public class DeviceEncoders {
int newFrameRate = (int) (double) mVideoCapabilities
.getSupportedFrameRatesFor(size.getWidth(), size.getHeight())
.clamp((double) frameRate);
LOG.i("getSupportedVideoFrameRate -", "inputRate:", frameRate, "adjustedRate:", newFrameRate);
LOG.i("getSupportedVideoFrameRate -",
"inputRate:", frameRate,
"adjustedRate:", newFrameRate);
return newFrameRate;
}
@ -263,7 +308,9 @@ public class DeviceEncoders {
public int getSupportedAudioBitRate(int bitRate) {
if (!ENABLED) return bitRate;
int newBitRate = mAudioCapabilities.getBitrateRange().clamp(bitRate);
LOG.i("getSupportedAudioBitRate -", "inputRate:", bitRate, "adjustedRate:", newBitRate);
LOG.i("getSupportedAudioBitRate -",
"inputRate:", bitRate,
"adjustedRate:", newBitRate);
return newBitRate;
}

@ -26,7 +26,8 @@ public class GlUtils {
public static void checkError(@NonNull String opName) {
int error = GLES20.glGetError();
if (error != GLES20.GL_NO_ERROR) {
String message = LOG.e("Error during", opName, "glError 0x", Integer.toHexString(error));
String message = LOG.e("Error during", opName, "glError 0x",
Integer.toHexString(error));
throw new RuntimeException(message);
}
}
@ -47,7 +48,8 @@ public class GlUtils {
int[] compiled = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
LOG.e("Could not compile shader", shaderType, ":", GLES20.glGetShaderInfoLog(shader));
LOG.e("Could not compile shader", shaderType, ":",
GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = 0;
}

@ -43,7 +43,8 @@ public class GridLinesLayout extends View {
super(context, attrs);
horiz = new ColorDrawable(gridColor);
vert = new ColorDrawable(gridColor);
width = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 0.9f, context.getResources().getDisplayMetrics());
width = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 0.9f,
context.getResources().getDisplayMetrics());
}
@Override

@ -36,8 +36,9 @@ import com.otaliastudios.cameraview.preview.RendererThread;
* 9B. Publish overlays to GL texture using overlaySurfaceTexture.updateTexImage()
* 9C. GLES - draw textureId
* 9D. GLES - draw overlayTextureId
* Both textures are drawn on the same EGLWindow and we manage to overlay them with {@link GLES20#GL_BLEND}.
* This is the whole procedure and it works for the majority of devices and situations.
* Both textures are drawn on the same EGLWindow and we manage to overlay them with
* {@link GLES20#GL_BLEND}. This is the whole procedure and it works for the majority of
* devices and situations.
*
* ISSUE DESCRIPTION
* The #514 issue can be described as follows:
@ -66,7 +67,8 @@ import com.otaliastudios.cameraview.preview.RendererThread;
*
* So a pixel format conversion takes place, when it shouldn't happen. We can't solve this:
* - It is done at a lower level, there's no real way for us to specify the surface format, but
* it seems that these devices will prefer a YUV format and misunderstand our {@link Canvas} pixels.
* it seems that these devices will prefer a YUV format and misunderstand our {@link Canvas}
* pixels.
* - There is also no way to identify which devices will present this issue, it's a bug somewhere
* and it is implementation specific.
*
@ -74,16 +76,18 @@ import com.otaliastudios.cameraview.preview.RendererThread;
* Hard to say why, but using this class fixes the described issue.
* It seems that when the {@link SurfaceTexture#updateTexImage()} method for the overlay surface
* is called - the one that updates the overlayTextureId - we must ensure that the CURRENTLY
* BOUND TEXTURE ID IS NOT 0. The id we choose to apply might be cameraTextureId, or overlayTextureId,
* or probably whatever other valid id, and should be passed to {@link #Issue514Workaround(int)}.
* BOUND TEXTURE ID IS NOT 0. The id we choose to apply might be cameraTextureId, or
* overlayTextureId, or probably whatever other valid id, and should be passed to
* {@link #Issue514Workaround(int)}.
* [Tested with cameraTextureId and overlayTextureId: both do work.]
* [Tested with invalid id like 9999. This won't work.]
*
* This makes no sense, since overlaySurfaceTexture.updateTexImage() is setting it to overlayTextureId
* anyway, but it fixes the issue. Specifically, after any draw operation with {@link EglViewport},
* the bound texture is reset to 0 so this must be undone here. We offer:
* This makes no sense, since overlaySurfaceTexture.updateTexImage() is setting it to
* overlayTextureId anyway, but it fixes the issue. Specifically, after any draw operation with
* {@link EglViewport}, the bound texture is reset to 0 so this must be undone here. We offer:
*
* - {@link #beforeOverlayUpdateTexImage()} to be called before the {@link SurfaceTexture#updateTexImage()} call
* - {@link #beforeOverlayUpdateTexImage()} to be called before the
* {@link SurfaceTexture#updateTexImage()} call
* - {@link #end()} to release and bring things back to normal state
*
* Since updating and rendering can happen on different threads with a shared EGL context,
@ -93,9 +97,9 @@ import com.otaliastudios.cameraview.preview.RendererThread;
* REFERENCES
* https://github.com/natario1/CameraView/issues/514
* https://android.googlesource.com/platform/frameworks/native/+/5c1139f/libs/gui/SurfaceTexture.cpp
* I can see here that SurfaceTexture does indeed call glBindTexture with the same parameters whenever
* updateTexImage is called, but it also does other gl stuff first. This other gl stuff might be
* breaking when we don't have a bound texture on some specific hardware implementation.
* I can see here that SurfaceTexture does indeed call glBindTexture with the same parameters
* whenever updateTexImage is called, but it also does other gl stuff first. This other gl stuff
* might be breaking when we don't have a bound texture on some specific hardware implementation.
*/
public class Issue514Workaround {

@ -229,7 +229,8 @@ public class EglBaseSurface {
int height = getHeight();
ByteBuffer buf = ByteBuffer.allocateDirect(width * height * 4);
buf.order(ByteOrder.LITTLE_ENDIAN);
GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);
GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE,
buf);
GlUtils.checkError("glReadPixels");
buf.rewind();

@ -172,8 +172,8 @@ public final class EglCore {
}
EGLConfig[] configs = new EGLConfig[1];
int[] numConfigs = new int[1];
if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length,
numConfigs, 0)) {
if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0,
configs.length, numConfigs, 0)) {
Log.w(TAG, "unable to find RGB8888 / " + version + " EGLConfig");
return null;
}
@ -210,7 +210,8 @@ public final class EglCore {
// the EGL state, so if a surface or context is still current on another
// thread we can't fully release it here. Exceptions thrown from here
// are quietly discarded. Complain in the log file.
Log.w(TAG, "WARNING: EglCore was not explicitly released -- state may be leaked");
Log.w(TAG, "WARNING: EglCore was not explicitly released! " +
"State may be leaked");
release();
}
} finally {

@ -35,7 +35,8 @@ public class EglViewport {
}
private void createProgram() {
mProgramHandle = GlUtils.createProgram(mFilter.getVertexShader(), mFilter.getFragmentShader());
mProgramHandle = GlUtils.createProgram(mFilter.getVertexShader(),
mFilter.getFragmentShader());
mFilter.onCreate(mProgramHandle);
}
@ -57,10 +58,14 @@ public class EglViewport {
GLES20.glBindTexture(mTextureTarget, texId);
GlUtils.checkError("glBindTexture " + texId);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
GLES20.GL_CLAMP_TO_EDGE);
GlUtils.checkError("glTexParameter");
return texId;
@ -72,7 +77,7 @@ public class EglViewport {
mPendingFilter = filter;
}
public void drawFrame(int textureId, float[] textureMatrix) {
public void drawFrame(long timestampUs, int textureId, float[] textureMatrix) {
if (mPendingFilter != null) {
release();
mFilter = mPendingFilter;
@ -89,7 +94,7 @@ public class EglViewport {
GLES20.glBindTexture(mTextureTarget, textureId);
// Draw.
mFilter.draw(textureMatrix);
mFilter.draw(timestampUs, textureMatrix);
// Release.
GLES20.glBindTexture(mTextureTarget, 0);

@ -77,25 +77,4 @@ public class EglWindowSurface extends EglBaseSurface {
mSurface = null;
}
}
/**
* Recreate the EGLSurface, using the new EglBase. The caller should have already
* freed the old EGLSurface with releaseEglSurface().
* <p>
* This is useful when we want to update the EGLSurface associated with a Surface.
* For example, if we want to share with a different EGLContext, which can only
* be done by tearing down and recreating the context. (That's handled by the caller;
* this just creates a new EGLSurface for the Surface we were handed earlier.)
* <p>
* If the previous EGLSurface isn't fully destroyed, e.g. it's still current on a
* context somewhere, the create call will fail with complaints from the Surface
* about already being connected.
*/
public void recreate(EglCore newEglCore) {
if (mSurface == null) {
throw new RuntimeException("not yet implemented for SurfaceTexture");
}
mEglCore = newEglCore; // switch to new context
createWindowSurface(mSurface); // create new surface
}
}

@ -31,14 +31,16 @@ public class CamcorderProfiles {
sizeToProfileMap.put(new Size(1280, 720), CamcorderProfile.QUALITY_720P);
sizeToProfileMap.put(new Size(1920, 1080), CamcorderProfile.QUALITY_1080P);
if (Build.VERSION.SDK_INT >= 21) {
sizeToProfileMap.put(new Size(3840, 2160), CamcorderProfile.QUALITY_2160P);
sizeToProfileMap.put(new Size(3840, 2160),
CamcorderProfile.QUALITY_2160P);
}
}
/**
* Returns a CamcorderProfile that's somewhat coherent with the target size,
* to ensure we get acceptable video/audio parameters for MediaRecorders (most notably the bitrate).
* to ensure we get acceptable video/audio parameters for MediaRecorders
* (most notably the bitrate).
*
* @param cameraId the camera2 id
* @param targetSize the target video size
@ -57,7 +59,8 @@ public class CamcorderProfiles {
/**
* Returns a CamcorderProfile that's somewhat coherent with the target size,
* to ensure we get acceptable video/audio parameters for MediaRecorders (most notably the bitrate).
* to ensure we get acceptable video/audio parameters for MediaRecorders
* (most notably the bitrate).
*
* @param cameraId the camera id
* @param targetSize the target video size

@ -67,7 +67,8 @@ public class ImageHelper {
}
if (pixelStride == 2 && rowStride == width && uBuffer.get(0) == vBuffer.get(1)) {
// maybe V an U planes overlap as per NV21, which means vBuffer[1] is alias of uBuffer[0]
// maybe V an U planes overlap as per NV21, which means vBuffer[1]
// is alias of uBuffer[0]
byte savePixel = vBuffer.get(1);
vBuffer.put(1, (byte)0);
if (uBuffer.get(0) == 0) {

@ -12,7 +12,8 @@ import android.view.WindowManager;
/**
* Helps with keeping track of both device orientation (which changes when device is rotated)
* and the display offset (which depends on the activity orientation wrt the device default orientation).
* and the display offset (which depends on the activity orientation
* wrt the device default orientation).
*/
public class OrientationHelper {
@ -35,7 +36,8 @@ public class OrientationHelper {
*/
public OrientationHelper(@NonNull Context context, @NonNull Callback callback) {
mCallback = callback;
mListener = new OrientationEventListener(context.getApplicationContext(), SensorManager.SENSOR_DELAY_NORMAL) {
mListener = new OrientationEventListener(context.getApplicationContext(),
SensorManager.SENSOR_DELAY_NORMAL) {
@SuppressWarnings("ConstantConditions")
@Override
@ -66,7 +68,9 @@ public class OrientationHelper {
* @param context a context
*/
public void enable(@NonNull Context context) {
Display display = ((WindowManager) context.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
Display display = ((WindowManager) context
.getSystemService(Context.WINDOW_SERVICE))
.getDefaultDisplay();
switch (display.getRotation()) {
case Surface.ROTATION_0: mDisplayOffset = 0; break;
case Surface.ROTATION_90: mDisplayOffset = 90; break;

@ -92,14 +92,16 @@ public class Pool<T> {
synchronized (lock) {
LOG.v("RECYCLE - Recycling item.", this);
if (--activeCount < 0) {
throw new IllegalStateException("Trying to recycle an item which makes activeCount < 0." +
"This means that this or some previous items being recycled were not coming from " +
"this pool, or some item was recycled more than once. " + this);
throw new IllegalStateException("Trying to recycle an item which makes " +
"activeCount < 0. This means that this or some previous items being " +
"recycled were not coming from this pool, or some item was recycled " +
"more than once. " + this);
}
if (!queue.offer(item)) {
throw new IllegalStateException("Trying to recycle an item while the queue is full. " +
"This means that this or some previous items being recycled were not coming from " +
"this pool, or some item was recycled more than once. " + this);
throw new IllegalStateException("Trying to recycle an item while the queue " +
"is full. This means that this or some previous items being recycled " +
"were not coming from this pool, or some item was recycled " +
"more than once. " + this);
}
}
}
@ -121,7 +123,6 @@ public class Pool<T> {
*
* @return count
*/
@SuppressWarnings("WeakerAccess")
public final int count() {
synchronized (lock) {
return activeCount() + recycledCount();

@ -19,7 +19,9 @@ public class RotationHelper {
* @param rotation desired angle
* @return a new yuv array
*/
public static byte[] rotate(@NonNull final byte[] yuv, @NonNull final Size size, final int rotation) {
public static byte[] rotate(@NonNull final byte[] yuv,
@NonNull final Size size,
final int rotation) {
if (rotation == 0) return yuv;
if (rotation % 90 != 0 || rotation < 0 || rotation > 270) {
throw new IllegalArgumentException("0 <= rotation < 360, rotation % 90 == 0");

@ -23,8 +23,10 @@ import java.util.concurrent.Executor;
*/
public class WorkerHandler {
private final static CameraLogger LOG = CameraLogger.create(WorkerHandler.class.getSimpleName());
private final static ConcurrentHashMap<String, WeakReference<WorkerHandler>> sCache = new ConcurrentHashMap<>(4);
private final static CameraLogger LOG
= CameraLogger.create(WorkerHandler.class.getSimpleName());
private final static ConcurrentHashMap<String, WeakReference<WorkerHandler>> sCache
= new ConcurrentHashMap<>(4);
private final static String FALLBACK_NAME = "FallbackCameraThread";
@ -50,7 +52,8 @@ public class WorkerHandler {
} else {
// Cleanup the old thread before creating a new one
cached.destroy();
LOG.w("get:", "Thread reference found, but not alive or interrupted. Removing.", name);
LOG.w("get:", "Thread reference found, but not alive or interrupted.",
"Removing.", name);
sCache.remove(name);
}
} else {

@ -32,7 +32,9 @@ public interface AutoFocusMarker extends Marker {
* @param successful whether the operation succeeded
* @param point coordinates
*/
void onAutoFocusEnd(@NonNull AutoFocusTrigger trigger, boolean successful, @NonNull PointF point);
void onAutoFocusEnd(@NonNull AutoFocusTrigger trigger,
boolean successful,
@NonNull PointF point);
}

@ -27,7 +27,8 @@ public class DefaultAutoFocusMarker implements AutoFocusMarker {
@Nullable
@Override
public View onAttach(@NonNull Context context, @NonNull ViewGroup container) {
View view = LayoutInflater.from(context).inflate(R.layout.cameraview_layout_focus_marker, container, false);
View view = LayoutInflater.from(context).inflate(R.layout.cameraview_layout_focus_marker,
container, false);
mContainer = view.findViewById(R.id.focusMarkerContainer);
mFill = view.findViewById(R.id.focusMarkerFill);
return view;
@ -49,18 +50,22 @@ public class DefaultAutoFocusMarker implements AutoFocusMarker {
}
@Override
public void onAutoFocusEnd(@NonNull AutoFocusTrigger trigger, boolean successful, @NonNull PointF point) {
public void onAutoFocusEnd(@NonNull AutoFocusTrigger trigger,
boolean successful,
@NonNull PointF point) {
if (trigger == AutoFocusTrigger.METHOD) return;
if (successful) {
animate(mContainer, 1, 0, 500, 0, null);
animate(mFill, 1, 0, 500, 0, null);
} else {
animate(mFill, 0, 0, 500, 0, null);
animate(mContainer, 1.36f, 1, 500, 0, new AnimatorListenerAdapter() {
animate(mContainer, 1.36f, 1, 500, 0,
new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
super.onAnimationEnd(animation);
animate(mContainer, 1.36f, 0, 200, 1000, null);
animate(mContainer, 1.36f, 0, 200, 1000,
null);
}
});
}

@ -24,7 +24,7 @@ import java.nio.Buffer;
*
* - Provides a {@link Canvas} to be passed to the Overlay
* - Lets the overlay draw there: {@link #draw(Overlay.Target)}
* - Renders this into the current EGL window: {@link #render()}
* - Renders this into the current EGL window: {@link #render(long)}
* - Applies the {@link Issue514Workaround} the correct way
*
* In the future we might want to use a different approach than {@link EglViewport},
@ -93,8 +93,10 @@ public class OverlayDrawer {
* Renders the drawn content in the current EGL surface, assuming there is one.
* Should be called after {@link #draw(Overlay.Target)} and any {@link #getTransform()}
* modification.
*
* @param timestampUs frame timestamp
*/
public void render() {
public void render(long timestampUs) {
// Enable blending
// Reference http://www.learnopengles.com/android-lesson-five-an-introduction-to-blending/
GLES20.glDisable(GLES20.GL_CULL_FACE);
@ -103,7 +105,7 @@ public class OverlayDrawer {
GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
synchronized (mIssue514WorkaroundLock) {
mViewport.drawFrame(mTextureId, mTransform);
mViewport.drawFrame(timestampUs, mTextureId, mTransform);
}
}

@ -183,9 +183,12 @@ public class OverlayLayout extends FrameLayout implements Overlay {
super(context, attrs);
TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.CameraView_Layout);
try {
drawOnPreview = a.getBoolean(R.styleable.CameraView_Layout_layout_drawOnPreview, false);
drawOnPictureSnapshot = a.getBoolean(R.styleable.CameraView_Layout_layout_drawOnPictureSnapshot, false);
drawOnVideoSnapshot = a.getBoolean(R.styleable.CameraView_Layout_layout_drawOnVideoSnapshot, false);
drawOnPreview = a.getBoolean(R.styleable.CameraView_Layout_layout_drawOnPreview,
false);
drawOnPictureSnapshot = a.getBoolean(
R.styleable.CameraView_Layout_layout_drawOnPictureSnapshot, false);
drawOnVideoSnapshot = a.getBoolean(
R.styleable.CameraView_Layout_layout_drawOnVideoSnapshot, false);
} finally {
a.recycle();
}

@ -54,7 +54,9 @@ public class Full1PictureRecorder extends PictureRecorder {
int exifRotation;
try {
ExifInterface exif = new ExifInterface(new ByteArrayInputStream(data));
int exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);
int exifOrientation = exif.getAttributeInt(
ExifInterface.TAG_ORIENTATION,
ExifInterface.ORIENTATION_NORMAL);
exifRotation = ExifHelper.readExifOrientation(exifOrientation);
} catch (IOException e) {
exifRotation = 0;

@ -28,7 +28,8 @@ import androidx.exifinterface.media.ExifInterface;
* A {@link PictureResult} that uses standard APIs.
*/
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
public class Full2PictureRecorder extends PictureRecorder implements ImageReader.OnImageAvailableListener {
public class Full2PictureRecorder extends PictureRecorder
implements ImageReader.OnImageAvailableListener {
private static final String TAG = Full2PictureRecorder.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
@ -65,7 +66,8 @@ public class Full2PictureRecorder extends PictureRecorder implements ImageReader
}
@Override
public void onCaptureStarted(@NonNull ActionHolder holder, @NonNull CaptureRequest request) {
public void onCaptureStarted(@NonNull ActionHolder holder,
@NonNull CaptureRequest request) {
super.onCaptureStarted(holder, request);
if (request.getTag() == (Integer) CameraDevice.TEMPLATE_STILL_CAPTURE) {
LOG.i("onCaptureStarted:", "Dispatching picture shutter.");

@ -44,7 +44,8 @@ public abstract class PictureRecorder {
* @param listener a listener
*/
@SuppressWarnings("WeakerAccess")
public PictureRecorder(@NonNull PictureResult.Stub stub, @Nullable PictureResultListener listener) {
public PictureRecorder(@NonNull PictureResult.Stub stub,
@Nullable PictureResultListener listener) {
mResult = stub;
mListener = listener;
}

@ -60,7 +60,8 @@ public class Snapshot1PictureRecorder extends PictureRecorder {
final Size outputSize = mResult.size;
final Size previewStreamSize = mEngine1.getPreviewStreamSize(Reference.SENSOR);
if (previewStreamSize == null) {
throw new IllegalStateException("Preview stream size should never be null here.");
throw new IllegalStateException("Preview stream size " +
"should never be null here.");
}
WorkerHandler.execute(new Runnable() {
@Override
@ -69,7 +70,8 @@ public class Snapshot1PictureRecorder extends PictureRecorder {
// then crop if needed. In both cases, transform yuv to jpeg.
//noinspection deprecation
byte[] data = RotationHelper.rotate(yuv, previewStreamSize, sensorToOutput);
YuvImage yuv = new YuvImage(data, mFormat, outputSize.getWidth(), outputSize.getHeight(), null);
YuvImage yuv = new YuvImage(data, mFormat, outputSize.getWidth(),
outputSize.getHeight(), null);
ByteArrayOutputStream stream = new ByteArrayOutputStream();
Rect outputRect = CropHelper.computeCrop(outputSize, mOutputRatio);

@ -26,18 +26,20 @@ import com.otaliastudios.cameraview.size.AspectRatio;
* Wraps {@link SnapshotGlPictureRecorder} for Camera2.
*
* Camera2 engine supports metering for snapshots and we expect for them to correctly fire flash as well.
* The first idea, and in theory, the most correct one, was to set {@link CaptureRequest#CONTROL_CAPTURE_INTENT}
* to {@link CaptureRequest#CONTROL_CAPTURE_INTENT_STILL_CAPTURE}.
* The first idea, and in theory, the most correct one, was to set
* {@link CaptureRequest#CONTROL_CAPTURE_INTENT} to
* {@link CaptureRequest#CONTROL_CAPTURE_INTENT_STILL_CAPTURE}.
*
* According to documentation, this will automatically trigger the flash if parameters says so.
* In fact this is what happens, but it is a very fast flash that only lasts for 1 or 2 frames.
* It's not easy to call super.take() at the exact time so that we capture the frame that was lit.
* I have tried by comparing {@link SurfaceTexture#getTimestamp()} and {@link CaptureResult#SENSOR_TIMESTAMP}
* to identify the correct frame. These timestamps match, but the frame is not the correct one.
* I have tried by comparing {@link SurfaceTexture#getTimestamp()} and
* {@link CaptureResult#SENSOR_TIMESTAMP} to identify the correct frame. These timestamps match,
* but the frame is not the correct one.
*
* So what we do here is ignore the {@link CaptureRequest#CONTROL_CAPTURE_INTENT} and instead open the
* torch, if requested to do so. Then wait for exposure to settle again and finally take a snapshot.
* I'd still love to use the capture intent instead of this, but was not able yet.
* So what we do here is ignore the {@link CaptureRequest#CONTROL_CAPTURE_INTENT} and instead
* open the torch, if requested to do so. Then wait for exposure to settle again and finally
* take a snapshot. I'd still love to use the capture intent instead of this, but was not able yet.
*/
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
public class Snapshot2PictureRecorder extends SnapshotGlPictureRecorder {
@ -52,8 +54,10 @@ public class Snapshot2PictureRecorder extends SnapshotGlPictureRecorder {
protected void onStart(@NonNull ActionHolder holder) {
super.onStart(holder);
LOG.i("FlashAction:", "Parameters locked, opening torch.");
holder.getBuilder(this).set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH);
holder.getBuilder(this).set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
holder.getBuilder(this).set(CaptureRequest.FLASH_MODE,
CaptureRequest.FLASH_MODE_TORCH);
holder.getBuilder(this).set(CaptureRequest.CONTROL_AE_MODE,
CaptureRequest.CONTROL_AE_MODE_ON);
holder.applyBuilder(this);
}
@ -64,10 +68,12 @@ public class Snapshot2PictureRecorder extends SnapshotGlPictureRecorder {
super.onCaptureCompleted(holder, request, result);
Integer flashState = result.get(CaptureResult.FLASH_STATE);
if (flashState == null) {
LOG.w("FlashAction:", "Waiting flash, but flashState is null! Taking snapshot.");
LOG.w("FlashAction:", "Waiting flash, but flashState is null!",
"Taking snapshot.");
setState(STATE_COMPLETED);
} else if (flashState == CaptureResult.FLASH_STATE_FIRED) {
LOG.i("FlashAction:", "Waiting flash and we have FIRED state! Taking snapshot.");
LOG.i("FlashAction:", "Waiting flash and we have FIRED state!",
"Taking snapshot.");
setState(STATE_COMPLETED);
} else {
LOG.i("FlashAction:", "Waiting flash but flashState is",
@ -111,7 +117,8 @@ public class Snapshot2PictureRecorder extends SnapshotGlPictureRecorder {
@Override
public void take() {
if (!mActionNeeded) {
LOG.i("take:", "Engine does no metering or needs no flash, taking fast snapshot.");
LOG.i("take:", "Engine does no metering or needs no flash.",
"Taking fast snapshot.");
super.take();
} else {
LOG.i("take:", "Engine needs flash. Starting action");

@ -46,11 +46,13 @@ import android.view.Surface;
* - We move to another thread, and create a new EGL surface for that EGL context.
* - We make this new surface current, and re-draw the textureId on it
* - [Optional: fill the overlayTextureId and draw it on the same surface]
* - We use glReadPixels (through {@link EglBaseSurface#saveFrameTo(Bitmap.CompressFormat)}) and save to file.
* - We use glReadPixels (through {@link EglBaseSurface#saveFrameTo(Bitmap.CompressFormat)})
* and save to file.
*
* We create a new EGL surface and redraw the frame because:
* 1. We want to go off the renderer thread as soon as possible
* 2. We have overlays to be drawn - we don't want to draw them on the preview surface, not even for a frame.
* 2. We have overlays to be drawn - we don't want to draw them on the preview surface,
* not even for a frame.
*/
public class SnapshotGlPictureRecorder extends PictureRecorder {
@ -102,7 +104,9 @@ public class SnapshotGlPictureRecorder extends PictureRecorder {
@RendererThread
@Override
public void onRendererFrame(@NonNull SurfaceTexture surfaceTexture, final float scaleX, final float scaleY) {
public void onRendererFrame(@NonNull SurfaceTexture surfaceTexture,
final float scaleX,
final float scaleY) {
mPreview.removeRendererFrameCallback(this);
SnapshotGlPictureRecorder.this.onRendererFrame(surfaceTexture, scaleX, scaleY);
}
@ -110,6 +114,7 @@ public class SnapshotGlPictureRecorder extends PictureRecorder {
});
}
@SuppressWarnings("WeakerAccess")
@RendererThread
@TargetApi(Build.VERSION_CODES.KITKAT)
protected void onRendererTextureCreated(int textureId) {
@ -126,12 +131,14 @@ public class SnapshotGlPictureRecorder extends PictureRecorder {
}
}
@SuppressWarnings("WeakerAccess")
@RendererThread
@TargetApi(Build.VERSION_CODES.KITKAT)
protected void onRendererFilterChanged(@NonNull Filter filter) {
mViewport.setFilter(filter.copy());
}
@SuppressWarnings("WeakerAccess")
@RendererThread
@TargetApi(Build.VERSION_CODES.KITKAT)
protected void onRendererFrame(@SuppressWarnings("unused") @NonNull final SurfaceTexture surfaceTexture,
@ -175,9 +182,13 @@ public class SnapshotGlPictureRecorder extends PictureRecorder {
* @param scaleX frame scale x in {@link Reference#VIEW}
* @param scaleY frame scale y in {@link Reference#VIEW}
*/
@SuppressWarnings("WeakerAccess")
@WorkerThread
@TargetApi(Build.VERSION_CODES.KITKAT)
protected void takeFrame(@NonNull SurfaceTexture surfaceTexture, float scaleX, float scaleY, @NonNull EGLContext eglContext) {
protected void takeFrame(@NonNull SurfaceTexture surfaceTexture,
float scaleX,
float scaleY,
@NonNull EGLContext eglContext) {
// 0. EGL window will need an output.
// We create a fake one as explained in javadocs.
@ -223,9 +234,10 @@ public class SnapshotGlPictureRecorder extends PictureRecorder {
}
// 5. Draw and save
LOG.i("takeFrame:", "timestamp:", surfaceTexture.getTimestamp());
mViewport.drawFrame(mTextureId, mTransform);
if (mHasOverlay) mOverlayDrawer.render();
long timestampUs = surfaceTexture.getTimestamp() / 1000L;
LOG.i("takeFrame:", "timestampUs:", timestampUs);
mViewport.drawFrame(timestampUs, mTextureId, mTransform);
if (mHasOverlay) mOverlayDrawer.render(timestampUs);
mResult.format = PictureResult.FORMAT_JPEG;
mResult.data = eglSurface.saveFrameTo(Bitmap.CompressFormat.JPEG);

@ -26,11 +26,13 @@ import com.otaliastudios.cameraview.size.Size;
* into an output surface that belongs to the view hierarchy.
*
* @param <T> the type of view which hosts the content surface
* @param <Output> the type of output, either {@link android.view.SurfaceHolder} or {@link android.graphics.SurfaceTexture}
* @param <Output> the type of output, either {@link android.view.SurfaceHolder}
* or {@link android.graphics.SurfaceTexture}
*/
public abstract class CameraPreview<T extends View, Output> {
protected final static CameraLogger LOG = CameraLogger.create(CameraPreview.class.getSimpleName());
protected final static CameraLogger LOG
= CameraLogger.create(CameraPreview.class.getSimpleName());
/**
* This is used to notify CameraEngine to recompute its camera Preview size.
@ -267,8 +269,9 @@ public abstract class CameraPreview<T extends View, Output> {
}
/**
* At this point we undo the work that was done during {@link #onCreateView(Context, ViewGroup)},
* which basically means removing the root view from the hierarchy.
* At this point we undo the work that was done during
* {@link #onCreateView(Context, ViewGroup)}, which basically means removing the root view
* from the hierarchy.
*/
@SuppressWarnings("WeakerAccess")
@UiThread

@ -32,9 +32,10 @@ import javax.microedition.khronos.opengles.GL10;
*
* - in the SurfaceTexture constructor we pass the GL texture handle that we have created.
*
* - The SurfaceTexture is linked to the Camera1Engine object. The camera will pass down buffers of data with
* a specified size (that is, the Camera1Engine preview size). For this reason we don't have to specify
* surfaceTexture.setDefaultBufferSize() (like we do, for example, in Snapshot1PictureRecorder).
* - The SurfaceTexture is linked to the Camera1Engine object. The camera will pass down
* buffers of data with a specified size (that is, the Camera1Engine preview size).
* For this reason we don't have to specify surfaceTexture.setDefaultBufferSize()
* (like we do, for example, in Snapshot1PictureRecorder).
*
* - When SurfaceTexture.updateTexImage() is called, it will fetch the latest texture image from the
* camera stream and assign it to the GL texture that was passed.
@ -42,12 +43,13 @@ import javax.microedition.khronos.opengles.GL10;
* the transformation matrix to be applied.
*
* - The easy way to render an OpenGL texture is using the {@link GLSurfaceView} class.
* It manages the GL context, hosts a surface and runs a separated rendering thread that will perform
* the rendering.
* It manages the GL context, hosts a surface and runs a separated rendering thread that will
* perform the rendering.
*
* - As per docs, we ask the GLSurfaceView to delegate rendering to us, using
* {@link GLSurfaceView#setRenderer(GLSurfaceView.Renderer)}. We request a render on the SurfaceView
* anytime the SurfaceTexture notifies that it has new data available (see OnFrameAvailableListener below).
* {@link GLSurfaceView#setRenderer(GLSurfaceView.Renderer)}. We request a render on the
* SurfaceView anytime the SurfaceTexture notifies that it has new data available
* (see OnFrameAvailableListener below).
*
* - So in short:
* - The SurfaceTexture has buffers of data of mInputStreamSize
@ -55,9 +57,10 @@ import javax.microedition.khronos.opengles.GL10;
* These are determined by the CameraView.onMeasure method.
* - We have a GL rich texture to be drawn (in the given method and thread).
*
* This class will provide rendering callbacks to anyone who registers a {@link RendererFrameCallback}.
* Callbacks are guaranteed to be called on the renderer thread, which means that we can fetch
* the GL context that was created and is managed by the {@link GLSurfaceView}.
* This class will provide rendering callbacks to anyone who registers a
* {@link RendererFrameCallback}. Callbacks are guaranteed to be called on the renderer thread,
* which means that we can fetch the GL context that was created and is managed
* by the {@link GLSurfaceView}.
*/
public class GlCameraPreview extends FilterCameraPreview<GLSurfaceView, SurfaceTexture> {
@ -66,7 +69,8 @@ public class GlCameraPreview extends FilterCameraPreview<GLSurfaceView, SurfaceT
private int mOutputTextureId = 0;
private SurfaceTexture mInputSurfaceTexture;
private EglViewport mOutputViewport;
private final Set<RendererFrameCallback> mRendererFrameCallbacks = Collections.synchronizedSet(new HashSet<RendererFrameCallback>());
private final Set<RendererFrameCallback> mRendererFrameCallbacks
= Collections.synchronizedSet(new HashSet<RendererFrameCallback>());
@VisibleForTesting float mCropScaleX = 1F;
@VisibleForTesting float mCropScaleY = 1F;
private View mRootView;
@ -79,7 +83,8 @@ public class GlCameraPreview extends FilterCameraPreview<GLSurfaceView, SurfaceT
@NonNull
@Override
protected GLSurfaceView onCreateView(@NonNull Context context, @NonNull ViewGroup parent) {
ViewGroup root = (ViewGroup) LayoutInflater.from(context).inflate(R.layout.cameraview_gl_view, parent, false);
ViewGroup root = (ViewGroup) LayoutInflater.from(context)
.inflate(R.layout.cameraview_gl_view, parent, false);
GLSurfaceView glView = root.findViewById(R.id.gl_surface_view);
glView.setEGLContextClientVersion(2);
glView.setRenderer(instantiateRenderer());
@ -160,8 +165,8 @@ public class GlCameraPreview extends FilterCameraPreview<GLSurfaceView, SurfaceT
}
});
// Since we are using GLSurfaceView.RENDERMODE_WHEN_DIRTY, we must notify the SurfaceView
// of dirtyness, so that it draws again. This is how it's done.
// Since we are using GLSurfaceView.RENDERMODE_WHEN_DIRTY, we must notify
// the SurfaceView of dirtyness, so that it draws again. This is how it's done.
mInputSurfaceTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
@ -209,14 +214,17 @@ public class GlCameraPreview extends FilterCameraPreview<GLSurfaceView, SurfaceT
if (isCropping()) {
// Scaling is easy, but we must also translate before:
// If the view is 10x1000 (very tall), it will show only the left strip of the preview (not the center one).
// If the view is 1000x10 (very large), it will show only the bottom strip of the preview (not the center one).
// If the view is 10x1000 (very tall), it will show only the left strip
// of the preview (not the center one).
// If the view is 1000x10 (very large), it will show only the bottom strip
// of the preview (not the center one).
float translX = (1F - mCropScaleX) / 2F;
float translY = (1F - mCropScaleY) / 2F;
Matrix.translateM(mTransformMatrix, 0, translX, translY, 0);
Matrix.scaleM(mTransformMatrix, 0, mCropScaleX, mCropScaleY, 1);
}
mOutputViewport.drawFrame(mOutputTextureId, mTransformMatrix);
mOutputViewport.drawFrame(mInputSurfaceTexture.getTimestamp() / 1000L,
mOutputTextureId, mTransformMatrix);
synchronized (mRendererFrameCallbacks) {
// Need to synchronize when iterating the Collections.synchronizedSet
for (RendererFrameCallback callback : mRendererFrameCallbacks) {
@ -244,21 +252,23 @@ public class GlCameraPreview extends FilterCameraPreview<GLSurfaceView, SurfaceT
}
/**
* To crop in GL, we could actually use view.setScaleX and setScaleY, but only from Android N onward.
* See documentation: https://developer.android.com/reference/android/view/SurfaceView
* To crop in GL, we could actually use view.setScaleX and setScaleY, but only from Android N
* onward. See documentation: https://developer.android.com/reference/android/view/SurfaceView
*
* Note: Starting in platform version Build.VERSION_CODES.N, SurfaceView's window position is updated
* synchronously with other View rendering. This means that translating and scaling a SurfaceView on
* screen will not cause rendering artifacts. Such artifacts may occur on previous versions of the
* platform when its window is positioned asynchronously.
* Note: Starting in platform version Build.VERSION_CODES.N, SurfaceView's window position
* is updated synchronously with other View rendering. This means that translating and scaling
* a SurfaceView on screen will not cause rendering artifacts. Such artifacts may occur on
* previous versions of the platform when its window is positioned asynchronously.
*
* But to support older platforms, this seem to work - computing scale values and requesting a new frame,
* then drawing it with a scaled transformation matrix. See {@link Renderer#onDrawFrame(GL10)}.
* But to support older platforms, this seem to work - computing scale values and requesting
* a new frame, then drawing it with a scaled transformation matrix.
* See {@link Renderer#onDrawFrame(GL10)}.
*/
@Override
protected void crop(@NonNull Op<Void> op) {
op.start();
if (mInputStreamWidth > 0 && mInputStreamHeight > 0 && mOutputSurfaceWidth > 0 && mOutputSurfaceHeight > 0) {
if (mInputStreamWidth > 0 && mInputStreamHeight > 0 && mOutputSurfaceWidth > 0
&& mOutputSurfaceHeight > 0) {
float scaleX = 1f, scaleY = 1f;
AspectRatio current = AspectRatio.of(mOutputSurfaceWidth, mOutputSurfaceHeight);
AspectRatio target = AspectRatio.of(mInputStreamWidth, mInputStreamHeight);

@ -14,14 +14,15 @@ import com.otaliastudios.cameraview.R;
/**
* This is the fallback preview when hardware acceleration is off, and is the last resort.
* Currently does not support cropping, which means that {@link com.otaliastudios.cameraview.CameraView}
* is forced to be wrap_content.
* Currently does not support cropping, which means that
* {@link com.otaliastudios.cameraview.CameraView} is forced to be wrap_content.
*
* Do not use.
*/
public class SurfaceCameraPreview extends CameraPreview<SurfaceView, SurfaceHolder> {
private final static CameraLogger LOG = CameraLogger.create(SurfaceCameraPreview.class.getSimpleName());
private final static CameraLogger LOG
= CameraLogger.create(SurfaceCameraPreview.class.getSimpleName());
private boolean mDispatched;
private View mRootView;
@ -33,7 +34,8 @@ public class SurfaceCameraPreview extends CameraPreview<SurfaceView, SurfaceHold
@NonNull
@Override
protected SurfaceView onCreateView(@NonNull Context context, @NonNull ViewGroup parent) {
View root = LayoutInflater.from(context).inflate(R.layout.cameraview_surface_view, parent, false);
View root = LayoutInflater.from(context).inflate(R.layout.cameraview_surface_view, parent,
false);
parent.addView(root, 0);
SurfaceView surfaceView = root.findViewById(R.id.surface_view);
final SurfaceHolder holder = surfaceView.getHolder();
@ -48,7 +50,10 @@ public class SurfaceCameraPreview extends CameraPreview<SurfaceView, SurfaceHold
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
LOG.i("callback:", "surfaceChanged", "w:", width, "h:", height, "dispatched:", mDispatched);
LOG.i("callback:", "surfaceChanged",
"w:", width,
"h:", height,
"dispatched:", mDispatched);
if (!mDispatched) {
dispatchOnSurfaceAvailable(width, height);
mDispatched = true;

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save