Merge branch 'feature-filters' of github.com:natario1/CameraView into feature-filters

pull/527/head
Suneet Agrawal 6 years ago
commit fb77c2d9d9
  1. 15
      README.md
  2. 4
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/CameraIntegrationTest.java
  3. 4
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/MockCameraEngine.java
  4. 14
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/internal/utils/WorkerHandlerTest.java
  5. 120
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/overlay/OverlayDrawerTest.java
  6. 4
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/video/VideoRecorderTest.java
  7. 18
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java
  8. 8
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java
  9. 12
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java
  10. 20
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraEngine.java
  11. 119
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/Issue514Workaround.java
  12. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/egl/EglBaseSurface.java
  13. 19
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/egl/EglViewport.java
  14. 10
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/CropHelper.java
  15. 66
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/WorkerHandler.java
  16. 131
      cameraview/src/main/java/com/otaliastudios/cameraview/overlay/OverlayDrawer.java
  17. 203
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/SnapshotGlPictureRecorder.java
  18. 8
      cameraview/src/main/java/com/otaliastudios/cameraview/size/AspectRatio.java
  19. 10
      cameraview/src/main/java/com/otaliastudios/cameraview/video/FullVideoRecorder.java
  20. 69
      cameraview/src/main/java/com/otaliastudios/cameraview/video/SnapshotVideoRecorder.java
  21. 9
      cameraview/src/main/java/com/otaliastudios/cameraview/video/VideoRecorder.java
  22. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioConfig.java
  23. 138
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioMediaEncoder.java
  24. 59
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioNoise.java
  25. 69
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoder.java
  26. 69
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoderEngine.java
  27. 16
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/TextureConfig.java
  28. 89
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/TextureMediaEncoder.java
  29. 57
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/VideoMediaEncoder.java
  30. 7
      demo/src/main/AndroidManifest.xml
  31. BIN
      demo/src/main/ic_launcher-web.png
  32. 7
      demo/src/main/java/com/otaliastudios/cameraview/demo/CameraActivity.java
  33. 20
      demo/src/main/java/com/otaliastudios/cameraview/demo/PicturePreviewActivity.java
  34. 25
      demo/src/main/java/com/otaliastudios/cameraview/demo/VideoPreviewActivity.java
  35. 18
      demo/src/main/res/layout/activity_camera.xml
  36. 5
      demo/src/main/res/mipmap-anydpi-v26/logo.xml
  37. BIN
      demo/src/main/res/mipmap-hdpi/cameraview.png
  38. BIN
      demo/src/main/res/mipmap-hdpi/logo.png
  39. BIN
      demo/src/main/res/mipmap-hdpi/logo_background.png
  40. BIN
      demo/src/main/res/mipmap-hdpi/logo_foreground.png
  41. BIN
      demo/src/main/res/mipmap-mdpi/cameraview.png
  42. BIN
      demo/src/main/res/mipmap-mdpi/logo.png
  43. BIN
      demo/src/main/res/mipmap-mdpi/logo_background.png
  44. BIN
      demo/src/main/res/mipmap-mdpi/logo_foreground.png
  45. BIN
      demo/src/main/res/mipmap-xhdpi/cameraview.png
  46. BIN
      demo/src/main/res/mipmap-xhdpi/logo.png
  47. BIN
      demo/src/main/res/mipmap-xhdpi/logo_background.png
  48. BIN
      demo/src/main/res/mipmap-xhdpi/logo_foreground.png
  49. BIN
      demo/src/main/res/mipmap-xxhdpi/cameraview.png
  50. BIN
      demo/src/main/res/mipmap-xxhdpi/logo.png
  51. BIN
      demo/src/main/res/mipmap-xxhdpi/logo_background.png
  52. BIN
      demo/src/main/res/mipmap-xxhdpi/logo_foreground.png
  53. BIN
      demo/src/main/res/mipmap-xxxhdpi/cameraview.png
  54. BIN
      demo/src/main/res/mipmap-xxxhdpi/logo.png
  55. BIN
      demo/src/main/res/mipmap-xxxhdpi/logo_background.png
  56. BIN
      demo/src/main/res/mipmap-xxxhdpi/logo_foreground.png
  57. 6
      demo/src/main/res/values/colors.xml
  58. 8
      docs/index.md
  59. BIN
      docs/static/banner.png
  60. BIN
      docs/static/icon.png
  61. BIN
      docs/static/screen1.jpg
  62. BIN
      docs/static/screen1.png
  63. BIN
      docs/static/screen2.jpg
  64. BIN
      docs/static/screen2.png
  65. BIN
      docs/static/screen3.jpg
  66. BIN
      docs/static/screen3.png

@ -4,9 +4,10 @@
[![Issues](https://img.shields.io/github/issues-raw/natario1/CameraView.svg)](https://github.com/natario1/CameraView/issues) [![Issues](https://img.shields.io/github/issues-raw/natario1/CameraView.svg)](https://github.com/natario1/CameraView/issues)
[![Funding](https://img.shields.io/opencollective/all/CameraView.svg?colorB=r)](https://natario1.github.io/CameraView/extra/donate) [![Funding](https://img.shields.io/opencollective/all/CameraView.svg?colorB=r)](https://natario1.github.io/CameraView/extra/donate)
&#10240; <!-- Hack to add whitespace -->
<p align="center"> <p align="center">
<img src="docs/static/icon.png" vspace="10" width="250" height="250"> <img src="docs/static/banner.png" width="100%">
</p> </p>
# CameraView # CameraView
@ -15,7 +16,7 @@ CameraView is a well documented, high-level library that makes capturing picture
addressing most of the common issues and needs, and still leaving you with flexibility where needed. addressing most of the common issues and needs, and still leaving you with flexibility where needed.
```groovy ```groovy
api 'com.otaliastudios:cameraview:2.0.0-rc1' api 'com.otaliastudios:cameraview:2.0.0-rc2'
``` ```
- Fast & reliable - Fast & reliable
@ -36,12 +37,14 @@ Read the [official website](https://natario1.github.io/CameraView) for setup ins
You might also be interested in [changelog](https://natario1.github.io/CameraView/about/changelog.html) You might also be interested in [changelog](https://natario1.github.io/CameraView/about/changelog.html)
or in the [v1 migration guide](https://natario1.github.io/CameraView/extra/v1-migration-guide.html). or in the [v1 migration guide](https://natario1.github.io/CameraView/extra/v1-migration-guide.html).
<p> &#10240; <!-- Hack to add whitespace -->
<img src="docs/static/screen1.jpg" width="250" vspace="20" hspace="5">
<img src="docs/static/screen2.jpg" width="250" vspace="20" hspace="5"> <p align="center">
<img src="docs/static/screen3.jpg" width="250" vspace="20" hspace="5"> <img src="docs/static/screen1.png" width="250" hspace="5"><img src="docs/static/screen2.png" width="250" hspace="5"><img src="docs/static/screen3.png" width="250" hspace="5">
</p> </p>
&#10240; <!-- Hack to add whitespace -->
If you like the project, use it with profit, or simply want to thank back, please consider [donating If you like the project, use it with profit, or simply want to thank back, please consider [donating
to the project](https://natario1.github.io/CameraView/extra/donate) now! You can either make a one time to the project](https://natario1.github.io/CameraView/extra/donate) now! You can either make a one time
donation or become a sponsor, in which case your company logo will immediately show up here. donation or become a sponsor, in which case your company logo will immediately show up here.

@ -86,7 +86,7 @@ public abstract class CameraIntegrationTest extends BaseTest {
@Before @Before
public void setUp() { public void setUp() {
LOG.e("Test started. Setting up camera."); LOG.e("Test started. Setting up camera.");
WorkerHandler.destroy(); WorkerHandler.destroyAll();
uiSync(new Runnable() { uiSync(new Runnable() {
@Override @Override
@ -126,7 +126,7 @@ public abstract class CameraIntegrationTest extends BaseTest {
public void tearDown() { public void tearDown() {
LOG.e("Test ended. Tearing down camera."); LOG.e("Test ended. Tearing down camera.");
camera.destroy(); camera.destroy();
WorkerHandler.destroy(); WorkerHandler.destroyAll();
} }
private void waitForUiException() throws Throwable { private void waitForUiException() throws Throwable {

@ -128,7 +128,7 @@ public class MockCameraEngine extends CameraEngine {
} }
@Override @Override
protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio viewAspectRatio) { protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio outputRatio) {
} }
@ -138,7 +138,7 @@ public class MockCameraEngine extends CameraEngine {
} }
@Override @Override
protected void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull AspectRatio viewAspectRatio) { protected void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull AspectRatio outputRatio) {
} }

@ -13,6 +13,7 @@ import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor; import java.util.concurrent.Executor;
@ -218,7 +219,7 @@ public class WorkerHandlerTest extends BaseTest {
public void testDestroy() { public void testDestroy() {
final WorkerHandler handler = WorkerHandler.get("handler"); final WorkerHandler handler = WorkerHandler.get("handler");
assertTrue(handler.getThread().isAlive()); assertTrue(handler.getThread().isAlive());
WorkerHandler.destroy(); handler.destroy();
// Wait for the thread to die. // Wait for the thread to die.
try { handler.getThread().join(500); } catch (InterruptedException ignore) {} try { handler.getThread().join(500); } catch (InterruptedException ignore) {}
assertFalse(handler.getThread().isAlive()); assertFalse(handler.getThread().isAlive());
@ -226,4 +227,15 @@ public class WorkerHandlerTest extends BaseTest {
assertNotSame(handler, newHandler); assertNotSame(handler, newHandler);
assertTrue(newHandler.getThread().isAlive()); assertTrue(newHandler.getThread().isAlive());
} }
@Test
public void testDestroyAll() {
final WorkerHandler handler1 = WorkerHandler.get("handler1");
final WorkerHandler handler2 = WorkerHandler.get("handler2");
WorkerHandler.destroyAll();
WorkerHandler newHandler1 = WorkerHandler.get("handler1");
WorkerHandler newHandler2 = WorkerHandler.get("handler2");
assertNotSame(handler1, newHandler1);
assertNotSame(handler2, newHandler2);
}
} }

@ -0,0 +1,120 @@
package com.otaliastudios.cameraview.overlay;
import android.content.res.XmlResourceParser;
import android.graphics.Canvas;
import android.util.AttributeSet;
import android.util.Xml;
import android.view.View;
import android.view.ViewGroup;
import androidx.annotation.NonNull;
import androidx.test.annotation.UiThreadTest;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest;
import com.otaliastudios.cameraview.BaseTest;
import com.otaliastudios.cameraview.internal.egl.EglBaseSurface;
import com.otaliastudios.cameraview.internal.egl.EglCore;
import com.otaliastudios.cameraview.internal.egl.EglViewport;
import com.otaliastudios.cameraview.size.Size;
import org.hamcrest.BaseMatcher;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyFloat;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(AndroidJUnit4.class)
@SmallTest
public class OverlayDrawerTest extends BaseTest {
private final static int WIDTH = 100;
private final static int HEIGHT = 100;
private EglCore eglCore;
private EglBaseSurface eglSurface;
@Before
public void setUp() {
eglCore = new EglCore(null, EglCore.FLAG_RECORDABLE);
eglSurface = new EglBaseSurface(eglCore);
eglSurface.createOffscreenSurface(WIDTH, HEIGHT);
eglSurface.makeCurrent();
}
@After
public void tearDown() {
eglSurface.releaseEglSurface();
eglSurface = null;
eglCore.release();
eglCore = null;
}
@Test
public void testDraw() {
Overlay overlay = mock(Overlay.class);
OverlayDrawer drawer = new OverlayDrawer(overlay, new Size(WIDTH, HEIGHT));
drawer.draw(Overlay.Target.PICTURE_SNAPSHOT);
verify(overlay, times(1)).drawOn(
eq(Overlay.Target.PICTURE_SNAPSHOT),
any(Canvas.class));
}
@Test
public void testGetTransform() {
// We'll check that the transform is not all zeros, which is highly unlikely
// (the default transform should be the identity matrix)
OverlayDrawer drawer = new OverlayDrawer(mock(Overlay.class), new Size(WIDTH, HEIGHT));
drawer.draw(Overlay.Target.PICTURE_SNAPSHOT);
assertThat(drawer.getTransform(), new BaseMatcher<float[]>() {
public void describeTo(Description description) { }
public boolean matches(Object item) {
float[] array = (float[]) item;
for (float value : array) {
if (value != 0.0F) return true;
}
return false;
}
});
}
@Test
public void testRender() {
OverlayDrawer drawer = new OverlayDrawer(mock(Overlay.class), new Size(WIDTH, HEIGHT));
drawer.mViewport = spy(drawer.mViewport);
drawer.draw(Overlay.Target.PICTURE_SNAPSHOT);
drawer.render();
verify(drawer.mViewport, times(1)).drawFrame(
drawer.mTextureId,
drawer.getTransform()
);
}
@Test
public void testRelease() {
OverlayDrawer drawer = new OverlayDrawer(mock(Overlay.class), new Size(WIDTH, HEIGHT));
EglViewport viewport = spy(drawer.mViewport);
drawer.mViewport = viewport;
drawer.release();
verify(viewport, times(1)).release();
}
}

@ -29,7 +29,7 @@ public class VideoRecorderTest extends BaseTest {
} }
@Override @Override
protected void onStop() { protected void onStop(boolean isCameraShutdown) {
dispatchVideoRecordingEnd(); dispatchVideoRecordingEnd();
dispatchResult(); dispatchResult();
} }
@ -37,7 +37,7 @@ public class VideoRecorderTest extends BaseTest {
recorder.start(result); recorder.start(result);
Mockito.verify(listener,Mockito.times(1) ) Mockito.verify(listener,Mockito.times(1) )
.onVideoRecordingStart(); .onVideoRecordingStart();
recorder.stop(); recorder.stop(false);
Mockito.verify(listener, Mockito.times(1)) Mockito.verify(listener, Mockito.times(1))
.onVideoRecordingEnd(); .onVideoRecordingEnd();
Mockito.verify(listener, Mockito.times(1)) Mockito.verify(listener, Mockito.times(1))

@ -426,10 +426,10 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
int height, width; int height, width;
if (freeWidth) { if (freeWidth) {
height = heightValue; height = heightValue;
width = (int) (height / ratio); width = Math.round(height / ratio);
} else { } else {
width = widthValue; width = widthValue;
height = (int) (width * ratio); height = Math.round(width * ratio);
} }
LOG.i("onMeasure:", "one dimension was free, we adapted it to fit the aspect ratio.", LOG.i("onMeasure:", "one dimension was free, we adapted it to fit the aspect ratio.",
"(" + width + "x" + height + ")"); "(" + width + "x" + height + ")");
@ -446,10 +446,10 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
int height, width; int height, width;
if (freeWidth) { if (freeWidth) {
height = heightValue; height = heightValue;
width = Math.min((int) (height / ratio), widthValue); width = Math.min(Math.round(height / ratio), widthValue);
} else { } else {
width = widthValue; width = widthValue;
height = Math.min((int) (width * ratio), heightValue); height = Math.min(Math.round(width * ratio), heightValue);
} }
LOG.i("onMeasure:", "one dimension was EXACTLY, another AT_MOST.", LOG.i("onMeasure:", "one dimension was EXACTLY, another AT_MOST.",
"We have TRIED to fit the aspect ratio, but it's not guaranteed.", "We have TRIED to fit the aspect ratio, but it's not guaranteed.",
@ -466,10 +466,10 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
if (atMostRatio >= ratio) { if (atMostRatio >= ratio) {
// We must reduce height. // We must reduce height.
width = widthValue; width = widthValue;
height = (int) (width * ratio); height = Math.round(width * ratio);
} else { } else {
height = heightValue; height = heightValue;
width = (int) (height / ratio); width = Math.round(height / ratio);
} }
LOG.i("onMeasure:", "both dimension were AT_MOST.", LOG.i("onMeasure:", "both dimension were AT_MOST.",
"We fit the preview aspect ratio.", "We fit the preview aspect ratio.",
@ -1467,9 +1467,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @see #takePicture() * @see #takePicture()
*/ */
public void takePictureSnapshot() { public void takePictureSnapshot() {
if (getWidth() == 0 || getHeight() == 0) return;
PictureResult.Stub stub = new PictureResult.Stub(); PictureResult.Stub stub = new PictureResult.Stub();
mCameraEngine.takePictureSnapshot(stub, AspectRatio.of(getWidth(), getHeight())); mCameraEngine.takePictureSnapshot(stub);
} }
@ -1501,9 +1500,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @param file a file where the video will be saved * @param file a file where the video will be saved
*/ */
public void takeVideoSnapshot(@NonNull File file) { public void takeVideoSnapshot(@NonNull File file) {
if (getWidth() == 0 || getHeight() == 0) return;
VideoResult.Stub stub = new VideoResult.Stub(); VideoResult.Stub stub = new VideoResult.Stub();
mCameraEngine.takeVideoSnapshot(stub, file, AspectRatio.of(getWidth(), getHeight())); mCameraEngine.takeVideoSnapshot(stub, file);
mUiHandler.post(new Runnable() { mUiHandler.post(new Runnable() {
@Override @Override
public void run() { public void run() {

@ -231,7 +231,7 @@ public class Camera1Engine extends CameraEngine implements
@Override @Override
protected Task<Void> onStopPreview() { protected Task<Void> onStopPreview() {
if (mVideoRecorder != null) { if (mVideoRecorder != null) {
mVideoRecorder.stop(); mVideoRecorder.stop(true);
mVideoRecorder = null; mVideoRecorder = null;
} }
mPictureRecorder = null; mPictureRecorder = null;
@ -306,10 +306,9 @@ public class Camera1Engine extends CameraEngine implements
@WorkerThread @WorkerThread
@Override @Override
protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio viewAspectRatio) { protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio outputRatio) {
stub.size = getUncroppedSnapshotSize(Reference.OUTPUT); // Not the real size: it will be cropped to match the view ratio stub.size = getUncroppedSnapshotSize(Reference.OUTPUT); // Not the real size: it will be cropped to match the view ratio
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR); // Actually it will be rotated and set to 0. stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR); // Actually it will be rotated and set to 0.
AspectRatio outputRatio = getAngles().flip(Reference.OUTPUT, Reference.VIEW) ? viewAspectRatio.flip() : viewAspectRatio;
if (mPreview instanceof GlCameraPreview && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { if (mPreview instanceof GlCameraPreview && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
mPictureRecorder = new SnapshotGlPictureRecorder(stub, this, (GlCameraPreview) mPreview, outputRatio, getOverlay()); mPictureRecorder = new SnapshotGlPictureRecorder(stub, this, (GlCameraPreview) mPreview, outputRatio, getOverlay());
@ -343,7 +342,7 @@ public class Camera1Engine extends CameraEngine implements
@SuppressLint("NewApi") @SuppressLint("NewApi")
@WorkerThread @WorkerThread
@Override @Override
protected void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull AspectRatio viewAspectRatio) { protected void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull AspectRatio outputRatio) {
if (!(mPreview instanceof GlCameraPreview)) { if (!(mPreview instanceof GlCameraPreview)) {
throw new IllegalStateException("Video snapshots are only supported with GlCameraPreview."); throw new IllegalStateException("Video snapshots are only supported with GlCameraPreview.");
} }
@ -355,7 +354,6 @@ public class Camera1Engine extends CameraEngine implements
if (outputSize == null) { if (outputSize == null) {
throw new IllegalStateException("outputSize should not be null."); throw new IllegalStateException("outputSize should not be null.");
} }
AspectRatio outputRatio = getAngles().flip(Reference.VIEW, Reference.OUTPUT) ? viewAspectRatio.flip() : viewAspectRatio;
Rect outputCrop = CropHelper.computeCrop(outputSize, outputRatio); Rect outputCrop = CropHelper.computeCrop(outputSize, outputRatio);
outputSize = new Size(outputCrop.width(), outputCrop.height()); outputSize = new Size(outputCrop.width(), outputCrop.height());
stub.size = outputSize; stub.size = outputSize;

@ -535,7 +535,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
if (mVideoRecorder != null) { if (mVideoRecorder != null) {
// This should synchronously call onVideoResult that will reset the repeating builder // This should synchronously call onVideoResult that will reset the repeating builder
// to the PREVIEW template. This is very important. // to the PREVIEW template. This is very important.
mVideoRecorder.stop(); mVideoRecorder.stop(true);
mVideoRecorder = null; mVideoRecorder = null;
} }
mPictureRecorder = null; mPictureRecorder = null;
@ -610,10 +610,9 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@WorkerThread @WorkerThread
@Override @Override
protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio viewAspectRatio) { protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio outputRatio) {
stub.size = getUncroppedSnapshotSize(Reference.OUTPUT); // Not the real size: it will be cropped to match the view ratio stub.size = getUncroppedSnapshotSize(Reference.OUTPUT); // Not the real size: it will be cropped to match the view ratio
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR); // Actually it will be rotated and set to 0. stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR); // Actually it will be rotated and set to 0.
AspectRatio outputRatio = getAngles().flip(Reference.OUTPUT, Reference.VIEW) ? viewAspectRatio.flip() : viewAspectRatio;
if (mPreview instanceof GlCameraPreview) { if (mPreview instanceof GlCameraPreview) {
mPictureRecorder = new SnapshotGlPictureRecorder(stub, this, (GlCameraPreview) mPreview, outputRatio, getOverlay()); mPictureRecorder = new SnapshotGlPictureRecorder(stub, this, (GlCameraPreview) mPreview, outputRatio, getOverlay());
} else { } else {
@ -695,7 +694,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@WorkerThread @WorkerThread
@Override @Override
protected void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull AspectRatio viewAspectRatio) { protected void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull AspectRatio outputRatio) {
if (!(mPreview instanceof GlCameraPreview)) { if (!(mPreview instanceof GlCameraPreview)) {
throw new IllegalStateException("Video snapshots are only supported with GlCameraPreview."); throw new IllegalStateException("Video snapshots are only supported with GlCameraPreview.");
} }
@ -704,7 +703,6 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
if (outputSize == null) { if (outputSize == null) {
throw new IllegalStateException("outputSize should not be null."); throw new IllegalStateException("outputSize should not be null.");
} }
AspectRatio outputRatio = getAngles().flip(Reference.VIEW, Reference.OUTPUT) ? viewAspectRatio.flip() : viewAspectRatio;
Rect outputCrop = CropHelper.computeCrop(outputSize, outputRatio); Rect outputCrop = CropHelper.computeCrop(outputSize, outputRatio);
outputSize = new Size(outputCrop.width(), outputCrop.height()); outputSize = new Size(outputCrop.width(), outputCrop.height());
stub.size = outputSize; stub.size = outputSize;
@ -1257,8 +1255,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
private void onAutoFocusCapture(@NonNull CaptureResult result) { private void onAutoFocusCapture(@NonNull CaptureResult result) {
Integer afState = result.get(CaptureResult.CONTROL_AF_STATE); Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
if (afState == null) { if (afState == null) {
LOG.e("onAutoFocusCapture", "afState is null! Assuming AF failed."); LOG.i("onAutoFocusCapture", "afState is null! This can happen for partial results. Waiting.");
afState = CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED; return;
} }
switch (afState) { switch (afState) {
case CaptureRequest.CONTROL_AF_STATE_FOCUSED_LOCKED: { case CaptureRequest.CONTROL_AF_STATE_FOCUSED_LOCKED: {

@ -1087,9 +1087,8 @@ public abstract class CameraEngine implements
* The snapshot size is the {@link #getPreviewStreamSize(Reference)}, but cropped based on the * The snapshot size is the {@link #getPreviewStreamSize(Reference)}, but cropped based on the
* view/surface aspect ratio. * view/surface aspect ratio.
* @param stub a picture stub * @param stub a picture stub
* @param viewAspectRatio the view aspect ratio
*/ */
public final void takePictureSnapshot(final @NonNull PictureResult.Stub stub, @NonNull final AspectRatio viewAspectRatio) { public final void takePictureSnapshot(final @NonNull PictureResult.Stub stub) {
LOG.v("takePictureSnapshot", "scheduling"); LOG.v("takePictureSnapshot", "scheduling");
mHandler.run(new Runnable() { mHandler.run(new Runnable() {
@Override @Override
@ -1101,7 +1100,9 @@ public abstract class CameraEngine implements
stub.isSnapshot = true; stub.isSnapshot = true;
stub.facing = mFacing; stub.facing = mFacing;
// Leave the other parameters to subclasses. // Leave the other parameters to subclasses.
onTakePictureSnapshot(stub, viewAspectRatio); //noinspection ConstantConditions
AspectRatio ratio = AspectRatio.of(getPreviewSurfaceSize(Reference.OUTPUT));
onTakePictureSnapshot(stub, ratio);
} }
}); });
} }
@ -1155,9 +1156,8 @@ public abstract class CameraEngine implements
/** /**
* @param stub a video stub * @param stub a video stub
* @param file the output file * @param file the output file
* @param viewAspectRatio the view aspect ratio
*/ */
public final void takeVideoSnapshot(final @NonNull VideoResult.Stub stub, @NonNull final File file, @NonNull final AspectRatio viewAspectRatio) { public final void takeVideoSnapshot(final @NonNull VideoResult.Stub stub, @NonNull final File file) {
LOG.v("takeVideoSnapshot", "scheduling"); LOG.v("takeVideoSnapshot", "scheduling");
mHandler.run(new Runnable() { mHandler.run(new Runnable() {
@Override @Override
@ -1175,7 +1175,9 @@ public abstract class CameraEngine implements
stub.audio = mAudio; stub.audio = mAudio;
stub.maxSize = mVideoMaxSize; stub.maxSize = mVideoMaxSize;
stub.maxDuration = mVideoMaxDuration; stub.maxDuration = mVideoMaxDuration;
onTakeVideoSnapshot(stub, viewAspectRatio); //noinspection ConstantConditions
AspectRatio ratio = AspectRatio.of(getPreviewSurfaceSize(Reference.OUTPUT));
onTakeVideoSnapshot(stub, ratio);
} }
}); });
} }
@ -1187,7 +1189,7 @@ public abstract class CameraEngine implements
public void run() { public void run() {
LOG.i("stopVideo", "executing.", "isTakingVideo?", isTakingVideo()); LOG.i("stopVideo", "executing.", "isTakingVideo?", isTakingVideo());
if (mVideoRecorder != null) { if (mVideoRecorder != null) {
mVideoRecorder.stop(); mVideoRecorder.stop(false);
mVideoRecorder = null; mVideoRecorder = null;
} }
} }
@ -1220,10 +1222,10 @@ public abstract class CameraEngine implements
protected abstract void onTakePicture(@NonNull PictureResult.Stub stub); protected abstract void onTakePicture(@NonNull PictureResult.Stub stub);
@WorkerThread @WorkerThread
protected abstract void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio viewAspectRatio); protected abstract void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio outputRatio);
@WorkerThread @WorkerThread
protected abstract void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull AspectRatio viewAspectRatio); protected abstract void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull AspectRatio outputRatio);
@WorkerThread @WorkerThread
protected abstract void onTakeVideo(@NonNull VideoResult.Stub stub); protected abstract void onTakeVideo(@NonNull VideoResult.Stub stub);

@ -0,0 +1,119 @@
package com.otaliastudios.cameraview.internal;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.SurfaceTexture;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.view.Surface;
import com.otaliastudios.cameraview.internal.egl.EglViewport;
import com.otaliastudios.cameraview.preview.RendererThread;
/**
* Fixes an issue for some devices with snapshot picture and video recording.
* This is so unclear that I wanted to have a separate class holding code and comments.
*
* WHEN TO USE THIS CLASS
* There is actually no need of this class in some cases:
* - when we don't have overlays, everything works
* - on the majority of devices, everything works
* But some devices will show the issue #514 and so they need this class to fix it.
* We will use this always since it should have close to none performance impact.
*
* SNAPSHOT PROCEDURE
* The issue is about picture and video snapshots with overlays. In both cases, we:
* 1. Take textureId from the camera preview
* 2. Take EGLContext from the camera preview thread ({@link RendererThread})
* 3. Create an overlayTextureId
* 4. Create an overlaySurfaceTexture
* 5. Create an overlaySurface
* 6. Move to another thread
* 7. Create a new EGLContext using the old context as a shared context so we have texture data
* 8. Create a new EGLWindow using some surface as output
* 9. For each frame:
* 9A. Draw overlays on the overlaySurface.lockCanvas() / unlockCanvasAndPost()
* 9B. Publish overlays to GL texture using overlaySurfaceTexture.updateTexImage()
* 9C. GLES - draw textureId
* 9D. GLES - draw overlayTextureId
* Both textures are drawn on the same EGLWindow and we manage to overlay them with {@link GLES20#GL_BLEND}.
* This is the whole procedure and it works for the majority of devices and situations.
*
* ISSUE DESCRIPTION
* The #514 issue can be described as follows:
* - Overlays have no transparency: background is {@link Color#BLACK} and covers the video
* - Overlays have distorted colors: {@link Color#RED} becomes greenish,
* {@link Color#GREEN} becomes blueish,
* {@link Color#BLUE} becomes reddish
*
* ISSUE INSIGHTS
* After painful debugging, we have reached these conclusions:
* 1. Overlays are drawn on {@link Canvas} with the correct format
* This can be checked for example by applying alpha to one overlay. The final color will
* be faded out, although on a black background. So the {@link Canvas} drawing step works well.
* 2. The GLES shader will always receive pixels in RGBA
* This seems to be a constant in Android - someone does the conversion for us at a lower level.
* This was confirmed for example by forcing A=0.5 and seeing the video frames behind the overlay
* black background, or by forcing to 0.0 some of the channels and seeing the output.
* 3. The {@link Canvas} / {@link Surface} pixels are wrongly treated as YUV!
* On problematic devices, some component down there thinks that our overlays RGBA are in YUV,
* and will CONVERT THEM TO RGBA. This means:
* 3A. Original alpha is dropped. The algorithm thinks we have passed YUV.
* 3B. Original colors are messed up. For example, (255,0,0,255,RGBA) is treated as (255,0,0,YUV)
* and converted back to rgb becoming greenish (74,255,27,255,RGBA).
* Doing the same conversion for {@link Color#GREEN} and {@link Color#BLUE} confirms what we
* were seeing in the issue screenshots.
*
* So a pixel format conversion takes place, when it shouldn't happen. We can't solve this:
* - It is done at a lower level, there's no real way for us to specify the surface format, but
* it seems that these devices will prefer a YUV format and misunderstand our {@link Canvas} pixels.
* - There is also no way to identify which devices will present this issue, it's a bug somewhere
* and it is implementation specific.
*
* THE MAGIC
* Hard to say why, but using this class fixes the described issue.
* It seems that when the {@link SurfaceTexture#updateTexImage()} method for the overlay surface
* is called - the one that updates the overlayTextureId - we must ensure that the CURRENTLY
* BOUND TEXTURE ID IS NOT 0. The id we choose to apply might be cameraTextureId, or overlayTextureId,
* or probably whatever other valid id, and should be passed to {@link #Issue514Workaround(int)}.
* [Tested with cameraTextureId and overlayTextureId: both do work.]
* [Tested with invalid id like 9999. This won't work.]
*
* This makes no sense, since overlaySurfaceTexture.updateTexImage() is setting it to overlayTextureId
* anyway, but it fixes the issue. Specifically, after any draw operation with {@link EglViewport},
* the bound texture is reset to 0 so this must be undone here. We offer:
*
* - {@link #beforeOverlayUpdateTexImage()} to be called before the {@link SurfaceTexture#updateTexImage()} call
* - {@link #end()} to release and bring things back to normal state
*
* Since updating and rendering can happen on different threads with a shared EGL context,
* in case they do, the {@link #beforeOverlayUpdateTexImage()}, the actual updateTexImage() and
* finally the {@link EglViewport} drawing operations should be synchronized with a lock.
*
* REFERENCES
* https://github.com/natario1/CameraView/issues/514
* https://android.googlesource.com/platform/frameworks/native/+/5c1139f/libs/gui/SurfaceTexture.cpp
* I can see here that SurfaceTexture does indeed call glBindTexture with the same parameters whenever
* updateTexImage is called, but it also does other gl stuff first. This other gl stuff might be
* breaking when we don't have a bound texture on some specific hardware implementation.
*/
public class Issue514Workaround {
private final int textureId;
public Issue514Workaround(int textureId) {
this.textureId = textureId;
}
public void beforeOverlayUpdateTexImage() {
bindTexture(textureId);
}
public void end() {
bindTexture(0);
}
private void bindTexture(int textureId) {
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);
}
}

@ -51,7 +51,7 @@ public class EglBaseSurface extends EglElement {
private int mWidth = -1; private int mWidth = -1;
private int mHeight = -1; private int mHeight = -1;
protected EglBaseSurface(EglCore eglCore) { public EglBaseSurface(EglCore eglCore) {
mEglCore = eglCore; mEglCore = eglCore;
} }

@ -44,6 +44,8 @@ public class EglViewport extends EglElement {
// Stuff from Texture2dProgram // Stuff from Texture2dProgram
private int mProgramHandle; private int mProgramHandle;
private int mTextureTarget; private int mTextureTarget;
private int mTextureUnit;
// Program attributes // Program attributes
private int muMVPMatrixLocation; private int muMVPMatrixLocation;
private int muTexMatrixLocation; private int muTexMatrixLocation;
@ -60,20 +62,16 @@ public class EglViewport extends EglElement {
public EglViewport() { public EglViewport() {
mTextureTarget = GLES11Ext.GL_TEXTURE_EXTERNAL_OES; mTextureTarget = GLES11Ext.GL_TEXTURE_EXTERNAL_OES;
mTextureUnit = GLES20.GL_TEXTURE0;
//init the default shader effect //init the default shader effect
mShaderEffect = new NoFilterEffect(); mShaderEffect = new NoFilterEffect();
initProgram(); initProgram();
} }
private void initProgram(){ private void initProgram() {
release(); release();
mTextureTarget = GLES11Ext.GL_TEXTURE_EXTERNAL_OES;
mProgramHandle = createProgram(mShaderEffect.getVertexShader(), mShaderEffect.getFragmentShader()); mProgramHandle = createProgram(mShaderEffect.getVertexShader(), mShaderEffect.getFragmentShader());
maPositionLocation = GLES20.glGetAttribLocation(mProgramHandle, mShaderEffect.getPositionVariableName()); maPositionLocation = GLES20.glGetAttribLocation(mProgramHandle, mShaderEffect.getPositionVariableName());
checkLocation(maPositionLocation, mShaderEffect.getPositionVariableName()); checkLocation(maPositionLocation, mShaderEffect.getPositionVariableName());
maTextureCoordLocation = GLES20.glGetAttribLocation(mProgramHandle, mShaderEffect.getTexttureCoordinateVariableName()); maTextureCoordLocation = GLES20.glGetAttribLocation(mProgramHandle, mShaderEffect.getTexttureCoordinateVariableName());
@ -99,6 +97,7 @@ public class EglViewport extends EglElement {
check("glGenTextures"); check("glGenTextures");
int texId = textures[0]; int texId = textures[0];
GLES20.glActiveTexture(mTextureUnit);
GLES20.glBindTexture(mTextureTarget, texId); GLES20.glBindTexture(mTextureTarget, texId);
check("glBindTexture " + texId); check("glBindTexture " + texId);
@ -152,14 +151,8 @@ public class EglViewport extends EglElement {
GLES20.glUseProgram(mProgramHandle); GLES20.glUseProgram(mProgramHandle);
check("glUseProgram"); check("glUseProgram");
// enable blending, from: http://www.learnopengles.com/android-lesson-five-an-introduction-to-blending/
GLES20.glDisable(GLES20.GL_CULL_FACE);
GLES20.glDisable(GLES20.GL_DEPTH_TEST);
GLES20.glEnable(GLES20.GL_BLEND);
GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
// Set the texture. // Set the texture.
GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glActiveTexture(mTextureUnit);
GLES20.glBindTexture(mTextureTarget, textureId); GLES20.glBindTexture(mTextureTarget, textureId);
// Copy the model / view / projection matrix over. // Copy the model / view / projection matrix over.

@ -17,7 +17,7 @@ public class CropHelper {
public static Rect computeCrop(@NonNull Size currentSize, @NonNull AspectRatio targetRatio) { public static Rect computeCrop(@NonNull Size currentSize, @NonNull AspectRatio targetRatio) {
int currentWidth = currentSize.getWidth(); int currentWidth = currentSize.getWidth();
int currentHeight = currentSize.getHeight(); int currentHeight = currentSize.getHeight();
if (targetRatio.matches(currentSize)) { if (targetRatio.matches(currentSize, 0.0005F)) {
return new Rect(0, 0, currentWidth, currentHeight); return new Rect(0, 0, currentWidth, currentHeight);
} }
@ -26,13 +26,13 @@ public class CropHelper {
int x, y, width, height; int x, y, width, height;
if (currentRatio.toFloat() > targetRatio.toFloat()) { if (currentRatio.toFloat() > targetRatio.toFloat()) {
height = currentHeight; height = currentHeight;
width = (int) (height * targetRatio.toFloat()); width = Math.round(height * targetRatio.toFloat());
y = 0; y = 0;
x = (currentWidth - width) / 2; x = Math.round((currentWidth - width) / 2F);
} else { } else {
width = currentWidth; width = currentWidth;
height = (int) (width / targetRatio.toFloat()); height = Math.round(width / targetRatio.toFloat());
y = (currentHeight - height) / 2; y = Math.round((currentHeight - height) / 2F);
x = 0; x = 0;
} }
return new Rect(x, y, x + width, y + height); return new Rect(x, y, x + width, y + height);

@ -14,6 +14,7 @@ import androidx.annotation.NonNull;
import java.lang.ref.WeakReference; import java.lang.ref.WeakReference;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Executor; import java.util.concurrent.Executor;
/** /**
@ -25,6 +26,13 @@ public class WorkerHandler {
private final static CameraLogger LOG = CameraLogger.create(WorkerHandler.class.getSimpleName()); private final static CameraLogger LOG = CameraLogger.create(WorkerHandler.class.getSimpleName());
private final static ConcurrentHashMap<String, WeakReference<WorkerHandler>> sCache = new ConcurrentHashMap<>(4); private final static ConcurrentHashMap<String, WeakReference<WorkerHandler>> sCache = new ConcurrentHashMap<>(4);
private final static String FALLBACK_NAME = "FallbackCameraThread";
// Store a hard reference to the fallback handler. We never use this, only update it
// anytime get() is called. This should ensure that this instance is not collected.
@SuppressWarnings("FieldCanBeLocal")
private static WorkerHandler sFallbackHandler;
/** /**
* Gets a possibly cached handler with the given name. * Gets a possibly cached handler with the given name.
* @param name the handler name * @param name the handler name
@ -36,15 +44,20 @@ public class WorkerHandler {
//noinspection ConstantConditions //noinspection ConstantConditions
WorkerHandler cached = sCache.get(name).get(); WorkerHandler cached = sCache.get(name).get();
if (cached != null) { if (cached != null) {
HandlerThread thread = cached.mThread; if (cached.getThread().isAlive() && !cached.getThread().isInterrupted()) {
if (thread.isAlive() && !thread.isInterrupted()) {
LOG.w("get:", "Reusing cached worker handler.", name); LOG.w("get:", "Reusing cached worker handler.", name);
return cached; return cached;
} else {
// Cleanup the old thread before creating a new one
cached.destroy();
LOG.w("get:", "Thread reference found, but not alive or interrupted. Removing.", name);
sCache.remove(name);
} }
} } else {
LOG.w("get:", "Thread reference died, removing.", name); LOG.w("get:", "Thread reference died. Removing.", name);
sCache.remove(name); sCache.remove(name);
} }
}
LOG.i("get:", "Creating new handler.", name); LOG.i("get:", "Creating new handler.", name);
WorkerHandler handler = new WorkerHandler(name); WorkerHandler handler = new WorkerHandler(name);
@ -58,7 +71,8 @@ public class WorkerHandler {
*/ */
@NonNull @NonNull
public static WorkerHandler get() { public static WorkerHandler get() {
return get("FallbackCameraThread"); sFallbackHandler = get(FALLBACK_NAME);
return sFallbackHandler;
} }
/** /**
@ -87,6 +101,20 @@ public class WorkerHandler {
WorkerHandler.this.run(command); WorkerHandler.this.run(command);
} }
}; };
// HandlerThreads/Handlers sometimes have a significant warmup time.
// We want to spend this time here so when this object is built, it
// is fully operational.
final CountDownLatch latch = new CountDownLatch(1);
post(new Runnable() {
@Override
public void run() {
latch.countDown();
}
});
try {
latch.await();
} catch (InterruptedException ignore) {}
} }
/** /**
@ -183,6 +211,7 @@ public class WorkerHandler {
* Returns the android backing {@link Looper}. * Returns the android backing {@link Looper}.
* @return the looper * @return the looper
*/ */
@SuppressWarnings("WeakerAccess")
@NonNull @NonNull
public Looper getLooper() { public Looper getLooper() {
return mThread.getLooper(); return mThread.getLooper();
@ -197,21 +226,34 @@ public class WorkerHandler {
return mExecutor; return mExecutor;
} }
/**
* Destroys this handler and its thread. After this method returns, the handler
* should be considered unusable.
*
* Internal note: this does not remove the thread from our cache, but it does
* interrupt it, so the next {@link #get(String)} call will remove it.
* In any case, we only store weak references.
*/
public void destroy() {
HandlerThread thread = getThread();
if (thread.isAlive()) {
thread.interrupt();
thread.quit();
// after quit(), the thread will die at some point in the future. Might take some ms.
// try { handler.getThread().join(); } catch (InterruptedException ignore) {}
}
}
/** /**
* Destroys all handlers, interrupting their work and * Destroys all handlers, interrupting their work and
* removing them from our cache. * removing them from our cache.
*/ */
public static void destroy() { public static void destroyAll() {
for (String key : sCache.keySet()) { for (String key : sCache.keySet()) {
WeakReference<WorkerHandler> ref = sCache.get(key); WeakReference<WorkerHandler> ref = sCache.get(key);
//noinspection ConstantConditions //noinspection ConstantConditions
WorkerHandler handler = ref.get(); WorkerHandler handler = ref.get();
if (handler != null && handler.getThread().isAlive()) { if (handler != null) handler.destroy();
handler.getThread().interrupt();
handler.getThread().quit();
// after quit(), the thread will die at some point in the future. Might take some ms.
// try { handler.getThread().join(); } catch (InterruptedException ignore) {}
}
ref.clear(); ref.clear();
} }
sCache.clear(); sCache.clear();

@ -0,0 +1,131 @@
package com.otaliastudios.cameraview.overlay;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.PorterDuff;
import android.graphics.SurfaceTexture;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.view.Surface;
import androidx.annotation.NonNull;
import androidx.annotation.VisibleForTesting;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.internal.Issue514Workaround;
import com.otaliastudios.cameraview.internal.egl.EglViewport;
import com.otaliastudios.cameraview.size.Size;
import java.nio.Buffer;
/**
* Draws overlays through {@link Overlay}.
*
* - Provides a {@link Canvas} to be passed to the Overlay
* - Lets the overlay draw there: {@link #draw(Overlay.Target)}
* - Renders this into the current EGL window: {@link #render()}
* - Applies the {@link Issue514Workaround} the correct way
*
* In the future we might want to use a different approach than {@link EglViewport},
* {@link SurfaceTexture} and {@link GLES11Ext#GL_TEXTURE_EXTERNAL_OES},
* for example by using a regular {@link GLES20#GL_TEXTURE_2D} that might
* be filled through {@link GLES20#glTexImage2D(int, int, int, int, int, int, int, int, Buffer)}.
*
* The current approach has some issues, for example see {@link Issue514Workaround}.
*/
public class OverlayDrawer {
private static final String TAG = OverlayDrawer.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
private Overlay mOverlay;
@VisibleForTesting int mTextureId;
private SurfaceTexture mSurfaceTexture;
private Surface mSurface;
private float[] mTransform = new float[16];
@VisibleForTesting EglViewport mViewport;
private Issue514Workaround mIssue514Workaround;
private final Object mIssue514WorkaroundLock = new Object();
public OverlayDrawer(@NonNull Overlay overlay, @NonNull Size size) {
mOverlay = overlay;
mViewport = new EglViewport();
mTextureId = mViewport.createTexture();
mSurfaceTexture = new SurfaceTexture(mTextureId);
mSurfaceTexture.setDefaultBufferSize(size.getWidth(), size.getHeight());
mSurface = new Surface(mSurfaceTexture);
mIssue514Workaround = new Issue514Workaround(mTextureId);
}
/**
* Should be called to draw the {@link Overlay} on the given {@link Overlay.Target}.
* This will provide a working {@link Canvas} to the overlay and also update the
* drawn contents to a GLES texture.
* @param target the target
*/
public void draw(@NonNull Overlay.Target target) {
try {
final Canvas surfaceCanvas = mSurface.lockCanvas(null);
surfaceCanvas.drawColor(Color.TRANSPARENT, PorterDuff.Mode.CLEAR);
mOverlay.drawOn(target, surfaceCanvas);
mSurface.unlockCanvasAndPost(surfaceCanvas);
} catch (Surface.OutOfResourcesException e) {
LOG.w("Got Surface.OutOfResourcesException while drawing video overlays", e);
}
synchronized (mIssue514WorkaroundLock) {
mIssue514Workaround.beforeOverlayUpdateTexImage();
mSurfaceTexture.updateTexImage();
}
mSurfaceTexture.getTransformMatrix(mTransform);
}
/**
* Returns the transform that should be used to render the drawn content.
* This should be called after {@link #draw(Overlay.Target)} and can be modified.
* @return the transform matrix
*/
public float[] getTransform() {
return mTransform;
}
/**
* Renders the drawn content in the current EGL surface, assuming there is one.
* Should be called after {@link #draw(Overlay.Target)} and any {@link #getTransform()}
* modification.
*/
public void render() {
// Enable blending
// Reference http://www.learnopengles.com/android-lesson-five-an-introduction-to-blending/
GLES20.glDisable(GLES20.GL_CULL_FACE);
GLES20.glDisable(GLES20.GL_DEPTH_TEST);
GLES20.glEnable(GLES20.GL_BLEND);
GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
synchronized (mIssue514WorkaroundLock) {
mViewport.drawFrame(mTextureId, mTransform);
}
}
/**
* Releases resources.
*/
public void release() {
if (mIssue514Workaround != null) {
mIssue514Workaround.end();
mIssue514Workaround = null;
}
if (mSurfaceTexture != null) {
mSurfaceTexture.release();
mSurfaceTexture = null;
}
if (mSurface != null) {
mSurface.release();
mSurface = null;
}
if (mViewport != null) {
mViewport.release();
mViewport = null;
}
}
}

@ -14,6 +14,8 @@ import android.os.Build;
import com.otaliastudios.cameraview.CameraLogger; import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.PictureResult; import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.internal.Issue514Workaround;
import com.otaliastudios.cameraview.internal.egl.EglBaseSurface;
import com.otaliastudios.cameraview.overlay.Overlay; import com.otaliastudios.cameraview.overlay.Overlay;
import com.otaliastudios.cameraview.controls.Facing; import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.engine.CameraEngine; import com.otaliastudios.cameraview.engine.CameraEngine;
@ -24,6 +26,7 @@ import com.otaliastudios.cameraview.internal.egl.EglViewport;
import com.otaliastudios.cameraview.internal.egl.EglWindowSurface; import com.otaliastudios.cameraview.internal.egl.EglWindowSurface;
import com.otaliastudios.cameraview.internal.utils.CropHelper; import com.otaliastudios.cameraview.internal.utils.CropHelper;
import com.otaliastudios.cameraview.internal.utils.WorkerHandler; import com.otaliastudios.cameraview.internal.utils.WorkerHandler;
import com.otaliastudios.cameraview.overlay.OverlayDrawer;
import com.otaliastudios.cameraview.preview.GlCameraPreview; import com.otaliastudios.cameraview.preview.GlCameraPreview;
import com.otaliastudios.cameraview.preview.RendererFrameCallback; import com.otaliastudios.cameraview.preview.RendererFrameCallback;
import com.otaliastudios.cameraview.preview.RendererThread; import com.otaliastudios.cameraview.preview.RendererThread;
@ -36,6 +39,23 @@ import androidx.annotation.Nullable;
import android.view.Surface; import android.view.Surface;
/**
* API 19.
* Records a picture snapshots from the {@link GlCameraPreview}. It works as follows:
*
* - We register a one time {@link RendererFrameCallback} on the preview
* - We get the textureId and the frame callback on the {@link RendererThread}
* - [Optional: we construct another textureId for overlays]
* - We take a handle of the EGL context from the {@link RendererThread}
* - We move to another thread, and create a new EGL surface for that EGL context.
* - We make this new surface current, and re-draw the textureId on it
* - [Optional: fill the overlayTextureId and draw it on the same surface]
* - We use glReadPixels (through {@link EglBaseSurface#saveFrameTo(Bitmap.CompressFormat)}) and save to file.
*
* We create a new EGL surface and redraw the frame because:
* 1. We want to go off the renderer thread as soon as possible
* 2. We have overlays to be drawn - we don't want to draw them on the preview surface, not even for a frame.
*/
public class SnapshotGlPictureRecorder extends PictureRecorder { public class SnapshotGlPictureRecorder extends PictureRecorder {
private static final String TAG = SnapshotGlPictureRecorder.class.getSimpleName(); private static final String TAG = SnapshotGlPictureRecorder.class.getSimpleName();
@ -47,6 +67,13 @@ public class SnapshotGlPictureRecorder extends PictureRecorder {
private Overlay mOverlay; private Overlay mOverlay;
private boolean mHasOverlay; private boolean mHasOverlay;
private OverlayDrawer mOverlayDrawer;
private int mTextureId;
private float[] mTransform;
private EglViewport mViewport;
public SnapshotGlPictureRecorder( public SnapshotGlPictureRecorder(
@NonNull PictureResult.Stub stub, @NonNull PictureResult.Stub stub,
@ -67,78 +94,84 @@ public class SnapshotGlPictureRecorder extends PictureRecorder {
public void take() { public void take() {
mPreview.addRendererFrameCallback(new RendererFrameCallback() { mPreview.addRendererFrameCallback(new RendererFrameCallback() {
int mTextureId; @RendererThread
SurfaceTexture mSurfaceTexture; public void onRendererTextureCreated(int textureId) {
float[] mTransform; SnapshotGlPictureRecorder.this.onRendererTextureCreated(textureId);
}
int mOverlayTextureId = 0;
SurfaceTexture mOverlaySurfaceTexture;
Surface mOverlaySurface;
float[] mOverlayTransform;
EglViewport mViewport; @RendererThread
@Override
public void onRendererFrame(@NonNull SurfaceTexture surfaceTexture, final float scaleX, final float scaleY, Filter shaderEffect) {
mPreview.removeRendererFrameCallback(this);
SnapshotGlPictureRecorder.this.onRendererFrame(surfaceTexture, scaleX, scaleY, shaderEffect);
}
});
}
@RendererThread @RendererThread
public void onRendererTextureCreated(int textureId) { @TargetApi(Build.VERSION_CODES.KITKAT)
private void onRendererTextureCreated(int textureId) {
mTextureId = textureId; mTextureId = textureId;
mViewport = new EglViewport(); mViewport = new EglViewport();
mSurfaceTexture = new SurfaceTexture(mTextureId, true);
// Need to crop the size. // Need to crop the size.
Rect crop = CropHelper.computeCrop(mResult.size, mOutputRatio); Rect crop = CropHelper.computeCrop(mResult.size, mOutputRatio);
mResult.size = new Size(crop.width(), crop.height()); mResult.size = new Size(crop.width(), crop.height());
mSurfaceTexture.setDefaultBufferSize(mResult.size.getWidth(), mResult.size.getHeight());
mTransform = new float[16]; mTransform = new float[16];
Matrix.setIdentityM(mTransform, 0);
if (mHasOverlay) { if (mHasOverlay) {
mOverlayTextureId = mViewport.createTexture(); mOverlayDrawer = new OverlayDrawer(mOverlay, mResult.size);
mOverlaySurfaceTexture = new SurfaceTexture(mOverlayTextureId, true);
mOverlaySurfaceTexture.setDefaultBufferSize(mResult.size.getWidth(), mResult.size.getHeight());
mOverlaySurface = new Surface(mOverlaySurfaceTexture);
mOverlayTransform = new float[16];
} }
} }
/**
* The tricky part here is the EGL surface creation.
*
* We don't have a real output window for the EGL surface - we will use glReadPixels()
* and never call swapBuffers(), so what we draw is never published.
*
* 1. One option is to use a pbuffer EGL surface. This works, we just have to pass
* the correct width and height. However, it is significantly slower than the current
* solution.
*
* 2. Another option is to create the EGL surface out of a ImageReader.getSurface()
* and use the reader to create a JPEG. In this case, we would have to publish
* the frame with swapBuffers(). However, currently ImageReader does not support
* all formats, it's risky. This is an example error that we get:
* "RGBA override BLOB format buffer should have height == width"
*
* The third option, which we are using, is to create the EGL surface using whatever
* {@link Surface} or {@link SurfaceTexture} we have at hand. Since we never call
* swapBuffers(), the frame will not actually be rendered. This is the fastest.
*
* @param scaleX frame scale x in {@link Reference#VIEW}
* @param scaleY frame scale y in {@link Reference#VIEW}
*/
@RendererThread @RendererThread
@Override @TargetApi(Build.VERSION_CODES.KITKAT)
public void onRendererFrame(@NonNull SurfaceTexture surfaceTexture, final float scaleX, final float scaleY, Filter shaderEffect) { private void onRendererFrame(final @NonNull SurfaceTexture surfaceTexture, final float scaleX, final float scaleY, @NonNull BaseShaderEffect effect) {
mPreview.removeRendererFrameCallback(this); mViewport.changeShaderEffect(effect);
// Get egl context from the RendererThread, which is the one in which we have created
// This kinda work but has drawbacks: // the textureId and the overlayTextureId, managed by the GlSurfaceView.
// - output is upside down due to coordinates in GL: need to flip the byte[] someway // Next operations can then be performed on different threads using this handle.
// - output is not rotated as we would like to: need to create a bitmap copy...
// - works only in the renderer thread, where it allocates the buffer and reads pixels. Bad!
/*
ByteBuffer buffer = ByteBuffer.allocateDirect(width * height * 4);
buffer.order(ByteOrder.LITTLE_ENDIAN);
GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buffer);
buffer.rewind();
ByteArrayOutputStream bos = new ByteArrayOutputStream(buffer.array().length);
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
bitmap.copyPixelsFromBuffer(buffer);
bitmap.compress(Bitmap.CompressFormat.JPEG, 90, bos);
bitmap.recycle(); */
// For this reason it is better to create a new surface,
// and draw the last frame again there.
final EGLContext eglContext = EGL14.eglGetCurrentContext(); final EGLContext eglContext = EGL14.eglGetCurrentContext();
final EglCore core = new EglCore(eglContext, EglCore.FLAG_RECORDABLE); // Calling this invalidates the rotation/scale logic below:
// surfaceTexture.getTransformMatrix(mTransform); // TODO activate and fix the logic.
//set the current shader before taking the snapshot
mViewport.changeShaderEffect(shaderEffect);
// final EGLSurface oldSurface = EGL14.eglGetCurrentSurface(EGL14.EGL_DRAW);
// final EGLDisplay oldDisplay = EGL14.eglGetCurrentDisplay();
WorkerHandler.execute(new Runnable() { WorkerHandler.execute(new Runnable() {
@Override @Override
public void run() { public void run() {
// 1. Get latest texture // 0. EGL window will need an output.
EglWindowSurface surface = new EglWindowSurface(core, mSurfaceTexture); // We create a fake one as explained in javadocs.
surface.makeCurrent(); final int fakeOutputTextureId = 9999;
mSurfaceTexture.updateTexImage(); SurfaceTexture fakeOutputSurface = new SurfaceTexture(fakeOutputTextureId);
mSurfaceTexture.getTransformMatrix(mTransform); fakeOutputSurface.setDefaultBufferSize(mResult.size.getWidth(), mResult.size.getHeight());
// 2. Apply scale and crop: // 1. Create an EGL surface
// scaleX and scaleY are in REF_VIEW, while our input appears to be in REF_SENSOR. final EglCore core = new EglCore(eglContext, EglCore.FLAG_RECORDABLE);
final EglBaseSurface eglSurface = new EglWindowSurface(core, fakeOutputSurface);
eglSurface.makeCurrent();
// 2. Apply scale and crop
boolean flip = mEngine.getAngles().flip(Reference.VIEW, Reference.SENSOR); boolean flip = mEngine.getAngles().flip(Reference.VIEW, Reference.SENSOR);
float realScaleX = flip ? scaleY : scaleX; float realScaleX = flip ? scaleY : scaleX;
float realScaleY = flip ? scaleX : scaleY; float realScaleY = flip ? scaleX : scaleY;
@ -147,69 +180,45 @@ public class SnapshotGlPictureRecorder extends PictureRecorder {
Matrix.translateM(mTransform, 0, scaleTranslX, scaleTranslY, 0); Matrix.translateM(mTransform, 0, scaleTranslX, scaleTranslY, 0);
Matrix.scaleM(mTransform, 0, realScaleX, realScaleY, 1); Matrix.scaleM(mTransform, 0, realScaleX, realScaleY, 1);
// 3. Go back to 0,0 so that rotate and flip work well. // 3. Apply rotation and flip
Matrix.translateM(mTransform, 0, 0.5F, 0.5F, 0); Matrix.translateM(mTransform, 0, 0.5F, 0.5F, 0); // Go back to 0,0
Matrix.rotateM(mTransform, 0, -mResult.rotation, 0, 0, 1); // Rotate (not sure why we need the minus)
// 4. Apply rotation:
// Not sure why we need the minus here.
Matrix.rotateM(mTransform, 0, -mResult.rotation, 0, 0, 1);
mResult.rotation = 0; mResult.rotation = 0;
if (mResult.facing == Facing.FRONT) { // 5. Flip horizontally for front camera
// 5. Flip horizontally for front camera:
if (mResult.facing == Facing.FRONT) {
Matrix.scaleM(mTransform, 0, -1, 1, 1); Matrix.scaleM(mTransform, 0, -1, 1, 1);
} }
Matrix.translateM(mTransform, 0, -0.5F, -0.5F, 0); // Go back to old position
// 6. Go back to old position. // 4. Do pretty much the same for overlays
Matrix.translateM(mTransform, 0, -0.5F, -0.5F, 0);
// 7. Do pretty much the same for overlays, though with
// some differences.
if (mHasOverlay) { if (mHasOverlay) {
// 1. First we must draw on the texture and get latest image. // 1. First we must draw on the texture and get latest image
try { mOverlayDrawer.draw(Overlay.Target.PICTURE_SNAPSHOT);
final Canvas surfaceCanvas = mOverlaySurface.lockCanvas(null);
surfaceCanvas.drawColor(Color.TRANSPARENT, PorterDuff.Mode.CLEAR);
mOverlay.drawOn(Overlay.Target.PICTURE_SNAPSHOT, surfaceCanvas);
mOverlaySurface.unlockCanvasAndPost(surfaceCanvas);
} catch (Surface.OutOfResourcesException e) {
LOG.w("Got Surface.OutOfResourcesException while drawing picture overlays", e);
}
mOverlaySurfaceTexture.updateTexImage();
mOverlaySurfaceTexture.getTransformMatrix(mOverlayTransform);
// 2. Then we can apply the transformations. // 2. Then we can apply the transformations
int rotation = mEngine.getAngles().offset(Reference.VIEW, Reference.OUTPUT, Axis.ABSOLUTE); int rotation = mEngine.getAngles().offset(Reference.VIEW, Reference.OUTPUT, Axis.ABSOLUTE);
Matrix.translateM(mOverlayTransform, 0, 0.5F, 0.5F, 0); Matrix.translateM(mOverlayDrawer.getTransform(), 0, 0.5F, 0.5F, 0);
Matrix.rotateM(mOverlayTransform, 0, rotation, 0, 0, 1); Matrix.rotateM(mOverlayDrawer.getTransform(), 0, rotation, 0, 0, 1);
// No need to flip the x axis for front camera, but need to flip the y axis always. // No need to flip the x axis for front camera, but need to flip the y axis always.
Matrix.scaleM(mOverlayTransform, 0, 1, -1, 1); Matrix.scaleM(mOverlayDrawer.getTransform(), 0, 1, -1, 1);
Matrix.translateM(mOverlayTransform, 0, -0.5F, -0.5F, 0); Matrix.translateM(mOverlayDrawer.getTransform(), 0, -0.5F, -0.5F, 0);
} }
// 8. Draw and save // 5. Draw and save
mViewport.drawFrame(mTextureId, mTransform); mViewport.drawFrame(mTextureId, mTransform);
if (mHasOverlay) mViewport.drawFrame(mOverlayTextureId, mOverlayTransform); if (mHasOverlay) mOverlayDrawer.render();
// don't - surface.swapBuffers();
mResult.data = surface.saveFrameTo(Bitmap.CompressFormat.JPEG);
mResult.format = PictureResult.FORMAT_JPEG; mResult.format = PictureResult.FORMAT_JPEG;
mResult.data = eglSurface.saveFrameTo(Bitmap.CompressFormat.JPEG);
// 9. Cleanup // 6. Cleanup
mSurfaceTexture.releaseTexImage(); eglSurface.releaseEglSurface();
surface.release();
mViewport.release(); mViewport.release();
mSurfaceTexture.release(); fakeOutputSurface.release();
if (mHasOverlay) { if (mHasOverlay) mOverlayDrawer.release();
mOverlaySurface.release();
mOverlaySurfaceTexture.release();
}
core.release(); core.release();
dispatchResult(); dispatchResult();
} }
}); });
} }
});
}
@Override @Override
protected void dispatchResult() { protected void dispatchResult() {

@ -19,7 +19,7 @@ public class AspectRatio implements Comparable<AspectRatio> {
* @return a (possibly cached) aspect ratio * @return a (possibly cached) aspect ratio
*/ */
@NonNull @NonNull
public static AspectRatio of(Size size) { public static AspectRatio of(@NonNull Size size) {
return AspectRatio.of(size.getWidth(), size.getHeight()); return AspectRatio.of(size.getWidth(), size.getHeight());
} }
@ -78,7 +78,6 @@ public class AspectRatio implements Comparable<AspectRatio> {
return mY; return mY;
} }
@SuppressWarnings("WeakerAccess")
public boolean matches(@NonNull Size size) { public boolean matches(@NonNull Size size) {
int gcd = gcd(size.getWidth(), size.getHeight()); int gcd = gcd(size.getWidth(), size.getHeight());
int x = size.getWidth() / gcd; int x = size.getWidth() / gcd;
@ -86,6 +85,10 @@ public class AspectRatio implements Comparable<AspectRatio> {
return mX == x && mY == y; return mX == x && mY == y;
} }
public boolean matches(@NonNull Size size, float tolerance) {
return Math.abs(toFloat() - (float) size.getWidth() / size.getHeight()) <= tolerance;
}
@Override @Override
public boolean equals(Object o) { public boolean equals(Object o) {
if (o == null) { if (o == null) {
@ -107,7 +110,6 @@ public class AspectRatio implements Comparable<AspectRatio> {
return mX + ":" + mY; return mX + ":" + mY;
} }
@SuppressWarnings("WeakerAccess")
public float toFloat() { public float toFloat() {
return (float) mX / mY; return (float) mX / mY;
} }

@ -103,11 +103,11 @@ public abstract class FullVideoRecorder extends VideoRecorder {
switch (what) { switch (what) {
case MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED: case MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED:
mResult.endReason = VideoResult.REASON_MAX_DURATION_REACHED; mResult.endReason = VideoResult.REASON_MAX_DURATION_REACHED;
stop(); stop(false);
break; break;
case MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED: case MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED:
mResult.endReason = VideoResult.REASON_MAX_SIZE_REACHED; mResult.endReason = VideoResult.REASON_MAX_SIZE_REACHED;
stop(); stop(false);
break; break;
} }
} }
@ -130,7 +130,7 @@ public abstract class FullVideoRecorder extends VideoRecorder {
protected void onStart() { protected void onStart() {
if (!prepareMediaRecorder(mResult)) { if (!prepareMediaRecorder(mResult)) {
mResult = null; mResult = null;
stop(); stop(false);
return; return;
} }
@ -141,12 +141,12 @@ public abstract class FullVideoRecorder extends VideoRecorder {
LOG.w("start:", "Error while starting media recorder.", e); LOG.w("start:", "Error while starting media recorder.", e);
mResult = null; mResult = null;
mError = e; mError = e;
stop(); stop(false);
} }
} }
@Override @Override
protected void onStop() { protected void onStop(boolean isCameraShutdown) {
if (mMediaRecorder != null) { if (mMediaRecorder != null) {
dispatchVideoRecordingEnd(); dispatchVideoRecordingEnd();
try { try {

@ -1,19 +1,15 @@
package com.otaliastudios.cameraview.video; package com.otaliastudios.cameraview.video;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.PorterDuff;
import android.graphics.SurfaceTexture; import android.graphics.SurfaceTexture;
import android.opengl.EGL14; import android.opengl.EGL14;
import android.os.Build; import android.os.Build;
import android.view.Surface;
import com.otaliastudios.cameraview.CameraLogger; import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.overlay.Overlay; import com.otaliastudios.cameraview.overlay.Overlay;
import com.otaliastudios.cameraview.VideoResult; import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.controls.Audio; import com.otaliastudios.cameraview.controls.Audio;
import com.otaliastudios.cameraview.engine.CameraEngine; import com.otaliastudios.cameraview.engine.CameraEngine;
import com.otaliastudios.cameraview.internal.egl.EglViewport; import com.otaliastudios.cameraview.overlay.OverlayDrawer;
import com.otaliastudios.cameraview.preview.GlCameraPreview; import com.otaliastudios.cameraview.preview.GlCameraPreview;
import com.otaliastudios.cameraview.preview.RendererFrameCallback; import com.otaliastudios.cameraview.preview.RendererFrameCallback;
import com.otaliastudios.cameraview.preview.RendererThread; import com.otaliastudios.cameraview.preview.RendererThread;
@ -60,15 +56,11 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
private int mDesiredState = STATE_NOT_RECORDING; private int mDesiredState = STATE_NOT_RECORDING;
private int mTextureId = 0; private int mTextureId = 0;
private int mOverlayTextureId = 0;
private SurfaceTexture mOverlaySurfaceTexture;
private Surface mOverlaySurface;
private Overlay mOverlay; private Overlay mOverlay;
private OverlayDrawer mOverlayDrawer;
private boolean mHasOverlay; private boolean mHasOverlay;
private int mOverlayRotation; private int mOverlayRotation;
private EglViewport mViewport;
public SnapshotVideoRecorder(@NonNull CameraEngine engine, public SnapshotVideoRecorder(@NonNull CameraEngine engine,
@NonNull GlCameraPreview preview, @NonNull GlCameraPreview preview,
@Nullable Overlay overlay, @Nullable Overlay overlay,
@ -87,20 +79,24 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
} }
@Override @Override
protected void onStop() { protected void onStop(boolean isCameraShutdown) {
if (isCameraShutdown) {
// The renderer callback might never be called. From my tests, it's not.
LOG.i("Stopping the encoder engine from isCameraShutdown.");
mDesiredState = STATE_NOT_RECORDING;
mCurrentState = STATE_NOT_RECORDING;
mEncoderEngine.stop();
} else {
mDesiredState = STATE_NOT_RECORDING; mDesiredState = STATE_NOT_RECORDING;
} }
}
@RendererThread @RendererThread
@Override @Override
public void onRendererTextureCreated(int textureId) { public void onRendererTextureCreated(int textureId) {
mTextureId = textureId; mTextureId = textureId;
if (mHasOverlay) { if (mHasOverlay) {
mViewport = new EglViewport(); mOverlayDrawer = new OverlayDrawer(mOverlay, mResult.size);
mOverlayTextureId = mViewport.createTexture();
mOverlaySurfaceTexture = new SurfaceTexture(mOverlayTextureId);
mOverlaySurfaceTexture.setDefaultBufferSize(mResult.size.getWidth(), mResult.size.getHeight());
mOverlaySurface = new Surface(mOverlaySurfaceTexture);
} }
} }
@ -110,9 +106,6 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
if (mCurrentState == STATE_NOT_RECORDING && mDesiredState == STATE_RECORDING) { if (mCurrentState == STATE_NOT_RECORDING && mDesiredState == STATE_RECORDING) {
LOG.i("Starting the encoder engine."); LOG.i("Starting the encoder engine.");
//set current shader effect
mViewport.changeShaderEffect(shaderEffect);
// Set default options // Set default options
if (mResult.videoFrameRate <= 0) mResult.videoFrameRate = DEFAULT_VIDEO_FRAMERATE; if (mResult.videoFrameRate <= 0) mResult.videoFrameRate = DEFAULT_VIDEO_FRAMERATE;
if (mResult.videoBitRate <= 0) mResult.videoBitRate = estimateVideoBitRate(mResult.size, mResult.videoFrameRate); if (mResult.videoBitRate <= 0) mResult.videoBitRate = estimateVideoBitRate(mResult.size, mResult.videoFrameRate);
@ -141,9 +134,13 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
videoConfig.textureId = mTextureId; videoConfig.textureId = mTextureId;
videoConfig.scaleX = scaleX; videoConfig.scaleX = scaleX;
videoConfig.scaleY = scaleY; videoConfig.scaleY = scaleY;
// Get egl context from the RendererThread, which is the one in which we have created
// the textureId and the overlayTextureId, managed by the GlSurfaceView.
// Next operations can then be performed on different threads using this handle.
videoConfig.eglContext = EGL14.eglGetCurrentContext(); videoConfig.eglContext = EGL14.eglGetCurrentContext();
if (mHasOverlay) { if (mHasOverlay) {
videoConfig.overlayTextureId = mOverlayTextureId; videoConfig.overlayTarget = Overlay.Target.VIDEO_SNAPSHOT;
videoConfig.overlayDrawer = mOverlayDrawer;
videoConfig.overlayRotation = mOverlayRotation; videoConfig.overlayRotation = mOverlayRotation;
} }
TextureMediaEncoder videoEncoder = new TextureMediaEncoder(videoConfig); TextureMediaEncoder videoEncoder = new TextureMediaEncoder(videoConfig);
@ -170,28 +167,10 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
LOG.v("dispatching frame."); LOG.v("dispatching frame.");
TextureMediaEncoder textureEncoder = (TextureMediaEncoder) mEncoderEngine.getVideoEncoder(); TextureMediaEncoder textureEncoder = (TextureMediaEncoder) mEncoderEngine.getVideoEncoder();
TextureMediaEncoder.Frame frame = textureEncoder.acquireFrame(); TextureMediaEncoder.Frame frame = textureEncoder.acquireFrame();
frame.timestamp = surfaceTexture.getTimestamp(); frame.timestampNanos = surfaceTexture.getTimestamp();
frame.timestampMillis = System.currentTimeMillis(); // NOTE: this is an approximation but it seems to work. frame.timestampMillis = System.currentTimeMillis(); // NOTE: this is an approximation but it seems to work.
surfaceTexture.getTransformMatrix(frame.transform); surfaceTexture.getTransformMatrix(frame.transform);
if (mEncoderEngine != null) { // Can happen on teardown. At least it used to.
// get overlay
if (mHasOverlay) {
try {
final Canvas surfaceCanvas = mOverlaySurface.lockCanvas(null);
surfaceCanvas.drawColor(Color.TRANSPARENT, PorterDuff.Mode.CLEAR);
mOverlay.drawOn(Overlay.Target.VIDEO_SNAPSHOT, surfaceCanvas);
mOverlaySurface.unlockCanvasAndPost(surfaceCanvas);
} catch (Surface.OutOfResourcesException e) {
LOG.w("Got Surface.OutOfResourcesException while drawing video overlays", e);
}
mOverlaySurfaceTexture.updateTexImage();
mOverlaySurfaceTexture.getTransformMatrix(frame.overlayTransform);
}
if (mEncoderEngine != null) {
// Can happen on teardown. At least it used to.
// NOTE: If this still happens, I would say we can still crash on mOverlaySurface
// calls above. We might have to add some synchronization.
mEncoderEngine.notify(TextureMediaEncoder.FRAME_EVENT, frame); mEncoderEngine.notify(TextureMediaEncoder.FRAME_EVENT, frame);
} }
} }
@ -239,13 +218,9 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
mDesiredState = STATE_NOT_RECORDING; mDesiredState = STATE_NOT_RECORDING;
mPreview.removeRendererFrameCallback(SnapshotVideoRecorder.this); mPreview.removeRendererFrameCallback(SnapshotVideoRecorder.this);
mPreview = null; mPreview = null;
if (mOverlaySurfaceTexture != null) { if (mOverlayDrawer != null) {
mOverlaySurfaceTexture.release(); mOverlayDrawer.release();
mOverlaySurfaceTexture = null; mOverlayDrawer = null;
}
if (mOverlaySurface != null) {
mOverlaySurface.release();
mOverlaySurface = null;
} }
mEncoderEngine = null; mEncoderEngine = null;
dispatchResult(); dispatchResult();

@ -64,9 +64,10 @@ public abstract class VideoRecorder {
/** /**
* Stops recording. * Stops recording.
* @param isCameraShutdown whether this is a full shutdown, camera is being closed
*/ */
public final void stop() { public final void stop(boolean isCameraShutdown) {
onStop(); onStop(isCameraShutdown);
} }
/** /**
@ -79,13 +80,12 @@ public abstract class VideoRecorder {
protected abstract void onStart(); protected abstract void onStart();
protected abstract void onStop(); protected abstract void onStop(boolean isCameraShutdown);
/** /**
* Subclasses can call this to notify that the result was obtained, * Subclasses can call this to notify that the result was obtained,
* either with some error (null result) or with the actual stub, filled. * either with some error (null result) or with the actual stub, filled.
*/ */
@SuppressWarnings("WeakerAccess")
@CallSuper @CallSuper
protected void dispatchResult() { protected void dispatchResult() {
mIsRecording = false; mIsRecording = false;
@ -112,6 +112,7 @@ public abstract class VideoRecorder {
* Subclasses can call this to notify that the video recording has ended, * Subclasses can call this to notify that the video recording has ended,
* although the video result might still be processed. * although the video result might still be processed.
*/ */
@SuppressWarnings("WeakerAccess")
@CallSuper @CallSuper
protected void dispatchVideoRecordingEnd() { protected void dispatchVideoRecordingEnd() {
if (mListener != null) { if (mListener != null) {

@ -20,7 +20,7 @@ public class AudioConfig {
final int encoding = AudioFormat.ENCODING_PCM_16BIT; // Determines the sampleSizePerChannel final int encoding = AudioFormat.ENCODING_PCM_16BIT; // Determines the sampleSizePerChannel
// The 44.1KHz frequency is the only setting guaranteed to be available on all devices. // The 44.1KHz frequency is the only setting guaranteed to be available on all devices.
final int samplingFrequency = 44100; // samples/sec final int samplingFrequency = 44100; // samples/sec
final int sampleSizePerChannel = 2; // byte/sample/channel [16bit] final int sampleSizePerChannel = 2; // byte/sample/channel [16bit]. If this changes, review noise introduction
final int byteRatePerChannel = samplingFrequency * sampleSizePerChannel; // byte/sec/channel final int byteRatePerChannel = samplingFrequency * sampleSizePerChannel; // byte/sec/channel
@NonNull @NonNull
@ -75,7 +75,7 @@ public class AudioConfig {
* @return the number of frames * @return the number of frames
*/ */
int audioRecordBufferFrames() { int audioRecordBufferFrames() {
return 25; return 50;
} }
/** /**
@ -91,6 +91,6 @@ public class AudioConfig {
* @return the buffer pool max size * @return the buffer pool max size
*/ */
int bufferPoolMaxSize() { int bufferPoolMaxSize() {
return 80; return 500;
} }
} }

@ -1,6 +1,5 @@
package com.otaliastudios.cameraview.video.encoding; package com.otaliastudios.cameraview.video.encoding;
import android.media.AudioFormat;
import android.media.AudioRecord; import android.media.AudioRecord;
import android.media.MediaCodec; import android.media.MediaCodec;
import android.media.MediaCodecInfo; import android.media.MediaCodecInfo;
@ -15,8 +14,10 @@ import androidx.annotation.RequiresApi;
import java.io.IOException; import java.io.IOException;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.Random;
import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.LinkedBlockingQueue;
/** /**
@ -30,23 +31,24 @@ public class AudioMediaEncoder extends MediaEncoder {
private static final boolean PERFORMANCE_DEBUG = false; private static final boolean PERFORMANCE_DEBUG = false;
private static final boolean PERFORMANCE_FILL_GAPS = true; private static final boolean PERFORMANCE_FILL_GAPS = true;
private static final int PERFORMANCE_MAX_GAPS = 8;
private boolean mRequestStop = false; private boolean mRequestStop = false;
private AudioEncodingThread mEncoder; private AudioEncodingThread mEncoder;
private AudioRecordingThread mRecorder; private AudioRecordingThread mRecorder;
private ByteBufferPool mByteBufferPool; private ByteBufferPool mByteBufferPool;
private ByteBuffer mZeroBuffer;
private final AudioTimestamp mTimestamp; private final AudioTimestamp mTimestamp;
private AudioConfig mConfig; private AudioConfig mConfig;
private InputBufferPool mInputBufferPool = new InputBufferPool(); private InputBufferPool mInputBufferPool = new InputBufferPool();
private final LinkedBlockingQueue<InputBuffer> mInputBufferQueue = new LinkedBlockingQueue<>(); private final LinkedBlockingQueue<InputBuffer> mInputBufferQueue = new LinkedBlockingQueue<>();
private AudioNoise mAudioNoise;
// Just to debug performance. // Just to debug performance.
private int mSendCount = 0; private int mDebugSendCount = 0;
private int mExecuteCount = 0; private int mDebugExecuteCount = 0;
private long mAvgSendDelay = 0; private long mDebugSendAvgDelay = 0;
private long mAvgExecuteDelay = 0; private long mDebugExecuteAvgDelay = 0;
private Map<Long, Long> mSendStartMap = new HashMap<>(); private Map<Long, Long> mDebugSendStartMap = new HashMap<>();
public AudioMediaEncoder(@NonNull AudioConfig config) { public AudioMediaEncoder(@NonNull AudioConfig config) {
super("AudioEncoder"); super("AudioEncoder");
@ -76,7 +78,7 @@ public class AudioMediaEncoder extends MediaEncoder {
mMediaCodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); mMediaCodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mMediaCodec.start(); mMediaCodec.start();
mByteBufferPool = new ByteBufferPool(mConfig.frameSize(), mConfig.bufferPoolMaxSize()); mByteBufferPool = new ByteBufferPool(mConfig.frameSize(), mConfig.bufferPoolMaxSize());
mZeroBuffer = ByteBuffer.allocateDirect(mConfig.frameSize()); mAudioNoise = new AudioNoise(mConfig);
} }
@EncoderThread @EncoderThread
@ -130,11 +132,13 @@ public class AudioMediaEncoder extends MediaEncoder {
private AudioRecord mAudioRecord; private AudioRecord mAudioRecord;
private ByteBuffer mCurrentBuffer; private ByteBuffer mCurrentBuffer;
private int mReadBytes; private int mCurrentReadBytes;
private long mLastTimeUs; private long mLastTimeUs;
private long mFirstTimeUs = Long.MIN_VALUE; private long mFirstTimeUs = Long.MIN_VALUE;
private AudioRecordingThread() { private AudioRecordingThread() {
setPriority(Thread.MAX_PRIORITY);
final int minBufferSize = AudioRecord.getMinBufferSize( final int minBufferSize = AudioRecord.getMinBufferSize(
mConfig.samplingFrequency, mConfig.samplingFrequency,
mConfig.audioFormatChannels(), mConfig.audioFormatChannels(),
@ -152,14 +156,22 @@ public class AudioMediaEncoder extends MediaEncoder {
mConfig.audioFormatChannels(), mConfig.audioFormatChannels(),
mConfig.encoding, mConfig.encoding,
bufferSize); bufferSize);
setPriority(Thread.MAX_PRIORITY);
} }
@Override @Override
public void run() { public void run() {
mAudioRecord.startRecording(); mAudioRecord.startRecording();
while (!mRequestStop) { while (!mRequestStop) {
if (!hasReachedMaxLength()) {
read(false); read(false);
} else {
// We have reached the max length, so stop reading.
// However, do not get out of the loop - the controller
// will call stop() on us soon. It's not our responsibility
// to stop ourselves.
//noinspection UnnecessaryContinue
continue;
}
} }
LOG.w("Stop was requested. We're out of the loop. Will post an endOfStream."); LOG.w("Stop was requested. We're out of the loop. Will post an endOfStream.");
// Last input with 0 length. This will signal the endOfStream. // Last input with 0 length. This will signal the endOfStream.
@ -192,25 +204,25 @@ public class AudioMediaEncoder extends MediaEncoder {
// with left and right bytes. https://stackoverflow.com/q/20594750/4288782 // with left and right bytes. https://stackoverflow.com/q/20594750/4288782
if (PERFORMANCE_DEBUG) { if (PERFORMANCE_DEBUG) {
long before = System.nanoTime(); long before = System.nanoTime();
mReadBytes = mAudioRecord.read(mCurrentBuffer, mConfig.frameSize()); mCurrentReadBytes = mAudioRecord.read(mCurrentBuffer, mConfig.frameSize());
long after = System.nanoTime(); long after = System.nanoTime();
float delayMillis = (after - before) / 1000000F; float delayMillis = (after - before) / 1000000F;
float durationMillis = AudioTimestamp.bytesToMillis(mReadBytes, mConfig.byteRate()); float durationMillis = AudioTimestamp.bytesToMillis(mCurrentReadBytes, mConfig.byteRate());
LOG.v("read thread - reading took:", delayMillis, LOG.v("read thread - reading took:", delayMillis,
"should be:", durationMillis, "should be:", durationMillis,
"delay:", delayMillis - durationMillis); "delay:", delayMillis - durationMillis);
} else { } else {
mReadBytes = mAudioRecord.read(mCurrentBuffer, mConfig.frameSize()); mCurrentReadBytes = mAudioRecord.read(mCurrentBuffer, mConfig.frameSize());
} }
LOG.i("read thread - eos:", endOfStream, "- Read new audio frame. Bytes:", mReadBytes); LOG.i("read thread - eos:", endOfStream, "- Read new audio frame. Bytes:", mCurrentReadBytes);
if (mReadBytes > 0) { // Good read: increase PTS. if (mCurrentReadBytes > 0) { // Good read: increase PTS.
increaseTime(mReadBytes, endOfStream); increaseTime(mCurrentReadBytes, endOfStream);
LOG.i("read thread - eos:", endOfStream, "- mLastTimeUs:", mLastTimeUs); LOG.i("read thread - eos:", endOfStream, "- mLastTimeUs:", mLastTimeUs);
mCurrentBuffer.limit(mReadBytes); mCurrentBuffer.limit(mCurrentReadBytes);
enqueue(mCurrentBuffer, mLastTimeUs, endOfStream); enqueue(mCurrentBuffer, mLastTimeUs, endOfStream);
} else if (mReadBytes == AudioRecord.ERROR_INVALID_OPERATION) { } else if (mCurrentReadBytes == AudioRecord.ERROR_INVALID_OPERATION) {
LOG.e("read thread - eos:", endOfStream, "- Got AudioRecord.ERROR_INVALID_OPERATION"); LOG.e("read thread - eos:", endOfStream, "- Got AudioRecord.ERROR_INVALID_OPERATION");
} else if (mReadBytes == AudioRecord.ERROR_BAD_VALUE) { } else if (mCurrentReadBytes == AudioRecord.ERROR_BAD_VALUE) {
LOG.e("read thread - eos:", endOfStream, "- Got AudioRecord.ERROR_BAD_VALUE"); LOG.e("read thread - eos:", endOfStream, "- Got AudioRecord.ERROR_BAD_VALUE");
} }
} }
@ -235,43 +247,21 @@ public class AudioMediaEncoder extends MediaEncoder {
} }
// See if we reached the max length value. // See if we reached the max length value.
if (!hasReachedMaxLength()) {
boolean didReachMaxLength = (mLastTimeUs - mFirstTimeUs) > getMaxLengthMillis() * 1000L; boolean didReachMaxLength = (mLastTimeUs - mFirstTimeUs) > getMaxLengthMillis() * 1000L;
if (didReachMaxLength && !endOfStream) { if (didReachMaxLength && !endOfStream) {
LOG.w("read thread - this frame reached the maxLength! deltaUs:", mLastTimeUs - mFirstTimeUs); LOG.w("read thread - this frame reached the maxLength! deltaUs:", mLastTimeUs - mFirstTimeUs);
notifyMaxLengthReached(); notifyMaxLengthReached();
} }
// Add zeroes if we have huge gaps. Even if timestamps are correct, if we have gaps between
// them, the encoder might shrink all timestamps to have a continuous audio. This results
// in a video that is fast-forwarded.
// Adding zeroes does not solve the gaps issue - audio will still be distorted. But at
// least we get a video that has the correct playback speed.
if (PERFORMANCE_FILL_GAPS) {
int gaps = mTimestamp.getGapCount(mConfig.frameSize());
if (gaps > 0) {
long gapStart = mTimestamp.getGapStartUs(mLastTimeUs);
long frameUs = AudioTimestamp.bytesToUs(mConfig.frameSize(), mConfig.byteRate());
LOG.w("read thread - GAPS: trying to add", gaps, "zeroed buffers");
for (int i = 0; i < gaps; i++) {
ByteBuffer zeroBuffer = mByteBufferPool.get();
if (zeroBuffer == null) {
LOG.e("read thread - GAPS: aborting because we have no free buffer.");
break;
}
;
zeroBuffer.position(0);
zeroBuffer.put(mZeroBuffer);
zeroBuffer.clear();
enqueue(zeroBuffer, gapStart, false);
gapStart += frameUs;
}
}
} }
// Maybe add noise.
maybeAddNoise();
} }
private void enqueue(@NonNull ByteBuffer byteBuffer, long timestamp, boolean isEndOfStream) { private void enqueue(@NonNull ByteBuffer byteBuffer, long timestamp, boolean isEndOfStream) {
if (PERFORMANCE_DEBUG) { if (PERFORMANCE_DEBUG) {
mSendStartMap.put(timestamp, System.nanoTime() / 1000000); mDebugSendStartMap.put(timestamp, System.nanoTime() / 1000000);
} }
int readBytes = byteBuffer.remaining(); int readBytes = byteBuffer.remaining();
InputBuffer inputBuffer = mInputBufferPool.get(); InputBuffer inputBuffer = mInputBufferPool.get();
@ -283,6 +273,45 @@ public class AudioMediaEncoder extends MediaEncoder {
mInputBufferQueue.add(inputBuffer); mInputBufferQueue.add(inputBuffer);
} }
/**
* If our {@link AudioTimestamp} detected huge gap, and the performance flag is enabled,
* we can add noise to fill them.
*
* Even if we always pass the correct timestamps, if there are big gaps between the frames,
* the encoder implementation might shrink all timestamps to have a continuous audio.
* This results in a video that is fast-forwarded.
*
* Adding noise does not solve the gaps issue, we'll still have distorted audio, but
* at least we get a video that has the correct playback speed.
*
* NOTE: this MUST be fast!
* If this operation is slow, we make the {@link AudioRecordingThread} busy, so we'll
* read the next frame with a delay, so we'll have even more gaps at the next call
* and spend even more time here. The result might be recording no audio at all - just
* random noise.
* This is the reason why we have a {@link #PERFORMANCE_MAX_GAPS} number.
*/
private void maybeAddNoise() {
if (!PERFORMANCE_FILL_GAPS) return;
int gaps = mTimestamp.getGapCount(mConfig.frameSize());
if (gaps <= 0) return;
long gapStart = mTimestamp.getGapStartUs(mLastTimeUs);
long frameUs = AudioTimestamp.bytesToUs(mConfig.frameSize(), mConfig.byteRate());
LOG.w("read thread - GAPS: trying to add", gaps, "noise buffers. PERFORMANCE_MAX_GAPS:", PERFORMANCE_MAX_GAPS);
for (int i = 0; i < Math.min(gaps, PERFORMANCE_MAX_GAPS); i++) {
ByteBuffer noiseBuffer = mByteBufferPool.get();
if (noiseBuffer == null) {
LOG.e("read thread - GAPS: aborting because we have no free buffer.");
break;
}
noiseBuffer.clear();
mAudioNoise.fill(noiseBuffer);
noiseBuffer.rewind();
enqueue(noiseBuffer, gapStart, false);
gapStart += frameUs;
}
}
} }
/** /**
@ -311,10 +340,11 @@ public class AudioMediaEncoder extends MediaEncoder {
// Performance logging // Performance logging
if (PERFORMANCE_DEBUG) { if (PERFORMANCE_DEBUG) {
long sendEnd = System.nanoTime() / 1000000; long sendEnd = System.nanoTime() / 1000000;
Long sendStart = mSendStartMap.remove(inputBuffer.timestamp); Long sendStart = mDebugSendStartMap.remove(inputBuffer.timestamp);
//noinspection StatementWithEmptyBody
if (sendStart != null) { if (sendStart != null) {
mAvgSendDelay = ((mAvgSendDelay * mSendCount) + (sendEnd - sendStart)) / (++mSendCount); mDebugSendAvgDelay = ((mDebugSendAvgDelay * mDebugSendCount) + (sendEnd - sendStart)) / (++mDebugSendCount);
LOG.v("send delay millis:", sendEnd - sendStart, "average:", mAvgSendDelay); LOG.v("send delay millis:", sendEnd - sendStart, "average:", mDebugSendAvgDelay);
} else { } else {
// This input buffer was already processed (but tryAcquire failed for now). // This input buffer was already processed (but tryAcquire failed for now).
} }
@ -338,8 +368,8 @@ public class AudioMediaEncoder extends MediaEncoder {
if (PERFORMANCE_DEBUG) { if (PERFORMANCE_DEBUG) {
// After latest changes, the count here is not so different between MONO and STEREO. // After latest changes, the count here is not so different between MONO and STEREO.
// We get about 400 frames in both cases (430 for MONO, but doesn't seem like a big issue). // We get about 400 frames in both cases (430 for MONO, but doesn't seem like a big issue).
LOG.e("EXECUTE DELAY MILLIS:", mAvgExecuteDelay, "COUNT:", mExecuteCount); LOG.e("EXECUTE DELAY MILLIS:", mDebugExecuteAvgDelay, "COUNT:", mDebugExecuteCount);
LOG.e("SEND DELAY MILLIS:", mAvgSendDelay, "COUNT:", mSendCount); LOG.e("SEND DELAY MILLIS:", mDebugSendAvgDelay, "COUNT:", mDebugSendCount);
} }
} }
@ -357,12 +387,12 @@ public class AudioMediaEncoder extends MediaEncoder {
// NOTE: can consider calling this drainOutput on yet another thread, which would let us // NOTE: can consider calling this drainOutput on yet another thread, which would let us
// use an even smaller BUFFER_POOL_MAX_SIZE without losing audio frames. But this way // use an even smaller BUFFER_POOL_MAX_SIZE without losing audio frames. But this way
// we can accumulate delay on this new thread without noticing (no pool getting empty). // we can accumulate delay on this new thread without noticing (no pool getting empty).
drainOutput(buffer.isEndOfStream); drainOutput(eos);
if (PERFORMANCE_DEBUG) { if (PERFORMANCE_DEBUG) {
long executeEnd = System.nanoTime() / 1000000; long executeEnd = System.nanoTime() / 1000000;
mAvgExecuteDelay = ((mAvgExecuteDelay * mExecuteCount) + (executeEnd - executeStart)) / (++mExecuteCount); mDebugExecuteAvgDelay = ((mDebugExecuteAvgDelay * mDebugExecuteCount) + (executeEnd - executeStart)) / (++mDebugExecuteCount);
LOG.v("execute delay millis:", executeEnd - executeStart, "average:", mAvgExecuteDelay); LOG.v("execute delay millis:", executeEnd - executeStart, "average:", mDebugExecuteAvgDelay);
} }
} }
} }

@ -0,0 +1,59 @@
package com.otaliastudios.cameraview.video.encoding;
import androidx.annotation.NonNull;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.ShortBuffer;
import java.util.Random;
/**
* An AudioNoise instance offers buffers of noise that we can use when recording
* some samples failed for some reason.
*
* Since we can't create noise anytime it's needed - that would be expensive and
* slow down the recording thread - we create a big noise buffer at start time.
*
* We'd like to work with {@link ShortBuffer}s, but this requires converting the
* input buffer to ShortBuffer each time, and this can be expensive.
*/
class AudioNoise {
private final static int FRAMES = 1; // After testing, it looks like this is the best setup
private final static Random RANDOM = new Random();
private final ByteBuffer mNoiseBuffer;
AudioNoise(@NonNull AudioConfig config) {
//noinspection ConstantConditions
if (config.sampleSizePerChannel != 2) {
throw new IllegalArgumentException("AudioNoise expects 2bytes-1short samples.");
}
mNoiseBuffer = ByteBuffer
.allocateDirect(config.frameSize() * FRAMES)
.order(ByteOrder.nativeOrder());
double i = 0;
double frequency = config.frameSize() / 2D; // each X samples, the signal repeats
double step = Math.PI / frequency; // the increase in radians
double max = 10; // might choose this from 0 to Short.MAX_VALUE
while (mNoiseBuffer.hasRemaining()) {
short noise = (short) (Math.sin(++i * step) * max);
mNoiseBuffer.put((byte) noise);
mNoiseBuffer.put((byte) (noise >> 8));
}
mNoiseBuffer.rewind();
}
void fill(@NonNull ByteBuffer outBuffer) {
mNoiseBuffer.clear();
if (mNoiseBuffer.capacity() == outBuffer.remaining()) {
mNoiseBuffer.position(0); // Happens if FRAMES = 1.
} else {
mNoiseBuffer.position(RANDOM.nextInt(mNoiseBuffer.capacity()
- outBuffer.remaining()));
}
mNoiseBuffer.limit(mNoiseBuffer.position() + outBuffer.remaining());
outBuffer.put(mNoiseBuffer);
}
}

@ -14,6 +14,9 @@ import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.internal.utils.WorkerHandler; import com.otaliastudios.cameraview.internal.utils.WorkerHandler;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
/** /**
* Base class for single-track encoders, coordinated by a {@link MediaEncoderEngine}. * Base class for single-track encoders, coordinated by a {@link MediaEncoderEngine}.
@ -117,12 +120,13 @@ public abstract class MediaEncoder {
private OutputBufferPool mOutputBufferPool; private OutputBufferPool mOutputBufferPool;
private MediaCodec.BufferInfo mBufferInfo; private MediaCodec.BufferInfo mBufferInfo;
private MediaCodecBuffers mBuffers; private MediaCodecBuffers mBuffers;
private final Map<String, AtomicInteger> mPendingEvents = new HashMap<>();
private long mMaxLengthMillis; private long mMaxLengthMillis;
private boolean mMaxLengthReached; private boolean mMaxLengthReached;
private long mStartTimeMillis = 0; // In System.currentTimeMillis() private long mStartTimeMillis = 0; // In System.currentTimeMillis()
private long mStartTimeUs = Long.MIN_VALUE; // In unknown reference private long mFirstTimeUs = Long.MIN_VALUE; // In unknown reference
private long mLastTimeUs = 0; private long mLastTimeUs = 0;
private long mDebugSetStateTimestamp = Long.MIN_VALUE; private long mDebugSetStateTimestamp = Long.MIN_VALUE;
@ -176,6 +180,7 @@ public abstract class MediaEncoder {
mBufferInfo = new MediaCodec.BufferInfo(); mBufferInfo = new MediaCodec.BufferInfo();
mMaxLengthMillis = maxLengthMillis; mMaxLengthMillis = maxLengthMillis;
mWorker = WorkerHandler.get(mName); mWorker = WorkerHandler.get(mName);
mWorker.getThread().setPriority(Thread.MAX_PRIORITY);
LOG.i(mName, "Prepare was called. Posting."); LOG.i(mName, "Prepare was called. Posting.");
mWorker.post(new Runnable() { mWorker.post(new Runnable() {
@Override @Override
@ -223,13 +228,18 @@ public abstract class MediaEncoder {
* @param event what happened * @param event what happened
* @param data object * @param data object
*/ */
@SuppressWarnings("ConstantConditions")
final void notify(final @NonNull String event, final @Nullable Object data) { final void notify(final @NonNull String event, final @Nullable Object data) {
LOG.v(mName, "Notify was called. Posting."); if (!mPendingEvents.containsKey(event)) mPendingEvents.put(event, new AtomicInteger(0));
final AtomicInteger pendingEvents = mPendingEvents.get(event);
pendingEvents.incrementAndGet();
LOG.v(mName, "Notify was called. Posting. pendingEvents:", pendingEvents.intValue());
mWorker.post(new Runnable() { mWorker.post(new Runnable() {
@Override @Override
public void run() { public void run() {
LOG.v(mName, "Notify was called. Executing."); LOG.v(mName, "Notify was called. Executing. pendingEvents:", pendingEvents.intValue());
onEvent(event, data); onEvent(event, data);
pendingEvents.decrementAndGet();
} }
}); });
} }
@ -315,6 +325,7 @@ public abstract class MediaEncoder {
mOutputBufferPool = null; mOutputBufferPool = null;
mBuffers = null; mBuffers = null;
setState(STATE_STOPPED); setState(STATE_STOPPED);
mWorker.destroy();
} }
/** /**
@ -357,7 +368,9 @@ public abstract class MediaEncoder {
*/ */
@SuppressWarnings("WeakerAccess") @SuppressWarnings("WeakerAccess")
protected void encodeInputBuffer(InputBuffer buffer) { protected void encodeInputBuffer(InputBuffer buffer) {
LOG.v(mName, "ENCODING - Buffer:", buffer.index, "Bytes:", buffer.length, "Presentation:", buffer.timestamp); LOG.v(mName, "ENCODING - Buffer:", buffer.index,
"Bytes:", buffer.length,
"Presentation:", buffer.timestamp);
if (buffer.isEndOfStream) { // send EOS if (buffer.isEndOfStream) { // send EOS
mMediaCodec.queueInputBuffer(buffer.index, 0, 0, mMediaCodec.queueInputBuffer(buffer.index, 0, 0,
buffer.timestamp, MediaCodec.BUFFER_FLAG_END_OF_STREAM); buffer.timestamp, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
@ -379,8 +392,8 @@ public abstract class MediaEncoder {
*/ */
@SuppressLint("LogNotTimber") @SuppressLint("LogNotTimber")
@SuppressWarnings("WeakerAccess") @SuppressWarnings("WeakerAccess")
protected void drainOutput(boolean drainAll) { protected final void drainOutput(boolean drainAll) {
LOG.v(mName, "DRAINING - EOS:", drainAll); LOG.i(mName, "DRAINING - EOS:", drainAll);
if (mMediaCodec == null) { if (mMediaCodec == null) {
LOG.e("drain() was called before prepare() or after releasing."); LOG.e("drain() was called before prepare() or after releasing.");
return; return;
@ -422,9 +435,9 @@ public abstract class MediaEncoder {
// Store mStartTimeUs and mLastTimeUs, useful to detect the max length // Store mStartTimeUs and mLastTimeUs, useful to detect the max length
// reached and stop recording when needed. // reached and stop recording when needed.
if (mStartTimeUs == Long.MIN_VALUE) { if (mFirstTimeUs == Long.MIN_VALUE) {
mStartTimeUs = mBufferInfo.presentationTimeUs; mFirstTimeUs = mBufferInfo.presentationTimeUs;
LOG.w(mName, "DRAINING - Got the first presentation time:", mStartTimeUs); LOG.w(mName, "DRAINING - Got the first presentation time:", mFirstTimeUs);
} }
mLastTimeUs = mBufferInfo.presentationTimeUs; mLastTimeUs = mBufferInfo.presentationTimeUs;
@ -434,16 +447,16 @@ public abstract class MediaEncoder {
// To address this, encoders are required to call notifyFirstFrameMillis // To address this, encoders are required to call notifyFirstFrameMillis
// so we can adjust here - moving to 1970 reference. // so we can adjust here - moving to 1970 reference.
// Extra benefit: we never pass a pts equal to 0, which some encoders refuse. // Extra benefit: we never pass a pts equal to 0, which some encoders refuse.
mBufferInfo.presentationTimeUs = (mStartTimeMillis * 1000) + mLastTimeUs - mStartTimeUs; mBufferInfo.presentationTimeUs = (mStartTimeMillis * 1000) + mLastTimeUs - mFirstTimeUs;
// Write. // Write.
LOG.v(mName, "DRAINING - About to write(). Adjusted presentation:", mBufferInfo.presentationTimeUs); LOG.i(mName, "DRAINING - About to write(). Adjusted presentation:", mBufferInfo.presentationTimeUs);
OutputBuffer buffer = mOutputBufferPool.get(); OutputBuffer buffer = mOutputBufferPool.get();
//noinspection ConstantConditions //noinspection ConstantConditions
buffer.info = mBufferInfo; buffer.info = mBufferInfo;
buffer.trackIndex = mTrackIndex; buffer.trackIndex = mTrackIndex;
buffer.data = encodedData; buffer.data = encodedData;
mController.write(mOutputBufferPool, buffer); onWriteOutput(mOutputBufferPool, buffer);
} }
mMediaCodec.releaseOutputBuffer(encoderStatus, false); mMediaCodec.releaseOutputBuffer(encoderStatus, false);
@ -451,10 +464,11 @@ public abstract class MediaEncoder {
// Not needed if drainAll because we already were asked to stop // Not needed if drainAll because we already were asked to stop
if (!drainAll if (!drainAll
&& !mMaxLengthReached && !mMaxLengthReached
&& mStartTimeUs != Long.MIN_VALUE && mFirstTimeUs != Long.MIN_VALUE
&& mLastTimeUs - mStartTimeUs > mMaxLengthMillis * 1000) { && mLastTimeUs - mFirstTimeUs > mMaxLengthMillis * 1000) {
LOG.w(mName, "DRAINING - Reached maxLength! mLastTimeUs:", mLastTimeUs, LOG.w(mName, "DRAINING - Reached maxLength! mLastTimeUs:", mLastTimeUs,
"mStartTimeUs:", mStartTimeUs, "mStartTimeUs:", mFirstTimeUs,
"mDeltaUs:", mLastTimeUs - mFirstTimeUs,
"mMaxLengthUs:", mMaxLengthMillis * 1000); "mMaxLengthUs:", mMaxLengthMillis * 1000);
onMaxLengthReached(); onMaxLengthReached();
break; break;
@ -470,6 +484,11 @@ public abstract class MediaEncoder {
} }
} }
@CallSuper
protected void onWriteOutput(@NonNull OutputBufferPool pool, @NonNull OutputBuffer buffer) {
mController.write(pool, buffer);
}
protected abstract int getEncodedBitRate(); protected abstract int getEncodedBitRate();
/** /**
@ -494,6 +513,11 @@ public abstract class MediaEncoder {
onMaxLengthReached(); onMaxLengthReached();
} }
@SuppressWarnings("WeakerAccess")
protected boolean hasReachedMaxLength() {
return mMaxLengthReached;
}
/** /**
* Called by us (during {@link #drainOutput(boolean)}) or by subclasses * Called by us (during {@link #drainOutput(boolean)}) or by subclasses
* (through {@link #notifyMaxLengthReached()}) to notify that we reached the * (through {@link #notifyMaxLengthReached()}) to notify that we reached the
@ -520,7 +544,20 @@ public abstract class MediaEncoder {
* @param firstFrameMillis the milliseconds of the first frame presentation * @param firstFrameMillis the milliseconds of the first frame presentation
*/ */
@SuppressWarnings("WeakerAccess") @SuppressWarnings("WeakerAccess")
protected void notifyFirstFrameMillis(long firstFrameMillis) { protected final void notifyFirstFrameMillis(long firstFrameMillis) {
mStartTimeMillis = firstFrameMillis; mStartTimeMillis = firstFrameMillis;
} }
/**
* Returns the number of events (see {@link #onEvent(String, Object)}) that were scheduled
* but still not passed to that function. Could be used to drop some of them if this
* number is too high.
*
* @param event the event type
* @return the pending events number
*/
@SuppressWarnings({"SameParameterValue", "ConstantConditions", "WeakerAccess"})
protected final int getPendingEvents(@NonNull String event) {
return mPendingEvents.get(event).intValue();
}
} }

@ -7,6 +7,7 @@ import android.os.Build;
import android.text.format.DateFormat; import android.text.format.DateFormat;
import com.otaliastudios.cameraview.CameraLogger; import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.internal.utils.WorkerHandler;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
@ -68,8 +69,9 @@ public class MediaEncoderEngine {
void onEncodingStart(); void onEncodingStart();
/** /**
* Called when encoding stopped. At this point the mxuer might still be processing, * Called when encoding stopped. At this point the muxer or the encoders might still be
* but we have stopped receiving input (recording video and audio frames). * processing data, but we have stopped receiving input (recording video and audio frames).
* Actually, we will stop very soon.
* *
* The {@link #onEncodingEnd(int, Exception)} callback will soon be called * The {@link #onEncodingEnd(int, Exception)} callback will soon be called
* with the results. * with the results.
@ -96,17 +98,18 @@ public class MediaEncoderEngine {
public final static int END_BY_MAX_DURATION = 1; public final static int END_BY_MAX_DURATION = 1;
public final static int END_BY_MAX_SIZE = 2; public final static int END_BY_MAX_SIZE = 2;
private List<MediaEncoder> mEncoders; private final List<MediaEncoder> mEncoders = new ArrayList<>();
private MediaMuxer mMediaMuxer; private MediaMuxer mMediaMuxer;
private int mStartedEncodersCount; private int mStartedEncodersCount = 0;
private int mReleasedEncodersCount; private int mStoppedEncodersCount = 0;
private boolean mMediaMuxerStarted; private boolean mMediaMuxerStarted = false;
@SuppressWarnings("FieldCanBeLocal") @SuppressWarnings("FieldCanBeLocal")
private Controller mController; private final Controller mController = new Controller();
private final WorkerHandler mControllerThread = WorkerHandler.get("EncoderEngine");
private final Object mControllerLock = new Object();
private Listener mListener; private Listener mListener;
private int mEndReason = END_BY_USER; private int mEndReason = END_BY_USER;
private int mPossibleEndReason; private int mPossibleEndReason;
private final Object mControllerLock = new Object();
/** /**
* Creates a new engine for the given file, with the given encoders and max limits, * Creates a new engine for the given file, with the given encoders and max limits,
@ -126,8 +129,6 @@ public class MediaEncoderEngine {
final long maxSize, final long maxSize,
@Nullable Listener listener) { @Nullable Listener listener) {
mListener = listener; mListener = listener;
mController = new Controller();
mEncoders = new ArrayList<>();
mEncoders.add(videoEncoder); mEncoders.add(videoEncoder);
if (audioEncoder != null) { if (audioEncoder != null) {
mEncoders.add(audioEncoder); mEncoders.add(audioEncoder);
@ -137,9 +138,6 @@ public class MediaEncoderEngine {
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
mStartedEncodersCount = 0;
mMediaMuxerStarted = false;
mReleasedEncodersCount = 0;
// Trying to convert the size constraints to duration constraints, // Trying to convert the size constraints to duration constraints,
// because they are super easy to check. // because they are super easy to check.
@ -203,6 +201,9 @@ public class MediaEncoderEngine {
for (MediaEncoder encoder : mEncoders) { for (MediaEncoder encoder : mEncoders) {
encoder.stop(); encoder.stop();
} }
if (mListener != null) {
mListener.onEncodingStop();
}
} }
/** /**
@ -218,10 +219,14 @@ public class MediaEncoderEngine {
// went wrong, and we propagate that to the listener. // went wrong, and we propagate that to the listener.
try { try {
mMediaMuxer.stop(); mMediaMuxer.stop();
mMediaMuxer.release();
} catch (Exception e) { } catch (Exception e) {
error = e; error = e;
} }
try {
mMediaMuxer.release();
} catch (Exception e) {
if (error == null) error = e;
}
mMediaMuxer = null; mMediaMuxer = null;
} }
LOG.w("end:", "Dispatching end to listener - reason:", mEndReason, "error:", error); LOG.w("end:", "Dispatching end to listener - reason:", mEndReason, "error:", error);
@ -231,8 +236,9 @@ public class MediaEncoderEngine {
} }
mEndReason = END_BY_USER; mEndReason = END_BY_USER;
mStartedEncodersCount = 0; mStartedEncodersCount = 0;
mReleasedEncodersCount = 0; mStoppedEncodersCount = 0;
mMediaMuxerStarted = false; mMediaMuxerStarted = false;
mControllerThread.destroy();
LOG.i("end:", "Completed."); LOG.i("end:", "Completed.");
} }
@ -281,12 +287,19 @@ public class MediaEncoderEngine {
LOG.w("notifyStarted:", "Assigned track", track, "to format", format.getString(MediaFormat.KEY_MIME)); LOG.w("notifyStarted:", "Assigned track", track, "to format", format.getString(MediaFormat.KEY_MIME));
if (++mStartedEncodersCount == mEncoders.size()) { if (++mStartedEncodersCount == mEncoders.size()) {
LOG.w("notifyStarted:", "All encoders have started. Starting muxer and dispatching onEncodingStart()."); LOG.w("notifyStarted:", "All encoders have started. Starting muxer and dispatching onEncodingStart().");
// Go out of this thread since it might be very important for the
// encoders and we don't want to perform expensive operations here.
mControllerThread.run(new Runnable() {
@Override
public void run() {
mMediaMuxer.start(); mMediaMuxer.start();
mMediaMuxerStarted = true; mMediaMuxerStarted = true;
if (mListener != null) { if (mListener != null) {
mListener.onEncodingStart(); mListener.onEncodingStart();
} }
} }
});
}
return track; return track;
} }
} }
@ -322,10 +335,6 @@ public class MediaEncoderEngine {
* large differences. * large differences.
*/ */
public void write(@NonNull OutputBufferPool pool, @NonNull OutputBuffer buffer) { public void write(@NonNull OutputBufferPool pool, @NonNull OutputBuffer buffer) {
if (!mMediaMuxerStarted) {
throw new IllegalStateException("Trying to write before muxer started");
}
if (DEBUG_PERFORMANCE) { if (DEBUG_PERFORMANCE) {
// When AUDIO = mono, this is called about twice the time. (200 vs 100 for 5 sec). // When AUDIO = mono, this is called about twice the time. (200 vs 100 for 5 sec).
Integer count = mDebugCount.get(buffer.trackIndex); Integer count = mDebugCount.get(buffer.trackIndex);
@ -342,7 +351,6 @@ public class MediaEncoderEngine {
"track:", buffer.trackIndex, "track:", buffer.trackIndex,
"presentation:", buffer.info.presentationTimeUs); "presentation:", buffer.info.presentationTimeUs);
} }
mMediaMuxer.writeSampleData(buffer.trackIndex, buffer.data, buffer.info); mMediaMuxer.writeSampleData(buffer.trackIndex, buffer.data, buffer.info);
pool.recycle(buffer); pool.recycle(buffer);
} }
@ -360,8 +368,15 @@ public class MediaEncoderEngine {
if (--mStartedEncodersCount == 0) { if (--mStartedEncodersCount == 0) {
LOG.w("requestStop:", "All encoders have requested a stop. Stopping them."); LOG.w("requestStop:", "All encoders have requested a stop. Stopping them.");
mEndReason = mPossibleEndReason; mEndReason = mPossibleEndReason;
// Go out of this thread since it might be very important for the
// encoders and we don't want to perform expensive operations here.
mControllerThread.run(new Runnable() {
@Override
public void run() {
stop(); stop();
} }
});
}
} }
} }
@ -372,13 +387,17 @@ public class MediaEncoderEngine {
public void notifyStopped(int track) { public void notifyStopped(int track) {
synchronized (mControllerLock) { synchronized (mControllerLock) {
LOG.w("notifyStopped:", "Called for track", track); LOG.w("notifyStopped:", "Called for track", track);
if (++mReleasedEncodersCount == mEncoders.size()) { if (++mStoppedEncodersCount == mEncoders.size()) {
LOG.w("requestStop:", "All encoders have been released. Stopping the muxer."); LOG.w("requestStop:", "All encoders have been stopped. Stopping the muxer.");
if (mListener != null) { // Go out of this thread since it might be very important for the
mListener.onEncodingStop(); // encoders and we don't want to perform expensive operations here.
} mControllerThread.run(new Runnable() {
@Override
public void run() {
end(); end();
} }
});
}
} }
} }
} }

@ -4,16 +4,19 @@ import android.opengl.EGLContext;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
import com.otaliastudios.cameraview.internal.Issue514Workaround;
import com.otaliastudios.cameraview.overlay.Overlay;
import com.otaliastudios.cameraview.overlay.OverlayDrawer;
/** /**
* Video configuration to be passed as input to the constructor * Video configuration to be passed as input to the constructor
* of a {@link TextureMediaEncoder}. * of a {@link TextureMediaEncoder}.
*/ */
public class TextureConfig extends VideoConfig { public class TextureConfig extends VideoConfig {
private final static int NO_TEXTURE = Integer.MIN_VALUE; public int textureId;
public Overlay.Target overlayTarget;
public int textureId = NO_TEXTURE; public OverlayDrawer overlayDrawer;
public int overlayTextureId = NO_TEXTURE;
public int overlayRotation; public int overlayRotation;
public float scaleX; public float scaleX;
public float scaleY; public float scaleY;
@ -24,7 +27,8 @@ public class TextureConfig extends VideoConfig {
TextureConfig copy = new TextureConfig(); TextureConfig copy = new TextureConfig();
copy(copy); copy(copy);
copy.textureId = this.textureId; copy.textureId = this.textureId;
copy.overlayTextureId = this.overlayTextureId; copy.overlayDrawer = this.overlayDrawer;
copy.overlayTarget = this.overlayTarget;
copy.overlayRotation = this.overlayRotation; copy.overlayRotation = this.overlayRotation;
copy.scaleX = this.scaleX; copy.scaleX = this.scaleX;
copy.scaleY = this.scaleY; copy.scaleY = this.scaleY;
@ -33,6 +37,6 @@ public class TextureConfig extends VideoConfig {
} }
boolean hasOverlay() { boolean hasOverlay() {
return overlayTextureId != NO_TEXTURE; return overlayDrawer != null;
} }
} }

@ -36,6 +36,8 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
} }
}); });
private long mFirstTimeUs = Long.MIN_VALUE;
public TextureMediaEncoder(@NonNull TextureConfig config) { public TextureMediaEncoder(@NonNull TextureConfig config) {
super(config.copy()); super(config.copy());
} }
@ -51,7 +53,7 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
* Nanoseconds, in no meaningful time-base. Will be used for offsets only. * Nanoseconds, in no meaningful time-base. Will be used for offsets only.
* Typically this comes from {@link SurfaceTexture#getTimestamp()}. * Typically this comes from {@link SurfaceTexture#getTimestamp()}.
*/ */
public long timestamp; public long timestampNanos;
/** /**
* Milliseconds in the {@link System#currentTimeMillis()} reference. * Milliseconds in the {@link System#currentTimeMillis()} reference.
@ -64,10 +66,9 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
*/ */
public float[] transform = new float[16]; public float[] transform = new float[16];
/** private long timestampUs() {
* The transformation matrix for the overlay texture, if any. return timestampNanos / 1000L;
*/ }
public float[] overlayTransform = new float[16];
} }
/** /**
@ -94,11 +95,40 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
super.onPrepare(controller, maxLengthMillis); super.onPrepare(controller, maxLengthMillis);
mEglCore = new EglCore(mConfig.eglContext, EglCore.FLAG_RECORDABLE); mEglCore = new EglCore(mConfig.eglContext, EglCore.FLAG_RECORDABLE);
mWindow = new EglWindowSurface(mEglCore, mSurface, true); mWindow = new EglWindowSurface(mEglCore, mSurface, true);
mWindow.makeCurrent(); // drawing will happen on the InputWindowSurface, which mWindow.makeCurrent();
// is backed by mVideoEncoder.getInputSurface()
mViewport = new EglViewport(); mViewport = new EglViewport();
} }
/**
* Any number of pending events > 1 means that we should skip this frame.
* To avoid skipping too many frames, we'll use 2 for now, but this just means
* that we'll be drawing the same frame twice.
*
* When an event is posted, the textureId data has already been updated so we're
* too late to draw the old one and it should be skipped.
*
* This is especially important if we perform overlay drawing here, since that
* makes this class thread busy and slows down the event dispatching.
*
* @param timestampUs frame timestamp
* @return true to render
*/
@Override
protected boolean shouldRenderFrame(long timestampUs) {
if (!super.shouldRenderFrame(timestampUs)) {
return false;
} else if (mFrameNumber <= 10) {
// Always render the first few frames, or muxer fails.
return true;
} else if (getPendingEvents(FRAME_EVENT) > 2) {
LOG.w("shouldRenderFrame - Dropping frame because we already have too many pending events:",
getPendingEvents(FRAME_EVENT));
return false;
} else {
return true;
}
}
@EncoderThread @EncoderThread
@Override @Override
protected void onEvent(@NonNull String event, @Nullable Object data) { protected void onEvent(@NonNull String event, @Nullable Object data) {
@ -107,30 +137,46 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
if (frame == null) { if (frame == null) {
throw new IllegalArgumentException("Got null frame for FRAME_EVENT."); throw new IllegalArgumentException("Got null frame for FRAME_EVENT.");
} }
if (frame.timestamp == 0) { // grafika if (!shouldRenderFrame(frame.timestampUs())) {
mFramePool.recycle(frame);
return;
}
if (mFrameNumber < 0) { // We were asked to stop.
mFramePool.recycle(frame); mFramePool.recycle(frame);
return; return;
} }
mFrameNumber++;
// Notify we're got the first frame and its absolute time.
if (mFrameNumber == 1) { if (mFrameNumber == 1) {
notifyFirstFrameMillis(frame.timestampMillis); notifyFirstFrameMillis(frame.timestampMillis);
} }
// Notify we have reached the max length value.
if (mFirstTimeUs == Long.MIN_VALUE) mFirstTimeUs = frame.timestampUs();
if (!hasReachedMaxLength()) {
boolean didReachMaxLength = (frame.timestampUs() - mFirstTimeUs) > getMaxLengthMillis() * 1000L;
if (didReachMaxLength) {
LOG.w("onEvent -",
"frameNumber:", mFrameNumber,
"timestampUs:", frame.timestampUs(),
"firstTimeUs:", mFirstTimeUs,
"- reached max length! deltaUs:", frame.timestampUs() - mFirstTimeUs);
notifyMaxLengthReached();
}
}
// First, drain any previous data. // First, drain any previous data.
LOG.i("onEvent", "frameNumber:", mFrameNumber, "timestamp:", frame.timestamp, "- draining."); LOG.i("onEvent -",
"frameNumber:", mFrameNumber,
"timestampUs:", frame.timestampUs(),
"- draining.");
drainOutput(false); drainOutput(false);
// Then draw on the surface. // Then draw on the surface.
LOG.i("onEvent", "frameNumber:", mFrameNumber, "timestamp:", frame.timestamp, "- drawing."); LOG.i("onEvent -",
"frameNumber:", mFrameNumber,
"timestampUs:", frame.timestampUs(),
"- rendering.");
// 1. We must scale this matrix like GlCameraPreview does, because it might have some cropping. // 1. We must scale this matrix like GlCameraPreview does, because it might have some cropping.
// Scaling takes place with respect to the (0, 0, 0) point, so we must apply a Translation to compensate. // Scaling takes place with respect to the (0, 0, 0) point, so we must apply a Translation to compensate.
float[] transform = frame.transform; float[] transform = frame.transform;
float[] overlayTransform = frame.overlayTransform;
float scaleX = mConfig.scaleX; float scaleX = mConfig.scaleX;
float scaleY = mConfig.scaleY; float scaleY = mConfig.scaleY;
float scaleTranslX = (1F - scaleX) / 2F; float scaleTranslX = (1F - scaleX) / 2F;
@ -148,15 +194,16 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
// 3. Do the same for overlays with their own rotation. // 3. Do the same for overlays with their own rotation.
if (mConfig.hasOverlay()) { if (mConfig.hasOverlay()) {
Matrix.translateM(overlayTransform, 0, 0.5F, 0.5F, 0); mConfig.overlayDrawer.draw(mConfig.overlayTarget);
Matrix.rotateM(overlayTransform, 0, mConfig.overlayRotation, 0, 0, 1); Matrix.translateM(mConfig.overlayDrawer.getTransform(), 0, 0.5F, 0.5F, 0);
Matrix.translateM(overlayTransform, 0, -0.5F, -0.5F, 0); Matrix.rotateM(mConfig.overlayDrawer.getTransform(), 0, mConfig.overlayRotation, 0, 0, 1);
Matrix.translateM(mConfig.overlayDrawer.getTransform(), 0, -0.5F, -0.5F, 0);
} }
mViewport.drawFrame(mConfig.textureId, transform); mViewport.drawFrame(mConfig.textureId, transform);
if (mConfig.hasOverlay()) { if (mConfig.hasOverlay()) {
mViewport.drawFrame(mConfig.overlayTextureId, overlayTransform); mConfig.overlayDrawer.render();
} }
mWindow.setPresentationTime(frame.timestamp); mWindow.setPresentationTime(frame.timestampNanos);
mWindow.swapBuffers(); mWindow.swapBuffers();
mFramePool.recycle(frame); mFramePool.recycle(frame);
} }

@ -7,6 +7,7 @@ import android.os.Build;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
import androidx.annotation.RequiresApi; import androidx.annotation.RequiresApi;
import android.os.Bundle;
import android.view.Surface; import android.view.Surface;
import com.otaliastudios.cameraview.CameraLogger; import com.otaliastudios.cameraview.CameraLogger;
@ -43,6 +44,8 @@ abstract class VideoMediaEncoder<C extends VideoConfig> extends MediaEncoder {
@SuppressWarnings("WeakerAccess") @SuppressWarnings("WeakerAccess")
protected int mFrameNumber = -1; protected int mFrameNumber = -1;
private boolean mSyncFrameFound = false;
VideoMediaEncoder(@NonNull C config) { VideoMediaEncoder(@NonNull C config) {
super("VideoEncoder"); super("VideoEncoder");
mConfig = config; mConfig = config;
@ -53,16 +56,16 @@ abstract class VideoMediaEncoder<C extends VideoConfig> extends MediaEncoder {
protected void onPrepare(@NonNull MediaEncoderEngine.Controller controller, long maxLengthMillis) { protected void onPrepare(@NonNull MediaEncoderEngine.Controller controller, long maxLengthMillis) {
MediaFormat format = MediaFormat.createVideoFormat(mConfig.mimeType, mConfig.width, mConfig.height); MediaFormat format = MediaFormat.createVideoFormat(mConfig.mimeType, mConfig.width, mConfig.height);
// Set some properties. Failing to specify some of these can cause the MediaCodec // Failing to specify some of these can cause the MediaCodec configure() call to throw an unhelpful exception.
// configure() call to throw an unhelpful exception. // About COLOR_FormatSurface, see https://stackoverflow.com/q/28027858/4288782
// This just means it is an opaque, implementation-specific format that the device GPU prefers.
// So as long as we use the GPU to draw, the format will match what the encoder expects.
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, mConfig.bitRate); format.setInteger(MediaFormat.KEY_BIT_RATE, mConfig.bitRate);
format.setInteger(MediaFormat.KEY_FRAME_RATE, mConfig.frameRate); format.setInteger(MediaFormat.KEY_FRAME_RATE, mConfig.frameRate);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 2); format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1); // seconds between key frames!
format.setInteger("rotation-degrees", mConfig.rotation); format.setInteger("rotation-degrees", mConfig.rotation);
// Create a MediaCodec encoder, and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work.
try { try {
mMediaCodec = MediaCodec.createEncoderByType(mConfig.mimeType); mMediaCodec = MediaCodec.createEncoderByType(mConfig.mimeType);
} catch (IOException e) { } catch (IOException e) {
@ -92,8 +95,52 @@ abstract class VideoMediaEncoder<C extends VideoConfig> extends MediaEncoder {
drainOutput(true); drainOutput(true);
} }
/**
* The first frame that we write MUST have the BUFFER_FLAG_SYNC_FRAME flag set.
* It sometimes doesn't because we might drop some frames in {@link #drainOutput(boolean)},
* basically if, at the time, the muxer was not started yet, due to Audio setup being slow.
*
* We can't add the BUFFER_FLAG_SYNC_FRAME flag to the first frame just because we'd like to.
* But we can drop frames until we get a sync one.
*
* @param pool the buffer pool
* @param buffer the buffer
*/
@Override
protected void onWriteOutput(@NonNull OutputBufferPool pool, @NonNull OutputBuffer buffer) {
if (!mSyncFrameFound) {
LOG.w("onWriteOutput:", "sync frame not found yet. Checking.");
int flag = MediaCodec.BUFFER_FLAG_SYNC_FRAME;
boolean hasFlag = (buffer.info.flags & flag) == flag;
if (hasFlag) {
LOG.w("onWriteOutput:", "SYNC FRAME FOUND!");
mSyncFrameFound = true;
super.onWriteOutput(pool, buffer);
} else {
LOG.w("onWriteOutput:", "DROPPING FRAME and requesting a sync frame soon.");
if (Build.VERSION.SDK_INT >= 19) {
Bundle params = new Bundle();
params.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
mMediaCodec.setParameters(params);
}
pool.recycle(buffer);
}
} else {
super.onWriteOutput(pool, buffer);
}
}
@Override @Override
protected int getEncodedBitRate() { protected int getEncodedBitRate() {
return mConfig.bitRate; return mConfig.bitRate;
} }
@SuppressWarnings("BooleanMethodIsAlwaysInverted")
protected boolean shouldRenderFrame(long timestampUs) {
if (timestampUs == 0) return false; // grafika said so
if (mFrameNumber < 0) return false; // We were asked to stop.
if (hasReachedMaxLength()) return false; // We were not asked yet, but we'll be soon.
mFrameNumber++;
return true;
}
} }

@ -1,16 +1,17 @@
<?xml version="1.0" encoding="utf-8"?> <?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android" <manifest xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
package="com.otaliastudios.cameraview.demo"> package="com.otaliastudios.cameraview.demo">
<uses-permission android:name="android.permission.RECORD_AUDIO"/> <uses-permission android:name="android.permission.RECORD_AUDIO"/>
<application <application
android:allowBackup="false" android:allowBackup="false"
android:icon="@mipmap/cameraview" android:icon="@mipmap/logo"
android:label="@string/app_name" android:label="@string/app_name"
android:roundIcon="@mipmap/cameraview"
android:supportsRtl="true" android:supportsRtl="true"
android:theme="@style/AppTheme"> android:theme="@style/AppTheme"
tools:ignore="GoogleAppIndexingWarning">
<activity <activity
android:name=".CameraActivity" android:name=".CameraActivity"

Binary file not shown.

Before

Width:  |  Height:  |  Size: 50 KiB

@ -226,6 +226,13 @@ public class CameraActivity extends AppCompatActivity implements View.OnClickLis
super.onVideoRecordingStart(); super.onVideoRecordingStart();
LOG.w("onVideoRecordingStart!"); LOG.w("onVideoRecordingStart!");
} }
@Override
public void onVideoRecordingEnd() {
super.onVideoRecordingEnd();
message("Video taken. Processing...", false);
LOG.w("onVideoRecordingEnd!");
}
} }
@Override @Override

@ -13,30 +13,30 @@ import com.otaliastudios.cameraview.size.AspectRatio;
import com.otaliastudios.cameraview.BitmapCallback; import com.otaliastudios.cameraview.BitmapCallback;
import com.otaliastudios.cameraview.PictureResult; import com.otaliastudios.cameraview.PictureResult;
import java.lang.ref.WeakReference;
public class PicturePreviewActivity extends Activity { public class PicturePreviewActivity extends Activity {
private static WeakReference<PictureResult> image; private static PictureResult picture;
public static void setPictureResult(@Nullable PictureResult im) { public static void setPictureResult(@Nullable PictureResult pictureResult) {
image = im != null ? new WeakReference<>(im) : null; picture = pictureResult;
} }
@Override @Override
protected void onCreate(@Nullable Bundle savedInstanceState) { protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState); super.onCreate(savedInstanceState);
setContentView(R.layout.activity_picture_preview); setContentView(R.layout.activity_picture_preview);
final ImageView imageView = findViewById(R.id.image); final PictureResult result = picture;
final MessageView captureResolution = findViewById(R.id.nativeCaptureResolution);
final MessageView captureLatency = findViewById(R.id.captureLatency);
final MessageView exifRotation = findViewById(R.id.exifRotation);
PictureResult result = image == null ? null : image.get();
if (result == null) { if (result == null) {
finish(); finish();
return; return;
} }
final ImageView imageView = findViewById(R.id.image);
final MessageView captureResolution = findViewById(R.id.nativeCaptureResolution);
final MessageView captureLatency = findViewById(R.id.captureLatency);
final MessageView exifRotation = findViewById(R.id.exifRotation);
final long delay = getIntent().getLongExtra("delay", 0); final long delay = getIntent().getLongExtra("delay", 0);
AspectRatio ratio = AspectRatio.of(result.getSize()); AspectRatio ratio = AspectRatio.of(result.getSize());
captureLatency.setTitleAndMessage("Approx. latency", delay + " milliseconds"); captureLatency.setTitleAndMessage("Approx. latency", delay + " milliseconds");

@ -13,23 +13,28 @@ import android.widget.MediaController;
import android.widget.VideoView; import android.widget.VideoView;
import com.otaliastudios.cameraview.VideoResult; import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.size.AspectRatio;
import java.lang.ref.WeakReference;
public class VideoPreviewActivity extends Activity { public class VideoPreviewActivity extends Activity {
private VideoView videoView; private VideoView videoView;
private static WeakReference<VideoResult> videoResult; private static VideoResult videoResult;
public static void setVideoResult(@Nullable VideoResult result) { public static void setVideoResult(@Nullable VideoResult result) {
videoResult = result != null ? new WeakReference<>(result) : null; videoResult = result;
} }
@Override @Override
protected void onCreate(@Nullable Bundle savedInstanceState) { protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState); super.onCreate(savedInstanceState);
setContentView(R.layout.activity_video_preview); setContentView(R.layout.activity_video_preview);
final VideoResult result = videoResult;
if (result == null) {
finish();
return;
}
videoView = findViewById(R.id.video); videoView = findViewById(R.id.video);
videoView.setOnClickListener(new View.OnClickListener() { videoView.setOnClickListener(new View.OnClickListener() {
@Override @Override
@ -46,13 +51,8 @@ public class VideoPreviewActivity extends Activity {
final MessageView videoBitRate = findViewById(R.id.videoBitRate); final MessageView videoBitRate = findViewById(R.id.videoBitRate);
final MessageView videoFrameRate = findViewById(R.id.videoFrameRate); final MessageView videoFrameRate = findViewById(R.id.videoFrameRate);
final VideoResult result = videoResult == null ? null : videoResult.get(); AspectRatio ratio = AspectRatio.of(result.getSize());
if (result == null) { actualResolution.setTitleAndMessage("Size", result.getSize() + " (" + ratio + ")");
finish();
return;
}
actualResolution.setTitleAndMessage("Size", result.getSize() + "");
isSnapshot.setTitleAndMessage("Snapshot", result.isSnapshot() + ""); isSnapshot.setTitleAndMessage("Snapshot", result.isSnapshot() + "");
rotation.setTitleAndMessage("Rotation", result.getRotation() + ""); rotation.setTitleAndMessage("Rotation", result.getRotation() + "");
audio.setTitleAndMessage("Audio", result.getAudio().name()); audio.setTitleAndMessage("Audio", result.getAudio().name());
@ -85,9 +85,10 @@ public class VideoPreviewActivity extends Activity {
} }
void playVideo() { void playVideo() {
if (videoView.isPlaying()) return; if (!videoView.isPlaying()) {
videoView.start(); videoView.start();
} }
}
@Override @Override
protected void onDestroy() { protected void onDestroy() {

@ -9,7 +9,6 @@
<!-- Camera --> <!-- Camera -->
<com.otaliastudios.cameraview.CameraView <com.otaliastudios.cameraview.CameraView
xmlns:app="http://schemas.android.com/apk/res-auto"
android:id="@+id/camera" android:id="@+id/camera"
android:layout_width="wrap_content" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:layout_height="wrap_content"
@ -23,7 +22,7 @@
app:cameraGrid="off" app:cameraGrid="off"
app:cameraFlash="off" app:cameraFlash="off"
app:cameraAudio="on" app:cameraAudio="on"
app:cameraFacing="front" app:cameraFacing="back"
app:cameraGestureTap="autoFocus" app:cameraGestureTap="autoFocus"
app:cameraGestureLongTap="none" app:cameraGestureLongTap="none"
app:cameraGesturePinch="zoom" app:cameraGesturePinch="zoom"
@ -35,14 +34,13 @@
<!-- Watermark --> <!-- Watermark -->
<ImageView <ImageView
android:id="@+id/watermark" android:id="@+id/watermark"
android:layout_width="wrap_content" android:layout_width="72dp"
android:layout_height="wrap_content" android:layout_height="72dp"
android:layout_gravity="bottom|end" android:layout_gravity="bottom|end"
app:layout_drawOnPreview="true" app:layout_drawOnPreview="true"
app:layout_drawOnVideoSnapshot="true" app:layout_drawOnVideoSnapshot="true"
app:layout_drawOnPictureSnapshot="true" app:layout_drawOnPictureSnapshot="true"
android:src="@mipmap/cameraview" android:src="@mipmap/logo_foreground"/>
android:padding="8dp"/>
</com.otaliastudios.cameraview.CameraView> </com.otaliastudios.cameraview.CameraView>
@ -59,6 +57,7 @@
android:layout_height="56dp" android:layout_height="56dp"
android:layout_margin="16dp" android:layout_margin="16dp"
android:background="@drawable/background" android:background="@drawable/background"
android:elevation="3dp"
app:srcCompat="@drawable/ic_switch" /> app:srcCompat="@drawable/ic_switch" />
<ImageButton <ImageButton
@ -67,14 +66,10 @@
android:layout_height="56dp" android:layout_height="56dp"
android:layout_margin="16dp" android:layout_margin="16dp"
android:background="@drawable/background" android:background="@drawable/background"
android:elevation="3dp"
app:srcCompat="@drawable/ic_filters" /> app:srcCompat="@drawable/ic_filters" />
</LinearLayout> </LinearLayout>
<!-- Controls --> <!-- Controls -->
<LinearLayout <LinearLayout
android:layout_width="match_parent" android:layout_width="match_parent"
@ -176,6 +171,7 @@
app:behavior_hideable="true" app:behavior_hideable="true"
app:behavior_peekHeight="300dp" app:behavior_peekHeight="300dp"
app:behavior_skipCollapsed="false" app:behavior_skipCollapsed="false"
android:elevation="4dp"
android:layout_width="match_parent" android:layout_width="match_parent"
android:layout_height="wrap_content"> android:layout_height="wrap_content">
<LinearLayout <LinearLayout

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@mipmap/logo_background"/>
<foreground android:drawable="@mipmap/logo_foreground"/>
</adaptive-icon>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 8.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 21 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 19 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 21 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 17 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 28 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 41 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 25 KiB

@ -1,6 +1,6 @@
<?xml version="1.0" encoding="utf-8"?> <?xml version="1.0" encoding="utf-8"?>
<resources> <resources>
<color name="colorPrimary">#009966</color> <color name="colorPrimary">#FFA000</color>
<color name="colorPrimaryDark">#00734d</color> <color name="colorPrimaryDark">#F57C00</color>
<color name="colorAccent">#57db27</color> <color name="colorAccent">#40C4FF</color>
</resources> </resources>

@ -8,6 +8,10 @@ title: "CameraView v2"
CameraView is a well documented, high-level library that makes capturing pictures and videos easy, CameraView is a well documented, high-level library that makes capturing pictures and videos easy,
addressing most of the common issues and needs, and still leaving you with flexibility where needed. addressing most of the common issues and needs, and still leaving you with flexibility where needed.
<p align="center">
<img src="static/banner.png" vspace="10" width="100%">
</p>
- Fast & reliable - Fast & reliable
- Gestures support [[docs]](docs/gestures.html) - Gestures support [[docs]](docs/gestures.html)
- Camera1 or Camera2 powered engine [[docs]](docs/previews.html) - Camera1 or Camera2 powered engine [[docs]](docs/previews.html)
@ -22,10 +26,6 @@ addressing most of the common issues and needs, and still leaving you with flexi
- Works down to API level 15 - Works down to API level 15
- Well tested - Well tested
<p align="center">
<img src="static/icon.png" vspace="10" width="200" height="200">
</p>
### Get started ### Get started
Get started with [install info](about/install.html), [quick setup](about/getting-started.html), or Get started with [install info](about/install.html), [quick setup](about/getting-started.html), or

Binary file not shown.

After

Width:  |  Height:  |  Size: 270 KiB

BIN
docs/static/icon.png vendored

Binary file not shown.

Before

Width:  |  Height:  |  Size: 122 KiB

After

Width:  |  Height:  |  Size: 302 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 113 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.8 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 62 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 273 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 46 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 281 KiB

Loading…
Cancel
Save