diff --git a/README.md b/README.md
index 0a3aa1c4..9ad40d1f 100644
--- a/README.md
+++ b/README.md
@@ -4,9 +4,10 @@
[](https://github.com/natario1/CameraView/issues)
[](https://natario1.github.io/CameraView/extra/donate)
+⠀
-
+
# CameraView
@@ -15,7 +16,7 @@ CameraView is a well documented, high-level library that makes capturing picture
addressing most of the common issues and needs, and still leaving you with flexibility where needed.
```groovy
-api 'com.otaliastudios:cameraview:2.0.0-rc1'
+api 'com.otaliastudios:cameraview:2.0.0-rc2'
```
- Fast & reliable
@@ -36,12 +37,14 @@ Read the [official website](https://natario1.github.io/CameraView) for setup ins
You might also be interested in [changelog](https://natario1.github.io/CameraView/about/changelog.html)
or in the [v1 migration guide](https://natario1.github.io/CameraView/extra/v1-migration-guide.html).
-
-
-
-
+⠀
+
+
+ 

+⠀
+
If you like the project, use it with profit, or simply want to thank back, please consider [donating
to the project](https://natario1.github.io/CameraView/extra/donate) now! You can either make a one time
donation or become a sponsor, in which case your company logo will immediately show up here.
diff --git a/cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/CameraIntegrationTest.java b/cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/CameraIntegrationTest.java
index 5971d98d..86172788 100644
--- a/cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/CameraIntegrationTest.java
+++ b/cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/CameraIntegrationTest.java
@@ -86,7 +86,7 @@ public abstract class CameraIntegrationTest extends BaseTest {
@Before
public void setUp() {
LOG.e("Test started. Setting up camera.");
- WorkerHandler.destroy();
+ WorkerHandler.destroyAll();
uiSync(new Runnable() {
@Override
@@ -126,7 +126,7 @@ public abstract class CameraIntegrationTest extends BaseTest {
public void tearDown() {
LOG.e("Test ended. Tearing down camera.");
camera.destroy();
- WorkerHandler.destroy();
+ WorkerHandler.destroyAll();
}
private void waitForUiException() throws Throwable {
diff --git a/cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/MockCameraEngine.java b/cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/MockCameraEngine.java
index 9b3d4566..3b320da7 100644
--- a/cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/MockCameraEngine.java
+++ b/cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/MockCameraEngine.java
@@ -128,7 +128,7 @@ public class MockCameraEngine extends CameraEngine {
}
@Override
- protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio viewAspectRatio) {
+ protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio outputRatio) {
}
@@ -138,7 +138,7 @@ public class MockCameraEngine extends CameraEngine {
}
@Override
- protected void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull AspectRatio viewAspectRatio) {
+ protected void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull AspectRatio outputRatio) {
}
diff --git a/cameraview/src/androidTest/java/com/otaliastudios/cameraview/internal/utils/WorkerHandlerTest.java b/cameraview/src/androidTest/java/com/otaliastudios/cameraview/internal/utils/WorkerHandlerTest.java
index acf53a49..fd5349be 100644
--- a/cameraview/src/androidTest/java/com/otaliastudios/cameraview/internal/utils/WorkerHandlerTest.java
+++ b/cameraview/src/androidTest/java/com/otaliastudios/cameraview/internal/utils/WorkerHandlerTest.java
@@ -13,6 +13,7 @@ import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.concurrent.Callable;
+import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
@@ -218,7 +219,7 @@ public class WorkerHandlerTest extends BaseTest {
public void testDestroy() {
final WorkerHandler handler = WorkerHandler.get("handler");
assertTrue(handler.getThread().isAlive());
- WorkerHandler.destroy();
+ handler.destroy();
// Wait for the thread to die.
try { handler.getThread().join(500); } catch (InterruptedException ignore) {}
assertFalse(handler.getThread().isAlive());
@@ -226,4 +227,15 @@ public class WorkerHandlerTest extends BaseTest {
assertNotSame(handler, newHandler);
assertTrue(newHandler.getThread().isAlive());
}
+
+ @Test
+ public void testDestroyAll() {
+ final WorkerHandler handler1 = WorkerHandler.get("handler1");
+ final WorkerHandler handler2 = WorkerHandler.get("handler2");
+ WorkerHandler.destroyAll();
+ WorkerHandler newHandler1 = WorkerHandler.get("handler1");
+ WorkerHandler newHandler2 = WorkerHandler.get("handler2");
+ assertNotSame(handler1, newHandler1);
+ assertNotSame(handler2, newHandler2);
+ }
}
diff --git a/cameraview/src/androidTest/java/com/otaliastudios/cameraview/overlay/OverlayDrawerTest.java b/cameraview/src/androidTest/java/com/otaliastudios/cameraview/overlay/OverlayDrawerTest.java
new file mode 100644
index 00000000..de620f05
--- /dev/null
+++ b/cameraview/src/androidTest/java/com/otaliastudios/cameraview/overlay/OverlayDrawerTest.java
@@ -0,0 +1,120 @@
+package com.otaliastudios.cameraview.overlay;
+
+
+import android.content.res.XmlResourceParser;
+import android.graphics.Canvas;
+import android.util.AttributeSet;
+import android.util.Xml;
+import android.view.View;
+import android.view.ViewGroup;
+
+import androidx.annotation.NonNull;
+import androidx.test.annotation.UiThreadTest;
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.SmallTest;
+
+import com.otaliastudios.cameraview.BaseTest;
+import com.otaliastudios.cameraview.internal.egl.EglBaseSurface;
+import com.otaliastudios.cameraview.internal.egl.EglCore;
+import com.otaliastudios.cameraview.internal.egl.EglViewport;
+import com.otaliastudios.cameraview.size.Size;
+
+import org.hamcrest.BaseMatcher;
+import org.hamcrest.Description;
+import org.hamcrest.Matcher;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mockito;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertThat;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.anyFloat;
+import static org.mockito.ArgumentMatchers.anyLong;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.reset;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+@RunWith(AndroidJUnit4.class)
+@SmallTest
+public class OverlayDrawerTest extends BaseTest {
+
+ private final static int WIDTH = 100;
+ private final static int HEIGHT = 100;
+
+ private EglCore eglCore;
+ private EglBaseSurface eglSurface;
+
+ @Before
+ public void setUp() {
+ eglCore = new EglCore(null, EglCore.FLAG_RECORDABLE);
+ eglSurface = new EglBaseSurface(eglCore);
+ eglSurface.createOffscreenSurface(WIDTH, HEIGHT);
+ eglSurface.makeCurrent();
+ }
+
+ @After
+ public void tearDown() {
+ eglSurface.releaseEglSurface();
+ eglSurface = null;
+ eglCore.release();
+ eglCore = null;
+ }
+
+ @Test
+ public void testDraw() {
+ Overlay overlay = mock(Overlay.class);
+ OverlayDrawer drawer = new OverlayDrawer(overlay, new Size(WIDTH, HEIGHT));
+ drawer.draw(Overlay.Target.PICTURE_SNAPSHOT);
+ verify(overlay, times(1)).drawOn(
+ eq(Overlay.Target.PICTURE_SNAPSHOT),
+ any(Canvas.class));
+ }
+
+ @Test
+ public void testGetTransform() {
+ // We'll check that the transform is not all zeros, which is highly unlikely
+ // (the default transform should be the identity matrix)
+ OverlayDrawer drawer = new OverlayDrawer(mock(Overlay.class), new Size(WIDTH, HEIGHT));
+ drawer.draw(Overlay.Target.PICTURE_SNAPSHOT);
+ assertThat(drawer.getTransform(), new BaseMatcher() {
+ public void describeTo(Description description) { }
+ public boolean matches(Object item) {
+ float[] array = (float[]) item;
+ for (float value : array) {
+ if (value != 0.0F) return true;
+ }
+ return false;
+ }
+ });
+ }
+
+ @Test
+ public void testRender() {
+ OverlayDrawer drawer = new OverlayDrawer(mock(Overlay.class), new Size(WIDTH, HEIGHT));
+ drawer.mViewport = spy(drawer.mViewport);
+ drawer.draw(Overlay.Target.PICTURE_SNAPSHOT);
+ drawer.render();
+ verify(drawer.mViewport, times(1)).drawFrame(
+ drawer.mTextureId,
+ drawer.getTransform()
+ );
+ }
+
+ @Test
+ public void testRelease() {
+ OverlayDrawer drawer = new OverlayDrawer(mock(Overlay.class), new Size(WIDTH, HEIGHT));
+ EglViewport viewport = spy(drawer.mViewport);
+ drawer.mViewport = viewport;
+ drawer.release();
+ verify(viewport, times(1)).release();
+ }
+}
diff --git a/cameraview/src/androidTest/java/com/otaliastudios/cameraview/video/VideoRecorderTest.java b/cameraview/src/androidTest/java/com/otaliastudios/cameraview/video/VideoRecorderTest.java
index 73175225..edafd44c 100644
--- a/cameraview/src/androidTest/java/com/otaliastudios/cameraview/video/VideoRecorderTest.java
+++ b/cameraview/src/androidTest/java/com/otaliastudios/cameraview/video/VideoRecorderTest.java
@@ -29,7 +29,7 @@ public class VideoRecorderTest extends BaseTest {
}
@Override
- protected void onStop() {
+ protected void onStop(boolean isCameraShutdown) {
dispatchVideoRecordingEnd();
dispatchResult();
}
@@ -37,7 +37,7 @@ public class VideoRecorderTest extends BaseTest {
recorder.start(result);
Mockito.verify(listener,Mockito.times(1) )
.onVideoRecordingStart();
- recorder.stop();
+ recorder.stop(false);
Mockito.verify(listener, Mockito.times(1))
.onVideoRecordingEnd();
Mockito.verify(listener, Mockito.times(1))
diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java b/cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java
index 45a382be..a978c184 100644
--- a/cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java
+++ b/cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java
@@ -428,10 +428,10 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
int height, width;
if (freeWidth) {
height = heightValue;
- width = (int) (height / ratio);
+ width = Math.round(height / ratio);
} else {
width = widthValue;
- height = (int) (width * ratio);
+ height = Math.round(width * ratio);
}
LOG.i("onMeasure:", "one dimension was free, we adapted it to fit the aspect ratio.",
"(" + width + "x" + height + ")");
@@ -448,10 +448,10 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
int height, width;
if (freeWidth) {
height = heightValue;
- width = Math.min((int) (height / ratio), widthValue);
+ width = Math.min(Math.round(height / ratio), widthValue);
} else {
width = widthValue;
- height = Math.min((int) (width * ratio), heightValue);
+ height = Math.min(Math.round(width * ratio), heightValue);
}
LOG.i("onMeasure:", "one dimension was EXACTLY, another AT_MOST.",
"We have TRIED to fit the aspect ratio, but it's not guaranteed.",
@@ -468,10 +468,10 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
if (atMostRatio >= ratio) {
// We must reduce height.
width = widthValue;
- height = (int) (width * ratio);
+ height = Math.round(width * ratio);
} else {
height = heightValue;
- width = (int) (height / ratio);
+ width = Math.round(height / ratio);
}
LOG.i("onMeasure:", "both dimension were AT_MOST.",
"We fit the preview aspect ratio.",
@@ -1469,9 +1469,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @see #takePicture()
*/
public void takePictureSnapshot() {
- if (getWidth() == 0 || getHeight() == 0) return;
PictureResult.Stub stub = new PictureResult.Stub();
- mCameraEngine.takePictureSnapshot(stub, AspectRatio.of(getWidth(), getHeight()));
+ mCameraEngine.takePictureSnapshot(stub);
}
@@ -1503,9 +1502,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @param file a file where the video will be saved
*/
public void takeVideoSnapshot(@NonNull File file) {
- if (getWidth() == 0 || getHeight() == 0) return;
VideoResult.Stub stub = new VideoResult.Stub();
- mCameraEngine.takeVideoSnapshot(stub, file, AspectRatio.of(getWidth(), getHeight()));
+ mCameraEngine.takeVideoSnapshot(stub, file);
mUiHandler.post(new Runnable() {
@Override
public void run() {
diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java
index 5ee4a480..5689fcf9 100644
--- a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java
+++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java
@@ -231,7 +231,7 @@ public class Camera1Engine extends CameraEngine implements
@Override
protected Task onStopPreview() {
if (mVideoRecorder != null) {
- mVideoRecorder.stop();
+ mVideoRecorder.stop(true);
mVideoRecorder = null;
}
mPictureRecorder = null;
@@ -306,10 +306,9 @@ public class Camera1Engine extends CameraEngine implements
@WorkerThread
@Override
- protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio viewAspectRatio) {
+ protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio outputRatio) {
stub.size = getUncroppedSnapshotSize(Reference.OUTPUT); // Not the real size: it will be cropped to match the view ratio
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR); // Actually it will be rotated and set to 0.
- AspectRatio outputRatio = getAngles().flip(Reference.OUTPUT, Reference.VIEW) ? viewAspectRatio.flip() : viewAspectRatio;
if (mPreview instanceof GlCameraPreview && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
mPictureRecorder = new SnapshotGlPictureRecorder(stub, this, (GlCameraPreview) mPreview, outputRatio, getOverlay());
@@ -343,7 +342,7 @@ public class Camera1Engine extends CameraEngine implements
@SuppressLint("NewApi")
@WorkerThread
@Override
- protected void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull AspectRatio viewAspectRatio) {
+ protected void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull AspectRatio outputRatio) {
if (!(mPreview instanceof GlCameraPreview)) {
throw new IllegalStateException("Video snapshots are only supported with GlCameraPreview.");
}
@@ -355,7 +354,6 @@ public class Camera1Engine extends CameraEngine implements
if (outputSize == null) {
throw new IllegalStateException("outputSize should not be null.");
}
- AspectRatio outputRatio = getAngles().flip(Reference.VIEW, Reference.OUTPUT) ? viewAspectRatio.flip() : viewAspectRatio;
Rect outputCrop = CropHelper.computeCrop(outputSize, outputRatio);
outputSize = new Size(outputCrop.width(), outputCrop.height());
stub.size = outputSize;
diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java
index 24c36554..2b471bf7 100644
--- a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java
+++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java
@@ -535,7 +535,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
if (mVideoRecorder != null) {
// This should synchronously call onVideoResult that will reset the repeating builder
// to the PREVIEW template. This is very important.
- mVideoRecorder.stop();
+ mVideoRecorder.stop(true);
mVideoRecorder = null;
}
mPictureRecorder = null;
@@ -610,10 +610,9 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@WorkerThread
@Override
- protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio viewAspectRatio) {
+ protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio outputRatio) {
stub.size = getUncroppedSnapshotSize(Reference.OUTPUT); // Not the real size: it will be cropped to match the view ratio
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR); // Actually it will be rotated and set to 0.
- AspectRatio outputRatio = getAngles().flip(Reference.OUTPUT, Reference.VIEW) ? viewAspectRatio.flip() : viewAspectRatio;
if (mPreview instanceof GlCameraPreview) {
mPictureRecorder = new SnapshotGlPictureRecorder(stub, this, (GlCameraPreview) mPreview, outputRatio, getOverlay());
} else {
@@ -695,7 +694,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@WorkerThread
@Override
- protected void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull AspectRatio viewAspectRatio) {
+ protected void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull AspectRatio outputRatio) {
if (!(mPreview instanceof GlCameraPreview)) {
throw new IllegalStateException("Video snapshots are only supported with GlCameraPreview.");
}
@@ -704,7 +703,6 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
if (outputSize == null) {
throw new IllegalStateException("outputSize should not be null.");
}
- AspectRatio outputRatio = getAngles().flip(Reference.VIEW, Reference.OUTPUT) ? viewAspectRatio.flip() : viewAspectRatio;
Rect outputCrop = CropHelper.computeCrop(outputSize, outputRatio);
outputSize = new Size(outputCrop.width(), outputCrop.height());
stub.size = outputSize;
@@ -1257,8 +1255,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
private void onAutoFocusCapture(@NonNull CaptureResult result) {
Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
if (afState == null) {
- LOG.e("onAutoFocusCapture", "afState is null! Assuming AF failed.");
- afState = CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
+ LOG.i("onAutoFocusCapture", "afState is null! This can happen for partial results. Waiting.");
+ return;
}
switch (afState) {
case CaptureRequest.CONTROL_AF_STATE_FOCUSED_LOCKED: {
diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraEngine.java b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraEngine.java
index 2d2b1d60..8a05f7d3 100644
--- a/cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraEngine.java
+++ b/cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraEngine.java
@@ -1087,9 +1087,8 @@ public abstract class CameraEngine implements
* The snapshot size is the {@link #getPreviewStreamSize(Reference)}, but cropped based on the
* view/surface aspect ratio.
* @param stub a picture stub
- * @param viewAspectRatio the view aspect ratio
*/
- public final void takePictureSnapshot(final @NonNull PictureResult.Stub stub, @NonNull final AspectRatio viewAspectRatio) {
+ public final void takePictureSnapshot(final @NonNull PictureResult.Stub stub) {
LOG.v("takePictureSnapshot", "scheduling");
mHandler.run(new Runnable() {
@Override
@@ -1101,7 +1100,9 @@ public abstract class CameraEngine implements
stub.isSnapshot = true;
stub.facing = mFacing;
// Leave the other parameters to subclasses.
- onTakePictureSnapshot(stub, viewAspectRatio);
+ //noinspection ConstantConditions
+ AspectRatio ratio = AspectRatio.of(getPreviewSurfaceSize(Reference.OUTPUT));
+ onTakePictureSnapshot(stub, ratio);
}
});
}
@@ -1155,9 +1156,8 @@ public abstract class CameraEngine implements
/**
* @param stub a video stub
* @param file the output file
- * @param viewAspectRatio the view aspect ratio
*/
- public final void takeVideoSnapshot(final @NonNull VideoResult.Stub stub, @NonNull final File file, @NonNull final AspectRatio viewAspectRatio) {
+ public final void takeVideoSnapshot(final @NonNull VideoResult.Stub stub, @NonNull final File file) {
LOG.v("takeVideoSnapshot", "scheduling");
mHandler.run(new Runnable() {
@Override
@@ -1175,7 +1175,9 @@ public abstract class CameraEngine implements
stub.audio = mAudio;
stub.maxSize = mVideoMaxSize;
stub.maxDuration = mVideoMaxDuration;
- onTakeVideoSnapshot(stub, viewAspectRatio);
+ //noinspection ConstantConditions
+ AspectRatio ratio = AspectRatio.of(getPreviewSurfaceSize(Reference.OUTPUT));
+ onTakeVideoSnapshot(stub, ratio);
}
});
}
@@ -1187,7 +1189,7 @@ public abstract class CameraEngine implements
public void run() {
LOG.i("stopVideo", "executing.", "isTakingVideo?", isTakingVideo());
if (mVideoRecorder != null) {
- mVideoRecorder.stop();
+ mVideoRecorder.stop(false);
mVideoRecorder = null;
}
}
@@ -1220,10 +1222,10 @@ public abstract class CameraEngine implements
protected abstract void onTakePicture(@NonNull PictureResult.Stub stub);
@WorkerThread
- protected abstract void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio viewAspectRatio);
+ protected abstract void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, @NonNull AspectRatio outputRatio);
@WorkerThread
- protected abstract void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull AspectRatio viewAspectRatio);
+ protected abstract void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub, @NonNull AspectRatio outputRatio);
@WorkerThread
protected abstract void onTakeVideo(@NonNull VideoResult.Stub stub);
diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/internal/Issue514Workaround.java b/cameraview/src/main/java/com/otaliastudios/cameraview/internal/Issue514Workaround.java
new file mode 100644
index 00000000..13745800
--- /dev/null
+++ b/cameraview/src/main/java/com/otaliastudios/cameraview/internal/Issue514Workaround.java
@@ -0,0 +1,119 @@
+package com.otaliastudios.cameraview.internal;
+
+import android.graphics.Canvas;
+import android.graphics.Color;
+import android.graphics.SurfaceTexture;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.view.Surface;
+import com.otaliastudios.cameraview.internal.egl.EglViewport;
+import com.otaliastudios.cameraview.preview.RendererThread;
+
+
+/**
+ * Fixes an issue for some devices with snapshot picture and video recording.
+ * This is so unclear that I wanted to have a separate class holding code and comments.
+ *
+ * WHEN TO USE THIS CLASS
+ * There is actually no need of this class in some cases:
+ * - when we don't have overlays, everything works
+ * - on the majority of devices, everything works
+ * But some devices will show the issue #514 and so they need this class to fix it.
+ * We will use this always since it should have close to none performance impact.
+ *
+ * SNAPSHOT PROCEDURE
+ * The issue is about picture and video snapshots with overlays. In both cases, we:
+ * 1. Take textureId from the camera preview
+ * 2. Take EGLContext from the camera preview thread ({@link RendererThread})
+ * 3. Create an overlayTextureId
+ * 4. Create an overlaySurfaceTexture
+ * 5. Create an overlaySurface
+ * 6. Move to another thread
+ * 7. Create a new EGLContext using the old context as a shared context so we have texture data
+ * 8. Create a new EGLWindow using some surface as output
+ * 9. For each frame:
+ * 9A. Draw overlays on the overlaySurface.lockCanvas() / unlockCanvasAndPost()
+ * 9B. Publish overlays to GL texture using overlaySurfaceTexture.updateTexImage()
+ * 9C. GLES - draw textureId
+ * 9D. GLES - draw overlayTextureId
+ * Both textures are drawn on the same EGLWindow and we manage to overlay them with {@link GLES20#GL_BLEND}.
+ * This is the whole procedure and it works for the majority of devices and situations.
+ *
+ * ISSUE DESCRIPTION
+ * The #514 issue can be described as follows:
+ * - Overlays have no transparency: background is {@link Color#BLACK} and covers the video
+ * - Overlays have distorted colors: {@link Color#RED} becomes greenish,
+ * {@link Color#GREEN} becomes blueish,
+ * {@link Color#BLUE} becomes reddish
+ *
+ * ISSUE INSIGHTS
+ * After painful debugging, we have reached these conclusions:
+ * 1. Overlays are drawn on {@link Canvas} with the correct format
+ * This can be checked for example by applying alpha to one overlay. The final color will
+ * be faded out, although on a black background. So the {@link Canvas} drawing step works well.
+ * 2. The GLES shader will always receive pixels in RGBA
+ * This seems to be a constant in Android - someone does the conversion for us at a lower level.
+ * This was confirmed for example by forcing A=0.5 and seeing the video frames behind the overlay
+ * black background, or by forcing to 0.0 some of the channels and seeing the output.
+ * 3. The {@link Canvas} / {@link Surface} pixels are wrongly treated as YUV!
+ * On problematic devices, some component down there thinks that our overlays RGBA are in YUV,
+ * and will CONVERT THEM TO RGBA. This means:
+ * 3A. Original alpha is dropped. The algorithm thinks we have passed YUV.
+ * 3B. Original colors are messed up. For example, (255,0,0,255,RGBA) is treated as (255,0,0,YUV)
+ * and converted back to rgb becoming greenish (74,255,27,255,RGBA).
+ * Doing the same conversion for {@link Color#GREEN} and {@link Color#BLUE} confirms what we
+ * were seeing in the issue screenshots.
+ *
+ * So a pixel format conversion takes place, when it shouldn't happen. We can't solve this:
+ * - It is done at a lower level, there's no real way for us to specify the surface format, but
+ * it seems that these devices will prefer a YUV format and misunderstand our {@link Canvas} pixels.
+ * - There is also no way to identify which devices will present this issue, it's a bug somewhere
+ * and it is implementation specific.
+ *
+ * THE MAGIC
+ * Hard to say why, but using this class fixes the described issue.
+ * It seems that when the {@link SurfaceTexture#updateTexImage()} method for the overlay surface
+ * is called - the one that updates the overlayTextureId - we must ensure that the CURRENTLY
+ * BOUND TEXTURE ID IS NOT 0. The id we choose to apply might be cameraTextureId, or overlayTextureId,
+ * or probably whatever other valid id, and should be passed to {@link #Issue514Workaround(int)}.
+ * [Tested with cameraTextureId and overlayTextureId: both do work.]
+ * [Tested with invalid id like 9999. This won't work.]
+ *
+ * This makes no sense, since overlaySurfaceTexture.updateTexImage() is setting it to overlayTextureId
+ * anyway, but it fixes the issue. Specifically, after any draw operation with {@link EglViewport},
+ * the bound texture is reset to 0 so this must be undone here. We offer:
+ *
+ * - {@link #beforeOverlayUpdateTexImage()} to be called before the {@link SurfaceTexture#updateTexImage()} call
+ * - {@link #end()} to release and bring things back to normal state
+ *
+ * Since updating and rendering can happen on different threads with a shared EGL context,
+ * in case they do, the {@link #beforeOverlayUpdateTexImage()}, the actual updateTexImage() and
+ * finally the {@link EglViewport} drawing operations should be synchronized with a lock.
+ *
+ * REFERENCES
+ * https://github.com/natario1/CameraView/issues/514
+ * https://android.googlesource.com/platform/frameworks/native/+/5c1139f/libs/gui/SurfaceTexture.cpp
+ * I can see here that SurfaceTexture does indeed call glBindTexture with the same parameters whenever
+ * updateTexImage is called, but it also does other gl stuff first. This other gl stuff might be
+ * breaking when we don't have a bound texture on some specific hardware implementation.
+ */
+public class Issue514Workaround {
+
+ private final int textureId;
+
+ public Issue514Workaround(int textureId) {
+ this.textureId = textureId;
+ }
+
+ public void beforeOverlayUpdateTexImage() {
+ bindTexture(textureId);
+ }
+
+ public void end() {
+ bindTexture(0);
+ }
+
+ private void bindTexture(int textureId) {
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);
+ }
+}
diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/internal/egl/EglBaseSurface.java b/cameraview/src/main/java/com/otaliastudios/cameraview/internal/egl/EglBaseSurface.java
index baff79e6..de2919fa 100644
--- a/cameraview/src/main/java/com/otaliastudios/cameraview/internal/egl/EglBaseSurface.java
+++ b/cameraview/src/main/java/com/otaliastudios/cameraview/internal/egl/EglBaseSurface.java
@@ -51,7 +51,7 @@ public class EglBaseSurface extends EglElement {
private int mWidth = -1;
private int mHeight = -1;
- protected EglBaseSurface(EglCore eglCore) {
+ public EglBaseSurface(EglCore eglCore) {
mEglCore = eglCore;
}
diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/internal/egl/EglViewport.java b/cameraview/src/main/java/com/otaliastudios/cameraview/internal/egl/EglViewport.java
index e0bee9d2..1cf8b53e 100644
--- a/cameraview/src/main/java/com/otaliastudios/cameraview/internal/egl/EglViewport.java
+++ b/cameraview/src/main/java/com/otaliastudios/cameraview/internal/egl/EglViewport.java
@@ -44,6 +44,8 @@ public class EglViewport extends EglElement {
// Stuff from Texture2dProgram
private int mProgramHandle;
private int mTextureTarget;
+ private int mTextureUnit;
+
// Program attributes
private int muMVPMatrixLocation;
private int muTexMatrixLocation;
@@ -60,20 +62,16 @@ public class EglViewport extends EglElement {
public EglViewport() {
mTextureTarget = GLES11Ext.GL_TEXTURE_EXTERNAL_OES;
+ mTextureUnit = GLES20.GL_TEXTURE0;
//init the default shader effect
mShaderEffect = new NoFilterEffect();
initProgram();
}
- private void initProgram(){
-
+ private void initProgram() {
release();
-
- mTextureTarget = GLES11Ext.GL_TEXTURE_EXTERNAL_OES;
-
mProgramHandle = createProgram(mShaderEffect.getVertexShader(), mShaderEffect.getFragmentShader());
-
maPositionLocation = GLES20.glGetAttribLocation(mProgramHandle, mShaderEffect.getPositionVariableName());
checkLocation(maPositionLocation, mShaderEffect.getPositionVariableName());
maTextureCoordLocation = GLES20.glGetAttribLocation(mProgramHandle, mShaderEffect.getTexttureCoordinateVariableName());
@@ -99,6 +97,7 @@ public class EglViewport extends EglElement {
check("glGenTextures");
int texId = textures[0];
+ GLES20.glActiveTexture(mTextureUnit);
GLES20.glBindTexture(mTextureTarget, texId);
check("glBindTexture " + texId);
@@ -152,14 +151,8 @@ public class EglViewport extends EglElement {
GLES20.glUseProgram(mProgramHandle);
check("glUseProgram");
- // enable blending, from: http://www.learnopengles.com/android-lesson-five-an-introduction-to-blending/
- GLES20.glDisable(GLES20.GL_CULL_FACE);
- GLES20.glDisable(GLES20.GL_DEPTH_TEST);
- GLES20.glEnable(GLES20.GL_BLEND);
- GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
-
// Set the texture.
- GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glActiveTexture(mTextureUnit);
GLES20.glBindTexture(mTextureTarget, textureId);
// Copy the model / view / projection matrix over.
diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/CropHelper.java b/cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/CropHelper.java
index 59586120..f7f71ee7 100644
--- a/cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/CropHelper.java
+++ b/cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/CropHelper.java
@@ -17,7 +17,7 @@ public class CropHelper {
public static Rect computeCrop(@NonNull Size currentSize, @NonNull AspectRatio targetRatio) {
int currentWidth = currentSize.getWidth();
int currentHeight = currentSize.getHeight();
- if (targetRatio.matches(currentSize)) {
+ if (targetRatio.matches(currentSize, 0.0005F)) {
return new Rect(0, 0, currentWidth, currentHeight);
}
@@ -26,13 +26,13 @@ public class CropHelper {
int x, y, width, height;
if (currentRatio.toFloat() > targetRatio.toFloat()) {
height = currentHeight;
- width = (int) (height * targetRatio.toFloat());
+ width = Math.round(height * targetRatio.toFloat());
y = 0;
- x = (currentWidth - width) / 2;
+ x = Math.round((currentWidth - width) / 2F);
} else {
width = currentWidth;
- height = (int) (width / targetRatio.toFloat());
- y = (currentHeight - height) / 2;
+ height = Math.round(width / targetRatio.toFloat());
+ y = Math.round((currentHeight - height) / 2F);
x = 0;
}
return new Rect(x, y, x + width, y + height);
diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/WorkerHandler.java b/cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/WorkerHandler.java
index 0b932842..6d0a2f49 100644
--- a/cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/WorkerHandler.java
+++ b/cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/WorkerHandler.java
@@ -14,6 +14,7 @@ import androidx.annotation.NonNull;
import java.lang.ref.WeakReference;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Executor;
/**
@@ -25,6 +26,13 @@ public class WorkerHandler {
private final static CameraLogger LOG = CameraLogger.create(WorkerHandler.class.getSimpleName());
private final static ConcurrentHashMap> sCache = new ConcurrentHashMap<>(4);
+ private final static String FALLBACK_NAME = "FallbackCameraThread";
+
+ // Store a hard reference to the fallback handler. We never use this, only update it
+ // anytime get() is called. This should ensure that this instance is not collected.
+ @SuppressWarnings("FieldCanBeLocal")
+ private static WorkerHandler sFallbackHandler;
+
/**
* Gets a possibly cached handler with the given name.
* @param name the handler name
@@ -36,14 +44,19 @@ public class WorkerHandler {
//noinspection ConstantConditions
WorkerHandler cached = sCache.get(name).get();
if (cached != null) {
- HandlerThread thread = cached.mThread;
- if (thread.isAlive() && !thread.isInterrupted()) {
+ if (cached.getThread().isAlive() && !cached.getThread().isInterrupted()) {
LOG.w("get:", "Reusing cached worker handler.", name);
return cached;
+ } else {
+ // Cleanup the old thread before creating a new one
+ cached.destroy();
+ LOG.w("get:", "Thread reference found, but not alive or interrupted. Removing.", name);
+ sCache.remove(name);
}
+ } else {
+ LOG.w("get:", "Thread reference died. Removing.", name);
+ sCache.remove(name);
}
- LOG.w("get:", "Thread reference died, removing.", name);
- sCache.remove(name);
}
LOG.i("get:", "Creating new handler.", name);
@@ -58,7 +71,8 @@ public class WorkerHandler {
*/
@NonNull
public static WorkerHandler get() {
- return get("FallbackCameraThread");
+ sFallbackHandler = get(FALLBACK_NAME);
+ return sFallbackHandler;
}
/**
@@ -87,6 +101,20 @@ public class WorkerHandler {
WorkerHandler.this.run(command);
}
};
+
+ // HandlerThreads/Handlers sometimes have a significant warmup time.
+ // We want to spend this time here so when this object is built, it
+ // is fully operational.
+ final CountDownLatch latch = new CountDownLatch(1);
+ post(new Runnable() {
+ @Override
+ public void run() {
+ latch.countDown();
+ }
+ });
+ try {
+ latch.await();
+ } catch (InterruptedException ignore) {}
}
/**
@@ -183,6 +211,7 @@ public class WorkerHandler {
* Returns the android backing {@link Looper}.
* @return the looper
*/
+ @SuppressWarnings("WeakerAccess")
@NonNull
public Looper getLooper() {
return mThread.getLooper();
@@ -197,21 +226,34 @@ public class WorkerHandler {
return mExecutor;
}
+ /**
+ * Destroys this handler and its thread. After this method returns, the handler
+ * should be considered unusable.
+ *
+ * Internal note: this does not remove the thread from our cache, but it does
+ * interrupt it, so the next {@link #get(String)} call will remove it.
+ * In any case, we only store weak references.
+ */
+ public void destroy() {
+ HandlerThread thread = getThread();
+ if (thread.isAlive()) {
+ thread.interrupt();
+ thread.quit();
+ // after quit(), the thread will die at some point in the future. Might take some ms.
+ // try { handler.getThread().join(); } catch (InterruptedException ignore) {}
+ }
+ }
+
/**
* Destroys all handlers, interrupting their work and
* removing them from our cache.
*/
- public static void destroy() {
+ public static void destroyAll() {
for (String key : sCache.keySet()) {
WeakReference ref = sCache.get(key);
//noinspection ConstantConditions
WorkerHandler handler = ref.get();
- if (handler != null && handler.getThread().isAlive()) {
- handler.getThread().interrupt();
- handler.getThread().quit();
- // after quit(), the thread will die at some point in the future. Might take some ms.
- // try { handler.getThread().join(); } catch (InterruptedException ignore) {}
- }
+ if (handler != null) handler.destroy();
ref.clear();
}
sCache.clear();
diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/overlay/OverlayDrawer.java b/cameraview/src/main/java/com/otaliastudios/cameraview/overlay/OverlayDrawer.java
new file mode 100644
index 00000000..ab9b1c2a
--- /dev/null
+++ b/cameraview/src/main/java/com/otaliastudios/cameraview/overlay/OverlayDrawer.java
@@ -0,0 +1,131 @@
+package com.otaliastudios.cameraview.overlay;
+
+import android.graphics.Canvas;
+import android.graphics.Color;
+import android.graphics.PorterDuff;
+import android.graphics.SurfaceTexture;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.view.Surface;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.VisibleForTesting;
+
+import com.otaliastudios.cameraview.CameraLogger;
+import com.otaliastudios.cameraview.internal.Issue514Workaround;
+import com.otaliastudios.cameraview.internal.egl.EglViewport;
+import com.otaliastudios.cameraview.size.Size;
+
+import java.nio.Buffer;
+
+
+/**
+ * Draws overlays through {@link Overlay}.
+ *
+ * - Provides a {@link Canvas} to be passed to the Overlay
+ * - Lets the overlay draw there: {@link #draw(Overlay.Target)}
+ * - Renders this into the current EGL window: {@link #render()}
+ * - Applies the {@link Issue514Workaround} the correct way
+ *
+ * In the future we might want to use a different approach than {@link EglViewport},
+ * {@link SurfaceTexture} and {@link GLES11Ext#GL_TEXTURE_EXTERNAL_OES},
+ * for example by using a regular {@link GLES20#GL_TEXTURE_2D} that might
+ * be filled through {@link GLES20#glTexImage2D(int, int, int, int, int, int, int, int, Buffer)}.
+ *
+ * The current approach has some issues, for example see {@link Issue514Workaround}.
+ */
+public class OverlayDrawer {
+
+ private static final String TAG = OverlayDrawer.class.getSimpleName();
+ private static final CameraLogger LOG = CameraLogger.create(TAG);
+
+ private Overlay mOverlay;
+ @VisibleForTesting int mTextureId;
+ private SurfaceTexture mSurfaceTexture;
+ private Surface mSurface;
+ private float[] mTransform = new float[16];
+ @VisibleForTesting EglViewport mViewport;
+ private Issue514Workaround mIssue514Workaround;
+ private final Object mIssue514WorkaroundLock = new Object();
+
+ public OverlayDrawer(@NonNull Overlay overlay, @NonNull Size size) {
+ mOverlay = overlay;
+ mViewport = new EglViewport();
+ mTextureId = mViewport.createTexture();
+ mSurfaceTexture = new SurfaceTexture(mTextureId);
+ mSurfaceTexture.setDefaultBufferSize(size.getWidth(), size.getHeight());
+ mSurface = new Surface(mSurfaceTexture);
+ mIssue514Workaround = new Issue514Workaround(mTextureId);
+ }
+
+ /**
+ * Should be called to draw the {@link Overlay} on the given {@link Overlay.Target}.
+ * This will provide a working {@link Canvas} to the overlay and also update the
+ * drawn contents to a GLES texture.
+ * @param target the target
+ */
+ public void draw(@NonNull Overlay.Target target) {
+ try {
+ final Canvas surfaceCanvas = mSurface.lockCanvas(null);
+ surfaceCanvas.drawColor(Color.TRANSPARENT, PorterDuff.Mode.CLEAR);
+ mOverlay.drawOn(target, surfaceCanvas);
+ mSurface.unlockCanvasAndPost(surfaceCanvas);
+ } catch (Surface.OutOfResourcesException e) {
+ LOG.w("Got Surface.OutOfResourcesException while drawing video overlays", e);
+ }
+ synchronized (mIssue514WorkaroundLock) {
+ mIssue514Workaround.beforeOverlayUpdateTexImage();
+ mSurfaceTexture.updateTexImage();
+ }
+ mSurfaceTexture.getTransformMatrix(mTransform);
+ }
+
+ /**
+ * Returns the transform that should be used to render the drawn content.
+ * This should be called after {@link #draw(Overlay.Target)} and can be modified.
+ * @return the transform matrix
+ */
+ public float[] getTransform() {
+ return mTransform;
+ }
+
+ /**
+ * Renders the drawn content in the current EGL surface, assuming there is one.
+ * Should be called after {@link #draw(Overlay.Target)} and any {@link #getTransform()}
+ * modification.
+ */
+ public void render() {
+ // Enable blending
+ // Reference http://www.learnopengles.com/android-lesson-five-an-introduction-to-blending/
+ GLES20.glDisable(GLES20.GL_CULL_FACE);
+ GLES20.glDisable(GLES20.GL_DEPTH_TEST);
+ GLES20.glEnable(GLES20.GL_BLEND);
+ GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
+
+ synchronized (mIssue514WorkaroundLock) {
+ mViewport.drawFrame(mTextureId, mTransform);
+ }
+ }
+
+ /**
+ * Releases resources.
+ */
+ public void release() {
+ if (mIssue514Workaround != null) {
+ mIssue514Workaround.end();
+ mIssue514Workaround = null;
+ }
+ if (mSurfaceTexture != null) {
+ mSurfaceTexture.release();
+ mSurfaceTexture = null;
+ }
+ if (mSurface != null) {
+ mSurface.release();
+ mSurface = null;
+ }
+ if (mViewport != null) {
+ mViewport.release();
+ mViewport = null;
+ }
+ }
+}
diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/picture/SnapshotGlPictureRecorder.java b/cameraview/src/main/java/com/otaliastudios/cameraview/picture/SnapshotGlPictureRecorder.java
index 494e1f1a..3fdf2bec 100644
--- a/cameraview/src/main/java/com/otaliastudios/cameraview/picture/SnapshotGlPictureRecorder.java
+++ b/cameraview/src/main/java/com/otaliastudios/cameraview/picture/SnapshotGlPictureRecorder.java
@@ -14,6 +14,8 @@ import android.os.Build;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.PictureResult;
+import com.otaliastudios.cameraview.internal.Issue514Workaround;
+import com.otaliastudios.cameraview.internal.egl.EglBaseSurface;
import com.otaliastudios.cameraview.overlay.Overlay;
import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.engine.CameraEngine;
@@ -24,6 +26,7 @@ import com.otaliastudios.cameraview.internal.egl.EglViewport;
import com.otaliastudios.cameraview.internal.egl.EglWindowSurface;
import com.otaliastudios.cameraview.internal.utils.CropHelper;
import com.otaliastudios.cameraview.internal.utils.WorkerHandler;
+import com.otaliastudios.cameraview.overlay.OverlayDrawer;
import com.otaliastudios.cameraview.preview.GlCameraPreview;
import com.otaliastudios.cameraview.preview.RendererFrameCallback;
import com.otaliastudios.cameraview.preview.RendererThread;
@@ -36,6 +39,23 @@ import androidx.annotation.Nullable;
import android.view.Surface;
+/**
+ * API 19.
+ * Records a picture snapshots from the {@link GlCameraPreview}. It works as follows:
+ *
+ * - We register a one time {@link RendererFrameCallback} on the preview
+ * - We get the textureId and the frame callback on the {@link RendererThread}
+ * - [Optional: we construct another textureId for overlays]
+ * - We take a handle of the EGL context from the {@link RendererThread}
+ * - We move to another thread, and create a new EGL surface for that EGL context.
+ * - We make this new surface current, and re-draw the textureId on it
+ * - [Optional: fill the overlayTextureId and draw it on the same surface]
+ * - We use glReadPixels (through {@link EglBaseSurface#saveFrameTo(Bitmap.CompressFormat)}) and save to file.
+ *
+ * We create a new EGL surface and redraw the frame because:
+ * 1. We want to go off the renderer thread as soon as possible
+ * 2. We have overlays to be drawn - we don't want to draw them on the preview surface, not even for a frame.
+ */
public class SnapshotGlPictureRecorder extends PictureRecorder {
private static final String TAG = SnapshotGlPictureRecorder.class.getSimpleName();
@@ -47,6 +67,13 @@ public class SnapshotGlPictureRecorder extends PictureRecorder {
private Overlay mOverlay;
private boolean mHasOverlay;
+ private OverlayDrawer mOverlayDrawer;
+
+ private int mTextureId;
+ private float[] mTransform;
+
+
+ private EglViewport mViewport;
public SnapshotGlPictureRecorder(
@NonNull PictureResult.Stub stub,
@@ -67,146 +94,128 @@ public class SnapshotGlPictureRecorder extends PictureRecorder {
public void take() {
mPreview.addRendererFrameCallback(new RendererFrameCallback() {
- int mTextureId;
- SurfaceTexture mSurfaceTexture;
- float[] mTransform;
-
- int mOverlayTextureId = 0;
- SurfaceTexture mOverlaySurfaceTexture;
- Surface mOverlaySurface;
- float[] mOverlayTransform;
-
- EglViewport mViewport;
-
@RendererThread
public void onRendererTextureCreated(int textureId) {
- mTextureId = textureId;
- mViewport = new EglViewport();
- mSurfaceTexture = new SurfaceTexture(mTextureId, true);
- // Need to crop the size.
- Rect crop = CropHelper.computeCrop(mResult.size, mOutputRatio);
- mResult.size = new Size(crop.width(), crop.height());
- mSurfaceTexture.setDefaultBufferSize(mResult.size.getWidth(), mResult.size.getHeight());
- mTransform = new float[16];
-
- if (mHasOverlay) {
- mOverlayTextureId = mViewport.createTexture();
- mOverlaySurfaceTexture = new SurfaceTexture(mOverlayTextureId, true);
- mOverlaySurfaceTexture.setDefaultBufferSize(mResult.size.getWidth(), mResult.size.getHeight());
- mOverlaySurface = new Surface(mOverlaySurfaceTexture);
- mOverlayTransform = new float[16];
- }
+ SnapshotGlPictureRecorder.this.onRendererTextureCreated(textureId);
}
@RendererThread
@Override
public void onRendererFrame(@NonNull SurfaceTexture surfaceTexture, final float scaleX, final float scaleY, BaseShaderEffect shaderEffect) {
mPreview.removeRendererFrameCallback(this);
+ SnapshotGlPictureRecorder.this.onRendererFrame(surfaceTexture, scaleX, scaleY, shaderEffect);
+ }
+ });
+ }
- // This kinda work but has drawbacks:
- // - output is upside down due to coordinates in GL: need to flip the byte[] someway
- // - output is not rotated as we would like to: need to create a bitmap copy...
- // - works only in the renderer thread, where it allocates the buffer and reads pixels. Bad!
- /*
- ByteBuffer buffer = ByteBuffer.allocateDirect(width * height * 4);
- buffer.order(ByteOrder.LITTLE_ENDIAN);
- GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buffer);
- buffer.rewind();
- ByteArrayOutputStream bos = new ByteArrayOutputStream(buffer.array().length);
- Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
- bitmap.copyPixelsFromBuffer(buffer);
- bitmap.compress(Bitmap.CompressFormat.JPEG, 90, bos);
- bitmap.recycle(); */
-
- // For this reason it is better to create a new surface,
- // and draw the last frame again there.
- final EGLContext eglContext = EGL14.eglGetCurrentContext();
+ @RendererThread
+ @TargetApi(Build.VERSION_CODES.KITKAT)
+ private void onRendererTextureCreated(int textureId) {
+ mTextureId = textureId;
+ mViewport = new EglViewport();
+ // Need to crop the size.
+ Rect crop = CropHelper.computeCrop(mResult.size, mOutputRatio);
+ mResult.size = new Size(crop.width(), crop.height());
+ mTransform = new float[16];
+ Matrix.setIdentityM(mTransform, 0);
+
+ if (mHasOverlay) {
+ mOverlayDrawer = new OverlayDrawer(mOverlay, mResult.size);
+ }
+ }
+
+ /**
+ * The tricky part here is the EGL surface creation.
+ *
+ * We don't have a real output window for the EGL surface - we will use glReadPixels()
+ * and never call swapBuffers(), so what we draw is never published.
+ *
+ * 1. One option is to use a pbuffer EGL surface. This works, we just have to pass
+ * the correct width and height. However, it is significantly slower than the current
+ * solution.
+ *
+ * 2. Another option is to create the EGL surface out of a ImageReader.getSurface()
+ * and use the reader to create a JPEG. In this case, we would have to publish
+ * the frame with swapBuffers(). However, currently ImageReader does not support
+ * all formats, it's risky. This is an example error that we get:
+ * "RGBA override BLOB format buffer should have height == width"
+ *
+ * The third option, which we are using, is to create the EGL surface using whatever
+ * {@link Surface} or {@link SurfaceTexture} we have at hand. Since we never call
+ * swapBuffers(), the frame will not actually be rendered. This is the fastest.
+ *
+ * @param scaleX frame scale x in {@link Reference#VIEW}
+ * @param scaleY frame scale y in {@link Reference#VIEW}
+ */
+ @RendererThread
+ @TargetApi(Build.VERSION_CODES.KITKAT)
+ private void onRendererFrame(final @NonNull SurfaceTexture surfaceTexture, final float scaleX, final float scaleY, @NonNull BaseShaderEffect effect) {
+ mViewport.changeShaderEffect(effect);
+ // Get egl context from the RendererThread, which is the one in which we have created
+ // the textureId and the overlayTextureId, managed by the GlSurfaceView.
+ // Next operations can then be performed on different threads using this handle.
+ final EGLContext eglContext = EGL14.eglGetCurrentContext();
+ // Calling this invalidates the rotation/scale logic below:
+ // surfaceTexture.getTransformMatrix(mTransform); // TODO activate and fix the logic.
+ WorkerHandler.execute(new Runnable() {
+ @Override
+ public void run() {
+ // 0. EGL window will need an output.
+ // We create a fake one as explained in javadocs.
+ final int fakeOutputTextureId = 9999;
+ SurfaceTexture fakeOutputSurface = new SurfaceTexture(fakeOutputTextureId);
+ fakeOutputSurface.setDefaultBufferSize(mResult.size.getWidth(), mResult.size.getHeight());
+
+ // 1. Create an EGL surface
final EglCore core = new EglCore(eglContext, EglCore.FLAG_RECORDABLE);
+ final EglBaseSurface eglSurface = new EglWindowSurface(core, fakeOutputSurface);
+ eglSurface.makeCurrent();
+
+ // 2. Apply scale and crop
+ boolean flip = mEngine.getAngles().flip(Reference.VIEW, Reference.SENSOR);
+ float realScaleX = flip ? scaleY : scaleX;
+ float realScaleY = flip ? scaleX : scaleY;
+ float scaleTranslX = (1F - realScaleX) / 2F;
+ float scaleTranslY = (1F - realScaleY) / 2F;
+ Matrix.translateM(mTransform, 0, scaleTranslX, scaleTranslY, 0);
+ Matrix.scaleM(mTransform, 0, realScaleX, realScaleY, 1);
+
+ // 3. Apply rotation and flip
+ Matrix.translateM(mTransform, 0, 0.5F, 0.5F, 0); // Go back to 0,0
+ Matrix.rotateM(mTransform, 0, -mResult.rotation, 0, 0, 1); // Rotate (not sure why we need the minus)
+ mResult.rotation = 0;
+ if (mResult.facing == Facing.FRONT) { // 5. Flip horizontally for front camera
+ Matrix.scaleM(mTransform, 0, -1, 1, 1);
+ }
+ Matrix.translateM(mTransform, 0, -0.5F, -0.5F, 0); // Go back to old position
+
+ // 4. Do pretty much the same for overlays
+ if (mHasOverlay) {
+ // 1. First we must draw on the texture and get latest image
+ mOverlayDrawer.draw(Overlay.Target.PICTURE_SNAPSHOT);
+
+ // 2. Then we can apply the transformations
+ int rotation = mEngine.getAngles().offset(Reference.VIEW, Reference.OUTPUT, Axis.ABSOLUTE);
+ Matrix.translateM(mOverlayDrawer.getTransform(), 0, 0.5F, 0.5F, 0);
+ Matrix.rotateM(mOverlayDrawer.getTransform(), 0, rotation, 0, 0, 1);
+ // No need to flip the x axis for front camera, but need to flip the y axis always.
+ Matrix.scaleM(mOverlayDrawer.getTransform(), 0, 1, -1, 1);
+ Matrix.translateM(mOverlayDrawer.getTransform(), 0, -0.5F, -0.5F, 0);
+ }
- //set the current shader before taking the snapshot
- mViewport.changeShaderEffect(shaderEffect);
-
- // final EGLSurface oldSurface = EGL14.eglGetCurrentSurface(EGL14.EGL_DRAW);
- // final EGLDisplay oldDisplay = EGL14.eglGetCurrentDisplay();
- WorkerHandler.execute(new Runnable() {
- @Override
- public void run() {
- // 1. Get latest texture
- EglWindowSurface surface = new EglWindowSurface(core, mSurfaceTexture);
- surface.makeCurrent();
- mSurfaceTexture.updateTexImage();
- mSurfaceTexture.getTransformMatrix(mTransform);
-
- // 2. Apply scale and crop:
- // scaleX and scaleY are in REF_VIEW, while our input appears to be in REF_SENSOR.
- boolean flip = mEngine.getAngles().flip(Reference.VIEW, Reference.SENSOR);
- float realScaleX = flip ? scaleY : scaleX;
- float realScaleY = flip ? scaleX : scaleY;
- float scaleTranslX = (1F - realScaleX) / 2F;
- float scaleTranslY = (1F - realScaleY) / 2F;
- Matrix.translateM(mTransform, 0, scaleTranslX, scaleTranslY, 0);
- Matrix.scaleM(mTransform, 0, realScaleX, realScaleY, 1);
-
- // 3. Go back to 0,0 so that rotate and flip work well.
- Matrix.translateM(mTransform, 0, 0.5F, 0.5F, 0);
-
- // 4. Apply rotation:
- // Not sure why we need the minus here.
- Matrix.rotateM(mTransform, 0, -mResult.rotation, 0, 0, 1);
- mResult.rotation = 0;
-
- // 5. Flip horizontally for front camera:
- if (mResult.facing == Facing.FRONT) {
- Matrix.scaleM(mTransform, 0, -1, 1, 1);
- }
-
- // 6. Go back to old position.
- Matrix.translateM(mTransform, 0, -0.5F, -0.5F, 0);
-
- // 7. Do pretty much the same for overlays, though with
- // some differences.
- if (mHasOverlay) {
- // 1. First we must draw on the texture and get latest image.
- try {
- final Canvas surfaceCanvas = mOverlaySurface.lockCanvas(null);
- surfaceCanvas.drawColor(Color.TRANSPARENT, PorterDuff.Mode.CLEAR);
- mOverlay.drawOn(Overlay.Target.PICTURE_SNAPSHOT, surfaceCanvas);
- mOverlaySurface.unlockCanvasAndPost(surfaceCanvas);
- } catch (Surface.OutOfResourcesException e) {
- LOG.w("Got Surface.OutOfResourcesException while drawing picture overlays", e);
- }
- mOverlaySurfaceTexture.updateTexImage();
- mOverlaySurfaceTexture.getTransformMatrix(mOverlayTransform);
-
- // 2. Then we can apply the transformations.
- int rotation = mEngine.getAngles().offset(Reference.VIEW, Reference.OUTPUT, Axis.ABSOLUTE);
- Matrix.translateM(mOverlayTransform, 0, 0.5F, 0.5F, 0);
- Matrix.rotateM(mOverlayTransform, 0, rotation, 0, 0, 1);
- // No need to flip the x axis for front camera, but need to flip the y axis always.
- Matrix.scaleM(mOverlayTransform, 0, 1, -1, 1);
- Matrix.translateM(mOverlayTransform, 0, -0.5F, -0.5F, 0);
- }
-
- // 8. Draw and save
- mViewport.drawFrame(mTextureId, mTransform);
- if (mHasOverlay) mViewport.drawFrame(mOverlayTextureId, mOverlayTransform);
- // don't - surface.swapBuffers();
- mResult.data = surface.saveFrameTo(Bitmap.CompressFormat.JPEG);
- mResult.format = PictureResult.FORMAT_JPEG;
-
- // 9. Cleanup
- mSurfaceTexture.releaseTexImage();
- surface.release();
- mViewport.release();
- mSurfaceTexture.release();
- if (mHasOverlay) {
- mOverlaySurface.release();
- mOverlaySurfaceTexture.release();
- }
- core.release();
- dispatchResult();
- }
- });
+ // 5. Draw and save
+ mViewport.drawFrame(mTextureId, mTransform);
+ if (mHasOverlay) mOverlayDrawer.render();
+ mResult.format = PictureResult.FORMAT_JPEG;
+ mResult.data = eglSurface.saveFrameTo(Bitmap.CompressFormat.JPEG);
+
+ // 6. Cleanup
+ eglSurface.releaseEglSurface();
+ mViewport.release();
+ fakeOutputSurface.release();
+ if (mHasOverlay) mOverlayDrawer.release();
+ core.release();
+ dispatchResult();
}
});
}
diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/size/AspectRatio.java b/cameraview/src/main/java/com/otaliastudios/cameraview/size/AspectRatio.java
index 71a47572..e13b73c1 100644
--- a/cameraview/src/main/java/com/otaliastudios/cameraview/size/AspectRatio.java
+++ b/cameraview/src/main/java/com/otaliastudios/cameraview/size/AspectRatio.java
@@ -19,7 +19,7 @@ public class AspectRatio implements Comparable {
* @return a (possibly cached) aspect ratio
*/
@NonNull
- public static AspectRatio of(Size size) {
+ public static AspectRatio of(@NonNull Size size) {
return AspectRatio.of(size.getWidth(), size.getHeight());
}
@@ -78,7 +78,6 @@ public class AspectRatio implements Comparable {
return mY;
}
- @SuppressWarnings("WeakerAccess")
public boolean matches(@NonNull Size size) {
int gcd = gcd(size.getWidth(), size.getHeight());
int x = size.getWidth() / gcd;
@@ -86,6 +85,10 @@ public class AspectRatio implements Comparable {
return mX == x && mY == y;
}
+ public boolean matches(@NonNull Size size, float tolerance) {
+ return Math.abs(toFloat() - (float) size.getWidth() / size.getHeight()) <= tolerance;
+ }
+
@Override
public boolean equals(Object o) {
if (o == null) {
@@ -107,7 +110,6 @@ public class AspectRatio implements Comparable {
return mX + ":" + mY;
}
- @SuppressWarnings("WeakerAccess")
public float toFloat() {
return (float) mX / mY;
}
diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/video/FullVideoRecorder.java b/cameraview/src/main/java/com/otaliastudios/cameraview/video/FullVideoRecorder.java
index abd73fcd..9c92deb3 100644
--- a/cameraview/src/main/java/com/otaliastudios/cameraview/video/FullVideoRecorder.java
+++ b/cameraview/src/main/java/com/otaliastudios/cameraview/video/FullVideoRecorder.java
@@ -103,11 +103,11 @@ public abstract class FullVideoRecorder extends VideoRecorder {
switch (what) {
case MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED:
mResult.endReason = VideoResult.REASON_MAX_DURATION_REACHED;
- stop();
+ stop(false);
break;
case MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED:
mResult.endReason = VideoResult.REASON_MAX_SIZE_REACHED;
- stop();
+ stop(false);
break;
}
}
@@ -130,7 +130,7 @@ public abstract class FullVideoRecorder extends VideoRecorder {
protected void onStart() {
if (!prepareMediaRecorder(mResult)) {
mResult = null;
- stop();
+ stop(false);
return;
}
@@ -141,12 +141,12 @@ public abstract class FullVideoRecorder extends VideoRecorder {
LOG.w("start:", "Error while starting media recorder.", e);
mResult = null;
mError = e;
- stop();
+ stop(false);
}
}
@Override
- protected void onStop() {
+ protected void onStop(boolean isCameraShutdown) {
if (mMediaRecorder != null) {
dispatchVideoRecordingEnd();
try {
diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/video/SnapshotVideoRecorder.java b/cameraview/src/main/java/com/otaliastudios/cameraview/video/SnapshotVideoRecorder.java
index fa28259e..74437d54 100644
--- a/cameraview/src/main/java/com/otaliastudios/cameraview/video/SnapshotVideoRecorder.java
+++ b/cameraview/src/main/java/com/otaliastudios/cameraview/video/SnapshotVideoRecorder.java
@@ -1,19 +1,15 @@
package com.otaliastudios.cameraview.video;
-import android.graphics.Canvas;
-import android.graphics.Color;
-import android.graphics.PorterDuff;
import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.os.Build;
-import android.view.Surface;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.overlay.Overlay;
import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.controls.Audio;
import com.otaliastudios.cameraview.engine.CameraEngine;
-import com.otaliastudios.cameraview.internal.egl.EglViewport;
+import com.otaliastudios.cameraview.overlay.OverlayDrawer;
import com.otaliastudios.cameraview.preview.GlCameraPreview;
import com.otaliastudios.cameraview.preview.RendererFrameCallback;
import com.otaliastudios.cameraview.preview.RendererThread;
@@ -60,15 +56,11 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
private int mDesiredState = STATE_NOT_RECORDING;
private int mTextureId = 0;
- private int mOverlayTextureId = 0;
- private SurfaceTexture mOverlaySurfaceTexture;
- private Surface mOverlaySurface;
private Overlay mOverlay;
+ private OverlayDrawer mOverlayDrawer;
private boolean mHasOverlay;
private int mOverlayRotation;
- private EglViewport mViewport;
-
public SnapshotVideoRecorder(@NonNull CameraEngine engine,
@NonNull GlCameraPreview preview,
@Nullable Overlay overlay,
@@ -87,8 +79,16 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
}
@Override
- protected void onStop() {
- mDesiredState = STATE_NOT_RECORDING;
+ protected void onStop(boolean isCameraShutdown) {
+ if (isCameraShutdown) {
+ // The renderer callback might never be called. From my tests, it's not.
+ LOG.i("Stopping the encoder engine from isCameraShutdown.");
+ mDesiredState = STATE_NOT_RECORDING;
+ mCurrentState = STATE_NOT_RECORDING;
+ mEncoderEngine.stop();
+ } else {
+ mDesiredState = STATE_NOT_RECORDING;
+ }
}
@RendererThread
@@ -96,11 +96,7 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
public void onRendererTextureCreated(int textureId) {
mTextureId = textureId;
if (mHasOverlay) {
- mViewport = new EglViewport();
- mOverlayTextureId = mViewport.createTexture();
- mOverlaySurfaceTexture = new SurfaceTexture(mOverlayTextureId);
- mOverlaySurfaceTexture.setDefaultBufferSize(mResult.size.getWidth(), mResult.size.getHeight());
- mOverlaySurface = new Surface(mOverlaySurfaceTexture);
+ mOverlayDrawer = new OverlayDrawer(mOverlay, mResult.size);
}
}
@@ -110,9 +106,6 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
if (mCurrentState == STATE_NOT_RECORDING && mDesiredState == STATE_RECORDING) {
LOG.i("Starting the encoder engine.");
- //set current shader effect
- mViewport.changeShaderEffect(shaderEffect);
-
// Set default options
if (mResult.videoFrameRate <= 0) mResult.videoFrameRate = DEFAULT_VIDEO_FRAMERATE;
if (mResult.videoBitRate <= 0) mResult.videoBitRate = estimateVideoBitRate(mResult.size, mResult.videoFrameRate);
@@ -141,9 +134,13 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
videoConfig.textureId = mTextureId;
videoConfig.scaleX = scaleX;
videoConfig.scaleY = scaleY;
+ // Get egl context from the RendererThread, which is the one in which we have created
+ // the textureId and the overlayTextureId, managed by the GlSurfaceView.
+ // Next operations can then be performed on different threads using this handle.
videoConfig.eglContext = EGL14.eglGetCurrentContext();
if (mHasOverlay) {
- videoConfig.overlayTextureId = mOverlayTextureId;
+ videoConfig.overlayTarget = Overlay.Target.VIDEO_SNAPSHOT;
+ videoConfig.overlayDrawer = mOverlayDrawer;
videoConfig.overlayRotation = mOverlayRotation;
}
TextureMediaEncoder videoEncoder = new TextureMediaEncoder(videoConfig);
@@ -170,28 +167,10 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
LOG.v("dispatching frame.");
TextureMediaEncoder textureEncoder = (TextureMediaEncoder) mEncoderEngine.getVideoEncoder();
TextureMediaEncoder.Frame frame = textureEncoder.acquireFrame();
- frame.timestamp = surfaceTexture.getTimestamp();
+ frame.timestampNanos = surfaceTexture.getTimestamp();
frame.timestampMillis = System.currentTimeMillis(); // NOTE: this is an approximation but it seems to work.
surfaceTexture.getTransformMatrix(frame.transform);
-
- // get overlay
- if (mHasOverlay) {
- try {
- final Canvas surfaceCanvas = mOverlaySurface.lockCanvas(null);
- surfaceCanvas.drawColor(Color.TRANSPARENT, PorterDuff.Mode.CLEAR);
- mOverlay.drawOn(Overlay.Target.VIDEO_SNAPSHOT, surfaceCanvas);
- mOverlaySurface.unlockCanvasAndPost(surfaceCanvas);
- } catch (Surface.OutOfResourcesException e) {
- LOG.w("Got Surface.OutOfResourcesException while drawing video overlays", e);
- }
- mOverlaySurfaceTexture.updateTexImage();
- mOverlaySurfaceTexture.getTransformMatrix(frame.overlayTransform);
- }
-
- if (mEncoderEngine != null) {
- // Can happen on teardown. At least it used to.
- // NOTE: If this still happens, I would say we can still crash on mOverlaySurface
- // calls above. We might have to add some synchronization.
+ if (mEncoderEngine != null) { // Can happen on teardown. At least it used to.
mEncoderEngine.notify(TextureMediaEncoder.FRAME_EVENT, frame);
}
}
@@ -239,13 +218,9 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
mDesiredState = STATE_NOT_RECORDING;
mPreview.removeRendererFrameCallback(SnapshotVideoRecorder.this);
mPreview = null;
- if (mOverlaySurfaceTexture != null) {
- mOverlaySurfaceTexture.release();
- mOverlaySurfaceTexture = null;
- }
- if (mOverlaySurface != null) {
- mOverlaySurface.release();
- mOverlaySurface = null;
+ if (mOverlayDrawer != null) {
+ mOverlayDrawer.release();
+ mOverlayDrawer = null;
}
mEncoderEngine = null;
dispatchResult();
diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/video/VideoRecorder.java b/cameraview/src/main/java/com/otaliastudios/cameraview/video/VideoRecorder.java
index 02b52876..394cfbbe 100644
--- a/cameraview/src/main/java/com/otaliastudios/cameraview/video/VideoRecorder.java
+++ b/cameraview/src/main/java/com/otaliastudios/cameraview/video/VideoRecorder.java
@@ -64,9 +64,10 @@ public abstract class VideoRecorder {
/**
* Stops recording.
+ * @param isCameraShutdown whether this is a full shutdown, camera is being closed
*/
- public final void stop() {
- onStop();
+ public final void stop(boolean isCameraShutdown) {
+ onStop(isCameraShutdown);
}
/**
@@ -79,13 +80,12 @@ public abstract class VideoRecorder {
protected abstract void onStart();
- protected abstract void onStop();
+ protected abstract void onStop(boolean isCameraShutdown);
/**
* Subclasses can call this to notify that the result was obtained,
* either with some error (null result) or with the actual stub, filled.
*/
- @SuppressWarnings("WeakerAccess")
@CallSuper
protected void dispatchResult() {
mIsRecording = false;
@@ -112,6 +112,7 @@ public abstract class VideoRecorder {
* Subclasses can call this to notify that the video recording has ended,
* although the video result might still be processed.
*/
+ @SuppressWarnings("WeakerAccess")
@CallSuper
protected void dispatchVideoRecordingEnd() {
if (mListener != null) {
diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioConfig.java b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioConfig.java
index 3d09b51e..fb663566 100644
--- a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioConfig.java
+++ b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioConfig.java
@@ -20,7 +20,7 @@ public class AudioConfig {
final int encoding = AudioFormat.ENCODING_PCM_16BIT; // Determines the sampleSizePerChannel
// The 44.1KHz frequency is the only setting guaranteed to be available on all devices.
final int samplingFrequency = 44100; // samples/sec
- final int sampleSizePerChannel = 2; // byte/sample/channel [16bit]
+ final int sampleSizePerChannel = 2; // byte/sample/channel [16bit]. If this changes, review noise introduction
final int byteRatePerChannel = samplingFrequency * sampleSizePerChannel; // byte/sec/channel
@NonNull
@@ -75,7 +75,7 @@ public class AudioConfig {
* @return the number of frames
*/
int audioRecordBufferFrames() {
- return 25;
+ return 50;
}
/**
@@ -91,6 +91,6 @@ public class AudioConfig {
* @return the buffer pool max size
*/
int bufferPoolMaxSize() {
- return 80;
+ return 500;
}
}
diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioMediaEncoder.java b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioMediaEncoder.java
index 62a060f1..9e9dac09 100644
--- a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioMediaEncoder.java
+++ b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioMediaEncoder.java
@@ -1,6 +1,5 @@
package com.otaliastudios.cameraview.video.encoding;
-import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
@@ -15,8 +14,10 @@ import androidx.annotation.RequiresApi;
import java.io.IOException;
import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
import java.util.HashMap;
import java.util.Map;
+import java.util.Random;
import java.util.concurrent.LinkedBlockingQueue;
/**
@@ -30,23 +31,24 @@ public class AudioMediaEncoder extends MediaEncoder {
private static final boolean PERFORMANCE_DEBUG = false;
private static final boolean PERFORMANCE_FILL_GAPS = true;
+ private static final int PERFORMANCE_MAX_GAPS = 8;
private boolean mRequestStop = false;
private AudioEncodingThread mEncoder;
private AudioRecordingThread mRecorder;
private ByteBufferPool mByteBufferPool;
- private ByteBuffer mZeroBuffer;
private final AudioTimestamp mTimestamp;
private AudioConfig mConfig;
private InputBufferPool mInputBufferPool = new InputBufferPool();
private final LinkedBlockingQueue mInputBufferQueue = new LinkedBlockingQueue<>();
+ private AudioNoise mAudioNoise;
// Just to debug performance.
- private int mSendCount = 0;
- private int mExecuteCount = 0;
- private long mAvgSendDelay = 0;
- private long mAvgExecuteDelay = 0;
- private Map mSendStartMap = new HashMap<>();
+ private int mDebugSendCount = 0;
+ private int mDebugExecuteCount = 0;
+ private long mDebugSendAvgDelay = 0;
+ private long mDebugExecuteAvgDelay = 0;
+ private Map mDebugSendStartMap = new HashMap<>();
public AudioMediaEncoder(@NonNull AudioConfig config) {
super("AudioEncoder");
@@ -76,7 +78,7 @@ public class AudioMediaEncoder extends MediaEncoder {
mMediaCodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mMediaCodec.start();
mByteBufferPool = new ByteBufferPool(mConfig.frameSize(), mConfig.bufferPoolMaxSize());
- mZeroBuffer = ByteBuffer.allocateDirect(mConfig.frameSize());
+ mAudioNoise = new AudioNoise(mConfig);
}
@EncoderThread
@@ -130,11 +132,13 @@ public class AudioMediaEncoder extends MediaEncoder {
private AudioRecord mAudioRecord;
private ByteBuffer mCurrentBuffer;
- private int mReadBytes;
+ private int mCurrentReadBytes;
+
private long mLastTimeUs;
private long mFirstTimeUs = Long.MIN_VALUE;
private AudioRecordingThread() {
+ setPriority(Thread.MAX_PRIORITY);
final int minBufferSize = AudioRecord.getMinBufferSize(
mConfig.samplingFrequency,
mConfig.audioFormatChannels(),
@@ -152,14 +156,22 @@ public class AudioMediaEncoder extends MediaEncoder {
mConfig.audioFormatChannels(),
mConfig.encoding,
bufferSize);
- setPriority(Thread.MAX_PRIORITY);
}
@Override
public void run() {
mAudioRecord.startRecording();
while (!mRequestStop) {
- read(false);
+ if (!hasReachedMaxLength()) {
+ read(false);
+ } else {
+ // We have reached the max length, so stop reading.
+ // However, do not get out of the loop - the controller
+ // will call stop() on us soon. It's not our responsibility
+ // to stop ourselves.
+ //noinspection UnnecessaryContinue
+ continue;
+ }
}
LOG.w("Stop was requested. We're out of the loop. Will post an endOfStream.");
// Last input with 0 length. This will signal the endOfStream.
@@ -192,25 +204,25 @@ public class AudioMediaEncoder extends MediaEncoder {
// with left and right bytes. https://stackoverflow.com/q/20594750/4288782
if (PERFORMANCE_DEBUG) {
long before = System.nanoTime();
- mReadBytes = mAudioRecord.read(mCurrentBuffer, mConfig.frameSize());
+ mCurrentReadBytes = mAudioRecord.read(mCurrentBuffer, mConfig.frameSize());
long after = System.nanoTime();
float delayMillis = (after - before) / 1000000F;
- float durationMillis = AudioTimestamp.bytesToMillis(mReadBytes, mConfig.byteRate());
+ float durationMillis = AudioTimestamp.bytesToMillis(mCurrentReadBytes, mConfig.byteRate());
LOG.v("read thread - reading took:", delayMillis,
"should be:", durationMillis,
"delay:", delayMillis - durationMillis);
} else {
- mReadBytes = mAudioRecord.read(mCurrentBuffer, mConfig.frameSize());
+ mCurrentReadBytes = mAudioRecord.read(mCurrentBuffer, mConfig.frameSize());
}
- LOG.i("read thread - eos:", endOfStream, "- Read new audio frame. Bytes:", mReadBytes);
- if (mReadBytes > 0) { // Good read: increase PTS.
- increaseTime(mReadBytes, endOfStream);
+ LOG.i("read thread - eos:", endOfStream, "- Read new audio frame. Bytes:", mCurrentReadBytes);
+ if (mCurrentReadBytes > 0) { // Good read: increase PTS.
+ increaseTime(mCurrentReadBytes, endOfStream);
LOG.i("read thread - eos:", endOfStream, "- mLastTimeUs:", mLastTimeUs);
- mCurrentBuffer.limit(mReadBytes);
+ mCurrentBuffer.limit(mCurrentReadBytes);
enqueue(mCurrentBuffer, mLastTimeUs, endOfStream);
- } else if (mReadBytes == AudioRecord.ERROR_INVALID_OPERATION) {
+ } else if (mCurrentReadBytes == AudioRecord.ERROR_INVALID_OPERATION) {
LOG.e("read thread - eos:", endOfStream, "- Got AudioRecord.ERROR_INVALID_OPERATION");
- } else if (mReadBytes == AudioRecord.ERROR_BAD_VALUE) {
+ } else if (mCurrentReadBytes == AudioRecord.ERROR_BAD_VALUE) {
LOG.e("read thread - eos:", endOfStream, "- Got AudioRecord.ERROR_BAD_VALUE");
}
}
@@ -235,43 +247,21 @@ public class AudioMediaEncoder extends MediaEncoder {
}
// See if we reached the max length value.
- boolean didReachMaxLength = (mLastTimeUs - mFirstTimeUs) > getMaxLengthMillis() * 1000L;
- if (didReachMaxLength && !endOfStream) {
- LOG.w("read thread - this frame reached the maxLength! deltaUs:", mLastTimeUs - mFirstTimeUs);
- notifyMaxLengthReached();
- }
-
- // Add zeroes if we have huge gaps. Even if timestamps are correct, if we have gaps between
- // them, the encoder might shrink all timestamps to have a continuous audio. This results
- // in a video that is fast-forwarded.
- // Adding zeroes does not solve the gaps issue - audio will still be distorted. But at
- // least we get a video that has the correct playback speed.
- if (PERFORMANCE_FILL_GAPS) {
- int gaps = mTimestamp.getGapCount(mConfig.frameSize());
- if (gaps > 0) {
- long gapStart = mTimestamp.getGapStartUs(mLastTimeUs);
- long frameUs = AudioTimestamp.bytesToUs(mConfig.frameSize(), mConfig.byteRate());
- LOG.w("read thread - GAPS: trying to add", gaps, "zeroed buffers");
- for (int i = 0; i < gaps; i++) {
- ByteBuffer zeroBuffer = mByteBufferPool.get();
- if (zeroBuffer == null) {
- LOG.e("read thread - GAPS: aborting because we have no free buffer.");
- break;
- }
- ;
- zeroBuffer.position(0);
- zeroBuffer.put(mZeroBuffer);
- zeroBuffer.clear();
- enqueue(zeroBuffer, gapStart, false);
- gapStart += frameUs;
- }
+ if (!hasReachedMaxLength()) {
+ boolean didReachMaxLength = (mLastTimeUs - mFirstTimeUs) > getMaxLengthMillis() * 1000L;
+ if (didReachMaxLength && !endOfStream) {
+ LOG.w("read thread - this frame reached the maxLength! deltaUs:", mLastTimeUs - mFirstTimeUs);
+ notifyMaxLengthReached();
}
}
+
+ // Maybe add noise.
+ maybeAddNoise();
}
private void enqueue(@NonNull ByteBuffer byteBuffer, long timestamp, boolean isEndOfStream) {
if (PERFORMANCE_DEBUG) {
- mSendStartMap.put(timestamp, System.nanoTime() / 1000000);
+ mDebugSendStartMap.put(timestamp, System.nanoTime() / 1000000);
}
int readBytes = byteBuffer.remaining();
InputBuffer inputBuffer = mInputBufferPool.get();
@@ -283,6 +273,45 @@ public class AudioMediaEncoder extends MediaEncoder {
mInputBufferQueue.add(inputBuffer);
}
+ /**
+ * If our {@link AudioTimestamp} detected huge gap, and the performance flag is enabled,
+ * we can add noise to fill them.
+ *
+ * Even if we always pass the correct timestamps, if there are big gaps between the frames,
+ * the encoder implementation might shrink all timestamps to have a continuous audio.
+ * This results in a video that is fast-forwarded.
+ *
+ * Adding noise does not solve the gaps issue, we'll still have distorted audio, but
+ * at least we get a video that has the correct playback speed.
+ *
+ * NOTE: this MUST be fast!
+ * If this operation is slow, we make the {@link AudioRecordingThread} busy, so we'll
+ * read the next frame with a delay, so we'll have even more gaps at the next call
+ * and spend even more time here. The result might be recording no audio at all - just
+ * random noise.
+ * This is the reason why we have a {@link #PERFORMANCE_MAX_GAPS} number.
+ */
+ private void maybeAddNoise() {
+ if (!PERFORMANCE_FILL_GAPS) return;
+ int gaps = mTimestamp.getGapCount(mConfig.frameSize());
+ if (gaps <= 0) return;
+
+ long gapStart = mTimestamp.getGapStartUs(mLastTimeUs);
+ long frameUs = AudioTimestamp.bytesToUs(mConfig.frameSize(), mConfig.byteRate());
+ LOG.w("read thread - GAPS: trying to add", gaps, "noise buffers. PERFORMANCE_MAX_GAPS:", PERFORMANCE_MAX_GAPS);
+ for (int i = 0; i < Math.min(gaps, PERFORMANCE_MAX_GAPS); i++) {
+ ByteBuffer noiseBuffer = mByteBufferPool.get();
+ if (noiseBuffer == null) {
+ LOG.e("read thread - GAPS: aborting because we have no free buffer.");
+ break;
+ }
+ noiseBuffer.clear();
+ mAudioNoise.fill(noiseBuffer);
+ noiseBuffer.rewind();
+ enqueue(noiseBuffer, gapStart, false);
+ gapStart += frameUs;
+ }
+ }
}
/**
@@ -311,10 +340,11 @@ public class AudioMediaEncoder extends MediaEncoder {
// Performance logging
if (PERFORMANCE_DEBUG) {
long sendEnd = System.nanoTime() / 1000000;
- Long sendStart = mSendStartMap.remove(inputBuffer.timestamp);
+ Long sendStart = mDebugSendStartMap.remove(inputBuffer.timestamp);
+ //noinspection StatementWithEmptyBody
if (sendStart != null) {
- mAvgSendDelay = ((mAvgSendDelay * mSendCount) + (sendEnd - sendStart)) / (++mSendCount);
- LOG.v("send delay millis:", sendEnd - sendStart, "average:", mAvgSendDelay);
+ mDebugSendAvgDelay = ((mDebugSendAvgDelay * mDebugSendCount) + (sendEnd - sendStart)) / (++mDebugSendCount);
+ LOG.v("send delay millis:", sendEnd - sendStart, "average:", mDebugSendAvgDelay);
} else {
// This input buffer was already processed (but tryAcquire failed for now).
}
@@ -338,8 +368,8 @@ public class AudioMediaEncoder extends MediaEncoder {
if (PERFORMANCE_DEBUG) {
// After latest changes, the count here is not so different between MONO and STEREO.
// We get about 400 frames in both cases (430 for MONO, but doesn't seem like a big issue).
- LOG.e("EXECUTE DELAY MILLIS:", mAvgExecuteDelay, "COUNT:", mExecuteCount);
- LOG.e("SEND DELAY MILLIS:", mAvgSendDelay, "COUNT:", mSendCount);
+ LOG.e("EXECUTE DELAY MILLIS:", mDebugExecuteAvgDelay, "COUNT:", mDebugExecuteCount);
+ LOG.e("SEND DELAY MILLIS:", mDebugSendAvgDelay, "COUNT:", mDebugSendCount);
}
}
@@ -357,12 +387,12 @@ public class AudioMediaEncoder extends MediaEncoder {
// NOTE: can consider calling this drainOutput on yet another thread, which would let us
// use an even smaller BUFFER_POOL_MAX_SIZE without losing audio frames. But this way
// we can accumulate delay on this new thread without noticing (no pool getting empty).
- drainOutput(buffer.isEndOfStream);
+ drainOutput(eos);
if (PERFORMANCE_DEBUG) {
long executeEnd = System.nanoTime() / 1000000;
- mAvgExecuteDelay = ((mAvgExecuteDelay * mExecuteCount) + (executeEnd - executeStart)) / (++mExecuteCount);
- LOG.v("execute delay millis:", executeEnd - executeStart, "average:", mAvgExecuteDelay);
+ mDebugExecuteAvgDelay = ((mDebugExecuteAvgDelay * mDebugExecuteCount) + (executeEnd - executeStart)) / (++mDebugExecuteCount);
+ LOG.v("execute delay millis:", executeEnd - executeStart, "average:", mDebugExecuteAvgDelay);
}
}
}
diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioNoise.java b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioNoise.java
new file mode 100644
index 00000000..ec60645c
--- /dev/null
+++ b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioNoise.java
@@ -0,0 +1,59 @@
+package com.otaliastudios.cameraview.video.encoding;
+
+import androidx.annotation.NonNull;
+
+import java.nio.Buffer;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.ShortBuffer;
+import java.util.Random;
+
+/**
+ * An AudioNoise instance offers buffers of noise that we can use when recording
+ * some samples failed for some reason.
+ *
+ * Since we can't create noise anytime it's needed - that would be expensive and
+ * slow down the recording thread - we create a big noise buffer at start time.
+ *
+ * We'd like to work with {@link ShortBuffer}s, but this requires converting the
+ * input buffer to ShortBuffer each time, and this can be expensive.
+ */
+class AudioNoise {
+
+ private final static int FRAMES = 1; // After testing, it looks like this is the best setup
+ private final static Random RANDOM = new Random();
+
+ private final ByteBuffer mNoiseBuffer;
+
+ AudioNoise(@NonNull AudioConfig config) {
+ //noinspection ConstantConditions
+ if (config.sampleSizePerChannel != 2) {
+ throw new IllegalArgumentException("AudioNoise expects 2bytes-1short samples.");
+ }
+ mNoiseBuffer = ByteBuffer
+ .allocateDirect(config.frameSize() * FRAMES)
+ .order(ByteOrder.nativeOrder());
+ double i = 0;
+ double frequency = config.frameSize() / 2D; // each X samples, the signal repeats
+ double step = Math.PI / frequency; // the increase in radians
+ double max = 10; // might choose this from 0 to Short.MAX_VALUE
+ while (mNoiseBuffer.hasRemaining()) {
+ short noise = (short) (Math.sin(++i * step) * max);
+ mNoiseBuffer.put((byte) noise);
+ mNoiseBuffer.put((byte) (noise >> 8));
+ }
+ mNoiseBuffer.rewind();
+ }
+
+ void fill(@NonNull ByteBuffer outBuffer) {
+ mNoiseBuffer.clear();
+ if (mNoiseBuffer.capacity() == outBuffer.remaining()) {
+ mNoiseBuffer.position(0); // Happens if FRAMES = 1.
+ } else {
+ mNoiseBuffer.position(RANDOM.nextInt(mNoiseBuffer.capacity()
+ - outBuffer.remaining()));
+ }
+ mNoiseBuffer.limit(mNoiseBuffer.position() + outBuffer.remaining());
+ outBuffer.put(mNoiseBuffer);
+ }
+}
diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoder.java b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoder.java
index d3efef0d..12a52743 100644
--- a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoder.java
+++ b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoder.java
@@ -14,6 +14,9 @@ import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.internal.utils.WorkerHandler;
import java.nio.ByteBuffer;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicInteger;
/**
* Base class for single-track encoders, coordinated by a {@link MediaEncoderEngine}.
@@ -117,12 +120,13 @@ public abstract class MediaEncoder {
private OutputBufferPool mOutputBufferPool;
private MediaCodec.BufferInfo mBufferInfo;
private MediaCodecBuffers mBuffers;
+ private final Map mPendingEvents = new HashMap<>();
private long mMaxLengthMillis;
private boolean mMaxLengthReached;
private long mStartTimeMillis = 0; // In System.currentTimeMillis()
- private long mStartTimeUs = Long.MIN_VALUE; // In unknown reference
+ private long mFirstTimeUs = Long.MIN_VALUE; // In unknown reference
private long mLastTimeUs = 0;
private long mDebugSetStateTimestamp = Long.MIN_VALUE;
@@ -176,6 +180,7 @@ public abstract class MediaEncoder {
mBufferInfo = new MediaCodec.BufferInfo();
mMaxLengthMillis = maxLengthMillis;
mWorker = WorkerHandler.get(mName);
+ mWorker.getThread().setPriority(Thread.MAX_PRIORITY);
LOG.i(mName, "Prepare was called. Posting.");
mWorker.post(new Runnable() {
@Override
@@ -223,13 +228,18 @@ public abstract class MediaEncoder {
* @param event what happened
* @param data object
*/
+ @SuppressWarnings("ConstantConditions")
final void notify(final @NonNull String event, final @Nullable Object data) {
- LOG.v(mName, "Notify was called. Posting.");
+ if (!mPendingEvents.containsKey(event)) mPendingEvents.put(event, new AtomicInteger(0));
+ final AtomicInteger pendingEvents = mPendingEvents.get(event);
+ pendingEvents.incrementAndGet();
+ LOG.v(mName, "Notify was called. Posting. pendingEvents:", pendingEvents.intValue());
mWorker.post(new Runnable() {
@Override
public void run() {
- LOG.v(mName, "Notify was called. Executing.");
+ LOG.v(mName, "Notify was called. Executing. pendingEvents:", pendingEvents.intValue());
onEvent(event, data);
+ pendingEvents.decrementAndGet();
}
});
}
@@ -315,6 +325,7 @@ public abstract class MediaEncoder {
mOutputBufferPool = null;
mBuffers = null;
setState(STATE_STOPPED);
+ mWorker.destroy();
}
/**
@@ -357,7 +368,9 @@ public abstract class MediaEncoder {
*/
@SuppressWarnings("WeakerAccess")
protected void encodeInputBuffer(InputBuffer buffer) {
- LOG.v(mName, "ENCODING - Buffer:", buffer.index, "Bytes:", buffer.length, "Presentation:", buffer.timestamp);
+ LOG.v(mName, "ENCODING - Buffer:", buffer.index,
+ "Bytes:", buffer.length,
+ "Presentation:", buffer.timestamp);
if (buffer.isEndOfStream) { // send EOS
mMediaCodec.queueInputBuffer(buffer.index, 0, 0,
buffer.timestamp, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
@@ -379,8 +392,8 @@ public abstract class MediaEncoder {
*/
@SuppressLint("LogNotTimber")
@SuppressWarnings("WeakerAccess")
- protected void drainOutput(boolean drainAll) {
- LOG.v(mName, "DRAINING - EOS:", drainAll);
+ protected final void drainOutput(boolean drainAll) {
+ LOG.i(mName, "DRAINING - EOS:", drainAll);
if (mMediaCodec == null) {
LOG.e("drain() was called before prepare() or after releasing.");
return;
@@ -422,9 +435,9 @@ public abstract class MediaEncoder {
// Store mStartTimeUs and mLastTimeUs, useful to detect the max length
// reached and stop recording when needed.
- if (mStartTimeUs == Long.MIN_VALUE) {
- mStartTimeUs = mBufferInfo.presentationTimeUs;
- LOG.w(mName, "DRAINING - Got the first presentation time:", mStartTimeUs);
+ if (mFirstTimeUs == Long.MIN_VALUE) {
+ mFirstTimeUs = mBufferInfo.presentationTimeUs;
+ LOG.w(mName, "DRAINING - Got the first presentation time:", mFirstTimeUs);
}
mLastTimeUs = mBufferInfo.presentationTimeUs;
@@ -434,16 +447,16 @@ public abstract class MediaEncoder {
// To address this, encoders are required to call notifyFirstFrameMillis
// so we can adjust here - moving to 1970 reference.
// Extra benefit: we never pass a pts equal to 0, which some encoders refuse.
- mBufferInfo.presentationTimeUs = (mStartTimeMillis * 1000) + mLastTimeUs - mStartTimeUs;
+ mBufferInfo.presentationTimeUs = (mStartTimeMillis * 1000) + mLastTimeUs - mFirstTimeUs;
// Write.
- LOG.v(mName, "DRAINING - About to write(). Adjusted presentation:", mBufferInfo.presentationTimeUs);
+ LOG.i(mName, "DRAINING - About to write(). Adjusted presentation:", mBufferInfo.presentationTimeUs);
OutputBuffer buffer = mOutputBufferPool.get();
//noinspection ConstantConditions
buffer.info = mBufferInfo;
buffer.trackIndex = mTrackIndex;
buffer.data = encodedData;
- mController.write(mOutputBufferPool, buffer);
+ onWriteOutput(mOutputBufferPool, buffer);
}
mMediaCodec.releaseOutputBuffer(encoderStatus, false);
@@ -451,10 +464,11 @@ public abstract class MediaEncoder {
// Not needed if drainAll because we already were asked to stop
if (!drainAll
&& !mMaxLengthReached
- && mStartTimeUs != Long.MIN_VALUE
- && mLastTimeUs - mStartTimeUs > mMaxLengthMillis * 1000) {
+ && mFirstTimeUs != Long.MIN_VALUE
+ && mLastTimeUs - mFirstTimeUs > mMaxLengthMillis * 1000) {
LOG.w(mName, "DRAINING - Reached maxLength! mLastTimeUs:", mLastTimeUs,
- "mStartTimeUs:", mStartTimeUs,
+ "mStartTimeUs:", mFirstTimeUs,
+ "mDeltaUs:", mLastTimeUs - mFirstTimeUs,
"mMaxLengthUs:", mMaxLengthMillis * 1000);
onMaxLengthReached();
break;
@@ -470,6 +484,11 @@ public abstract class MediaEncoder {
}
}
+ @CallSuper
+ protected void onWriteOutput(@NonNull OutputBufferPool pool, @NonNull OutputBuffer buffer) {
+ mController.write(pool, buffer);
+ }
+
protected abstract int getEncodedBitRate();
/**
@@ -494,6 +513,11 @@ public abstract class MediaEncoder {
onMaxLengthReached();
}
+ @SuppressWarnings("WeakerAccess")
+ protected boolean hasReachedMaxLength() {
+ return mMaxLengthReached;
+ }
+
/**
* Called by us (during {@link #drainOutput(boolean)}) or by subclasses
* (through {@link #notifyMaxLengthReached()}) to notify that we reached the
@@ -520,7 +544,20 @@ public abstract class MediaEncoder {
* @param firstFrameMillis the milliseconds of the first frame presentation
*/
@SuppressWarnings("WeakerAccess")
- protected void notifyFirstFrameMillis(long firstFrameMillis) {
+ protected final void notifyFirstFrameMillis(long firstFrameMillis) {
mStartTimeMillis = firstFrameMillis;
}
+
+ /**
+ * Returns the number of events (see {@link #onEvent(String, Object)}) that were scheduled
+ * but still not passed to that function. Could be used to drop some of them if this
+ * number is too high.
+ *
+ * @param event the event type
+ * @return the pending events number
+ */
+ @SuppressWarnings({"SameParameterValue", "ConstantConditions", "WeakerAccess"})
+ protected final int getPendingEvents(@NonNull String event) {
+ return mPendingEvents.get(event).intValue();
+ }
}
diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoderEngine.java b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoderEngine.java
index 6e2725dc..0ee9e60a 100644
--- a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoderEngine.java
+++ b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoderEngine.java
@@ -7,6 +7,7 @@ import android.os.Build;
import android.text.format.DateFormat;
import com.otaliastudios.cameraview.CameraLogger;
+import com.otaliastudios.cameraview.internal.utils.WorkerHandler;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
@@ -68,8 +69,9 @@ public class MediaEncoderEngine {
void onEncodingStart();
/**
- * Called when encoding stopped. At this point the mxuer might still be processing,
- * but we have stopped receiving input (recording video and audio frames).
+ * Called when encoding stopped. At this point the muxer or the encoders might still be
+ * processing data, but we have stopped receiving input (recording video and audio frames).
+ * Actually, we will stop very soon.
*
* The {@link #onEncodingEnd(int, Exception)} callback will soon be called
* with the results.
@@ -96,17 +98,18 @@ public class MediaEncoderEngine {
public final static int END_BY_MAX_DURATION = 1;
public final static int END_BY_MAX_SIZE = 2;
- private List mEncoders;
+ private final List mEncoders = new ArrayList<>();
private MediaMuxer mMediaMuxer;
- private int mStartedEncodersCount;
- private int mReleasedEncodersCount;
- private boolean mMediaMuxerStarted;
+ private int mStartedEncodersCount = 0;
+ private int mStoppedEncodersCount = 0;
+ private boolean mMediaMuxerStarted = false;
@SuppressWarnings("FieldCanBeLocal")
- private Controller mController;
+ private final Controller mController = new Controller();
+ private final WorkerHandler mControllerThread = WorkerHandler.get("EncoderEngine");
+ private final Object mControllerLock = new Object();
private Listener mListener;
private int mEndReason = END_BY_USER;
private int mPossibleEndReason;
- private final Object mControllerLock = new Object();
/**
* Creates a new engine for the given file, with the given encoders and max limits,
@@ -126,8 +129,6 @@ public class MediaEncoderEngine {
final long maxSize,
@Nullable Listener listener) {
mListener = listener;
- mController = new Controller();
- mEncoders = new ArrayList<>();
mEncoders.add(videoEncoder);
if (audioEncoder != null) {
mEncoders.add(audioEncoder);
@@ -137,9 +138,6 @@ public class MediaEncoderEngine {
} catch (IOException e) {
throw new RuntimeException(e);
}
- mStartedEncodersCount = 0;
- mMediaMuxerStarted = false;
- mReleasedEncodersCount = 0;
// Trying to convert the size constraints to duration constraints,
// because they are super easy to check.
@@ -203,6 +201,9 @@ public class MediaEncoderEngine {
for (MediaEncoder encoder : mEncoders) {
encoder.stop();
}
+ if (mListener != null) {
+ mListener.onEncodingStop();
+ }
}
/**
@@ -218,10 +219,14 @@ public class MediaEncoderEngine {
// went wrong, and we propagate that to the listener.
try {
mMediaMuxer.stop();
- mMediaMuxer.release();
} catch (Exception e) {
error = e;
}
+ try {
+ mMediaMuxer.release();
+ } catch (Exception e) {
+ if (error == null) error = e;
+ }
mMediaMuxer = null;
}
LOG.w("end:", "Dispatching end to listener - reason:", mEndReason, "error:", error);
@@ -231,8 +236,9 @@ public class MediaEncoderEngine {
}
mEndReason = END_BY_USER;
mStartedEncodersCount = 0;
- mReleasedEncodersCount = 0;
+ mStoppedEncodersCount = 0;
mMediaMuxerStarted = false;
+ mControllerThread.destroy();
LOG.i("end:", "Completed.");
}
@@ -281,11 +287,18 @@ public class MediaEncoderEngine {
LOG.w("notifyStarted:", "Assigned track", track, "to format", format.getString(MediaFormat.KEY_MIME));
if (++mStartedEncodersCount == mEncoders.size()) {
LOG.w("notifyStarted:", "All encoders have started. Starting muxer and dispatching onEncodingStart().");
- mMediaMuxer.start();
- mMediaMuxerStarted = true;
- if (mListener != null) {
- mListener.onEncodingStart();
- }
+ // Go out of this thread since it might be very important for the
+ // encoders and we don't want to perform expensive operations here.
+ mControllerThread.run(new Runnable() {
+ @Override
+ public void run() {
+ mMediaMuxer.start();
+ mMediaMuxerStarted = true;
+ if (mListener != null) {
+ mListener.onEncodingStart();
+ }
+ }
+ });
}
return track;
}
@@ -322,10 +335,6 @@ public class MediaEncoderEngine {
* large differences.
*/
public void write(@NonNull OutputBufferPool pool, @NonNull OutputBuffer buffer) {
- if (!mMediaMuxerStarted) {
- throw new IllegalStateException("Trying to write before muxer started");
- }
-
if (DEBUG_PERFORMANCE) {
// When AUDIO = mono, this is called about twice the time. (200 vs 100 for 5 sec).
Integer count = mDebugCount.get(buffer.trackIndex);
@@ -342,7 +351,6 @@ public class MediaEncoderEngine {
"track:", buffer.trackIndex,
"presentation:", buffer.info.presentationTimeUs);
}
-
mMediaMuxer.writeSampleData(buffer.trackIndex, buffer.data, buffer.info);
pool.recycle(buffer);
}
@@ -360,7 +368,14 @@ public class MediaEncoderEngine {
if (--mStartedEncodersCount == 0) {
LOG.w("requestStop:", "All encoders have requested a stop. Stopping them.");
mEndReason = mPossibleEndReason;
- stop();
+ // Go out of this thread since it might be very important for the
+ // encoders and we don't want to perform expensive operations here.
+ mControllerThread.run(new Runnable() {
+ @Override
+ public void run() {
+ stop();
+ }
+ });
}
}
}
@@ -372,12 +387,16 @@ public class MediaEncoderEngine {
public void notifyStopped(int track) {
synchronized (mControllerLock) {
LOG.w("notifyStopped:", "Called for track", track);
- if (++mReleasedEncodersCount == mEncoders.size()) {
- LOG.w("requestStop:", "All encoders have been released. Stopping the muxer.");
- if (mListener != null) {
- mListener.onEncodingStop();
- }
- end();
+ if (++mStoppedEncodersCount == mEncoders.size()) {
+ LOG.w("requestStop:", "All encoders have been stopped. Stopping the muxer.");
+ // Go out of this thread since it might be very important for the
+ // encoders and we don't want to perform expensive operations here.
+ mControllerThread.run(new Runnable() {
+ @Override
+ public void run() {
+ end();
+ }
+ });
}
}
}
diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/TextureConfig.java b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/TextureConfig.java
index 4f022713..67ada078 100644
--- a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/TextureConfig.java
+++ b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/TextureConfig.java
@@ -4,16 +4,19 @@ import android.opengl.EGLContext;
import androidx.annotation.NonNull;
+import com.otaliastudios.cameraview.internal.Issue514Workaround;
+import com.otaliastudios.cameraview.overlay.Overlay;
+import com.otaliastudios.cameraview.overlay.OverlayDrawer;
+
/**
* Video configuration to be passed as input to the constructor
* of a {@link TextureMediaEncoder}.
*/
public class TextureConfig extends VideoConfig {
- private final static int NO_TEXTURE = Integer.MIN_VALUE;
-
- public int textureId = NO_TEXTURE;
- public int overlayTextureId = NO_TEXTURE;
+ public int textureId;
+ public Overlay.Target overlayTarget;
+ public OverlayDrawer overlayDrawer;
public int overlayRotation;
public float scaleX;
public float scaleY;
@@ -24,7 +27,8 @@ public class TextureConfig extends VideoConfig {
TextureConfig copy = new TextureConfig();
copy(copy);
copy.textureId = this.textureId;
- copy.overlayTextureId = this.overlayTextureId;
+ copy.overlayDrawer = this.overlayDrawer;
+ copy.overlayTarget = this.overlayTarget;
copy.overlayRotation = this.overlayRotation;
copy.scaleX = this.scaleX;
copy.scaleY = this.scaleY;
@@ -33,6 +37,6 @@ public class TextureConfig extends VideoConfig {
}
boolean hasOverlay() {
- return overlayTextureId != NO_TEXTURE;
+ return overlayDrawer != null;
}
}
diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/TextureMediaEncoder.java b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/TextureMediaEncoder.java
index fcd78cd4..c3ce7377 100644
--- a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/TextureMediaEncoder.java
+++ b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/TextureMediaEncoder.java
@@ -36,6 +36,8 @@ public class TextureMediaEncoder extends VideoMediaEncoder {
}
});
+ private long mFirstTimeUs = Long.MIN_VALUE;
+
public TextureMediaEncoder(@NonNull TextureConfig config) {
super(config.copy());
}
@@ -51,7 +53,7 @@ public class TextureMediaEncoder extends VideoMediaEncoder {
* Nanoseconds, in no meaningful time-base. Will be used for offsets only.
* Typically this comes from {@link SurfaceTexture#getTimestamp()}.
*/
- public long timestamp;
+ public long timestampNanos;
/**
* Milliseconds in the {@link System#currentTimeMillis()} reference.
@@ -64,10 +66,9 @@ public class TextureMediaEncoder extends VideoMediaEncoder {
*/
public float[] transform = new float[16];
- /**
- * The transformation matrix for the overlay texture, if any.
- */
- public float[] overlayTransform = new float[16];
+ private long timestampUs() {
+ return timestampNanos / 1000L;
+ }
}
/**
@@ -94,11 +95,40 @@ public class TextureMediaEncoder extends VideoMediaEncoder {
super.onPrepare(controller, maxLengthMillis);
mEglCore = new EglCore(mConfig.eglContext, EglCore.FLAG_RECORDABLE);
mWindow = new EglWindowSurface(mEglCore, mSurface, true);
- mWindow.makeCurrent(); // drawing will happen on the InputWindowSurface, which
- // is backed by mVideoEncoder.getInputSurface()
+ mWindow.makeCurrent();
mViewport = new EglViewport();
}
+ /**
+ * Any number of pending events > 1 means that we should skip this frame.
+ * To avoid skipping too many frames, we'll use 2 for now, but this just means
+ * that we'll be drawing the same frame twice.
+ *
+ * When an event is posted, the textureId data has already been updated so we're
+ * too late to draw the old one and it should be skipped.
+ *
+ * This is especially important if we perform overlay drawing here, since that
+ * makes this class thread busy and slows down the event dispatching.
+ *
+ * @param timestampUs frame timestamp
+ * @return true to render
+ */
+ @Override
+ protected boolean shouldRenderFrame(long timestampUs) {
+ if (!super.shouldRenderFrame(timestampUs)) {
+ return false;
+ } else if (mFrameNumber <= 10) {
+ // Always render the first few frames, or muxer fails.
+ return true;
+ } else if (getPendingEvents(FRAME_EVENT) > 2) {
+ LOG.w("shouldRenderFrame - Dropping frame because we already have too many pending events:",
+ getPendingEvents(FRAME_EVENT));
+ return false;
+ } else {
+ return true;
+ }
+ }
+
@EncoderThread
@Override
protected void onEvent(@NonNull String event, @Nullable Object data) {
@@ -107,30 +137,46 @@ public class TextureMediaEncoder extends VideoMediaEncoder {
if (frame == null) {
throw new IllegalArgumentException("Got null frame for FRAME_EVENT.");
}
- if (frame.timestamp == 0) { // grafika
- mFramePool.recycle(frame);
- return;
- }
- if (mFrameNumber < 0) { // We were asked to stop.
+ if (!shouldRenderFrame(frame.timestampUs())) {
mFramePool.recycle(frame);
return;
}
- mFrameNumber++;
+
+ // Notify we're got the first frame and its absolute time.
if (mFrameNumber == 1) {
notifyFirstFrameMillis(frame.timestampMillis);
}
+ // Notify we have reached the max length value.
+ if (mFirstTimeUs == Long.MIN_VALUE) mFirstTimeUs = frame.timestampUs();
+ if (!hasReachedMaxLength()) {
+ boolean didReachMaxLength = (frame.timestampUs() - mFirstTimeUs) > getMaxLengthMillis() * 1000L;
+ if (didReachMaxLength) {
+ LOG.w("onEvent -",
+ "frameNumber:", mFrameNumber,
+ "timestampUs:", frame.timestampUs(),
+ "firstTimeUs:", mFirstTimeUs,
+ "- reached max length! deltaUs:", frame.timestampUs() - mFirstTimeUs);
+ notifyMaxLengthReached();
+ }
+ }
+
// First, drain any previous data.
- LOG.i("onEvent", "frameNumber:", mFrameNumber, "timestamp:", frame.timestamp, "- draining.");
+ LOG.i("onEvent -",
+ "frameNumber:", mFrameNumber,
+ "timestampUs:", frame.timestampUs(),
+ "- draining.");
drainOutput(false);
// Then draw on the surface.
- LOG.i("onEvent", "frameNumber:", mFrameNumber, "timestamp:", frame.timestamp, "- drawing.");
+ LOG.i("onEvent -",
+ "frameNumber:", mFrameNumber,
+ "timestampUs:", frame.timestampUs(),
+ "- rendering.");
// 1. We must scale this matrix like GlCameraPreview does, because it might have some cropping.
// Scaling takes place with respect to the (0, 0, 0) point, so we must apply a Translation to compensate.
float[] transform = frame.transform;
- float[] overlayTransform = frame.overlayTransform;
float scaleX = mConfig.scaleX;
float scaleY = mConfig.scaleY;
float scaleTranslX = (1F - scaleX) / 2F;
@@ -148,15 +194,16 @@ public class TextureMediaEncoder extends VideoMediaEncoder {
// 3. Do the same for overlays with their own rotation.
if (mConfig.hasOverlay()) {
- Matrix.translateM(overlayTransform, 0, 0.5F, 0.5F, 0);
- Matrix.rotateM(overlayTransform, 0, mConfig.overlayRotation, 0, 0, 1);
- Matrix.translateM(overlayTransform, 0, -0.5F, -0.5F, 0);
+ mConfig.overlayDrawer.draw(mConfig.overlayTarget);
+ Matrix.translateM(mConfig.overlayDrawer.getTransform(), 0, 0.5F, 0.5F, 0);
+ Matrix.rotateM(mConfig.overlayDrawer.getTransform(), 0, mConfig.overlayRotation, 0, 0, 1);
+ Matrix.translateM(mConfig.overlayDrawer.getTransform(), 0, -0.5F, -0.5F, 0);
}
mViewport.drawFrame(mConfig.textureId, transform);
if (mConfig.hasOverlay()) {
- mViewport.drawFrame(mConfig.overlayTextureId, overlayTransform);
+ mConfig.overlayDrawer.render();
}
- mWindow.setPresentationTime(frame.timestamp);
+ mWindow.setPresentationTime(frame.timestampNanos);
mWindow.swapBuffers();
mFramePool.recycle(frame);
}
diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/VideoMediaEncoder.java b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/VideoMediaEncoder.java
index bc5e98f8..b542a882 100644
--- a/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/VideoMediaEncoder.java
+++ b/cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/VideoMediaEncoder.java
@@ -7,6 +7,7 @@ import android.os.Build;
import androidx.annotation.NonNull;
import androidx.annotation.RequiresApi;
+import android.os.Bundle;
import android.view.Surface;
import com.otaliastudios.cameraview.CameraLogger;
@@ -43,6 +44,8 @@ abstract class VideoMediaEncoder extends MediaEncoder {
@SuppressWarnings("WeakerAccess")
protected int mFrameNumber = -1;
+ private boolean mSyncFrameFound = false;
+
VideoMediaEncoder(@NonNull C config) {
super("VideoEncoder");
mConfig = config;
@@ -53,16 +56,16 @@ abstract class VideoMediaEncoder extends MediaEncoder {
protected void onPrepare(@NonNull MediaEncoderEngine.Controller controller, long maxLengthMillis) {
MediaFormat format = MediaFormat.createVideoFormat(mConfig.mimeType, mConfig.width, mConfig.height);
- // Set some properties. Failing to specify some of these can cause the MediaCodec
- // configure() call to throw an unhelpful exception.
+ // Failing to specify some of these can cause the MediaCodec configure() call to throw an unhelpful exception.
+ // About COLOR_FormatSurface, see https://stackoverflow.com/q/28027858/4288782
+ // This just means it is an opaque, implementation-specific format that the device GPU prefers.
+ // So as long as we use the GPU to draw, the format will match what the encoder expects.
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, mConfig.bitRate);
format.setInteger(MediaFormat.KEY_FRAME_RATE, mConfig.frameRate);
- format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 2);
+ format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1); // seconds between key frames!
format.setInteger("rotation-degrees", mConfig.rotation);
- // Create a MediaCodec encoder, and configure it with our format. Get a Surface
- // we can use for input and wrap it with a class that handles the EGL work.
try {
mMediaCodec = MediaCodec.createEncoderByType(mConfig.mimeType);
} catch (IOException e) {
@@ -92,8 +95,52 @@ abstract class VideoMediaEncoder extends MediaEncoder {
drainOutput(true);
}
+ /**
+ * The first frame that we write MUST have the BUFFER_FLAG_SYNC_FRAME flag set.
+ * It sometimes doesn't because we might drop some frames in {@link #drainOutput(boolean)},
+ * basically if, at the time, the muxer was not started yet, due to Audio setup being slow.
+ *
+ * We can't add the BUFFER_FLAG_SYNC_FRAME flag to the first frame just because we'd like to.
+ * But we can drop frames until we get a sync one.
+ *
+ * @param pool the buffer pool
+ * @param buffer the buffer
+ */
+ @Override
+ protected void onWriteOutput(@NonNull OutputBufferPool pool, @NonNull OutputBuffer buffer) {
+ if (!mSyncFrameFound) {
+ LOG.w("onWriteOutput:", "sync frame not found yet. Checking.");
+ int flag = MediaCodec.BUFFER_FLAG_SYNC_FRAME;
+ boolean hasFlag = (buffer.info.flags & flag) == flag;
+ if (hasFlag) {
+ LOG.w("onWriteOutput:", "SYNC FRAME FOUND!");
+ mSyncFrameFound = true;
+ super.onWriteOutput(pool, buffer);
+ } else {
+ LOG.w("onWriteOutput:", "DROPPING FRAME and requesting a sync frame soon.");
+ if (Build.VERSION.SDK_INT >= 19) {
+ Bundle params = new Bundle();
+ params.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
+ mMediaCodec.setParameters(params);
+ }
+ pool.recycle(buffer);
+ }
+ } else {
+ super.onWriteOutput(pool, buffer);
+ }
+ }
+
@Override
protected int getEncodedBitRate() {
return mConfig.bitRate;
}
+
+ @SuppressWarnings("BooleanMethodIsAlwaysInverted")
+ protected boolean shouldRenderFrame(long timestampUs) {
+ if (timestampUs == 0) return false; // grafika said so
+ if (mFrameNumber < 0) return false; // We were asked to stop.
+ if (hasReachedMaxLength()) return false; // We were not asked yet, but we'll be soon.
+ mFrameNumber++;
+ return true;
+ }
}
diff --git a/demo/src/main/AndroidManifest.xml b/demo/src/main/AndroidManifest.xml
index 18f0871a..77976f9a 100644
--- a/demo/src/main/AndroidManifest.xml
+++ b/demo/src/main/AndroidManifest.xml
@@ -1,16 +1,17 @@
+ android:theme="@style/AppTheme"
+ tools:ignore="GoogleAppIndexingWarning">
image;
+ private static PictureResult picture;
- public static void setPictureResult(@Nullable PictureResult im) {
- image = im != null ? new WeakReference<>(im) : null;
+ public static void setPictureResult(@Nullable PictureResult pictureResult) {
+ picture = pictureResult;
}
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_picture_preview);
- final ImageView imageView = findViewById(R.id.image);
- final MessageView captureResolution = findViewById(R.id.nativeCaptureResolution);
- final MessageView captureLatency = findViewById(R.id.captureLatency);
- final MessageView exifRotation = findViewById(R.id.exifRotation);
- PictureResult result = image == null ? null : image.get();
+ final PictureResult result = picture;
if (result == null) {
finish();
return;
}
+
+ final ImageView imageView = findViewById(R.id.image);
+ final MessageView captureResolution = findViewById(R.id.nativeCaptureResolution);
+ final MessageView captureLatency = findViewById(R.id.captureLatency);
+ final MessageView exifRotation = findViewById(R.id.exifRotation);
+
final long delay = getIntent().getLongExtra("delay", 0);
AspectRatio ratio = AspectRatio.of(result.getSize());
captureLatency.setTitleAndMessage("Approx. latency", delay + " milliseconds");
diff --git a/demo/src/main/java/com/otaliastudios/cameraview/demo/VideoPreviewActivity.java b/demo/src/main/java/com/otaliastudios/cameraview/demo/VideoPreviewActivity.java
index d7921671..1d26fcc9 100644
--- a/demo/src/main/java/com/otaliastudios/cameraview/demo/VideoPreviewActivity.java
+++ b/demo/src/main/java/com/otaliastudios/cameraview/demo/VideoPreviewActivity.java
@@ -13,23 +13,28 @@ import android.widget.MediaController;
import android.widget.VideoView;
import com.otaliastudios.cameraview.VideoResult;
-
-import java.lang.ref.WeakReference;
+import com.otaliastudios.cameraview.size.AspectRatio;
public class VideoPreviewActivity extends Activity {
private VideoView videoView;
- private static WeakReference videoResult;
+ private static VideoResult videoResult;
public static void setVideoResult(@Nullable VideoResult result) {
- videoResult = result != null ? new WeakReference<>(result) : null;
+ videoResult = result;
}
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_video_preview);
+ final VideoResult result = videoResult;
+ if (result == null) {
+ finish();
+ return;
+ }
+
videoView = findViewById(R.id.video);
videoView.setOnClickListener(new View.OnClickListener() {
@Override
@@ -46,13 +51,8 @@ public class VideoPreviewActivity extends Activity {
final MessageView videoBitRate = findViewById(R.id.videoBitRate);
final MessageView videoFrameRate = findViewById(R.id.videoFrameRate);
- final VideoResult result = videoResult == null ? null : videoResult.get();
- if (result == null) {
- finish();
- return;
- }
-
- actualResolution.setTitleAndMessage("Size", result.getSize() + "");
+ AspectRatio ratio = AspectRatio.of(result.getSize());
+ actualResolution.setTitleAndMessage("Size", result.getSize() + " (" + ratio + ")");
isSnapshot.setTitleAndMessage("Snapshot", result.isSnapshot() + "");
rotation.setTitleAndMessage("Rotation", result.getRotation() + "");
audio.setTitleAndMessage("Audio", result.getAudio().name());
@@ -85,8 +85,9 @@ public class VideoPreviewActivity extends Activity {
}
void playVideo() {
- if (videoView.isPlaying()) return;
- videoView.start();
+ if (!videoView.isPlaying()) {
+ videoView.start();
+ }
}
@Override
diff --git a/demo/src/main/res/layout/activity_camera.xml b/demo/src/main/res/layout/activity_camera.xml
index ca9c6879..d54b82c5 100644
--- a/demo/src/main/res/layout/activity_camera.xml
+++ b/demo/src/main/res/layout/activity_camera.xml
@@ -9,7 +9,6 @@
+ android:src="@mipmap/logo_foreground"/>
@@ -59,6 +57,7 @@
android:layout_height="56dp"
android:layout_margin="16dp"
android:background="@drawable/background"
+ android:elevation="3dp"
app:srcCompat="@drawable/ic_switch" />
-
-
-
-
-
+
+
+
+
\ No newline at end of file
diff --git a/demo/src/main/res/mipmap-hdpi/cameraview.png b/demo/src/main/res/mipmap-hdpi/cameraview.png
deleted file mode 100644
index 976baba8..00000000
Binary files a/demo/src/main/res/mipmap-hdpi/cameraview.png and /dev/null differ
diff --git a/demo/src/main/res/mipmap-hdpi/logo.png b/demo/src/main/res/mipmap-hdpi/logo.png
new file mode 100644
index 00000000..fbac9bc0
Binary files /dev/null and b/demo/src/main/res/mipmap-hdpi/logo.png differ
diff --git a/demo/src/main/res/mipmap-hdpi/logo_background.png b/demo/src/main/res/mipmap-hdpi/logo_background.png
new file mode 100644
index 00000000..22af45a3
Binary files /dev/null and b/demo/src/main/res/mipmap-hdpi/logo_background.png differ
diff --git a/demo/src/main/res/mipmap-hdpi/logo_foreground.png b/demo/src/main/res/mipmap-hdpi/logo_foreground.png
new file mode 100644
index 00000000..7ae9b406
Binary files /dev/null and b/demo/src/main/res/mipmap-hdpi/logo_foreground.png differ
diff --git a/demo/src/main/res/mipmap-mdpi/cameraview.png b/demo/src/main/res/mipmap-mdpi/cameraview.png
deleted file mode 100644
index b7a326cd..00000000
Binary files a/demo/src/main/res/mipmap-mdpi/cameraview.png and /dev/null differ
diff --git a/demo/src/main/res/mipmap-mdpi/logo.png b/demo/src/main/res/mipmap-mdpi/logo.png
new file mode 100644
index 00000000..c1a6c39b
Binary files /dev/null and b/demo/src/main/res/mipmap-mdpi/logo.png differ
diff --git a/demo/src/main/res/mipmap-mdpi/logo_background.png b/demo/src/main/res/mipmap-mdpi/logo_background.png
new file mode 100644
index 00000000..88ffec4d
Binary files /dev/null and b/demo/src/main/res/mipmap-mdpi/logo_background.png differ
diff --git a/demo/src/main/res/mipmap-mdpi/logo_foreground.png b/demo/src/main/res/mipmap-mdpi/logo_foreground.png
new file mode 100644
index 00000000..583ba997
Binary files /dev/null and b/demo/src/main/res/mipmap-mdpi/logo_foreground.png differ
diff --git a/demo/src/main/res/mipmap-xhdpi/cameraview.png b/demo/src/main/res/mipmap-xhdpi/cameraview.png
deleted file mode 100644
index 81965431..00000000
Binary files a/demo/src/main/res/mipmap-xhdpi/cameraview.png and /dev/null differ
diff --git a/demo/src/main/res/mipmap-xhdpi/logo.png b/demo/src/main/res/mipmap-xhdpi/logo.png
new file mode 100644
index 00000000..a4766714
Binary files /dev/null and b/demo/src/main/res/mipmap-xhdpi/logo.png differ
diff --git a/demo/src/main/res/mipmap-xhdpi/logo_background.png b/demo/src/main/res/mipmap-xhdpi/logo_background.png
new file mode 100644
index 00000000..e3d91073
Binary files /dev/null and b/demo/src/main/res/mipmap-xhdpi/logo_background.png differ
diff --git a/demo/src/main/res/mipmap-xhdpi/logo_foreground.png b/demo/src/main/res/mipmap-xhdpi/logo_foreground.png
new file mode 100644
index 00000000..9811118d
Binary files /dev/null and b/demo/src/main/res/mipmap-xhdpi/logo_foreground.png differ
diff --git a/demo/src/main/res/mipmap-xxhdpi/cameraview.png b/demo/src/main/res/mipmap-xxhdpi/cameraview.png
deleted file mode 100644
index 706d7738..00000000
Binary files a/demo/src/main/res/mipmap-xxhdpi/cameraview.png and /dev/null differ
diff --git a/demo/src/main/res/mipmap-xxhdpi/logo.png b/demo/src/main/res/mipmap-xxhdpi/logo.png
new file mode 100644
index 00000000..bfdd8329
Binary files /dev/null and b/demo/src/main/res/mipmap-xxhdpi/logo.png differ
diff --git a/demo/src/main/res/mipmap-xxhdpi/logo_background.png b/demo/src/main/res/mipmap-xxhdpi/logo_background.png
new file mode 100644
index 00000000..b342f295
Binary files /dev/null and b/demo/src/main/res/mipmap-xxhdpi/logo_background.png differ
diff --git a/demo/src/main/res/mipmap-xxhdpi/logo_foreground.png b/demo/src/main/res/mipmap-xxhdpi/logo_foreground.png
new file mode 100644
index 00000000..1159ab59
Binary files /dev/null and b/demo/src/main/res/mipmap-xxhdpi/logo_foreground.png differ
diff --git a/demo/src/main/res/mipmap-xxxhdpi/cameraview.png b/demo/src/main/res/mipmap-xxxhdpi/cameraview.png
deleted file mode 100644
index f14b3915..00000000
Binary files a/demo/src/main/res/mipmap-xxxhdpi/cameraview.png and /dev/null differ
diff --git a/demo/src/main/res/mipmap-xxxhdpi/logo.png b/demo/src/main/res/mipmap-xxxhdpi/logo.png
new file mode 100644
index 00000000..4f396ea1
Binary files /dev/null and b/demo/src/main/res/mipmap-xxxhdpi/logo.png differ
diff --git a/demo/src/main/res/mipmap-xxxhdpi/logo_background.png b/demo/src/main/res/mipmap-xxxhdpi/logo_background.png
new file mode 100644
index 00000000..4c81022f
Binary files /dev/null and b/demo/src/main/res/mipmap-xxxhdpi/logo_background.png differ
diff --git a/demo/src/main/res/mipmap-xxxhdpi/logo_foreground.png b/demo/src/main/res/mipmap-xxxhdpi/logo_foreground.png
new file mode 100644
index 00000000..9f3d1ffe
Binary files /dev/null and b/demo/src/main/res/mipmap-xxxhdpi/logo_foreground.png differ
diff --git a/demo/src/main/res/values/colors.xml b/demo/src/main/res/values/colors.xml
index 269ab9ab..20ecab1c 100644
--- a/demo/src/main/res/values/colors.xml
+++ b/demo/src/main/res/values/colors.xml
@@ -1,6 +1,6 @@
- #009966
- #00734d
- #57db27
+ #FFA000
+ #F57C00
+ #40C4FF
diff --git a/docs/index.md b/docs/index.md
index eab56e4c..4081cf1d 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -8,6 +8,10 @@ title: "CameraView v2"
CameraView is a well documented, high-level library that makes capturing pictures and videos easy,
addressing most of the common issues and needs, and still leaving you with flexibility where needed.
+
+
+
+
- Fast & reliable
- Gestures support [[docs]](docs/gestures.html)
- Camera1 or Camera2 powered engine [[docs]](docs/previews.html)
@@ -22,10 +26,6 @@ addressing most of the common issues and needs, and still leaving you with flexi
- Works down to API level 15
- Well tested
-
-
-
-
### Get started
Get started with [install info](about/install.html), [quick setup](about/getting-started.html), or
diff --git a/docs/static/banner.png b/docs/static/banner.png
new file mode 100644
index 00000000..b42e3e94
Binary files /dev/null and b/docs/static/banner.png differ
diff --git a/docs/static/icon.png b/docs/static/icon.png
index 50c84432..cad21694 100644
Binary files a/docs/static/icon.png and b/docs/static/icon.png differ
diff --git a/docs/static/screen1.jpg b/docs/static/screen1.jpg
deleted file mode 100644
index 4c132942..00000000
Binary files a/docs/static/screen1.jpg and /dev/null differ
diff --git a/docs/static/screen1.png b/docs/static/screen1.png
new file mode 100644
index 00000000..0399da03
Binary files /dev/null and b/docs/static/screen1.png differ
diff --git a/docs/static/screen2.jpg b/docs/static/screen2.jpg
deleted file mode 100644
index 2f0d3a29..00000000
Binary files a/docs/static/screen2.jpg and /dev/null differ
diff --git a/docs/static/screen2.png b/docs/static/screen2.png
new file mode 100644
index 00000000..bb19cb36
Binary files /dev/null and b/docs/static/screen2.png differ
diff --git a/docs/static/screen3.jpg b/docs/static/screen3.jpg
deleted file mode 100644
index 5abf710b..00000000
Binary files a/docs/static/screen3.jpg and /dev/null differ
diff --git a/docs/static/screen3.png b/docs/static/screen3.png
new file mode 100644
index 00000000..1a30c706
Binary files /dev/null and b/docs/static/screen3.png differ