* get overlay working

* fix overlay drawing

* allow disabling overlay in pictures or videos

* Fix picture snapshot colors when there is an overlay

* Bug fixes

* Update example with watermark

* Fix bug

* Fix overlay orientation in pictures

* Fix overlay orientation in videos

* Fix overlay when changing preview size

* Fix bug

* Experiment

* Refactor EglViewport

* Refactor SnapshotPictureRecorder

* Use single EglViewport

* Refactor SnapshotVideoRecorder

* Bug fix

* fix some of the requested changes

* clean adding View to OverlayLayout

* Specify where to draw the overlay

* Refactor

* Remove unnecessary variable from CameraPreview

* Use mWithOverlay in SnapshotVideoRecorder

* Use multiple OverlayLayout

* Add explanation for OverlayLayoutManager

* override removeView
pull/502/head
Giacomo Randazzo 6 years ago committed by Mattia Iavarone
parent 7d87d4af61
commit c7edfe9408
  1. 23
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraOptions.java
  2. 117
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java
  3. 57
      cameraview/src/main/java/com/otaliastudios/cameraview/DisableOverlayFor.java
  4. 27
      cameraview/src/main/java/com/otaliastudios/cameraview/OverlayLayout.java
  5. 145
      cameraview/src/main/java/com/otaliastudios/cameraview/OverlayLayoutManager.java
  6. 8
      cameraview/src/main/java/com/otaliastudios/cameraview/SurfaceDrawer.java
  7. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java
  8. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java
  9. 33
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraEngine.java
  10. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/egl/EglViewport.java
  11. 80
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/SnapshotGlPictureRecorder.java
  12. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/GlCameraPreview.java
  13. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/SurfaceCameraPreview.java
  14. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/TextureCameraPreview.java
  15. 56
      cameraview/src/main/java/com/otaliastudios/cameraview/video/SnapshotVideoRecorder.java
  16. 15
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/TextureMediaEncoder.java
  17. 8
      cameraview/src/main/res/values/attrs.xml
  18. 4
      demo/src/main/java/com/otaliastudios/cameraview/demo/CameraActivity.java
  19. 26
      demo/src/main/res/layout/activity_camera.xml

@ -55,6 +55,7 @@ public class CameraOptions {
private Set<Size> supportedVideoSizes = new HashSet<>(5); private Set<Size> supportedVideoSizes = new HashSet<>(5);
private Set<AspectRatio> supportedPictureAspectRatio = new HashSet<>(4); private Set<AspectRatio> supportedPictureAspectRatio = new HashSet<>(4);
private Set<AspectRatio> supportedVideoAspectRatio = new HashSet<>(3); private Set<AspectRatio> supportedVideoAspectRatio = new HashSet<>(3);
private Set<DisableOverlayFor> supportedDisableOverlayFor = new HashSet<>(3);
private boolean zoomSupported; private boolean zoomSupported;
private boolean exposureCorrectionSupported; private boolean exposureCorrectionSupported;
@ -147,6 +148,12 @@ public class CameraOptions {
supportedVideoAspectRatio.add(AspectRatio.of(width, height)); supportedVideoAspectRatio.add(AspectRatio.of(width, height));
} }
} }
// Disable overlay for
supportedDisableOverlayFor.add(DisableOverlayFor.NONE);
supportedDisableOverlayFor.add(DisableOverlayFor.PICTURE);
supportedDisableOverlayFor.add(DisableOverlayFor.VIDEO);
} }
// Camera2Engine constructor. // Camera2Engine constructor.
@ -312,6 +319,8 @@ public class CameraOptions {
return (Collection<T>) Arrays.asList(Engine.values()); return (Collection<T>) Arrays.asList(Engine.values());
} else if (controlClass.equals(Preview.class)) { } else if (controlClass.equals(Preview.class)) {
return (Collection<T>) Arrays.asList(Preview.values()); return (Collection<T>) Arrays.asList(Preview.values());
} else if (controlClass.equals(DisableOverlayFor.class)) {
return (Collection<T>) getSupportedDisableOverlayFor();
} }
// Unrecognized control. // Unrecognized control.
return Collections.emptyList(); return Collections.emptyList();
@ -486,4 +495,18 @@ public class CameraOptions {
public float getExposureCorrectionMaxValue() { public float getExposureCorrectionMaxValue() {
return exposureCorrectionMaxValue; return exposureCorrectionMaxValue;
} }
/**
* Set of supported mode values for which the overlay can be disabled.
*
* @see DisableOverlayFor#NONE
* @see DisableOverlayFor#PICTURE
* @see DisableOverlayFor#VIDEO
* @return a collection of supported values.
*/
@SuppressWarnings("WeakerAccess")
@NonNull
public Collection<DisableOverlayFor> getSupportedDisableOverlayFor() {
return Collections.unmodifiableSet(supportedDisableOverlayFor);
}
} }

@ -28,6 +28,7 @@ import androidx.annotation.NonNull;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import android.util.AttributeSet; import android.util.AttributeSet;
import android.view.MotionEvent; import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup; import android.view.ViewGroup;
import android.widget.FrameLayout; import android.widget.FrameLayout;
@ -131,6 +132,10 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
@SuppressWarnings({"FieldCanBeLocal", "unused"}) @SuppressWarnings({"FieldCanBeLocal", "unused"})
private boolean mExperimental; private boolean mExperimental;
// Overlays
private OverlayLayoutManager mOverlayLayoutManager; // see OverlayLayoutManager for why having two of them
private OverlayLayoutManager mOverlayLayoutManagerBelow;
// Threading // Threading
private Handler mUiHandler; private Handler mUiHandler;
private WorkerHandler mFrameProcessorsHandler; private WorkerHandler mFrameProcessorsHandler;
@ -187,9 +192,13 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
// Views // Views
mGridLinesLayout = new GridLinesLayout(context); mGridLinesLayout = new GridLinesLayout(context);
mOverlayLayoutManager = new OverlayLayoutManager(context);
mOverlayLayoutManagerBelow = new OverlayLayoutManager(context);
mMarkerLayout = new MarkerLayout(context); mMarkerLayout = new MarkerLayout(context);
addView(mGridLinesLayout); addView(mGridLinesLayout);
addView(mMarkerLayout); addView(mMarkerLayout);
addView(mOverlayLayoutManager);
addView(mOverlayLayoutManagerBelow, 0); // put it at the bottom of the FrameLayout
// Create the engine // Create the engine
doInstantiateEngine(); doInstantiateEngine();
@ -300,9 +309,14 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
protected void onAttachedToWindow() { protected void onAttachedToWindow() {
super.onAttachedToWindow(); super.onAttachedToWindow();
if (mCameraPreview == null) { if (mCameraPreview == null) {
// isHardwareAccelerated will return the real value only after we are // isHardwareAccelerated will return the real value only after we are
// attached. That's why we instantiate the preview here. // attached. That's why we instantiate the preview here.
doInstantiatePreview(); doInstantiatePreview();
mCameraEngine.addPictureSurfaceDrawer(mOverlayLayoutManager);
mCameraEngine.addPictureSurfaceDrawer(mOverlayLayoutManagerBelow);
mCameraEngine.addVideoSurfaceDrawer(mOverlayLayoutManager);
mCameraEngine.addVideoSurfaceDrawer(mOverlayLayoutManagerBelow);
} }
if (!isInEditMode()) { if (!isInEditMode()) {
mOrientationHelper.enable(getContext()); mOrientationHelper.enable(getContext());
@ -317,6 +331,49 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
super.onDetachedFromWindow(); super.onDetachedFromWindow();
} }
@Override
public void addView(View child, ViewGroup.LayoutParams params) {
if (params instanceof OverlayLayoutParams) {
if (((OverlayLayoutParams) params).drawInPreview) {
mOverlayLayoutManager.addView(child, params);
} else {
mOverlayLayoutManagerBelow.addView(child, params);
}
} else {
super.addView(child, params);
}
}
@Override
public void removeView(View child) {
if (child.getLayoutParams() instanceof OverlayLayoutParams) {
if (((OverlayLayoutParams) child.getLayoutParams()).drawInPreview) {
mOverlayLayoutManager.removeView(child);
} else {
mOverlayLayoutManagerBelow.removeView(child);
}
} else {
super.removeView(child);
}
}
@Override
public LayoutParams generateLayoutParams(AttributeSet attributeSet) {
OverlayLayoutParams toBeChecked = new OverlayLayoutParams(this.getContext(), attributeSet);
if (toBeChecked.isOverlay()) {
return toBeChecked;
}
return super.generateLayoutParams(attributeSet);
}
@Override
protected ViewGroup.LayoutParams generateLayoutParams(ViewGroup.LayoutParams p) {
if (p instanceof OverlayLayoutParams) {
return p;
}
return super.generateLayoutParams(p);
}
//endregion //endregion
//region Measuring behavior //region Measuring behavior
@ -1218,6 +1275,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
mCameraEngine.setAutoFocusResetDelay(delayMillis); mCameraEngine.setAutoFocusResetDelay(delayMillis);
} }
/** /**
* Returns the current delay in milliseconds to reset the focus after an autofocus process. * Returns the current delay in milliseconds to reset the focus after an autofocus process.
* @return the current autofocus reset delay in milliseconds. * @return the current autofocus reset delay in milliseconds.
@ -2077,5 +2135,64 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
} }
} }
public static class OverlayLayoutParams extends FrameLayout.LayoutParams {
private boolean drawInPreview = false;
private boolean drawInPictureSnapshot = false;
private boolean drawInVideoSnapshot = false;
public OverlayLayoutParams(Context context, AttributeSet attributeSet) {
super(context, attributeSet);
this.readStyleParameters(context, attributeSet);
}
public OverlayLayoutParams(int width, int height) {
super(width, height);
}
public OverlayLayoutParams(ViewGroup.LayoutParams layoutParams) {
super(layoutParams);
}
private void readStyleParameters(Context context, AttributeSet attributeSet) {
TypedArray a = context.obtainStyledAttributes(attributeSet, R.styleable.CameraView_Layout);
try {
this.drawInPreview = a.getBoolean(R.styleable.CameraView_Layout_layout_drawInPreview, false);
this.drawInPictureSnapshot = a.getBoolean(R.styleable.CameraView_Layout_layout_drawInPictureSnapshot, false);
this.drawInVideoSnapshot = a.getBoolean(R.styleable.CameraView_Layout_layout_drawInVideoSnapshot, false);
} finally {
a.recycle();
}
}
public boolean isDrawInPreview() {
return drawInPreview;
}
public void setDrawInPreview(boolean drawInPreview) {
this.drawInPreview = drawInPreview;
}
public boolean isDrawInPictureSnapshot() {
return drawInPictureSnapshot;
}
public void setDrawInPictureSnapshot(boolean drawInPictureSnapshot) {
this.drawInPictureSnapshot = drawInPictureSnapshot;
}
public boolean isDrawInVideoSnapshot() {
return drawInVideoSnapshot;
}
public void setDrawInVideoSnapshot(boolean drawInVideoSnapshot) {
this.drawInVideoSnapshot = drawInVideoSnapshot;
}
public boolean isOverlay() {
return drawInPreview || drawInPictureSnapshot || drawInVideoSnapshot;
}
}
//endregion //endregion
} }

@ -0,0 +1,57 @@
package com.otaliastudios.cameraview;
import androidx.annotation.Nullable;
import com.otaliastudios.cameraview.controls.Control;
/**
* DisableOverlayFor value allow the user to prevent the overlay from being recorded.
*
* @see CameraView#setDisableOverlayFor(DisableOverlayFor)
*/
public enum DisableOverlayFor implements Control {
/**
* Record the overlay both in picture and video snapshots.
*/
NONE(0),
/**
* The picture snapshots will not contain the overlay.
*
* @see CameraOptions#getSupportedDisableOverlayFor()
*/
PICTURE(1),
/**
* The picture snapshots will not contain the overlay.
*
* @see CameraOptions#getSupportedDisableOverlayFor()
*/
VIDEO(2);
static final DisableOverlayFor DEFAULT = NONE;
private int value;
DisableOverlayFor(int value) {
this.value = value;
}
int value() {
return value;
}
@Nullable
static DisableOverlayFor fromValue(int value) {
DisableOverlayFor[] list = DisableOverlayFor.values();
for (DisableOverlayFor action : list) {
if (action.value() == value) {
return action;
}
}
return null;
}
}

@ -0,0 +1,27 @@
package com.otaliastudios.cameraview;
import android.content.Context;
import android.graphics.Canvas;
import android.util.AttributeSet;
import android.widget.FrameLayout;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
class OverlayLayout extends FrameLayout {
public OverlayLayout(@NonNull Context context) {
super(context);
setWillNotDraw(false);
}
public OverlayLayout(@NonNull Context context, @Nullable AttributeSet attrs) {
super(context, attrs);
setWillNotDraw(false);
}
public void drawOverlay(Canvas canvas) {
super.draw(canvas);
}
}

@ -0,0 +1,145 @@
package com.otaliastudios.cameraview;
import android.content.Context;
import android.graphics.Canvas;
import android.util.AttributeSet;
import android.view.View;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import java.util.HashMap;
import java.util.Map;
/**
* This class manages {@link OverlayLayout}s.
* The necessity for this class comes from two features of {@link View}s:
* - a {@link View} can only have one parent
* - the View framework does not provide a straightforward way for a {@link ViewGroup} to draw
* only a subset of it's children.
* We have three possible target for a overlay {@link View} to be drawn on:
* - camera preview
* - picture snapshot
* - video snapshot
* Given the two constraints above in order to draw exclusively on a subset of targets we need a
* different {@link OverlayLayout} for each subset of targets. This class manages those different
* {@link OverlayLayout}s.
*
* A problem remains: the views are drawn on preview when {@link #draw(Canvas)} is called on this
* class, for not drawing on the preview but drawing on picture snapshot, for instance, we cannot
* change the child's visibility.
* One way to solve this problem is to have two instances of {@link OverlayLayoutManager} and layer
* them so that the one below is covered and hidden by the camera preview. This way only the top
* {@link OverlayLayoutManager} is shown on top of the camera preview and we can still access the
* bottom one's {@link OverlayLayout#draw(Canvas)} for drawing on picture snapshots.
*/
class OverlayLayoutManager extends FrameLayout implements SurfaceDrawer {
private Map<OverlayType, OverlayLayout> mLayouts = new HashMap<>();
public OverlayLayoutManager(@NonNull Context context) {
super(context);
}
public OverlayLayoutManager(@NonNull Context context, @Nullable AttributeSet attrs) {
super(context, attrs);
}
@Override
public void addView(View child, ViewGroup.LayoutParams params) {
// params must be instance of OverlayLayoutParams
if (!(params instanceof CameraView.OverlayLayoutParams)) {
return;
}
OverlayType viewOverlayType = new OverlayType((CameraView.OverlayLayoutParams) params);
if (mLayouts.containsKey(viewOverlayType)) {
mLayouts.get(viewOverlayType).addView(child, params);
} else {
OverlayLayout newLayout = new OverlayLayout(getContext());
newLayout.addView(child, params);
super.addView(newLayout);
mLayouts.put(viewOverlayType, newLayout);
}
}
@Override
public void removeView(View child) {
// params must be instance of OverlayLayoutParams
if (!(child.getLayoutParams() instanceof CameraView.OverlayLayoutParams)) {
return;
}
OverlayType viewOverlayType = new OverlayType((CameraView.OverlayLayoutParams) child.getLayoutParams());
if (mLayouts.containsKey(viewOverlayType)) {
mLayouts.get(viewOverlayType).removeView(child);
}
}
@Override
public void drawOnSurfaceForPictureSnapshot(Canvas surfaceCanvas) {
surfaceCanvas.save();
// scale factor between canvas width and this View's width
float widthScale = surfaceCanvas.getWidth() / (float) getWidth();
// scale factor between canvas height and this View's height
float heightScale = surfaceCanvas.getHeight() / (float) getHeight();
surfaceCanvas.scale(widthScale, heightScale);
for (Map.Entry<OverlayType, OverlayLayout> entry : mLayouts.entrySet()) {
if (entry.getKey().pictureSnapshot) {
entry.getValue().drawOverlay(surfaceCanvas);
}
}
surfaceCanvas.restore();
}
@Override
public void drawOnSurfaceForVideoSnapshot(Canvas surfaceCanvas) {
surfaceCanvas.save();
// scale factor between canvas width and this View's width
float widthScale = surfaceCanvas.getWidth() / (float) getWidth();
// scale factor between canvas height and this View's height
float heightScale = surfaceCanvas.getHeight() / (float) getHeight();
surfaceCanvas.scale(widthScale, heightScale);
for (Map.Entry<OverlayType, OverlayLayout> entry : mLayouts.entrySet()) {
if (entry.getKey().videoSnapshot) {
entry.getValue().drawOverlay(surfaceCanvas);
}
}
surfaceCanvas.restore();
}
private class OverlayType {
boolean preview = false;
boolean pictureSnapshot = false;
boolean videoSnapshot = false;
OverlayType(CameraView.OverlayLayoutParams params) {
this.preview = params.isDrawInPreview();
this.pictureSnapshot = params.isDrawInPictureSnapshot();
this.videoSnapshot = params.isDrawInVideoSnapshot();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
OverlayType that = (OverlayType) o;
return preview == that.preview &&
pictureSnapshot == that.pictureSnapshot &&
videoSnapshot == that.videoSnapshot;
}
@Override
public int hashCode() {
int result = 0;
result = 31*result + (preview ? 1 : 0);
result = 31*result + (pictureSnapshot ? 1 : 0);
result = 31*result + (videoSnapshot ? 1 : 0);
return result;
}
}
}

@ -0,0 +1,8 @@
package com.otaliastudios.cameraview;
import android.graphics.Canvas;
public interface SurfaceDrawer {
void drawOnSurfaceForPictureSnapshot(Canvas surfaceCanvas);
void drawOnSurfaceForVideoSnapshot(Canvas surfaceCanvas);
}

@ -312,7 +312,7 @@ public class Camera1Engine extends CameraEngine implements
AspectRatio outputRatio = getAngles().flip(Reference.OUTPUT, Reference.VIEW) ? viewAspectRatio.flip() : viewAspectRatio; AspectRatio outputRatio = getAngles().flip(Reference.OUTPUT, Reference.VIEW) ? viewAspectRatio.flip() : viewAspectRatio;
if (mPreview instanceof GlCameraPreview && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { if (mPreview instanceof GlCameraPreview && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
mPictureRecorder = new SnapshotGlPictureRecorder(stub, this, (GlCameraPreview) mPreview, outputRatio); mPictureRecorder = new SnapshotGlPictureRecorder(stub, this, (GlCameraPreview) mPreview, outputRatio, getPictureSurfaceDrawers());
} else { } else {
mPictureRecorder = new Snapshot1PictureRecorder(stub, this, mCamera, outputRatio); mPictureRecorder = new Snapshot1PictureRecorder(stub, this, mCamera, outputRatio);
} }
@ -362,7 +362,7 @@ public class Camera1Engine extends CameraEngine implements
stub.rotation = getAngles().offset(Reference.VIEW, Reference.OUTPUT, Axis.ABSOLUTE); stub.rotation = getAngles().offset(Reference.VIEW, Reference.OUTPUT, Axis.ABSOLUTE);
// Start. // Start.
mVideoRecorder = new SnapshotVideoRecorder(Camera1Engine.this, glPreview); mVideoRecorder = new SnapshotVideoRecorder(Camera1Engine.this, glPreview, getVideoSurfaceDrawers());
mVideoRecorder.start(stub); mVideoRecorder.start(stub);
} }

@ -615,7 +615,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR); // Actually it will be rotated and set to 0. stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR); // Actually it will be rotated and set to 0.
AspectRatio outputRatio = getAngles().flip(Reference.OUTPUT, Reference.VIEW) ? viewAspectRatio.flip() : viewAspectRatio; AspectRatio outputRatio = getAngles().flip(Reference.OUTPUT, Reference.VIEW) ? viewAspectRatio.flip() : viewAspectRatio;
if (mPreview instanceof GlCameraPreview) { if (mPreview instanceof GlCameraPreview) {
mPictureRecorder = new SnapshotGlPictureRecorder(stub, this, (GlCameraPreview) mPreview, outputRatio); mPictureRecorder = new SnapshotGlPictureRecorder(stub, this, (GlCameraPreview) mPreview, outputRatio, getPictureSurfaceDrawers());
} else { } else {
throw new RuntimeException("takePictureSnapshot with Camera2 is only supported with Preview.GL_SURFACE"); throw new RuntimeException("takePictureSnapshot with Camera2 is only supported with Preview.GL_SURFACE");
} }
@ -709,7 +709,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
stub.rotation = getAngles().offset(Reference.VIEW, Reference.OUTPUT, Axis.ABSOLUTE); stub.rotation = getAngles().offset(Reference.VIEW, Reference.OUTPUT, Axis.ABSOLUTE);
// Start. // Start.
mVideoRecorder = new SnapshotVideoRecorder(this, glPreview); mVideoRecorder = new SnapshotVideoRecorder(this, glPreview, getVideoSurfaceDrawers());
mVideoRecorder.start(stub); mVideoRecorder.start(stub);
} }

@ -18,6 +18,7 @@ import com.otaliastudios.cameraview.CameraException;
import com.otaliastudios.cameraview.CameraLogger; import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.CameraOptions; import com.otaliastudios.cameraview.CameraOptions;
import com.otaliastudios.cameraview.PictureResult; import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.SurfaceDrawer;
import com.otaliastudios.cameraview.VideoResult; import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.engine.offset.Angles; import com.otaliastudios.cameraview.engine.offset.Angles;
import com.otaliastudios.cameraview.engine.offset.Reference; import com.otaliastudios.cameraview.engine.offset.Reference;
@ -184,6 +185,38 @@ public abstract class CameraEngine implements
private long mAutoFocusResetDelayMillis; private long mAutoFocusResetDelayMillis;
private int mSnapshotMaxWidth = Integer.MAX_VALUE; // in REF_VIEW for consistency with SizeSelectors private int mSnapshotMaxWidth = Integer.MAX_VALUE; // in REF_VIEW for consistency with SizeSelectors
private int mSnapshotMaxHeight = Integer.MAX_VALUE; // in REF_VIEW for consistency with SizeSelectors private int mSnapshotMaxHeight = Integer.MAX_VALUE; // in REF_VIEW for consistency with SizeSelectors
private final List<SurfaceDrawer> pictureSurfaceDrawers = new ArrayList<>();
private final List<SurfaceDrawer> videoSurfaceDrawers = new ArrayList<>();
public void addPictureSurfaceDrawer(@NonNull SurfaceDrawer surfaceDrawer) {
if (!pictureSurfaceDrawers.contains(surfaceDrawer)) {
pictureSurfaceDrawers.add(surfaceDrawer);
}
}
public void removePictureSurfaceDrawer(@NonNull SurfaceDrawer surfaceDrawer) {
pictureSurfaceDrawers.remove(surfaceDrawer);
}
@NonNull
protected List<SurfaceDrawer> getPictureSurfaceDrawers() {
return pictureSurfaceDrawers;
}
public void addVideoSurfaceDrawer(@NonNull SurfaceDrawer surfaceDrawer) {
if (!videoSurfaceDrawers.contains(surfaceDrawer)) {
videoSurfaceDrawers.add(surfaceDrawer);
}
}
public void removeVideoSurfaceDrawer(@NonNull SurfaceDrawer surfaceDrawer) {
videoSurfaceDrawers.remove(surfaceDrawer);
}
@NonNull
protected List<SurfaceDrawer> getVideoSurfaceDrawers() {
return videoSurfaceDrawers;
}
// Steps // Steps
private final Step.Callback mStepCallback = new Step.Callback() { private final Step.Callback mStepCallback = new Step.Callback() {

@ -145,6 +145,12 @@ public class EglViewport extends EglElement {
GLES20.glUseProgram(mProgramHandle); GLES20.glUseProgram(mProgramHandle);
check("glUseProgram"); check("glUseProgram");
// enable blending, from: http://www.learnopengles.com/android-lesson-five-an-introduction-to-blending/
GLES20.glDisable(GLES20.GL_CULL_FACE);
GLES20.glDisable(GLES20.GL_DEPTH_TEST);
GLES20.glEnable(GLES20.GL_BLEND);
GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
// Set the texture. // Set the texture.
GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(mTextureTarget, textureId); GLES20.glBindTexture(mTextureTarget, textureId);

@ -2,6 +2,9 @@ package com.otaliastudios.cameraview.picture;
import android.annotation.TargetApi; import android.annotation.TargetApi;
import android.graphics.Bitmap; import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.PorterDuff;
import android.graphics.Rect; import android.graphics.Rect;
import android.graphics.SurfaceTexture; import android.graphics.SurfaceTexture;
import android.opengl.EGL14; import android.opengl.EGL14;
@ -11,8 +14,10 @@ import android.os.Build;
import com.otaliastudios.cameraview.CameraLogger; import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.PictureResult; import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.SurfaceDrawer;
import com.otaliastudios.cameraview.controls.Facing; import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.engine.CameraEngine; import com.otaliastudios.cameraview.engine.CameraEngine;
import com.otaliastudios.cameraview.engine.offset.Axis;
import com.otaliastudios.cameraview.engine.offset.Reference; import com.otaliastudios.cameraview.engine.offset.Reference;
import com.otaliastudios.cameraview.internal.egl.EglCore; import com.otaliastudios.cameraview.internal.egl.EglCore;
import com.otaliastudios.cameraview.internal.egl.EglViewport; import com.otaliastudios.cameraview.internal.egl.EglViewport;
@ -27,6 +32,9 @@ import com.otaliastudios.cameraview.size.Size;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
import android.view.Surface;
import java.util.List;
public class SnapshotGlPictureRecorder extends PictureRecorder { public class SnapshotGlPictureRecorder extends PictureRecorder {
@ -37,15 +45,21 @@ public class SnapshotGlPictureRecorder extends PictureRecorder {
private GlCameraPreview mPreview; private GlCameraPreview mPreview;
private AspectRatio mOutputRatio; private AspectRatio mOutputRatio;
private List<SurfaceDrawer> mSurfaceDrawerList;
private boolean mWithOverlay;
public SnapshotGlPictureRecorder( public SnapshotGlPictureRecorder(
@NonNull PictureResult.Stub stub, @NonNull PictureResult.Stub stub,
@NonNull CameraEngine engine, @NonNull CameraEngine engine,
@NonNull GlCameraPreview preview, @NonNull GlCameraPreview preview,
@NonNull AspectRatio outputRatio) { @NonNull AspectRatio outputRatio,
@NonNull List<SurfaceDrawer> surfaceDrawerList) {
super(stub, engine); super(stub, engine);
mEngine = engine; mEngine = engine;
mPreview = preview; mPreview = preview;
mOutputRatio = outputRatio; mOutputRatio = outputRatio;
mWithOverlay = true;
mSurfaceDrawerList = surfaceDrawerList;
} }
@TargetApi(Build.VERSION_CODES.KITKAT) @TargetApi(Build.VERSION_CODES.KITKAT)
@ -54,18 +68,35 @@ public class SnapshotGlPictureRecorder extends PictureRecorder {
mPreview.addRendererFrameCallback(new RendererFrameCallback() { mPreview.addRendererFrameCallback(new RendererFrameCallback() {
int mTextureId; int mTextureId;
int mOverlayTextureId = 0;
SurfaceTexture mSurfaceTexture; SurfaceTexture mSurfaceTexture;
SurfaceTexture mOverlaySurfaceTexture;
float[] mTransform; float[] mTransform;
float[] mOverlayTransform;
EglViewport viewport;
@RendererThread @RendererThread
public void onRendererTextureCreated(int textureId) { public void onRendererTextureCreated(int textureId) {
mTextureId = textureId; mTextureId = textureId;
viewport = new EglViewport();
if (mWithOverlay) {
mOverlayTextureId = viewport.createTexture();
}
mSurfaceTexture = new SurfaceTexture(mTextureId, true); mSurfaceTexture = new SurfaceTexture(mTextureId, true);
if (mWithOverlay) {
mOverlaySurfaceTexture = new SurfaceTexture(mOverlayTextureId, true);
}
// Need to crop the size. // Need to crop the size.
Rect crop = CropHelper.computeCrop(mResult.size, mOutputRatio); Rect crop = CropHelper.computeCrop(mResult.size, mOutputRatio);
mResult.size = new Size(crop.width(), crop.height()); mResult.size = new Size(crop.width(), crop.height());
mSurfaceTexture.setDefaultBufferSize(mResult.size.getWidth(), mResult.size.getHeight()); mSurfaceTexture.setDefaultBufferSize(mResult.size.getWidth(), mResult.size.getHeight());
if (mWithOverlay) {
mOverlaySurfaceTexture.setDefaultBufferSize(mResult.size.getWidth(), mResult.size.getHeight());
}
mTransform = new float[16]; mTransform = new float[16];
if (mWithOverlay) {
mOverlayTransform = new float[16];
}
} }
@RendererThread @RendererThread
@ -99,9 +130,25 @@ public class SnapshotGlPictureRecorder extends PictureRecorder {
public void run() { public void run() {
EglWindowSurface surface = new EglWindowSurface(core, mSurfaceTexture); EglWindowSurface surface = new EglWindowSurface(core, mSurfaceTexture);
surface.makeCurrent(); surface.makeCurrent();
EglViewport viewport = new EglViewport(); // EglViewport viewport = new EglViewport();
mSurfaceTexture.updateTexImage(); mSurfaceTexture.updateTexImage();
mSurfaceTexture.getTransformMatrix(mTransform); mSurfaceTexture.getTransformMatrix(mTransform);
Surface drawOnto = new Surface(mOverlaySurfaceTexture);
if (mWithOverlay) {
try {
final Canvas surfaceCanvas = drawOnto.lockCanvas(null);
surfaceCanvas.drawColor(Color.TRANSPARENT, PorterDuff.Mode.CLEAR);
for (SurfaceDrawer surfaceDrawer : mSurfaceDrawerList) {
surfaceDrawer.drawOnSurfaceForPictureSnapshot(surfaceCanvas);
}
drawOnto.unlockCanvasAndPost(surfaceCanvas);
} catch (Surface.OutOfResourcesException e) {
e.printStackTrace();
}
mOverlaySurfaceTexture.updateTexImage();
mOverlaySurfaceTexture.getTransformMatrix(mOverlayTransform);
}
// Apply scale and crop: // Apply scale and crop:
// NOTE: scaleX and scaleY are in REF_VIEW, while our input appears to be in REF_SENSOR. // NOTE: scaleX and scaleY are in REF_VIEW, while our input appears to be in REF_SENSOR.
@ -117,25 +164,50 @@ public class SnapshotGlPictureRecorder extends PictureRecorder {
// Not sure why we need the minus here... It makes no sense to me. // Not sure why we need the minus here... It makes no sense to me.
LOG.w("Recording frame. Rotation:", mResult.rotation, "Actual:", -mResult.rotation); LOG.w("Recording frame. Rotation:", mResult.rotation, "Actual:", -mResult.rotation);
int rotation = -mResult.rotation; int rotation = -mResult.rotation;
int overlayRotation = mEngine.getAngles().offset(Reference.VIEW, Reference.OUTPUT, Axis.ABSOLUTE); // TODO check axis
// apparently with front facing camera with don't need the minus sign
if (mResult.facing == Facing.FRONT) {
overlayRotation = -overlayRotation;
}
mResult.rotation = 0; mResult.rotation = 0;
// Go back to 0,0 so that rotate and flip work well. // Go back to 0,0 so that rotate and flip work well.
Matrix.translateM(mTransform, 0, 0.5F, 0.5F, 0); Matrix.translateM(mTransform, 0, 0.5F, 0.5F, 0);
if (mOverlayTransform != null) {
Matrix.translateM(mOverlayTransform, 0, 0.5F, 0.5F, 0);
}
// Apply rotation: // Apply rotation:
Matrix.rotateM(mTransform, 0, rotation, 0, 0, 1); Matrix.rotateM(mTransform, 0, rotation, 0, 0, 1);
if (mOverlayTransform != null) {
Matrix.rotateM(mOverlayTransform, 0, overlayRotation, 0, 0, 1);
}
// Flip horizontally for front camera: // Flip horizontally for front camera:
if (mResult.facing == Facing.FRONT) { if (mResult.facing == Facing.FRONT) {
Matrix.scaleM(mTransform, 0, -1, 1, 1); Matrix.scaleM(mTransform, 0, -1, 1, 1);
if (mOverlayTransform != null) {
// not sure why we have to flip the mirror the y axis
Matrix.scaleM(mOverlayTransform, 0, -1, -1, 1);
}
}
if (mOverlayTransform != null) {
// not sure why we have to flip the mirror the y axis
Matrix.scaleM(mOverlayTransform, 0, 1, -1, 1);
} }
// Go back to old position. // Go back to old position.
Matrix.translateM(mTransform, 0, -0.5F, -0.5F, 0); Matrix.translateM(mTransform, 0, -0.5F, -0.5F, 0);
if (mOverlayTransform != null) {
Matrix.translateM(mOverlayTransform, 0, -0.5F, -0.5F, 0);
}
// Future note: passing scale values to the viewport? // Future note: passing scale values to the viewport?
// They are simply realScaleX and realScaleY. // They are simply realScaleX and realScaleY.
viewport.drawFrame(mTextureId, mTransform); viewport.drawFrame(mTextureId, mTransform);
if (mWithOverlay) {
viewport.drawFrame(mOverlayTextureId, mOverlayTransform);
}
// don't - surface.swapBuffers(); // don't - surface.swapBuffers();
mResult.data = surface.saveFrameTo(Bitmap.CompressFormat.JPEG); mResult.data = surface.saveFrameTo(Bitmap.CompressFormat.JPEG);
mResult.format = PictureResult.FORMAT_JPEG; mResult.format = PictureResult.FORMAT_JPEG;
@ -144,7 +216,11 @@ public class SnapshotGlPictureRecorder extends PictureRecorder {
// EGL14.eglMakeCurrent(oldDisplay, oldSurface, oldSurface, eglContext); // EGL14.eglMakeCurrent(oldDisplay, oldSurface, oldSurface, eglContext);
surface.release(); surface.release();
viewport.release(); viewport.release();
drawOnto.release();
mSurfaceTexture.release(); mSurfaceTexture.release();
if (mOverlaySurfaceTexture != null) {
mOverlaySurfaceTexture.release();
}
core.release(); core.release();
dispatchResult(); dispatchResult();
} }

@ -91,7 +91,7 @@ public class GlCameraPreview extends CameraPreview<GLSurfaceView, SurfaceTexture
mDispatched = false; mDispatched = false;
} }
}); });
parent.addView(root, 0); parent.addView(root, 1);
mRootView = root; mRootView = root;
return glView; return glView;
} }

@ -34,7 +34,7 @@ public class SurfaceCameraPreview extends CameraPreview<SurfaceView, SurfaceHold
@Override @Override
protected SurfaceView onCreateView(@NonNull Context context, @NonNull ViewGroup parent) { protected SurfaceView onCreateView(@NonNull Context context, @NonNull ViewGroup parent) {
View root = LayoutInflater.from(context).inflate(R.layout.cameraview_surface_view, parent, false); View root = LayoutInflater.from(context).inflate(R.layout.cameraview_surface_view, parent, false);
parent.addView(root, 0); parent.addView(root, 1);
SurfaceView surfaceView = root.findViewById(R.id.surface_view); SurfaceView surfaceView = root.findViewById(R.id.surface_view);
final SurfaceHolder holder = surfaceView.getHolder(); final SurfaceHolder holder = surfaceView.getHolder();
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);

@ -38,7 +38,7 @@ public class TextureCameraPreview extends CameraPreview<TextureView, SurfaceText
@Override @Override
protected TextureView onCreateView(@NonNull Context context, @NonNull ViewGroup parent) { protected TextureView onCreateView(@NonNull Context context, @NonNull ViewGroup parent) {
View root = LayoutInflater.from(context).inflate(R.layout.cameraview_texture_view, parent, false); View root = LayoutInflater.from(context).inflate(R.layout.cameraview_texture_view, parent, false);
parent.addView(root, 0); parent.addView(root, 1);
TextureView texture = root.findViewById(R.id.texture_view); TextureView texture = root.findViewById(R.id.texture_view);
texture.setSurfaceTextureListener(new TextureView.SurfaceTextureListener() { texture.setSurfaceTextureListener(new TextureView.SurfaceTextureListener() {

@ -1,14 +1,23 @@
package com.otaliastudios.cameraview.video; package com.otaliastudios.cameraview.video;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.PorterDuff;
import android.graphics.SurfaceTexture; import android.graphics.SurfaceTexture;
import android.opengl.EGL14; import android.opengl.EGL14;
import android.os.Build; import android.os.Build;
import android.util.Log;
import android.view.Surface;
import java.util.List;
import com.otaliastudios.cameraview.CameraLogger; import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.SurfaceDrawer;
import com.otaliastudios.cameraview.VideoResult; import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.controls.Audio; import com.otaliastudios.cameraview.controls.Audio;
import com.otaliastudios.cameraview.engine.CameraEngine; import com.otaliastudios.cameraview.engine.CameraEngine;
import com.otaliastudios.cameraview.engine.offset.Reference; import com.otaliastudios.cameraview.engine.offset.Reference;
import com.otaliastudios.cameraview.internal.egl.EglViewport;
import com.otaliastudios.cameraview.preview.GlCameraPreview; import com.otaliastudios.cameraview.preview.GlCameraPreview;
import com.otaliastudios.cameraview.preview.RendererFrameCallback; import com.otaliastudios.cameraview.preview.RendererFrameCallback;
import com.otaliastudios.cameraview.preview.RendererThread; import com.otaliastudios.cameraview.preview.RendererThread;
@ -48,11 +57,21 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
private int mDesiredState = STATE_NOT_RECORDING; private int mDesiredState = STATE_NOT_RECORDING;
private int mTextureId = 0; private int mTextureId = 0;
private int mOverlayTextureId = 0;
private SurfaceTexture mOverlaySurfaceTexture;
private Surface mOverlaySurface;
private List<SurfaceDrawer> mSurfaceDrawerList;
private boolean mWithOverlay;
public SnapshotVideoRecorder(@NonNull CameraEngine engine, public SnapshotVideoRecorder(@NonNull CameraEngine engine,
@NonNull GlCameraPreview preview) { @NonNull GlCameraPreview preview,
@NonNull List<SurfaceDrawer> surfaceDrawerList) {
super(engine); super(engine);
mPreview = preview; mPreview = preview;
mEngine = engine; mEngine = engine;
mWithOverlay = true;
mSurfaceDrawerList = surfaceDrawerList;
} }
@Override @Override
@ -72,6 +91,14 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
@Override @Override
public void onRendererTextureCreated(int textureId) { public void onRendererTextureCreated(int textureId) {
mTextureId = textureId; mTextureId = textureId;
if (mWithOverlay) {
EglViewport temp = new EglViewport();
mOverlayTextureId = temp.createTexture();
mOverlaySurfaceTexture = new SurfaceTexture(mOverlayTextureId);
mOverlaySurfaceTexture.setDefaultBufferSize(mResult.size.getWidth(), mResult.size.getHeight());
mOverlaySurface = new Surface(mOverlaySurfaceTexture);
}
} }
@RendererThread @RendererThread
@ -103,6 +130,7 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
mResult.videoFrameRate, mResult.videoFrameRate,
mResult.rotation, mResult.rotation,
type, mTextureId, type, mTextureId,
mWithOverlay ? mOverlayTextureId : 0,
scaleX, scaleY, scaleX, scaleY,
mFlipped, mFlipped,
EGL14.eglGetCurrentContext() EGL14.eglGetCurrentContext()
@ -129,6 +157,24 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
TextureMediaEncoder.TextureFrame textureFrame = textureEncoder.acquireFrame(); TextureMediaEncoder.TextureFrame textureFrame = textureEncoder.acquireFrame();
textureFrame.timestamp = surfaceTexture.getTimestamp(); textureFrame.timestamp = surfaceTexture.getTimestamp();
surfaceTexture.getTransformMatrix(textureFrame.transform); surfaceTexture.getTransformMatrix(textureFrame.transform);
// get overlay
if (mWithOverlay) {
try {
final Canvas surfaceCanvas = mOverlaySurface.lockCanvas(null);
surfaceCanvas.drawColor(Color.TRANSPARENT, PorterDuff.Mode.CLEAR);
for (SurfaceDrawer surfaceDrawer : mSurfaceDrawerList) {
surfaceDrawer.drawOnSurfaceForVideoSnapshot(surfaceCanvas);
}
mOverlaySurface.unlockCanvasAndPost(surfaceCanvas);
} catch (Surface.OutOfResourcesException e) {
e.printStackTrace();
}
mOverlaySurfaceTexture.updateTexImage();
mOverlaySurfaceTexture.getTransformMatrix(textureFrame.overlayTransform);
}
if (mEncoderEngine != null) { if (mEncoderEngine != null) {
// can happen on teardown // can happen on teardown
mEncoderEngine.notify(TextureMediaEncoder.FRAME_EVENT, textureFrame); mEncoderEngine.notify(TextureMediaEncoder.FRAME_EVENT, textureFrame);
@ -142,6 +188,14 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
mEncoderEngine = null; mEncoderEngine = null;
mPreview.removeRendererFrameCallback(SnapshotVideoRecorder.this); mPreview.removeRendererFrameCallback(SnapshotVideoRecorder.this);
mPreview = null; mPreview = null;
if (mOverlaySurfaceTexture != null) {
mOverlaySurfaceTexture.release();
mOverlaySurfaceTexture = null;
}
if (mOverlaySurface != null) {
mOverlaySurface.release();
mOverlaySurface = null;
}
} }
} }

@ -27,6 +27,7 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureMediaEncoder.C
public static class Config extends VideoMediaEncoder.Config { public static class Config extends VideoMediaEncoder.Config {
int textureId; int textureId;
int overlayTextureId;
float scaleX; float scaleX;
float scaleY; float scaleY;
boolean scaleFlipped; boolean scaleFlipped;
@ -35,11 +36,17 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureMediaEncoder.C
public Config(int width, int height, int bitRate, int frameRate, int rotation, String mimeType, public Config(int width, int height, int bitRate, int frameRate, int rotation, String mimeType,
int textureId, float scaleX, float scaleY, boolean scaleFlipped, EGLContext eglContext) { int textureId, float scaleX, float scaleY, boolean scaleFlipped, EGLContext eglContext) {
this(width, height, bitRate, frameRate, rotation, mimeType, textureId, 0, scaleX, scaleY, scaleFlipped, eglContext);
}
public Config(int width, int height, int bitRate, int frameRate, int rotation, String mimeType,
int textureId, int overlayTextureId, float scaleX, float scaleY, boolean scaleFlipped, EGLContext eglContext) {
// We rotate the texture using transformRotation. Pass rotation=0 to super so that // We rotate the texture using transformRotation. Pass rotation=0 to super so that
// no rotation metadata is written into the output file. // no rotation metadata is written into the output file.
super(width, height, bitRate, frameRate, 0, mimeType); super(width, height, bitRate, frameRate, 0, mimeType);
this.transformRotation = rotation; this.transformRotation = rotation;
this.textureId = textureId; this.textureId = textureId;
this.overlayTextureId = overlayTextureId;
this.scaleX = scaleX; this.scaleX = scaleX;
this.scaleY = scaleY; this.scaleY = scaleY;
this.scaleFlipped = scaleFlipped; this.scaleFlipped = scaleFlipped;
@ -67,6 +74,7 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureMediaEncoder.C
// Typically coming from SurfaceTexture.getTimestamp(). // Typically coming from SurfaceTexture.getTimestamp().
public long timestamp; public long timestamp;
public float[] transform = new float[16]; public float[] transform = new float[16];
public float[] overlayTransform = new float[16];
} }
@NonNull @NonNull
@ -117,6 +125,7 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureMediaEncoder.C
// We must scale this matrix like GlCameraPreview does, because it might have some cropping. // We must scale this matrix like GlCameraPreview does, because it might have some cropping.
// Scaling takes place with respect to the (0, 0, 0) point, so we must apply a Translation to compensate. // Scaling takes place with respect to the (0, 0, 0) point, so we must apply a Translation to compensate.
float[] transform = frame.transform; float[] transform = frame.transform;
float[] overlayTransform = frame.overlayTransform;
float scaleX = mConfig.scaleX; float scaleX = mConfig.scaleX;
float scaleY = mConfig.scaleY; float scaleY = mConfig.scaleY;
float scaleTranslX = (1F - scaleX) / 2F; float scaleTranslX = (1F - scaleX) / 2F;
@ -132,6 +141,11 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureMediaEncoder.C
Matrix.translateM(transform, 0, 0.5F, 0.5F, 0); Matrix.translateM(transform, 0, 0.5F, 0.5F, 0);
Matrix.rotateM(transform, 0, mConfig.transformRotation, 0, 0, 1); Matrix.rotateM(transform, 0, mConfig.transformRotation, 0, 0, 1);
Matrix.translateM(transform, 0, -0.5F, -0.5F, 0); Matrix.translateM(transform, 0, -0.5F, -0.5F, 0);
if (overlayTransform != null) {
Matrix.translateM(overlayTransform, 0, 0.5F, 0.5F, 0);
Matrix.rotateM(overlayTransform, 0, mConfig.transformRotation, 0, 0, 1);
Matrix.translateM(overlayTransform, 0, -0.5F, -0.5F, 0);
}
LOG.v("onEvent", "frameNum:", thisFrameNum, "realFrameNum:", mFrameNum, "calling drainOutput."); LOG.v("onEvent", "frameNum:", thisFrameNum, "realFrameNum:", mFrameNum, "calling drainOutput.");
drainOutput(false); drainOutput(false);
@ -139,6 +153,7 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureMediaEncoder.C
// but flipped based on the mConfig.scaleFlipped boolean. // but flipped based on the mConfig.scaleFlipped boolean.
LOG.v("onEvent", "frameNum:", thisFrameNum, "realFrameNum:", mFrameNum, "calling drawFrame."); LOG.v("onEvent", "frameNum:", thisFrameNum, "realFrameNum:", mFrameNum, "calling drawFrame.");
mViewport.drawFrame(mConfig.textureId, transform); mViewport.drawFrame(mConfig.textureId, transform);
mViewport.drawFrame(mConfig.overlayTextureId, overlayTransform);
mWindow.setPresentationTime(frame.timestamp); mWindow.setPresentationTime(frame.timestamp);
mWindow.swapBuffers(); mWindow.swapBuffers();
mFramePool.recycle(frame); mFramePool.recycle(frame);

@ -134,4 +134,12 @@
<attr name="cameraUseDeviceOrientation" format="boolean"/> <attr name="cameraUseDeviceOrientation" format="boolean"/>
</declare-styleable> </declare-styleable>
<declare-styleable name="CameraView_Layout">
<attr name="layout_drawInPreview" format="boolean"/>
<attr name="layout_drawInPictureSnapshot" format="boolean"/>
<attr name="layout_drawInVideoSnapshot" format="boolean"/>
</declare-styleable>
</resources> </resources>

@ -11,6 +11,7 @@ import android.util.Log;
import android.view.View; import android.view.View;
import android.view.ViewGroup; import android.view.ViewGroup;
import android.view.ViewTreeObserver; import android.view.ViewTreeObserver;
import android.widget.TextView;
import android.widget.Toast; import android.widget.Toast;
import com.otaliastudios.cameraview.CameraException; import com.otaliastudios.cameraview.CameraException;
@ -206,12 +207,15 @@ public class CameraActivity extends AppCompatActivity implements View.OnClickLis
private void toggleCamera() { private void toggleCamera() {
if (camera.isTakingPicture() || camera.isTakingVideo()) return; if (camera.isTakingPicture() || camera.isTakingVideo()) return;
TextView watermarkTitle = findViewById(R.id.watermark_title);
switch (camera.toggleFacing()) { switch (camera.toggleFacing()) {
case BACK: case BACK:
watermarkTitle.setText("Back facing");
message("Switched to back camera!", false); message("Switched to back camera!", false);
break; break;
case FRONT: case FRONT:
watermarkTitle.setText("Front facing");
message("Switched to front camera!", false); message("Switched to front camera!", false);
break; break;
} }

@ -29,8 +29,32 @@
app:cameraGestureScrollHorizontal="exposureCorrection" app:cameraGestureScrollHorizontal="exposureCorrection"
app:cameraGestureScrollVertical="none" app:cameraGestureScrollVertical="none"
app:cameraMode="picture" app:cameraMode="picture"
app:cameraAutoFocusMarker="@string/cameraview_default_autofocus_marker"/> app:cameraAutoFocusMarker="@string/cameraview_default_autofocus_marker">
<!-- Watermark -->
<LinearLayout
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:orientation="vertical"
android:layout_gravity="bottom|end"
app:layout_drawInPreview="false"
app:layout_drawInVideoSnapshot="true"
app:layout_drawInPictureSnapshot="true"
android:gravity="center"
android:padding="8dp">
<ImageView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:src="@mipmap/cameraview" />
<TextView
android:id="@+id/watermark_title"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="CameraView"
android:textColor="@android:color/white" />
</LinearLayout>
</com.otaliastudios.cameraview.CameraView>
<ImageButton <ImageButton
android:id="@+id/toggleCamera" android:id="@+id/toggleCamera"

Loading…
Cancel
Save