Use device orientation flag (#497)

* Add cameraUseDeviceOrientation flag

* Add tests for useDeviceOrientation

* Improve demo app

* Add docs

* Fix tests
pull/501/head
Mattia Iavarone 5 years ago committed by GitHub
parent 71f006c6d9
commit dc31c40dd9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 13
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraViewTest.java
  2. 7
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/preview/MockCameraPreview.java
  3. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraOptions.java
  4. 64
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java
  5. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java
  6. 31
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java
  7. 38
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraEngine.java
  8. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Step.java
  9. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/SnapshotGlPictureRecorder.java
  10. 15
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/CameraPreview.java
  11. 31
      cameraview/src/main/java/com/otaliastudios/cameraview/video/Full2VideoRecorder.java
  12. 2
      cameraview/src/main/res/values/attrs.xml
  13. 18
      demo/src/main/java/com/otaliastudios/cameraview/demo/CameraActivity.java
  14. 87
      demo/src/main/java/com/otaliastudios/cameraview/demo/Control.java
  15. 5
      docs/_posts/2018-12-20-changelog.md
  16. 15
      docs/_posts/2018-12-20-more-features.md
  17. 2
      docs/_posts/2018-12-20-previews.md

@ -164,6 +164,7 @@ public class CameraViewTest extends BaseTest {
// Self managed // Self managed
GestureParser gestures = new GestureParser(empty); GestureParser gestures = new GestureParser(empty);
assertEquals(cameraView.getPlaySounds(), CameraView.DEFAULT_PLAY_SOUNDS); assertEquals(cameraView.getPlaySounds(), CameraView.DEFAULT_PLAY_SOUNDS);
assertEquals(cameraView.getUseDeviceOrientation(), CameraView.DEFAULT_USE_DEVICE_ORIENTATION);
assertEquals(cameraView.getGestureAction(Gesture.TAP), gestures.getTapAction()); assertEquals(cameraView.getGestureAction(Gesture.TAP), gestures.getTapAction());
assertEquals(cameraView.getGestureAction(Gesture.LONG_TAP), gestures.getLongTapAction()); assertEquals(cameraView.getGestureAction(Gesture.LONG_TAP), gestures.getLongTapAction());
assertEquals(cameraView.getGestureAction(Gesture.PINCH), gestures.getPinchAction()); assertEquals(cameraView.getGestureAction(Gesture.PINCH), gestures.getPinchAction());
@ -544,9 +545,17 @@ public class CameraViewTest extends BaseTest {
@Test @Test
public void testSetPlaySounds() { public void testSetPlaySounds() {
cameraView.setPlaySounds(true); cameraView.setPlaySounds(true);
assertEquals(cameraView.getPlaySounds(), true); assertTrue(cameraView.getPlaySounds());
cameraView.setPlaySounds(false); cameraView.setPlaySounds(false);
assertEquals(cameraView.getPlaySounds(), false); assertFalse(cameraView.getPlaySounds());
}
@Test
public void testSetUseDeviceOrientation() {
cameraView.setUseDeviceOrientation(true);
assertTrue(cameraView.getUseDeviceOrientation());
cameraView.setUseDeviceOrientation(false);
assertFalse(cameraView.getUseDeviceOrientation());
} }
@Test @Test

@ -15,6 +15,8 @@ public class MockCameraPreview extends CameraPreview<View, Void> {
super(context, parent); super(context, parent);
} }
private View rootView;
@Override @Override
public boolean supportsCropping() { public boolean supportsCropping() {
return true; return true;
@ -23,7 +25,8 @@ public class MockCameraPreview extends CameraPreview<View, Void> {
@NonNull @NonNull
@Override @Override
protected View onCreateView(@NonNull Context context, @NonNull ViewGroup parent) { protected View onCreateView(@NonNull Context context, @NonNull ViewGroup parent) {
return new View(context); rootView = new View(context);
return rootView;
} }
@NonNull @NonNull
@ -42,6 +45,6 @@ public class MockCameraPreview extends CameraPreview<View, Void> {
@NonNull @NonNull
@Override @Override
public View getRootView() { public View getRootView() {
return null; return rootView;
} }
} }

@ -20,6 +20,7 @@ import com.otaliastudios.cameraview.controls.Control;
import com.otaliastudios.cameraview.controls.Engine; import com.otaliastudios.cameraview.controls.Engine;
import com.otaliastudios.cameraview.controls.Facing; import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.controls.Flash; import com.otaliastudios.cameraview.controls.Flash;
import com.otaliastudios.cameraview.controls.Preview;
import com.otaliastudios.cameraview.engine.Mapper; import com.otaliastudios.cameraview.engine.Mapper;
import com.otaliastudios.cameraview.gesture.GestureAction; import com.otaliastudios.cameraview.gesture.GestureAction;
import com.otaliastudios.cameraview.controls.Grid; import com.otaliastudios.cameraview.controls.Grid;
@ -309,6 +310,8 @@ public class CameraOptions {
return (Collection<T>) getSupportedWhiteBalance(); return (Collection<T>) getSupportedWhiteBalance();
} else if (controlClass.equals(Engine.class)) { } else if (controlClass.equals(Engine.class)) {
return (Collection<T>) Arrays.asList(Engine.values()); return (Collection<T>) Arrays.asList(Engine.values());
} else if (controlClass.equals(Preview.class)) {
return (Collection<T>) Arrays.asList(Preview.values());
} }
// Unrecognized control. // Unrecognized control.
return Collections.emptyList(); return Collections.emptyList();

@ -6,6 +6,7 @@ import android.annotation.TargetApi;
import android.app.Activity; import android.app.Activity;
import androidx.annotation.VisibleForTesting; import androidx.annotation.VisibleForTesting;
import androidx.core.view.ViewCompat;
import androidx.lifecycle.Lifecycle; import androidx.lifecycle.Lifecycle;
import androidx.lifecycle.LifecycleObserver; import androidx.lifecycle.LifecycleObserver;
import androidx.lifecycle.LifecycleOwner; import androidx.lifecycle.LifecycleOwner;
@ -98,9 +99,11 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
final static long DEFAULT_AUTOFOCUS_RESET_DELAY_MILLIS = 3000; final static long DEFAULT_AUTOFOCUS_RESET_DELAY_MILLIS = 3000;
final static boolean DEFAULT_PLAY_SOUNDS = true; final static boolean DEFAULT_PLAY_SOUNDS = true;
final static boolean DEFAULT_USE_DEVICE_ORIENTATION = true;
// Self managed parameters // Self managed parameters
private boolean mPlaySounds; private boolean mPlaySounds;
private boolean mUseDeviceOrientation;
private HashMap<Gesture, GestureAction> mGestureMap = new HashMap<>(4); private HashMap<Gesture, GestureAction> mGestureMap = new HashMap<>(4);
private Preview mPreview; private Preview mPreview;
private Engine mEngine; private Engine mEngine;
@ -152,6 +155,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
// Self managed // Self managed
boolean playSounds = a.getBoolean(R.styleable.CameraView_cameraPlaySounds, DEFAULT_PLAY_SOUNDS); boolean playSounds = a.getBoolean(R.styleable.CameraView_cameraPlaySounds, DEFAULT_PLAY_SOUNDS);
boolean useDeviceOrientation = a.getBoolean(R.styleable.CameraView_cameraUseDeviceOrientation, DEFAULT_USE_DEVICE_ORIENTATION);
mExperimental = a.getBoolean(R.styleable.CameraView_cameraExperimental, false); mExperimental = a.getBoolean(R.styleable.CameraView_cameraExperimental, false);
mPreview = controls.getPreview(); mPreview = controls.getPreview();
mEngine = controls.getEngine(); mEngine = controls.getEngine();
@ -192,6 +196,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
// Apply self managed // Apply self managed
setPlaySounds(playSounds); setPlaySounds(playSounds);
setUseDeviceOrientation(useDeviceOrientation);
setGrid(controls.getGrid()); setGrid(controls.getGrid());
setGridColor(gridColor); setGridColor(gridColor);
@ -640,7 +645,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
if (checkPermissions(getAudio())) { if (checkPermissions(getAudio())) {
// Update display orientation for current CameraEngine // Update display orientation for current CameraEngine
mOrientationHelper.enable(getContext()); mOrientationHelper.enable(getContext());
mCameraEngine.setDisplayOffset(mOrientationHelper.getDisplayOffset()); mCameraEngine.getAngles().setDisplayOffset(mOrientationHelper.getDisplayOffset());
mCameraEngine.start(); mCameraEngine.start();
} }
} }
@ -778,9 +783,26 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @param preview desired preview engine * @param preview desired preview engine
*/ */
public void setPreview(@NonNull Preview preview) { public void setPreview(@NonNull Preview preview) {
boolean isNew = preview != mPreview;
if (!isNew) return;
mPreview = preview; mPreview = preview;
if (!ViewCompat.isAttachedToWindow(this) && mCameraPreview != null) {
// Null the preview: will create another when re-attaching.
mCameraPreview.onDestroy();
mCameraPreview = null;
}
} }
/**
* Returns the current preview control.
*
* @see #setPreview(Preview)
* @return the current preview control
*/
@NonNull
public Preview getPreview() {
return mPreview;
}
/** /**
* Controls the core engine. Should only be called * Controls the core engine. Should only be called
@ -816,6 +838,16 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
setAutoFocusResetDelay(oldEngine.getAutoFocusResetDelay()); setAutoFocusResetDelay(oldEngine.getAutoFocusResetDelay());
} }
/**
* Returns the current engine control.
*
* @see #setEngine(Engine)
* @return the current engine control
*/
@NonNull
public Engine getEngine() {
return mEngine;
}
/** /**
* Returns a {@link CameraOptions} instance holding supported options for this camera * Returns a {@link CameraOptions} instance holding supported options for this camera
@ -1648,6 +1680,27 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
return mPlaySounds; return mPlaySounds;
} }
/**
* Controls whether picture and video output should consider the current device orientation.
* For example, when true, if the user rotates the device before taking a picture, the picture
* will be rotated as well.
*
* @param useDeviceOrientation true to consider device orientation for outputs
*/
public void setUseDeviceOrientation(boolean useDeviceOrientation) {
mUseDeviceOrientation = useDeviceOrientation;
}
/**
* Gets the current behavior for considering the device orientation when returning picture
* or video outputs.
*
* @see #setUseDeviceOrientation(boolean)
* @return whether we are using the device orientation for outputs
*/
public boolean getUseDeviceOrientation() {
return mUseDeviceOrientation;
}
/** /**
* Sets the encoder for video recordings. * Sets the encoder for video recordings.
@ -1890,8 +1943,15 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
@Override @Override
public void onDeviceOrientationChanged(int deviceOrientation) { public void onDeviceOrientationChanged(int deviceOrientation) {
mLogger.i("onDeviceOrientationChanged", deviceOrientation); mLogger.i("onDeviceOrientationChanged", deviceOrientation);
mCameraEngine.setDeviceOrientation(deviceOrientation);
int displayOffset = mOrientationHelper.getDisplayOffset(); int displayOffset = mOrientationHelper.getDisplayOffset();
if (!mUseDeviceOrientation) {
// To fool the engine to return outputs in the VIEW reference system,
// The device orientation should be set to -displayOffset.
int fakeDeviceOrientation = (360 - displayOffset) % 360;
mCameraEngine.getAngles().setDeviceOrientation(fakeDeviceOrientation);
} else {
mCameraEngine.getAngles().setDeviceOrientation(deviceOrientation);
}
final int value = (deviceOrientation + displayOffset) % 360; final int value = (deviceOrientation + displayOffset) % 360;
mUiHandler.post(new Runnable() { mUiHandler.post(new Runnable() {
@Override @Override

@ -123,7 +123,7 @@ public class Camera1Engine extends CameraEngine implements
for (int i = 0, count = Camera.getNumberOfCameras(); i < count; i++) { for (int i = 0, count = Camera.getNumberOfCameras(); i < count; i++) {
Camera.getCameraInfo(i, cameraInfo); Camera.getCameraInfo(i, cameraInfo);
if (cameraInfo.facing == internalFacing) { if (cameraInfo.facing == internalFacing) {
setSensorOffset(facing, cameraInfo.orientation); getAngles().setSensorOffset(facing, cameraInfo.orientation);
mCameraId = i; mCameraId = i;
return true; return true;
} }
@ -351,13 +351,11 @@ public class Camera1Engine extends CameraEngine implements
throw new IllegalStateException("Video snapshots are only supported starting from API 18."); throw new IllegalStateException("Video snapshots are only supported starting from API 18.");
} }
GlCameraPreview glPreview = (GlCameraPreview) mPreview; GlCameraPreview glPreview = (GlCameraPreview) mPreview;
// Output size is easy:
Size outputSize = getUncroppedSnapshotSize(Reference.OUTPUT); Size outputSize = getUncroppedSnapshotSize(Reference.OUTPUT);
if (outputSize == null) { if (outputSize == null) {
throw new IllegalStateException("outputSize should not be null."); throw new IllegalStateException("outputSize should not be null.");
} }
AspectRatio outputRatio = getAngles().flip(Reference.OUTPUT, Reference.VIEW) ? viewAspectRatio.flip() : viewAspectRatio; AspectRatio outputRatio = getAngles().flip(Reference.VIEW, Reference.OUTPUT) ? viewAspectRatio.flip() : viewAspectRatio;
Rect outputCrop = CropHelper.computeCrop(outputSize, outputRatio); Rect outputCrop = CropHelper.computeCrop(outputSize, outputRatio);
outputSize = new Size(outputCrop.width(), outputCrop.height()); outputSize = new Size(outputCrop.width(), outputCrop.height());
stub.size = outputSize; stub.size = outputSize;

@ -19,7 +19,6 @@ import android.hardware.camera2.params.StreamConfigurationMap;
import android.location.Location; import android.location.Location;
import android.media.Image; import android.media.Image;
import android.media.ImageReader; import android.media.ImageReader;
import android.media.MediaCodec;
import android.os.Build; import android.os.Build;
import android.util.Rational; import android.util.Rational;
import android.view.Surface; import android.view.Surface;
@ -96,8 +95,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
private Surface mPreviewStreamSurface; private Surface mPreviewStreamSurface;
// Video recording // Video recording
private Surface mFullVideoPersistentSurface; // API 23+. The surface is created before. private VideoResult.Stub mFullVideoPendingStub; // When takeVideo is called, we have to reset the session.
private VideoResult.Stub mFullVideoPendingStub; // API 21-22. When takeVideo is called, we have to reset the session.
// Picture capturing // Picture capturing
private ImageReader mPictureReader; private ImageReader mPictureReader;
@ -302,7 +300,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
if (internalFacing == readCharacteristic(characteristics, CameraCharacteristics.LENS_FACING, -99)) { if (internalFacing == readCharacteristic(characteristics, CameraCharacteristics.LENS_FACING, -99)) {
mCameraId = cameraId; mCameraId = cameraId;
int sensorOffset = readCharacteristic(characteristics, CameraCharacteristics.SENSOR_ORIENTATION, 0); int sensorOffset = readCharacteristic(characteristics, CameraCharacteristics.SENSOR_ORIENTATION, 0);
setSensorOffset(facing, sensorOffset); getAngles().setSensorOffset(facing, sensorOffset);
return true; return true;
} }
} catch (CameraAccessException ignore) { } catch (CameraAccessException ignore) {
@ -411,11 +409,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
// 2. VIDEO RECORDING // 2. VIDEO RECORDING
if (getMode() == Mode.VIDEO) { if (getMode() == Mode.VIDEO) {
if (Full2VideoRecorder.SUPPORTS_PERSISTENT_SURFACE) { if (mFullVideoPendingStub != null) {
mFullVideoPersistentSurface = MediaCodec.createPersistentInputSurface(); Full2VideoRecorder recorder = new Full2VideoRecorder(this, mCameraId);
outputSurfaces.add(mFullVideoPersistentSurface);
} else if (mFullVideoPendingStub != null) {
Full2VideoRecorder recorder = new Full2VideoRecorder(this, mCameraId, null);
try { try {
outputSurfaces.add(recorder.createInputSurface(mFullVideoPendingStub)); outputSurfaces.add(recorder.createInputSurface(mFullVideoPendingStub));
} catch (Full2VideoRecorder.PrepareException e) { } catch (Full2VideoRecorder.PrepareException e) {
@ -567,10 +562,6 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@Override @Override
protected Task<Void> onStopBind() { protected Task<Void> onStopBind() {
LOG.i("onStopBind:", "About to clean up."); LOG.i("onStopBind:", "About to clean up.");
if (mFullVideoPersistentSurface != null) {
mFullVideoPersistentSurface.release();
mFullVideoPersistentSurface = null;
}
mFrameProcessingSurface = null; mFrameProcessingSurface = null;
mPreviewStreamSurface = null; mPreviewStreamSurface = null;
mPreviewStreamSize = null; mPreviewStreamSize = null;
@ -667,19 +658,17 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
LOG.i("onTakeVideo", "called."); LOG.i("onTakeVideo", "called.");
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR); stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR);
stub.size = getAngles().flip(Reference.SENSOR, Reference.OUTPUT) ? mCaptureSize.flip() : mCaptureSize; stub.size = getAngles().flip(Reference.SENSOR, Reference.OUTPUT) ? mCaptureSize.flip() : mCaptureSize;
if (!Full2VideoRecorder.SUPPORTS_PERSISTENT_SURFACE) { // We must restart the session at each time.
// On API 21 and 22, we must restart the session at each time.
// Save the pending data and restart the session. // Save the pending data and restart the session.
LOG.w("onTakeVideo", "calling restartBind."); LOG.w("onTakeVideo", "calling restartBind.");
mFullVideoPendingStub = stub; mFullVideoPendingStub = stub;
restartBind(); restartBind();
} else {
doTakeVideo(stub);
}
} }
private void doTakeVideo(@NonNull final VideoResult.Stub stub) { private void doTakeVideo(@NonNull final VideoResult.Stub stub) {
mVideoRecorder = new Full2VideoRecorder(this, mCameraId, mFullVideoPersistentSurface); if (!(mVideoRecorder instanceof Full2VideoRecorder)) {
mVideoRecorder = new Full2VideoRecorder(this, mCameraId);
}
Full2VideoRecorder recorder = (Full2VideoRecorder) mVideoRecorder; Full2VideoRecorder recorder = (Full2VideoRecorder) mVideoRecorder;
try { try {
createRepeatingRequestBuilder(CameraDevice.TEMPLATE_RECORD); createRepeatingRequestBuilder(CameraDevice.TEMPLATE_RECORD);
@ -706,13 +695,11 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
throw new IllegalStateException("Video snapshots are only supported with GlCameraPreview."); throw new IllegalStateException("Video snapshots are only supported with GlCameraPreview.");
} }
GlCameraPreview glPreview = (GlCameraPreview) mPreview; GlCameraPreview glPreview = (GlCameraPreview) mPreview;
// Output size is easy:
Size outputSize = getUncroppedSnapshotSize(Reference.OUTPUT); Size outputSize = getUncroppedSnapshotSize(Reference.OUTPUT);
if (outputSize == null) { if (outputSize == null) {
throw new IllegalStateException("outputSize should not be null."); throw new IllegalStateException("outputSize should not be null.");
} }
AspectRatio outputRatio = getAngles().flip(Reference.OUTPUT, Reference.VIEW) ? viewAspectRatio.flip() : viewAspectRatio; AspectRatio outputRatio = getAngles().flip(Reference.VIEW, Reference.OUTPUT) ? viewAspectRatio.flip() : viewAspectRatio;
Rect outputCrop = CropHelper.computeCrop(outputSize, outputRatio); Rect outputCrop = CropHelper.computeCrop(outputSize, outputRatio);
outputSize = new Size(outputCrop.width(), outputCrop.height()); outputSize = new Size(outputCrop.width(), outputCrop.height());
stub.size = outputSize; stub.size = outputSize;

@ -70,7 +70,7 @@ import java.util.concurrent.TimeUnit;
* So at the end of both step 1 and 2, the engine should check if both have * So at the end of both step 1 and 2, the engine should check if both have
* been performed and trigger the steps 3 and 4. * been performed and trigger the steps 3 and 4.
* *
* We use an abstraction for each step called {@link CameraEngineStep} that manages the state of * We use an abstraction for each step called {@link Step} that manages the state of
* each step and ensures that start and stop operations, for each step, are never called if the * each step and ensures that start and stop operations, for each step, are never called if the
* previous one has not ended. * previous one has not ended.
* *
@ -140,11 +140,11 @@ public abstract class CameraEngine implements
private static final CameraLogger LOG = CameraLogger.create(TAG); private static final CameraLogger LOG = CameraLogger.create(TAG);
@SuppressWarnings({"WeakerAccess", "unused"}) @SuppressWarnings({"WeakerAccess", "unused"})
public static final int STATE_STOPPING = CameraEngineStep.STATE_STOPPING; public static final int STATE_STOPPING = Step.STATE_STOPPING;
public static final int STATE_STOPPED = CameraEngineStep.STATE_STOPPED; public static final int STATE_STOPPED = Step.STATE_STOPPED;
@SuppressWarnings({"WeakerAccess", "unused"}) @SuppressWarnings({"WeakerAccess", "unused"})
public static final int STATE_STARTING = CameraEngineStep.STATE_STARTING; public static final int STATE_STARTING = Step.STATE_STARTING;
public static final int STATE_STARTED = CameraEngineStep.STATE_STARTED; public static final int STATE_STARTED = Step.STATE_STARTED;
// Need to be protected // Need to be protected
@SuppressWarnings("WeakerAccess") protected WorkerHandler mHandler; @SuppressWarnings("WeakerAccess") protected WorkerHandler mHandler;
@ -185,16 +185,17 @@ public abstract class CameraEngine implements
private int mSnapshotMaxHeight = Integer.MAX_VALUE; // in REF_VIEW for consistency with SizeSelectors private int mSnapshotMaxHeight = Integer.MAX_VALUE; // in REF_VIEW for consistency with SizeSelectors
// Steps // Steps
private final CameraEngineStep.Callback mStepCallback = new CameraEngineStep.Callback() { private final Step.Callback mStepCallback = new Step.Callback() {
@Override @NonNull public Executor getExecutor() { return mHandler.getExecutor(); } @Override @NonNull public Executor getExecutor() { return mHandler.getExecutor(); }
@Override public void handleException(@NonNull Exception exception) { @Override public void handleException(@NonNull Exception exception) {
CameraEngine.this.handleException(Thread.currentThread(), exception, false); CameraEngine.this.handleException(Thread.currentThread(), exception, false);
} }
}; };
@VisibleForTesting CameraEngineStep mEngineStep = new CameraEngineStep("engine", mStepCallback); @VisibleForTesting
private CameraEngineStep mBindStep = new CameraEngineStep("bind", mStepCallback); Step mEngineStep = new Step("engine", mStepCallback);
private CameraEngineStep mPreviewStep = new CameraEngineStep("preview", mStepCallback); private Step mBindStep = new Step("bind", mStepCallback);
private CameraEngineStep mAllStep = new CameraEngineStep("all", mStepCallback); private Step mPreviewStep = new Step("preview", mStepCallback);
private Step mAllStep = new Step("all", mStepCallback);
// Ops used for testing. // Ops used for testing.
@VisibleForTesting Op<Void> mStartVideoOp = new Op<>(); @VisibleForTesting Op<Void> mStartVideoOp = new Op<>();
@ -785,21 +786,6 @@ public abstract class CameraEngine implements
return mAngles; return mAngles;
} }
@SuppressWarnings("WeakerAccess")
protected final void setSensorOffset(@NonNull Facing facing, int sensorOffset) {
mAngles.setSensorOffset(facing, sensorOffset);
}
// This is called before start() and never again.
public final void setDisplayOffset(int displayOffset) {
mAngles.setDisplayOffset(displayOffset);
}
// This can be called multiple times.
public final void setDeviceOrientation(int deviceOrientation) {
mAngles.setDeviceOrientation(deviceOrientation);
}
public final void setPreviewStreamSizeSelector(@Nullable SizeSelector selector) { public final void setPreviewStreamSizeSelector(@Nullable SizeSelector selector) {
mPreviewStreamSizeSelector = selector; mPreviewStreamSizeSelector = selector;
} }
@ -1018,7 +1004,7 @@ public abstract class CameraEngine implements
* Camera is about to be opened. Implementors should look into available cameras * Camera is about to be opened. Implementors should look into available cameras
* and see if anyone matches the given {@link Facing value}. * and see if anyone matches the given {@link Facing value}.
* *
* If so, implementors should set {@link #setSensorOffset(Facing, int)} and any other information * If so, implementors should set {@link Angles#setSensorOffset(Facing, int)} and any other information
* (like camera ID) needed to start the engine. * (like camera ID) needed to start the engine.
* *
* @param facing the facing value * @param facing the facing value

@ -34,9 +34,9 @@ import androidx.annotation.VisibleForTesting;
* *
* <strong>This class is NOT thread safe!</string> * <strong>This class is NOT thread safe!</string>
*/ */
class CameraEngineStep { class Step {
private static final String TAG = CameraEngineStep.class.getSimpleName(); private static final String TAG = Step.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG); private static final CameraLogger LOG = CameraLogger.create(TAG);
interface Callback { interface Callback {
@ -59,7 +59,7 @@ class CameraEngineStep {
private final String name; private final String name;
private final Callback callback; private final Callback callback;
CameraEngineStep(@NonNull String name, @NonNull Callback callback) { Step(@NonNull String name, @NonNull Callback callback) {
this.name = name.toUpperCase(); this.name = name.toUpperCase();
this.callback = callback; this.callback = callback;
} }

@ -114,7 +114,7 @@ public class SnapshotGlPictureRecorder extends PictureRecorder {
Matrix.scaleM(mTransform, 0, realScaleX, realScaleY, 1); Matrix.scaleM(mTransform, 0, realScaleX, realScaleY, 1);
// Fix rotation: // Fix rotation:
// TODO Not sure why we need the minus here... It makes no sense to me. // Not sure why we need the minus here... It makes no sense to me.
LOG.w("Recording frame. Rotation:", mResult.rotation, "Actual:", -mResult.rotation); LOG.w("Recording frame. Rotation:", mResult.rotation, "Actual:", -mResult.rotation);
int rotation = -mResult.rotation; int rotation = -mResult.rotation;
mResult.rotation = 0; mResult.rotation = 0;

@ -7,6 +7,7 @@ import androidx.annotation.VisibleForTesting;
import android.view.View; import android.view.View;
import android.view.ViewGroup; import android.view.ViewGroup;
import android.view.ViewParent;
import com.otaliastudios.cameraview.CameraLogger; import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.engine.CameraEngine; import com.otaliastudios.cameraview.engine.CameraEngine;
@ -186,8 +187,10 @@ public abstract class CameraPreview<T extends View, Output> {
if (mOutputSurfaceWidth > 0 && mOutputSurfaceHeight > 0) { if (mOutputSurfaceWidth > 0 && mOutputSurfaceHeight > 0) {
crop(mCropOp); crop(mCropOp);
} }
if (mSurfaceCallback != null) {
mSurfaceCallback.onSurfaceAvailable(); mSurfaceCallback.onSurfaceAvailable();
} }
}
/** /**
* Subclasses can call this to notify that the surface has changed. * Subclasses can call this to notify that the surface has changed.
@ -203,9 +206,11 @@ public abstract class CameraPreview<T extends View, Output> {
if (width > 0 && height > 0) { if (width > 0 && height > 0) {
crop(mCropOp); crop(mCropOp);
} }
if (mSurfaceCallback != null) {
mSurfaceCallback.onSurfaceChanged(); mSurfaceCallback.onSurfaceChanged();
} }
} }
}
/** /**
* Subclasses can call this to notify that the surface has been destroyed. * Subclasses can call this to notify that the surface has been destroyed.
@ -214,8 +219,10 @@ public abstract class CameraPreview<T extends View, Output> {
protected final void dispatchOnSurfaceDestroyed() { protected final void dispatchOnSurfaceDestroyed() {
mOutputSurfaceWidth = 0; mOutputSurfaceWidth = 0;
mOutputSurfaceHeight = 0; mOutputSurfaceHeight = 0;
if (mSurfaceCallback != null) {
mSurfaceCallback.onSurfaceDestroyed(); mSurfaceCallback.onSurfaceDestroyed();
} }
}
/** /**
* Called by the hosting {@link com.otaliastudios.cameraview.CameraView}, * Called by the hosting {@link com.otaliastudios.cameraview.CameraView},
@ -233,7 +240,13 @@ public abstract class CameraPreview<T extends View, Output> {
* Called by the hosting {@link com.otaliastudios.cameraview.CameraView}, * Called by the hosting {@link com.otaliastudios.cameraview.CameraView},
* this is a lifecycle event. * this is a lifecycle event.
*/ */
public void onDestroy() {} public void onDestroy() {
View root = getRootView();
ViewParent parent = root.getParent();
if (parent instanceof ViewGroup) {
((ViewGroup) parent).removeView(root);
}
}
/** /**
* Here we must crop the visible part by applying a scale greater than 1 to one of our * Here we must crop the visible part by applying a scale greater than 1 to one of our

@ -1,14 +1,6 @@
package com.otaliastudios.cameraview.video; package com.otaliastudios.cameraview.video;
import android.annotation.SuppressLint; import android.annotation.SuppressLint;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest;
import android.media.Image;
import android.media.MediaActionSound;
import android.media.MediaRecorder; import android.media.MediaRecorder;
import android.os.Build; import android.os.Build;
import android.view.Surface; import android.view.Surface;
@ -33,21 +25,13 @@ public class Full2VideoRecorder extends FullVideoRecorder {
private static final String TAG = Full2VideoRecorder.class.getSimpleName(); private static final String TAG = Full2VideoRecorder.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG); private static final CameraLogger LOG = CameraLogger.create(TAG);
// This actually didn't work as I expected, we're never using this. Should remove.
@SuppressWarnings("PointlessBooleanExpression") // TODO
public static final boolean SUPPORTS_PERSISTENT_SURFACE = false && Build.VERSION.SDK_INT >= 23;
private final String mCameraId; private final String mCameraId;
private Surface mInputSurface; private Surface mInputSurface;
private final boolean mUseInputSurface;
public Full2VideoRecorder(@NonNull Camera2Engine engine, public Full2VideoRecorder(@NonNull Camera2Engine engine,
@NonNull String cameraId, @NonNull String cameraId) {
@Nullable Surface surface) {
super(engine); super(engine);
mCameraId = cameraId; mCameraId = cameraId;
mUseInputSurface = surface != null && SUPPORTS_PERSISTENT_SURFACE;
mInputSurface = mUseInputSurface ? surface : null;
} }
@SuppressLint("NewApi") @SuppressLint("NewApi")
@ -56,10 +40,10 @@ public class Full2VideoRecorder extends FullVideoRecorder {
mediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE); mediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
Size size = stub.rotation % 180 != 0 ? stub.size.flip() : stub.size; Size size = stub.rotation % 180 != 0 ? stub.size.flip() : stub.size;
mProfile = CamcorderProfiles.get(mCameraId, size); mProfile = CamcorderProfiles.get(mCameraId, size);
if (mUseInputSurface) { // This was an option: get the surface from outside this class, using MediaCodec.createPersistentInputSurface()
//noinspection ConstantConditions // But it doesn't really help since the Camera2 engine refuses a surface that has not been configured,
mediaRecorder.setInputSurface(mInputSurface); // so even with that trick we would have to attach the surface to this recorder before creating the CameraSession.
} // mediaRecorder.setInputSurface(mInputSurface);
return super.onPrepareMediaRecorder(stub, mediaRecorder); return super.onPrepareMediaRecorder(stub, mediaRecorder);
} }
@ -72,9 +56,6 @@ public class Full2VideoRecorder extends FullVideoRecorder {
*/ */
@NonNull @NonNull
public Surface createInputSurface(@NonNull VideoResult.Stub stub) throws PrepareException { public Surface createInputSurface(@NonNull VideoResult.Stub stub) throws PrepareException {
if (mUseInputSurface) {
throw new IllegalStateException("We are using the input surface (API23+), can't createInputSurface here.");
}
if (!prepareMediaRecorder(stub)) { if (!prepareMediaRecorder(stub)) {
throw new PrepareException(mError); throw new PrepareException(mError);
} }
@ -87,8 +68,6 @@ public class Full2VideoRecorder extends FullVideoRecorder {
return mInputSurface; return mInputSurface;
} }
@SuppressWarnings("WeakerAccess")
public class PrepareException extends Exception { public class PrepareException extends Exception {
private PrepareException(Throwable cause) { private PrepareException(Throwable cause) {
super(cause); super(cause);

@ -131,5 +131,7 @@
<attr name="cameraAutoFocusMarker" format="string|reference"/> <attr name="cameraAutoFocusMarker" format="string|reference"/>
<attr name="cameraUseDeviceOrientation" format="boolean"/>
</declare-styleable> </declare-styleable>
</resources> </resources>

@ -21,6 +21,7 @@ import com.otaliastudios.cameraview.CameraView;
import com.otaliastudios.cameraview.PictureResult; import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.controls.Mode; import com.otaliastudios.cameraview.controls.Mode;
import com.otaliastudios.cameraview.VideoResult; import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.controls.Preview;
import com.otaliastudios.cameraview.frame.Frame; import com.otaliastudios.cameraview.frame.Frame;
import com.otaliastudios.cameraview.frame.FrameProcessor; import com.otaliastudios.cameraview.frame.FrameProcessor;
import com.otaliastudios.cameraview.size.SizeSelectors; import com.otaliastudios.cameraview.size.SizeSelectors;
@ -173,6 +174,10 @@ public class CameraActivity extends AppCompatActivity implements View.OnClickLis
private void capturePictureSnapshot() { private void capturePictureSnapshot() {
if (camera.isTakingPicture()) return; if (camera.isTakingPicture()) return;
if (camera.getPreview() != Preview.GL_SURFACE) {
message("Picture snapshots are only allowed with the GL_SURFACE preview.", true);
return;
}
mCaptureTime = System.currentTimeMillis(); mCaptureTime = System.currentTimeMillis();
message("Capturing picture snapshot...", false); message("Capturing picture snapshot...", false);
camera.takePictureSnapshot(); camera.takePictureSnapshot();
@ -193,6 +198,10 @@ public class CameraActivity extends AppCompatActivity implements View.OnClickLis
message("Already taking video.", false); message("Already taking video.", false);
return; return;
} }
if (camera.getPreview() != Preview.GL_SURFACE) {
message("Video snapshots are only allowed with the GL_SURFACE preview.", true);
return;
}
message("Recording snapshot for 5 seconds...", true); message("Recording snapshot for 5 seconds...", true);
camera.takeVideoSnapshot(new File(getFilesDir(), "video.mp4"), 5000); camera.takeVideoSnapshot(new File(getFilesDir(), "video.mp4"), 5000);
} }
@ -212,10 +221,11 @@ public class CameraActivity extends AppCompatActivity implements View.OnClickLis
@Override @Override
public boolean onValueChanged(Control control, Object value, String name) { public boolean onValueChanged(Control control, Object value, String name) {
if (!camera.isHardwareAccelerated() && (control == Control.WIDTH || control == Control.HEIGHT)) { if ((control == Control.WIDTH || control == Control.HEIGHT)) {
if ((Integer) value > 0) { Preview preview = camera.getPreview();
message("This device does not support hardware acceleration. " + boolean wrapContent = (Integer) value == ViewGroup.LayoutParams.WRAP_CONTENT;
"In this case you can not change width or height. " + if (preview == Preview.SURFACE && !wrapContent) {
message("The SurfaceView preview does not support width or height changes. " +
"The view will act as WRAP_CONTENT by default.", true); "The view will act as WRAP_CONTENT by default.", true);
return false; return false;
} }

@ -5,10 +5,13 @@ import androidx.annotation.NonNull;
import android.view.View; import android.view.View;
import android.view.ViewGroup; import android.view.ViewGroup;
import com.otaliastudios.cameraview.CameraListener;
import com.otaliastudios.cameraview.controls.Audio; import com.otaliastudios.cameraview.controls.Audio;
import com.otaliastudios.cameraview.CameraOptions; import com.otaliastudios.cameraview.CameraOptions;
import com.otaliastudios.cameraview.CameraView; import com.otaliastudios.cameraview.CameraView;
import com.otaliastudios.cameraview.controls.Engine;
import com.otaliastudios.cameraview.controls.Flash; import com.otaliastudios.cameraview.controls.Flash;
import com.otaliastudios.cameraview.controls.Preview;
import com.otaliastudios.cameraview.gesture.Gesture; import com.otaliastudios.cameraview.gesture.Gesture;
import com.otaliastudios.cameraview.gesture.GestureAction; import com.otaliastudios.cameraview.gesture.GestureAction;
import com.otaliastudios.cameraview.controls.Grid; import com.otaliastudios.cameraview.controls.Grid;
@ -18,6 +21,7 @@ import com.otaliastudios.cameraview.controls.VideoCodec;
import com.otaliastudios.cameraview.controls.WhiteBalance; import com.otaliastudios.cameraview.controls.WhiteBalance;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.List; import java.util.List;
@ -26,28 +30,38 @@ import java.util.List;
*/ */
public enum Control { public enum Control {
// Layout
WIDTH("Width", false), WIDTH("Width", false),
HEIGHT("Height", true), HEIGHT("Height", true),
// Some controls
MODE("Mode", false), MODE("Mode", false),
FLASH("Flash", false), FLASH("Flash", false),
WHITE_BALANCE("White balance", false), WHITE_BALANCE("White balance", false),
HDR("Hdr", true), HDR("Hdr", true),
GRID("Grid lines", false), // Engine and preview
GRID_COLOR("Grid color", true), ENGINE("Engine", false),
PREVIEW("Preview Surface", true),
// TODO audio bitRate // Video recording
// TODO video bitRate
// THey are a bit annoying because it's not clear what the default should be.
VIDEO_CODEC("Video codec", false), VIDEO_CODEC("Video codec", false),
AUDIO("Audio", true), AUDIO("Audio", true),
// TODO audio bitRate
// TODO video bitRate
// They are a bit annoying because it's not clear what the default should be.
// Gestures
PINCH("Pinch", false), PINCH("Pinch", false),
HSCROLL("Horizontal scroll", false), HSCROLL("Horizontal scroll", false),
VSCROLL("Vertical scroll", false), VSCROLL("Vertical scroll", false),
TAP("Single tap", false), TAP("Single tap", false),
LONG_TAP("Long tap", true); LONG_TAP("Long tap", true),
// Others
GRID("Grid lines", false),
GRID_COLOR("Grid color", false),
USE_DEVICE_ORIENTATION("Use device orientation", true);
private String name; private String name;
private boolean last; private boolean last;
@ -88,6 +102,8 @@ public enum Control {
case GRID: return options.getSupportedControls(Grid.class); case GRID: return options.getSupportedControls(Grid.class);
case AUDIO: return options.getSupportedControls(Audio.class); case AUDIO: return options.getSupportedControls(Audio.class);
case VIDEO_CODEC: return options.getSupportedControls(VideoCodec.class); case VIDEO_CODEC: return options.getSupportedControls(VideoCodec.class);
case ENGINE: return options.getSupportedControls(Engine.class);
case PREVIEW: return options.getSupportedControls(Preview.class);
case PINCH: case PINCH:
case HSCROLL: case HSCROLL:
case VSCROLL: case VSCROLL:
@ -110,6 +126,8 @@ public enum Control {
list3.add(new GridColor(Color.BLACK, "black")); list3.add(new GridColor(Color.BLACK, "black"));
list3.add(new GridColor(Color.YELLOW, "yellow")); list3.add(new GridColor(Color.YELLOW, "yellow"));
return list3; return list3;
case USE_DEVICE_ORIENTATION:
return Arrays.asList(true, false);
} }
return null; return null;
} }
@ -135,11 +153,14 @@ public enum Control {
case VSCROLL: return view.getGestureAction(Gesture.SCROLL_VERTICAL); case VSCROLL: return view.getGestureAction(Gesture.SCROLL_VERTICAL);
case TAP: return view.getGestureAction(Gesture.TAP); case TAP: return view.getGestureAction(Gesture.TAP);
case LONG_TAP: return view.getGestureAction(Gesture.LONG_TAP); case LONG_TAP: return view.getGestureAction(Gesture.LONG_TAP);
case USE_DEVICE_ORIENTATION: return view.getUseDeviceOrientation();
case ENGINE: return view.getEngine();
case PREVIEW: return view.getPreview();
} }
return null; return null;
} }
public void applyValue(CameraView camera, Object value) { public void applyValue(final CameraView camera, final Object value) {
switch (this) { switch (this) {
case WIDTH: case WIDTH:
camera.getLayoutParams().width = (int) value; camera.getLayoutParams().width = (int) value;
@ -175,9 +196,61 @@ public enum Control {
break; break;
case GRID_COLOR: case GRID_COLOR:
camera.setGridColor(((GridColor) value).color); camera.setGridColor(((GridColor) value).color);
break;
case USE_DEVICE_ORIENTATION:
camera.setUseDeviceOrientation((Boolean) value);
break;
case ENGINE:
boolean started = camera.isOpened();
if (started) {
camera.addCameraListener(new CameraListener() {
@Override
public void onCameraClosed() {
super.onCameraClosed();
camera.removeCameraListener(this);
camera.setEngine((Engine) value);
camera.open();
}
});
camera.close();
} else {
camera.setEngine((Engine) value);
}
break;
case PREVIEW:
boolean opened = camera.isOpened();
if (opened) {
camera.addCameraListener(new CameraListener() {
@Override
public void onCameraClosed() {
super.onCameraClosed();
camera.removeCameraListener(this);
applyPreview(camera, (Preview) value, true);
}
});
camera.close();
} else {
applyPreview(camera, (Preview) value, false);
}
} }
} }
// This is really tricky since the preview can only be changed when not attached to window.
private void applyPreview(@NonNull CameraView cameraView, @NonNull Preview newPreview, boolean openWhenDone) {
ViewGroup.LayoutParams params = cameraView.getLayoutParams();
ViewGroup parent = (ViewGroup) cameraView.getParent();
int index = 0;
for (int i = 0; i < parent.getChildCount(); i++) {
if (parent.getChildAt(i) == cameraView) {
index = i;
break;
}
}
parent.removeView(cameraView);
cameraView.setPreview(newPreview);
parent.addView(cameraView, index, params);
if (openWhenDone) cameraView.open();
}
static class GridColor { static class GridColor {
int color; int color;

@ -8,6 +8,10 @@ order: 3
New versions are released through GitHub, so the reference page is the [GitHub Releases](https://github.com/natario1/CameraView/releases) page. New versions are released through GitHub, so the reference page is the [GitHub Releases](https://github.com/natario1/CameraView/releases) page.
### v2.0.0-*** (to be released)
- New: `cameraUseDeviceOrientation` XML attribute and `setUseDeviceOrientation()` method to disable considering the device orientation for outputs. ([#497][497])
### v2.0.0-beta06 ### v2.0.0-beta06
- New: Full featured Camera2 integration! Use `cameraExperimental="true"` and `cameraEngine="camera2"` to test this out. ([#490][490]) - New: Full featured Camera2 integration! Use `cameraExperimental="true"` and `cameraEngine="camera2"` to test this out. ([#490][490])
@ -65,3 +69,4 @@ This is the first beta release. For changes with respect to v1, please take a lo
[482]: https://github.com/natario1/CameraView/pull/482 [482]: https://github.com/natario1/CameraView/pull/482
[484]: https://github.com/natario1/CameraView/pull/484 [484]: https://github.com/natario1/CameraView/pull/484
[490]: https://github.com/natario1/CameraView/pull/490 [490]: https://github.com/natario1/CameraView/pull/490
[497]: https://github.com/natario1/CameraView/pull/497

@ -16,7 +16,8 @@ disqus: 1
app:cameraPlaySounds="true|false" app:cameraPlaySounds="true|false"
app:cameraGrid="off|draw3x3|draw4x4|drawPhi" app:cameraGrid="off|draw3x3|draw4x4|drawPhi"
app:cameraGridColor="@color/black" app:cameraGridColor="@color/black"
app:cameraAutoFocusResetDelay="0"/> app:cameraAutoFocusResetDelay="0"
app:cameraUseDeviceOrientation="true"/>
``` ```
##### cameraPlaySounds ##### cameraPlaySounds
@ -89,6 +90,18 @@ cameraView.setCameraAutoFocusResetDelay(-1); // NO reset
cameraView.setCameraAutoFocusResetDelay(Long.MAX_VALUE); // NO reset cameraView.setCameraAutoFocusResetDelay(Long.MAX_VALUE); // NO reset
``` ```
##### cameraUseDeviceOrientation
Controls whether we should consider the device orientation for picture and video outputs.
This defaults to true, but can be set to false for specific usages, where you don't want the
output to be rotated based on the device rotation at the moment of capturing.
Defaults to true.
```java
cameraView.setUseDeviceOrientation(true); // rotate media
cameraView.setUseDeviceOrientation(false); // don't
```
### UI Orientation ### UI Orientation
Within a Camera app, it's common to rotate buttons and other UI elements as the device is tilted around. Within a Camera app, it's common to rotate buttons and other UI elements as the device is tilted around.

@ -64,4 +64,6 @@ The engine method should only be called when the `CameraView` is closed. Otherwi
|Method|Description| |Method|Description|
|------|-----------| |------|-----------|
|`setPreview(Preview)`|Sets the preview implementation.| |`setPreview(Preview)`|Sets the preview implementation.|
|`getPreview()`|Gets the current preview implementation.|
|`setEngine(Engine)`|Sets the engine implementation.| |`setEngine(Engine)`|Sets the engine implementation.|
|`getEngine()`|Gets the current engine implementation.|

Loading…
Cancel
Save