Use device orientation flag (#497)

* Add cameraUseDeviceOrientation flag

* Add tests for useDeviceOrientation

* Improve demo app

* Add docs

* Fix tests
pull/501/head
Mattia Iavarone 5 years ago committed by GitHub
parent 71f006c6d9
commit dc31c40dd9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 13
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraViewTest.java
  2. 7
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/preview/MockCameraPreview.java
  3. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraOptions.java
  4. 64
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java
  5. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java
  6. 31
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java
  7. 38
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraEngine.java
  8. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Step.java
  9. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/SnapshotGlPictureRecorder.java
  10. 15
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/CameraPreview.java
  11. 31
      cameraview/src/main/java/com/otaliastudios/cameraview/video/Full2VideoRecorder.java
  12. 2
      cameraview/src/main/res/values/attrs.xml
  13. 18
      demo/src/main/java/com/otaliastudios/cameraview/demo/CameraActivity.java
  14. 87
      demo/src/main/java/com/otaliastudios/cameraview/demo/Control.java
  15. 5
      docs/_posts/2018-12-20-changelog.md
  16. 15
      docs/_posts/2018-12-20-more-features.md
  17. 2
      docs/_posts/2018-12-20-previews.md

@ -164,6 +164,7 @@ public class CameraViewTest extends BaseTest {
// Self managed
GestureParser gestures = new GestureParser(empty);
assertEquals(cameraView.getPlaySounds(), CameraView.DEFAULT_PLAY_SOUNDS);
assertEquals(cameraView.getUseDeviceOrientation(), CameraView.DEFAULT_USE_DEVICE_ORIENTATION);
assertEquals(cameraView.getGestureAction(Gesture.TAP), gestures.getTapAction());
assertEquals(cameraView.getGestureAction(Gesture.LONG_TAP), gestures.getLongTapAction());
assertEquals(cameraView.getGestureAction(Gesture.PINCH), gestures.getPinchAction());
@ -544,9 +545,17 @@ public class CameraViewTest extends BaseTest {
@Test
public void testSetPlaySounds() {
cameraView.setPlaySounds(true);
assertEquals(cameraView.getPlaySounds(), true);
assertTrue(cameraView.getPlaySounds());
cameraView.setPlaySounds(false);
assertEquals(cameraView.getPlaySounds(), false);
assertFalse(cameraView.getPlaySounds());
}
@Test
public void testSetUseDeviceOrientation() {
cameraView.setUseDeviceOrientation(true);
assertTrue(cameraView.getUseDeviceOrientation());
cameraView.setUseDeviceOrientation(false);
assertFalse(cameraView.getUseDeviceOrientation());
}
@Test

@ -15,6 +15,8 @@ public class MockCameraPreview extends CameraPreview<View, Void> {
super(context, parent);
}
private View rootView;
@Override
public boolean supportsCropping() {
return true;
@ -23,7 +25,8 @@ public class MockCameraPreview extends CameraPreview<View, Void> {
@NonNull
@Override
protected View onCreateView(@NonNull Context context, @NonNull ViewGroup parent) {
return new View(context);
rootView = new View(context);
return rootView;
}
@NonNull
@ -42,6 +45,6 @@ public class MockCameraPreview extends CameraPreview<View, Void> {
@NonNull
@Override
public View getRootView() {
return null;
return rootView;
}
}

@ -20,6 +20,7 @@ import com.otaliastudios.cameraview.controls.Control;
import com.otaliastudios.cameraview.controls.Engine;
import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.controls.Flash;
import com.otaliastudios.cameraview.controls.Preview;
import com.otaliastudios.cameraview.engine.Mapper;
import com.otaliastudios.cameraview.gesture.GestureAction;
import com.otaliastudios.cameraview.controls.Grid;
@ -309,6 +310,8 @@ public class CameraOptions {
return (Collection<T>) getSupportedWhiteBalance();
} else if (controlClass.equals(Engine.class)) {
return (Collection<T>) Arrays.asList(Engine.values());
} else if (controlClass.equals(Preview.class)) {
return (Collection<T>) Arrays.asList(Preview.values());
}
// Unrecognized control.
return Collections.emptyList();

@ -6,6 +6,7 @@ import android.annotation.TargetApi;
import android.app.Activity;
import androidx.annotation.VisibleForTesting;
import androidx.core.view.ViewCompat;
import androidx.lifecycle.Lifecycle;
import androidx.lifecycle.LifecycleObserver;
import androidx.lifecycle.LifecycleOwner;
@ -98,9 +99,11 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
final static long DEFAULT_AUTOFOCUS_RESET_DELAY_MILLIS = 3000;
final static boolean DEFAULT_PLAY_SOUNDS = true;
final static boolean DEFAULT_USE_DEVICE_ORIENTATION = true;
// Self managed parameters
private boolean mPlaySounds;
private boolean mUseDeviceOrientation;
private HashMap<Gesture, GestureAction> mGestureMap = new HashMap<>(4);
private Preview mPreview;
private Engine mEngine;
@ -152,6 +155,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
// Self managed
boolean playSounds = a.getBoolean(R.styleable.CameraView_cameraPlaySounds, DEFAULT_PLAY_SOUNDS);
boolean useDeviceOrientation = a.getBoolean(R.styleable.CameraView_cameraUseDeviceOrientation, DEFAULT_USE_DEVICE_ORIENTATION);
mExperimental = a.getBoolean(R.styleable.CameraView_cameraExperimental, false);
mPreview = controls.getPreview();
mEngine = controls.getEngine();
@ -192,6 +196,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
// Apply self managed
setPlaySounds(playSounds);
setUseDeviceOrientation(useDeviceOrientation);
setGrid(controls.getGrid());
setGridColor(gridColor);
@ -640,7 +645,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
if (checkPermissions(getAudio())) {
// Update display orientation for current CameraEngine
mOrientationHelper.enable(getContext());
mCameraEngine.setDisplayOffset(mOrientationHelper.getDisplayOffset());
mCameraEngine.getAngles().setDisplayOffset(mOrientationHelper.getDisplayOffset());
mCameraEngine.start();
}
}
@ -778,9 +783,26 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @param preview desired preview engine
*/
public void setPreview(@NonNull Preview preview) {
boolean isNew = preview != mPreview;
if (!isNew) return;
mPreview = preview;
if (!ViewCompat.isAttachedToWindow(this) && mCameraPreview != null) {
// Null the preview: will create another when re-attaching.
mCameraPreview.onDestroy();
mCameraPreview = null;
}
}
/**
* Returns the current preview control.
*
* @see #setPreview(Preview)
* @return the current preview control
*/
@NonNull
public Preview getPreview() {
return mPreview;
}
/**
* Controls the core engine. Should only be called
@ -816,6 +838,16 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
setAutoFocusResetDelay(oldEngine.getAutoFocusResetDelay());
}
/**
* Returns the current engine control.
*
* @see #setEngine(Engine)
* @return the current engine control
*/
@NonNull
public Engine getEngine() {
return mEngine;
}
/**
* Returns a {@link CameraOptions} instance holding supported options for this camera
@ -1648,6 +1680,27 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
return mPlaySounds;
}
/**
* Controls whether picture and video output should consider the current device orientation.
* For example, when true, if the user rotates the device before taking a picture, the picture
* will be rotated as well.
*
* @param useDeviceOrientation true to consider device orientation for outputs
*/
public void setUseDeviceOrientation(boolean useDeviceOrientation) {
mUseDeviceOrientation = useDeviceOrientation;
}
/**
* Gets the current behavior for considering the device orientation when returning picture
* or video outputs.
*
* @see #setUseDeviceOrientation(boolean)
* @return whether we are using the device orientation for outputs
*/
public boolean getUseDeviceOrientation() {
return mUseDeviceOrientation;
}
/**
* Sets the encoder for video recordings.
@ -1890,8 +1943,15 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
@Override
public void onDeviceOrientationChanged(int deviceOrientation) {
mLogger.i("onDeviceOrientationChanged", deviceOrientation);
mCameraEngine.setDeviceOrientation(deviceOrientation);
int displayOffset = mOrientationHelper.getDisplayOffset();
if (!mUseDeviceOrientation) {
// To fool the engine to return outputs in the VIEW reference system,
// The device orientation should be set to -displayOffset.
int fakeDeviceOrientation = (360 - displayOffset) % 360;
mCameraEngine.getAngles().setDeviceOrientation(fakeDeviceOrientation);
} else {
mCameraEngine.getAngles().setDeviceOrientation(deviceOrientation);
}
final int value = (deviceOrientation + displayOffset) % 360;
mUiHandler.post(new Runnable() {
@Override

@ -123,7 +123,7 @@ public class Camera1Engine extends CameraEngine implements
for (int i = 0, count = Camera.getNumberOfCameras(); i < count; i++) {
Camera.getCameraInfo(i, cameraInfo);
if (cameraInfo.facing == internalFacing) {
setSensorOffset(facing, cameraInfo.orientation);
getAngles().setSensorOffset(facing, cameraInfo.orientation);
mCameraId = i;
return true;
}
@ -351,13 +351,11 @@ public class Camera1Engine extends CameraEngine implements
throw new IllegalStateException("Video snapshots are only supported starting from API 18.");
}
GlCameraPreview glPreview = (GlCameraPreview) mPreview;
// Output size is easy:
Size outputSize = getUncroppedSnapshotSize(Reference.OUTPUT);
if (outputSize == null) {
throw new IllegalStateException("outputSize should not be null.");
}
AspectRatio outputRatio = getAngles().flip(Reference.OUTPUT, Reference.VIEW) ? viewAspectRatio.flip() : viewAspectRatio;
AspectRatio outputRatio = getAngles().flip(Reference.VIEW, Reference.OUTPUT) ? viewAspectRatio.flip() : viewAspectRatio;
Rect outputCrop = CropHelper.computeCrop(outputSize, outputRatio);
outputSize = new Size(outputCrop.width(), outputCrop.height());
stub.size = outputSize;

@ -19,7 +19,6 @@ import android.hardware.camera2.params.StreamConfigurationMap;
import android.location.Location;
import android.media.Image;
import android.media.ImageReader;
import android.media.MediaCodec;
import android.os.Build;
import android.util.Rational;
import android.view.Surface;
@ -96,8 +95,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
private Surface mPreviewStreamSurface;
// Video recording
private Surface mFullVideoPersistentSurface; // API 23+. The surface is created before.
private VideoResult.Stub mFullVideoPendingStub; // API 21-22. When takeVideo is called, we have to reset the session.
private VideoResult.Stub mFullVideoPendingStub; // When takeVideo is called, we have to reset the session.
// Picture capturing
private ImageReader mPictureReader;
@ -302,7 +300,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
if (internalFacing == readCharacteristic(characteristics, CameraCharacteristics.LENS_FACING, -99)) {
mCameraId = cameraId;
int sensorOffset = readCharacteristic(characteristics, CameraCharacteristics.SENSOR_ORIENTATION, 0);
setSensorOffset(facing, sensorOffset);
getAngles().setSensorOffset(facing, sensorOffset);
return true;
}
} catch (CameraAccessException ignore) {
@ -411,11 +409,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
// 2. VIDEO RECORDING
if (getMode() == Mode.VIDEO) {
if (Full2VideoRecorder.SUPPORTS_PERSISTENT_SURFACE) {
mFullVideoPersistentSurface = MediaCodec.createPersistentInputSurface();
outputSurfaces.add(mFullVideoPersistentSurface);
} else if (mFullVideoPendingStub != null) {
Full2VideoRecorder recorder = new Full2VideoRecorder(this, mCameraId, null);
if (mFullVideoPendingStub != null) {
Full2VideoRecorder recorder = new Full2VideoRecorder(this, mCameraId);
try {
outputSurfaces.add(recorder.createInputSurface(mFullVideoPendingStub));
} catch (Full2VideoRecorder.PrepareException e) {
@ -567,10 +562,6 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@Override
protected Task<Void> onStopBind() {
LOG.i("onStopBind:", "About to clean up.");
if (mFullVideoPersistentSurface != null) {
mFullVideoPersistentSurface.release();
mFullVideoPersistentSurface = null;
}
mFrameProcessingSurface = null;
mPreviewStreamSurface = null;
mPreviewStreamSize = null;
@ -667,19 +658,17 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
LOG.i("onTakeVideo", "called.");
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR);
stub.size = getAngles().flip(Reference.SENSOR, Reference.OUTPUT) ? mCaptureSize.flip() : mCaptureSize;
if (!Full2VideoRecorder.SUPPORTS_PERSISTENT_SURFACE) {
// On API 21 and 22, we must restart the session at each time.
// We must restart the session at each time.
// Save the pending data and restart the session.
LOG.w("onTakeVideo", "calling restartBind.");
mFullVideoPendingStub = stub;
restartBind();
} else {
doTakeVideo(stub);
}
}
private void doTakeVideo(@NonNull final VideoResult.Stub stub) {
mVideoRecorder = new Full2VideoRecorder(this, mCameraId, mFullVideoPersistentSurface);
if (!(mVideoRecorder instanceof Full2VideoRecorder)) {
mVideoRecorder = new Full2VideoRecorder(this, mCameraId);
}
Full2VideoRecorder recorder = (Full2VideoRecorder) mVideoRecorder;
try {
createRepeatingRequestBuilder(CameraDevice.TEMPLATE_RECORD);
@ -706,13 +695,11 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
throw new IllegalStateException("Video snapshots are only supported with GlCameraPreview.");
}
GlCameraPreview glPreview = (GlCameraPreview) mPreview;
// Output size is easy:
Size outputSize = getUncroppedSnapshotSize(Reference.OUTPUT);
if (outputSize == null) {
throw new IllegalStateException("outputSize should not be null.");
}
AspectRatio outputRatio = getAngles().flip(Reference.OUTPUT, Reference.VIEW) ? viewAspectRatio.flip() : viewAspectRatio;
AspectRatio outputRatio = getAngles().flip(Reference.VIEW, Reference.OUTPUT) ? viewAspectRatio.flip() : viewAspectRatio;
Rect outputCrop = CropHelper.computeCrop(outputSize, outputRatio);
outputSize = new Size(outputCrop.width(), outputCrop.height());
stub.size = outputSize;

@ -70,7 +70,7 @@ import java.util.concurrent.TimeUnit;
* So at the end of both step 1 and 2, the engine should check if both have
* been performed and trigger the steps 3 and 4.
*
* We use an abstraction for each step called {@link CameraEngineStep} that manages the state of
* We use an abstraction for each step called {@link Step} that manages the state of
* each step and ensures that start and stop operations, for each step, are never called if the
* previous one has not ended.
*
@ -140,11 +140,11 @@ public abstract class CameraEngine implements
private static final CameraLogger LOG = CameraLogger.create(TAG);
@SuppressWarnings({"WeakerAccess", "unused"})
public static final int STATE_STOPPING = CameraEngineStep.STATE_STOPPING;
public static final int STATE_STOPPED = CameraEngineStep.STATE_STOPPED;
public static final int STATE_STOPPING = Step.STATE_STOPPING;
public static final int STATE_STOPPED = Step.STATE_STOPPED;
@SuppressWarnings({"WeakerAccess", "unused"})
public static final int STATE_STARTING = CameraEngineStep.STATE_STARTING;
public static final int STATE_STARTED = CameraEngineStep.STATE_STARTED;
public static final int STATE_STARTING = Step.STATE_STARTING;
public static final int STATE_STARTED = Step.STATE_STARTED;
// Need to be protected
@SuppressWarnings("WeakerAccess") protected WorkerHandler mHandler;
@ -185,16 +185,17 @@ public abstract class CameraEngine implements
private int mSnapshotMaxHeight = Integer.MAX_VALUE; // in REF_VIEW for consistency with SizeSelectors
// Steps
private final CameraEngineStep.Callback mStepCallback = new CameraEngineStep.Callback() {
private final Step.Callback mStepCallback = new Step.Callback() {
@Override @NonNull public Executor getExecutor() { return mHandler.getExecutor(); }
@Override public void handleException(@NonNull Exception exception) {
CameraEngine.this.handleException(Thread.currentThread(), exception, false);
}
};
@VisibleForTesting CameraEngineStep mEngineStep = new CameraEngineStep("engine", mStepCallback);
private CameraEngineStep mBindStep = new CameraEngineStep("bind", mStepCallback);
private CameraEngineStep mPreviewStep = new CameraEngineStep("preview", mStepCallback);
private CameraEngineStep mAllStep = new CameraEngineStep("all", mStepCallback);
@VisibleForTesting
Step mEngineStep = new Step("engine", mStepCallback);
private Step mBindStep = new Step("bind", mStepCallback);
private Step mPreviewStep = new Step("preview", mStepCallback);
private Step mAllStep = new Step("all", mStepCallback);
// Ops used for testing.
@VisibleForTesting Op<Void> mStartVideoOp = new Op<>();
@ -785,21 +786,6 @@ public abstract class CameraEngine implements
return mAngles;
}
@SuppressWarnings("WeakerAccess")
protected final void setSensorOffset(@NonNull Facing facing, int sensorOffset) {
mAngles.setSensorOffset(facing, sensorOffset);
}
// This is called before start() and never again.
public final void setDisplayOffset(int displayOffset) {
mAngles.setDisplayOffset(displayOffset);
}
// This can be called multiple times.
public final void setDeviceOrientation(int deviceOrientation) {
mAngles.setDeviceOrientation(deviceOrientation);
}
public final void setPreviewStreamSizeSelector(@Nullable SizeSelector selector) {
mPreviewStreamSizeSelector = selector;
}
@ -1018,7 +1004,7 @@ public abstract class CameraEngine implements
* Camera is about to be opened. Implementors should look into available cameras
* and see if anyone matches the given {@link Facing value}.
*
* If so, implementors should set {@link #setSensorOffset(Facing, int)} and any other information
* If so, implementors should set {@link Angles#setSensorOffset(Facing, int)} and any other information
* (like camera ID) needed to start the engine.
*
* @param facing the facing value

@ -34,9 +34,9 @@ import androidx.annotation.VisibleForTesting;
*
* <strong>This class is NOT thread safe!</string>
*/
class CameraEngineStep {
class Step {
private static final String TAG = CameraEngineStep.class.getSimpleName();
private static final String TAG = Step.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
interface Callback {
@ -59,7 +59,7 @@ class CameraEngineStep {
private final String name;
private final Callback callback;
CameraEngineStep(@NonNull String name, @NonNull Callback callback) {
Step(@NonNull String name, @NonNull Callback callback) {
this.name = name.toUpperCase();
this.callback = callback;
}

@ -114,7 +114,7 @@ public class SnapshotGlPictureRecorder extends PictureRecorder {
Matrix.scaleM(mTransform, 0, realScaleX, realScaleY, 1);
// Fix rotation:
// TODO Not sure why we need the minus here... It makes no sense to me.
// Not sure why we need the minus here... It makes no sense to me.
LOG.w("Recording frame. Rotation:", mResult.rotation, "Actual:", -mResult.rotation);
int rotation = -mResult.rotation;
mResult.rotation = 0;

@ -7,6 +7,7 @@ import androidx.annotation.VisibleForTesting;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewParent;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.engine.CameraEngine;
@ -186,8 +187,10 @@ public abstract class CameraPreview<T extends View, Output> {
if (mOutputSurfaceWidth > 0 && mOutputSurfaceHeight > 0) {
crop(mCropOp);
}
if (mSurfaceCallback != null) {
mSurfaceCallback.onSurfaceAvailable();
}
}
/**
* Subclasses can call this to notify that the surface has changed.
@ -203,9 +206,11 @@ public abstract class CameraPreview<T extends View, Output> {
if (width > 0 && height > 0) {
crop(mCropOp);
}
if (mSurfaceCallback != null) {
mSurfaceCallback.onSurfaceChanged();
}
}
}
/**
* Subclasses can call this to notify that the surface has been destroyed.
@ -214,8 +219,10 @@ public abstract class CameraPreview<T extends View, Output> {
protected final void dispatchOnSurfaceDestroyed() {
mOutputSurfaceWidth = 0;
mOutputSurfaceHeight = 0;
if (mSurfaceCallback != null) {
mSurfaceCallback.onSurfaceDestroyed();
}
}
/**
* Called by the hosting {@link com.otaliastudios.cameraview.CameraView},
@ -233,7 +240,13 @@ public abstract class CameraPreview<T extends View, Output> {
* Called by the hosting {@link com.otaliastudios.cameraview.CameraView},
* this is a lifecycle event.
*/
public void onDestroy() {}
public void onDestroy() {
View root = getRootView();
ViewParent parent = root.getParent();
if (parent instanceof ViewGroup) {
((ViewGroup) parent).removeView(root);
}
}
/**
* Here we must crop the visible part by applying a scale greater than 1 to one of our

@ -1,14 +1,6 @@
package com.otaliastudios.cameraview.video;
import android.annotation.SuppressLint;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest;
import android.media.Image;
import android.media.MediaActionSound;
import android.media.MediaRecorder;
import android.os.Build;
import android.view.Surface;
@ -33,21 +25,13 @@ public class Full2VideoRecorder extends FullVideoRecorder {
private static final String TAG = Full2VideoRecorder.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
// This actually didn't work as I expected, we're never using this. Should remove.
@SuppressWarnings("PointlessBooleanExpression") // TODO
public static final boolean SUPPORTS_PERSISTENT_SURFACE = false && Build.VERSION.SDK_INT >= 23;
private final String mCameraId;
private Surface mInputSurface;
private final boolean mUseInputSurface;
public Full2VideoRecorder(@NonNull Camera2Engine engine,
@NonNull String cameraId,
@Nullable Surface surface) {
@NonNull String cameraId) {
super(engine);
mCameraId = cameraId;
mUseInputSurface = surface != null && SUPPORTS_PERSISTENT_SURFACE;
mInputSurface = mUseInputSurface ? surface : null;
}
@SuppressLint("NewApi")
@ -56,10 +40,10 @@ public class Full2VideoRecorder extends FullVideoRecorder {
mediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
Size size = stub.rotation % 180 != 0 ? stub.size.flip() : stub.size;
mProfile = CamcorderProfiles.get(mCameraId, size);
if (mUseInputSurface) {
//noinspection ConstantConditions
mediaRecorder.setInputSurface(mInputSurface);
}
// This was an option: get the surface from outside this class, using MediaCodec.createPersistentInputSurface()
// But it doesn't really help since the Camera2 engine refuses a surface that has not been configured,
// so even with that trick we would have to attach the surface to this recorder before creating the CameraSession.
// mediaRecorder.setInputSurface(mInputSurface);
return super.onPrepareMediaRecorder(stub, mediaRecorder);
}
@ -72,9 +56,6 @@ public class Full2VideoRecorder extends FullVideoRecorder {
*/
@NonNull
public Surface createInputSurface(@NonNull VideoResult.Stub stub) throws PrepareException {
if (mUseInputSurface) {
throw new IllegalStateException("We are using the input surface (API23+), can't createInputSurface here.");
}
if (!prepareMediaRecorder(stub)) {
throw new PrepareException(mError);
}
@ -87,8 +68,6 @@ public class Full2VideoRecorder extends FullVideoRecorder {
return mInputSurface;
}
@SuppressWarnings("WeakerAccess")
public class PrepareException extends Exception {
private PrepareException(Throwable cause) {
super(cause);

@ -131,5 +131,7 @@
<attr name="cameraAutoFocusMarker" format="string|reference"/>
<attr name="cameraUseDeviceOrientation" format="boolean"/>
</declare-styleable>
</resources>

@ -21,6 +21,7 @@ import com.otaliastudios.cameraview.CameraView;
import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.controls.Mode;
import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.controls.Preview;
import com.otaliastudios.cameraview.frame.Frame;
import com.otaliastudios.cameraview.frame.FrameProcessor;
import com.otaliastudios.cameraview.size.SizeSelectors;
@ -173,6 +174,10 @@ public class CameraActivity extends AppCompatActivity implements View.OnClickLis
private void capturePictureSnapshot() {
if (camera.isTakingPicture()) return;
if (camera.getPreview() != Preview.GL_SURFACE) {
message("Picture snapshots are only allowed with the GL_SURFACE preview.", true);
return;
}
mCaptureTime = System.currentTimeMillis();
message("Capturing picture snapshot...", false);
camera.takePictureSnapshot();
@ -193,6 +198,10 @@ public class CameraActivity extends AppCompatActivity implements View.OnClickLis
message("Already taking video.", false);
return;
}
if (camera.getPreview() != Preview.GL_SURFACE) {
message("Video snapshots are only allowed with the GL_SURFACE preview.", true);
return;
}
message("Recording snapshot for 5 seconds...", true);
camera.takeVideoSnapshot(new File(getFilesDir(), "video.mp4"), 5000);
}
@ -212,10 +221,11 @@ public class CameraActivity extends AppCompatActivity implements View.OnClickLis
@Override
public boolean onValueChanged(Control control, Object value, String name) {
if (!camera.isHardwareAccelerated() && (control == Control.WIDTH || control == Control.HEIGHT)) {
if ((Integer) value > 0) {
message("This device does not support hardware acceleration. " +
"In this case you can not change width or height. " +
if ((control == Control.WIDTH || control == Control.HEIGHT)) {
Preview preview = camera.getPreview();
boolean wrapContent = (Integer) value == ViewGroup.LayoutParams.WRAP_CONTENT;
if (preview == Preview.SURFACE && !wrapContent) {
message("The SurfaceView preview does not support width or height changes. " +
"The view will act as WRAP_CONTENT by default.", true);
return false;
}

@ -5,10 +5,13 @@ import androidx.annotation.NonNull;
import android.view.View;
import android.view.ViewGroup;
import com.otaliastudios.cameraview.CameraListener;
import com.otaliastudios.cameraview.controls.Audio;
import com.otaliastudios.cameraview.CameraOptions;
import com.otaliastudios.cameraview.CameraView;
import com.otaliastudios.cameraview.controls.Engine;
import com.otaliastudios.cameraview.controls.Flash;
import com.otaliastudios.cameraview.controls.Preview;
import com.otaliastudios.cameraview.gesture.Gesture;
import com.otaliastudios.cameraview.gesture.GestureAction;
import com.otaliastudios.cameraview.controls.Grid;
@ -18,6 +21,7 @@ import com.otaliastudios.cameraview.controls.VideoCodec;
import com.otaliastudios.cameraview.controls.WhiteBalance;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
@ -26,28 +30,38 @@ import java.util.List;
*/
public enum Control {
// Layout
WIDTH("Width", false),
HEIGHT("Height", true),
// Some controls
MODE("Mode", false),
FLASH("Flash", false),
WHITE_BALANCE("White balance", false),
HDR("Hdr", true),
GRID("Grid lines", false),
GRID_COLOR("Grid color", true),
// Engine and preview
ENGINE("Engine", false),
PREVIEW("Preview Surface", true),
// TODO audio bitRate
// TODO video bitRate
// THey are a bit annoying because it's not clear what the default should be.
// Video recording
VIDEO_CODEC("Video codec", false),
AUDIO("Audio", true),
// TODO audio bitRate
// TODO video bitRate
// They are a bit annoying because it's not clear what the default should be.
// Gestures
PINCH("Pinch", false),
HSCROLL("Horizontal scroll", false),
VSCROLL("Vertical scroll", false),
TAP("Single tap", false),
LONG_TAP("Long tap", true);
LONG_TAP("Long tap", true),
// Others
GRID("Grid lines", false),
GRID_COLOR("Grid color", false),
USE_DEVICE_ORIENTATION("Use device orientation", true);
private String name;
private boolean last;
@ -88,6 +102,8 @@ public enum Control {
case GRID: return options.getSupportedControls(Grid.class);
case AUDIO: return options.getSupportedControls(Audio.class);
case VIDEO_CODEC: return options.getSupportedControls(VideoCodec.class);
case ENGINE: return options.getSupportedControls(Engine.class);
case PREVIEW: return options.getSupportedControls(Preview.class);
case PINCH:
case HSCROLL:
case VSCROLL:
@ -110,6 +126,8 @@ public enum Control {
list3.add(new GridColor(Color.BLACK, "black"));
list3.add(new GridColor(Color.YELLOW, "yellow"));
return list3;
case USE_DEVICE_ORIENTATION:
return Arrays.asList(true, false);
}
return null;
}
@ -135,11 +153,14 @@ public enum Control {
case VSCROLL: return view.getGestureAction(Gesture.SCROLL_VERTICAL);
case TAP: return view.getGestureAction(Gesture.TAP);
case LONG_TAP: return view.getGestureAction(Gesture.LONG_TAP);
case USE_DEVICE_ORIENTATION: return view.getUseDeviceOrientation();
case ENGINE: return view.getEngine();
case PREVIEW: return view.getPreview();
}
return null;
}
public void applyValue(CameraView camera, Object value) {
public void applyValue(final CameraView camera, final Object value) {
switch (this) {
case WIDTH:
camera.getLayoutParams().width = (int) value;
@ -175,9 +196,61 @@ public enum Control {
break;
case GRID_COLOR:
camera.setGridColor(((GridColor) value).color);
break;
case USE_DEVICE_ORIENTATION:
camera.setUseDeviceOrientation((Boolean) value);
break;
case ENGINE:
boolean started = camera.isOpened();
if (started) {
camera.addCameraListener(new CameraListener() {
@Override
public void onCameraClosed() {
super.onCameraClosed();
camera.removeCameraListener(this);
camera.setEngine((Engine) value);
camera.open();
}
});
camera.close();
} else {
camera.setEngine((Engine) value);
}
break;
case PREVIEW:
boolean opened = camera.isOpened();
if (opened) {
camera.addCameraListener(new CameraListener() {
@Override
public void onCameraClosed() {
super.onCameraClosed();
camera.removeCameraListener(this);
applyPreview(camera, (Preview) value, true);
}
});
camera.close();
} else {
applyPreview(camera, (Preview) value, false);
}
}
}
// This is really tricky since the preview can only be changed when not attached to window.
private void applyPreview(@NonNull CameraView cameraView, @NonNull Preview newPreview, boolean openWhenDone) {
ViewGroup.LayoutParams params = cameraView.getLayoutParams();
ViewGroup parent = (ViewGroup) cameraView.getParent();
int index = 0;
for (int i = 0; i < parent.getChildCount(); i++) {
if (parent.getChildAt(i) == cameraView) {
index = i;
break;
}
}
parent.removeView(cameraView);
cameraView.setPreview(newPreview);
parent.addView(cameraView, index, params);
if (openWhenDone) cameraView.open();
}
static class GridColor {
int color;

@ -8,6 +8,10 @@ order: 3
New versions are released through GitHub, so the reference page is the [GitHub Releases](https://github.com/natario1/CameraView/releases) page.
### v2.0.0-*** (to be released)
- New: `cameraUseDeviceOrientation` XML attribute and `setUseDeviceOrientation()` method to disable considering the device orientation for outputs. ([#497][497])
### v2.0.0-beta06
- New: Full featured Camera2 integration! Use `cameraExperimental="true"` and `cameraEngine="camera2"` to test this out. ([#490][490])
@ -65,3 +69,4 @@ This is the first beta release. For changes with respect to v1, please take a lo
[482]: https://github.com/natario1/CameraView/pull/482
[484]: https://github.com/natario1/CameraView/pull/484
[490]: https://github.com/natario1/CameraView/pull/490
[497]: https://github.com/natario1/CameraView/pull/497

@ -16,7 +16,8 @@ disqus: 1
app:cameraPlaySounds="true|false"
app:cameraGrid="off|draw3x3|draw4x4|drawPhi"
app:cameraGridColor="@color/black"
app:cameraAutoFocusResetDelay="0"/>
app:cameraAutoFocusResetDelay="0"
app:cameraUseDeviceOrientation="true"/>
```
##### cameraPlaySounds
@ -89,6 +90,18 @@ cameraView.setCameraAutoFocusResetDelay(-1); // NO reset
cameraView.setCameraAutoFocusResetDelay(Long.MAX_VALUE); // NO reset
```
##### cameraUseDeviceOrientation
Controls whether we should consider the device orientation for picture and video outputs.
This defaults to true, but can be set to false for specific usages, where you don't want the
output to be rotated based on the device rotation at the moment of capturing.
Defaults to true.
```java
cameraView.setUseDeviceOrientation(true); // rotate media
cameraView.setUseDeviceOrientation(false); // don't
```
### UI Orientation
Within a Camera app, it's common to rotate buttons and other UI elements as the device is tilted around.

@ -64,4 +64,6 @@ The engine method should only be called when the `CameraView` is closed. Otherwi
|Method|Description|
|------|-----------|
|`setPreview(Preview)`|Sets the preview implementation.|
|`getPreview()`|Gets the current preview implementation.|
|`setEngine(Engine)`|Sets the engine implementation.|
|`getEngine()`|Gets the current engine implementation.|

Loading…
Cancel
Save