Create PictureRecorder and two implementations

pull/360/head
Mattia Iavarone 6 years ago
parent 481062a08b
commit 419ff34736
  1. 10
      MIGRATION.md
  2. 32
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/PictureRecorderTest.java
  3. 4
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/VideoRecorderTest.java
  4. 148
      cameraview/src/main/java/com/otaliastudios/cameraview/Camera1.java
  5. 8
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraController.java
  6. 70
      cameraview/src/main/java/com/otaliastudios/cameraview/FullPictureRecorder.java
  7. 38
      cameraview/src/main/java/com/otaliastudios/cameraview/PictureRecorder.java
  8. 82
      cameraview/src/main/java/com/otaliastudios/cameraview/SnapshotPictureRecorder.java
  9. 1
      cameraview/src/main/java/com/otaliastudios/cameraview/SnapshotVideoRecorder.java
  10. 1
      demo/src/main/res/layout/activity_camera.xml

@ -39,12 +39,12 @@
NO maxSize limit.
- New cameraPreview XML attribute lets you choose the backing preview engine (surfaceView, textureView, GlSurfaceView).
The default is GlSurfaceView and it is highly recommended that you do not change this.
- New pictureRecorder interface for picture capturing.
- Created FullPictureRecorder and SnapshotPictureRecorder for capturing HQ pictures and snapshots.
TODO: cameraPreview documentation
TODO: takeVideoSnapshot documentation
TODO: add audio to the video snapshots
TODO: create PictureRecorder interface
create FullPictureRecorder implementation that just uses camera.takePicture
create SnapshotPictureRecorder implementation that, for now, uses camera.setOneShotPreviewCallback
improve SnapshotPictureRecorder so that, if preview is GL, we catch the preview through GLES drawing
this would finally remove the RotationHelper!
TODO: improve SnapshotPictureRecorder so that, if preview is GL, we catch the preview through GLES drawing
this would finally remove the RotationHelper and OOMs!

@ -0,0 +1,32 @@
package com.otaliastudios.cameraview;
import android.support.test.filters.SmallTest;
import android.support.test.runner.AndroidJUnit4;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import static org.junit.Assert.assertNull;
@RunWith(AndroidJUnit4.class)
@SmallTest
public class PictureRecorderTest extends BaseTest {
@Test
public void testRecorder() {
PictureResult result = new PictureResult();
PictureRecorder.PictureResultListener listener = Mockito.mock(PictureRecorder.PictureResultListener.class);
PictureRecorder recorder = new PictureRecorder(result, listener) {
void take() {
dispatchResult();
}
};
recorder.take();
Mockito.verify(listener, Mockito.times(1)).onPictureResult(result);
assertNull(recorder.mListener);
assertNull(recorder.mResult);
}
}

@ -25,7 +25,9 @@ public class VideoRecorderTest extends BaseTest {
VideoRecorder.VideoResultListener listener = Mockito.mock(VideoRecorder.VideoResultListener.class);
VideoRecorder recorder = new VideoRecorder(result, listener) {
void start() {}
void stop() { }
void stop() {
dispatchResult();
}
};
recorder.start();
recorder.stop();

@ -25,7 +25,9 @@ import java.util.List;
@SuppressWarnings("deprecation")
class Camera1 extends CameraController implements Camera.PreviewCallback, Camera.ErrorCallback, VideoRecorder.VideoResultListener {
class Camera1 extends CameraController implements Camera.PreviewCallback, Camera.ErrorCallback,
VideoRecorder.VideoResultListener,
PictureRecorder.PictureResultListener {
private static final String TAG = Camera1.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
@ -272,8 +274,6 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
mPreviewSize = null;
mCaptureSize = null;
mIsBound = false;
mIsTakingImage = false;
mIsTakingVideo = false;
LOG.w("onStop:", "Clean up.", "Returning.");
// We were saving a reference to the exception here and throwing to the user.
@ -442,7 +442,7 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
@Override
void setAudio(Audio audio) {
if (mAudio != audio) {
if (mIsTakingVideo) {
if (isTakingVideo()) {
LOG.w("Audio setting was changed while recording. " +
"Changes will take place starting from next video");
}
@ -500,6 +500,25 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
}
}
// -----------------
// Picture recording stuff.
@Override
public void onPictureShutter(boolean didPlaySound) {
mCameraCallbacks.onShutter(!didPlaySound);
}
@Override
public void onPictureResult(@Nullable PictureResult result) {
mPictureRecorder = null;
if (result != null) {
mCameraCallbacks.dispatchOnPictureTaken(result);
} else {
// Something went wrong.
LOG.e("onPictureResult", "result is null: something went wrong.");
}
}
@Override
void takePicture() {
@ -511,47 +530,15 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
throw new IllegalStateException("Can't take hq pictures while in VIDEO mode");
}
LOG.v("takePicture: performing.", mIsTakingImage);
if (mIsTakingImage) return;
mIsTakingImage = true;
final int sensorToOutput = offset(REF_SENSOR, REF_OUTPUT);
final Size outputSize = getPictureSize(REF_OUTPUT);
Camera.Parameters params = mCamera.getParameters();
params.setRotation(sensorToOutput);
mCamera.setParameters(params);
mCamera.takePicture(
new Camera.ShutterCallback() {
@Override
public void onShutter() {
mCameraCallbacks.onShutter(false);
}
},
null,
null,
new Camera.PictureCallback() {
@Override
public void onPictureTaken(byte[] data, final Camera camera) {
mIsTakingImage = false;
int exifRotation;
try {
ExifInterface exif = new ExifInterface(new ByteArrayInputStream(data));
int exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);
exifRotation = CameraUtils.decodeExifOrientation(exifOrientation);
} catch (IOException e) {
exifRotation = 0;
}
PictureResult result = new PictureResult();
result.jpeg = data;
result.isSnapshot = false;
result.location = mLocation;
result.rotation = exifRotation;
result.size = outputSize;
mCameraCallbacks.dispatchOnPictureTaken(result);
camera.startPreview(); // This is needed, read somewhere in the docs.
}
}
);
LOG.v("takePicture: performing.", isTakingPicture());
if (isTakingPicture()) return;
PictureResult result = new PictureResult();
result.isSnapshot = false;
result.location = mLocation;
result.rotation = offset(REF_SENSOR, REF_OUTPUT);
result.size = getPictureSize(REF_OUTPUT);
mPictureRecorder = new FullPictureRecorder(result, Camera1.this, mCamera);
mPictureRecorder.take();
}
});
}
@ -563,61 +550,22 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
schedule(null, true, new Runnable() {
@Override
public void run() {
if (mIsTakingVideo) {
if (isTakingVideo()) {
// TODO v2: what to do here?
// This won't work while capturing a video.
// But we want it to work.
return;
}
LOG.v("takePictureSnapshot: performing.", mIsTakingImage);
if (mIsTakingImage) return;
mIsTakingImage = true;
mCamera.setOneShotPreviewCallback(new Camera.PreviewCallback() {
@Override
public void onPreviewFrame(final byte[] yuv, Camera camera) {
mCameraCallbacks.onShutter(true);
// Got to rotate the preview frame, since byte[] data here does not include
// EXIF tags automatically set by camera. So either we add EXIF, or we rotate.
// Adding EXIF to a byte array, unfortunately, is hard.
final int sensorToOutput = offset(REF_SENSOR, REF_OUTPUT);
final AspectRatio outputRatio = flip(REF_OUTPUT, REF_VIEW) ? viewAspectRatio.inverse() : viewAspectRatio;
final Size outputSize = getPreviewSize(REF_OUTPUT);
final int format = mPreviewFormat;
WorkerHandler.run(new Runnable() {
@Override
public void run() {
// Rotate the picture, because no one will write EXIF data,
// then crop if needed. In both cases, transform yuv to jpeg.
LOG.v("takePictureSnapshot:", "rotating.");
byte[] data = RotationHelper.rotate(yuv, mPreviewSize, sensorToOutput);
YuvImage yuv = new YuvImage(data, format, outputSize.getWidth(), outputSize.getHeight(), null);
LOG.v("takePictureSnapshot:", "rotated. Cropping and transforming to jpeg.");
ByteArrayOutputStream stream = new ByteArrayOutputStream();
Rect outputRect = CropHelper.computeCrop(outputSize, outputRatio);
yuv.compressToJpeg(outputRect, 90, stream);
data = stream.toByteArray();
LOG.v("takePictureSnapshot:", "cropped. Dispatching.");
PictureResult result = new PictureResult();
result.jpeg = data;
result.size = new Size(outputRect.width(), outputRect.height());
result.rotation = 0;
result.location = mLocation;
result.isSnapshot = true;
mCameraCallbacks.dispatchOnPictureTaken(result);
mIsTakingImage = false;
}
});
// It seems that the buffers are already cleared here, so we need to allocate again.
mCamera.setPreviewCallbackWithBuffer(null); // Release anything left
mCamera.setPreviewCallbackWithBuffer(Camera1.this); // Add ourselves
mFrameManager.allocate(ImageFormat.getBitsPerPixel(mPreviewFormat), mPreviewSize);
}
});
LOG.v("takePictureSnapshot: performing.", isTakingPicture());
if (isTakingPicture()) return;
PictureResult result = new PictureResult();
result.location = mLocation;
result.isSnapshot = true;
result.size = getPreviewSize(REF_OUTPUT); // Not the real size: it will be cropped to match the view ratio
result.rotation = offset(REF_SENSOR, REF_OUTPUT); // Actually it will be rotated and set to 0.
AspectRatio outputRatio = flip(REF_OUTPUT, REF_VIEW) ? viewAspectRatio.inverse() : viewAspectRatio;
mPictureRecorder = new SnapshotPictureRecorder(result, Camera1.this, mCamera, outputRatio);
mPictureRecorder.take();
}
});
}
@ -657,6 +605,7 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
@Override
public void onVideoResult(@Nullable VideoResult result) {
mVideoRecorder = null;
if (result != null) {
mCameraCallbacks.dispatchOnVideoTaken(result);
} else {
@ -674,8 +623,7 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
throw new IllegalStateException("Can't record video while in PICTURE mode");
}
if (mIsTakingVideo) return;
mIsTakingVideo = true;
if (isTakingVideo()) return;
// Create the video result stub
VideoResult videoResult = new VideoResult();
@ -709,8 +657,7 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
schedule(mStartVideoTask, true, new Runnable() {
@Override
public void run() {
if (mIsTakingVideo) return;
mIsTakingVideo = true;
if (isTakingVideo()) return;
// Create the video result stub
VideoResult videoResult = new VideoResult();
@ -768,7 +715,6 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
schedule(null, false, new Runnable() {
@Override
public void run() {
mIsTakingVideo = false;
LOG.i("stopVideo", "mVideoRecorder is null?", mVideoRecorder == null);
if (mVideoRecorder != null) {
mVideoRecorder.stop();

@ -54,6 +54,7 @@ abstract class CameraController implements
protected Mapper mMapper;
protected FrameManager mFrameManager;
protected SizeSelector mPictureSizeSelector;
protected PictureRecorder mPictureRecorder;
protected SizeSelector mVideoSizeSelector;
protected VideoRecorder mVideoRecorder;
protected long mVideoMaxSize;
@ -66,9 +67,6 @@ abstract class CameraController implements
private int mDisplayOffset;
private int mDeviceOrientation;
protected boolean mIsTakingImage = false;
protected boolean mIsTakingVideo = false;
protected int mState = STATE_STOPPED;
// Used for testing.
@ -405,11 +403,11 @@ abstract class CameraController implements
}
final boolean isTakingVideo() {
return mIsTakingVideo;
return mVideoRecorder != null;
}
final boolean isTakingPicture() {
return mIsTakingImage;
return mPictureRecorder != null;
}
//endregion

@ -0,0 +1,70 @@
package com.otaliastudios.cameraview;
import android.hardware.Camera;
import android.media.CamcorderProfile;
import android.media.MediaRecorder;
import android.support.media.ExifInterface;
import java.io.ByteArrayInputStream;
import java.io.IOException;
/**
* A {@link PictureResult} that uses standard APIs.
*/
class FullPictureRecorder extends PictureRecorder {
private static final String TAG = FullPictureRecorder.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
private Camera mCamera;
FullPictureRecorder(PictureResult stub, PictureResultListener listener, Camera camera) {
super(stub, listener);
mCamera = camera;
// We set the rotation to the camera parameters, but we don't know if the result will be
// already rotated with 0 exif, or original with non zero exif. we will have to read EXIF.
Camera.Parameters params = mCamera.getParameters();
params.setRotation(mResult.rotation);
mCamera.setParameters(params);
}
// Camera2 constructor here...
@Override
void take() {
mCamera.takePicture(
new Camera.ShutterCallback() {
@Override
public void onShutter() {
dispatchOnShutter(true);
}
},
null,
null,
new Camera.PictureCallback() {
@Override
public void onPictureTaken(byte[] data, final Camera camera) {
int exifRotation;
try {
ExifInterface exif = new ExifInterface(new ByteArrayInputStream(data));
int exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);
exifRotation = CameraUtils.decodeExifOrientation(exifOrientation);
} catch (IOException e) {
exifRotation = 0;
}
mResult.jpeg = data;
mResult.rotation = exifRotation;
camera.startPreview(); // This is needed, read somewhere in the docs.
dispatchResult();
}
}
);
}
@Override
protected void dispatchResult() {
mCamera = null;
super.dispatchResult();
}
}

@ -0,0 +1,38 @@
package com.otaliastudios.cameraview;
import android.support.annotation.Nullable;
/**
* Interface for picture capturing.
* Don't call start if already started. Don't call stop if already stopped.
* Don't reuse.
*/
abstract class PictureRecorder {
/* tests */ PictureResult mResult;
/* tests */ PictureResultListener mListener;
PictureRecorder(PictureResult stub, PictureResultListener listener) {
mResult = stub;
mListener = listener;
}
abstract void take();
protected void dispatchOnShutter(boolean didPlaySound) {
if (mListener != null) mListener.onPictureShutter(didPlaySound);
}
protected void dispatchResult() {
if (mListener != null) {
mListener.onPictureResult(mResult);
mListener = null;
mResult = null;
}
}
interface PictureResultListener {
void onPictureShutter(boolean didPlaySound);
void onPictureResult(@Nullable PictureResult result);
}
}

@ -0,0 +1,82 @@
package com.otaliastudios.cameraview;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.hardware.Camera;
import java.io.ByteArrayOutputStream;
/**
* A {@link PictureResult} that uses standard APIs.
*/
class SnapshotPictureRecorder extends PictureRecorder {
private static final String TAG = SnapshotPictureRecorder.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
private Camera1 mController;
private Camera mCamera;
private AspectRatio mOutputRatio;
private Size mSensorPreviewSize;
private int mFormat;
SnapshotPictureRecorder(PictureResult stub, Camera1 controller, Camera camera, AspectRatio viewRatio) {
super(stub, controller);
mController = controller;
mCamera = camera;
mOutputRatio = viewRatio;
mFormat = mController.mPreviewFormat;
mSensorPreviewSize = mController.mPreviewSize;
}
@Override
void take() {
mCamera.setOneShotPreviewCallback(new Camera.PreviewCallback() {
@Override
public void onPreviewFrame(final byte[] yuv, Camera camera) {
dispatchOnShutter(false);
// Got to rotate the preview frame, since byte[] data here does not include
// EXIF tags automatically set by camera. So either we add EXIF, or we rotate.
// Adding EXIF to a byte array, unfortunately, is hard.
final int sensorToOutput = mResult.rotation;
final Size outputSize = mResult.size;
WorkerHandler.run(new Runnable() {
@Override
public void run() {
// Rotate the picture, because no one will write EXIF data,
// then crop if needed. In both cases, transform yuv to jpeg.
byte[] data = RotationHelper.rotate(yuv, mSensorPreviewSize, sensorToOutput);
YuvImage yuv = new YuvImage(data, mFormat, outputSize.getWidth(), outputSize.getHeight(), null);
ByteArrayOutputStream stream = new ByteArrayOutputStream();
Rect outputRect = CropHelper.computeCrop(outputSize, mOutputRatio);
yuv.compressToJpeg(outputRect, 90, stream);
data = stream.toByteArray();
mResult.jpeg = data;
mResult.size = new Size(outputRect.width(), outputRect.height());
mResult.rotation = 0;
dispatchResult();
}
});
// It seems that the buffers are already cleared here, so we need to allocate again.
camera.setPreviewCallbackWithBuffer(null); // Release anything left
camera.setPreviewCallbackWithBuffer(mController); // Add ourselves
mController.mFrameManager.allocate(ImageFormat.getBitsPerPixel(mFormat), mController.mPreviewSize);
}
});
}
@Override
protected void dispatchResult() {
mController = null;
mCamera = null;
mOutputRatio = null;
mFormat = 0;
mSensorPreviewSize = null;
super.dispatchResult();
}
}

@ -90,6 +90,7 @@ class SnapshotVideoRecorder extends VideoRecorder implements GLCameraPreview.Ren
type,
EGL14.eglGetCurrentContext()
);
mResult.rotation = 0; // We will rotate the result instead.
mEncoder.startRecording(configuration);
mEncoder.setTextureId(mTextureId);
mCurrentState = STATE_RECORDING;

@ -16,7 +16,6 @@
android:layout_gravity="center"
android:layout_marginBottom="88dp"
android:keepScreenOn="true"
app:cameraPreview="surface"
app:cameraExperimental="true"
app:cameraPlaySounds="true"
app:cameraGrid="off"

Loading…
Cancel
Save