Move to PictureResult, rewrote takePicture and takePictureSnapshot

v2
Mattia Iavarone 7 years ago
parent 3486481494
commit a5f940517b
  1. 2
      MIGRATION.md
  2. 18
      README.md
  3. 6
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraUtilsTest.java
  4. 2
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraViewCallbacksTest.java
  5. 76
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CropHelperTest.java
  6. 6
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/IntegrationTest.java
  7. 2
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/MockCameraController.java
  8. 43
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/PictureResultTest.java
  9. 64
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/YuvHelperTest.java
  10. 68
      cameraview/src/main/java/com/otaliastudios/cameraview/Camera1.java
  11. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/Camera2.java
  12. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraController.java
  13. 12
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraListener.java
  14. 70
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java
  15. 85
      cameraview/src/main/java/com/otaliastudios/cameraview/PictureResult.java
  16. 19
      cameraview/src/main/utils/com/otaliastudios/cameraview/BitmapCallback.java
  17. 60
      cameraview/src/main/utils/com/otaliastudios/cameraview/CameraUtils.java
  18. 50
      cameraview/src/main/utils/com/otaliastudios/cameraview/CropHelper.java
  19. 31
      cameraview/src/main/utils/com/otaliastudios/cameraview/YuvHelper.java
  20. 3
      demo/src/main/java/com/otaliastudios/cameraview/demo/CameraActivity.java
  21. 4
      demo/src/main/java/com/otaliastudios/cameraview/demo/PicturePreviewActivity.java

@ -12,3 +12,5 @@
- getSnapshotSize(): removed. The size of snapshots (pictures and videos) is equal to - getSnapshotSize(): removed. The size of snapshots (pictures and videos) is equal to
the preview size as returned by getPreviewSize(). the preview size as returned by getPreviewSize().
- onVideoTaken(): now passing a VideoResult. Use VideoResult.getFile() to access the video file. - onVideoTaken(): now passing a VideoResult. Use VideoResult.getFile() to access the video file.
- CameraUtils.BitmapCallback: has been moved in a separate BitmapCallback class.
- isCapturingVideo(): renamed to isTakingVideo().

@ -137,10 +137,13 @@ to handle the image callback.
```java ```java
camera.addCameraListener(new CameraListener() { camera.addCameraListener(new CameraListener() {
@Override @Override
public void onPictureTaken(byte[] picture) { public void onPictureTaken(PictureResult result) {
// Create a bitmap or a file... // If planning to save a file, just get the jpeg array.
// CameraUtils will read EXIF orientation for you, in a worker thread. byte[] jpeg = result.getJpeg();
CameraUtils.decodeBitmap(picture, ...);
// If planning to show a Bitmap, we will take care of
// EXIF rotation and background threading for you...
result.asBitmap(maxWidth, maxHeight, callback);
} }
}); });
@ -218,14 +221,13 @@ camera.addCameraListener(new CameraListener() {
* to decode the byte array taking care about orientation. * to decode the byte array taking care about orientation.
*/ */
@Override @Override
public void onPictureTaken(byte[] picture) {} public void onPictureTaken(PictureResult result) {}
/** /**
* Notifies that a video capture has just ended. The file parameter is the one that * Notifies that a video capture has just ended.
* was passed to takeVideo(File), or a fallback video file.
*/ */
@Override @Override
public void onVideoTaken(File video) {} public void onVideoTaken(VideoResult result) {}
/** /**
* Notifies that the device was tilted or the window offset changed. * Notifies that the device was tilted or the window offset changed.

@ -1,15 +1,11 @@
package com.otaliastudios.cameraview; package com.otaliastudios.cameraview;
import android.annotation.TargetApi;
import android.app.Instrumentation;
import android.content.Context; import android.content.Context;
import android.content.pm.PackageManager; import android.content.pm.PackageManager;
import android.graphics.Bitmap; import android.graphics.Bitmap;
import android.graphics.Color; import android.graphics.Color;
import android.support.test.InstrumentationRegistry;
import android.support.test.filters.SmallTest; import android.support.test.filters.SmallTest;
import android.support.test.internal.runner.InstrumentationConnection;
import android.support.test.runner.AndroidJUnit4; import android.support.test.runner.AndroidJUnit4;
import org.junit.Test; import org.junit.Test;
@ -49,7 +45,7 @@ public class CameraUtilsTest extends BaseTest {
final byte[] data = os.toByteArray(); final byte[] data = os.toByteArray();
final Task<Bitmap> decode = new Task<>(true); final Task<Bitmap> decode = new Task<>(true);
final CameraUtils.BitmapCallback callback = new CameraUtils.BitmapCallback() { final BitmapCallback callback = new BitmapCallback() {
@Override @Override
public void onBitmapReady(Bitmap bitmap) { public void onBitmapReady(Bitmap bitmap) {
decode.end(bitmap); decode.end(bitmap);

@ -255,7 +255,7 @@ public class CameraViewCallbacksTest extends BaseTest {
jpegTask.end((byte[]) invocation.getArguments()[0]); jpegTask.end((byte[]) invocation.getArguments()[0]);
return null; return null;
} }
}).when(listener).onPictureTaken(any(byte[].class)); }).when(listener).onPictureTaken(any(PictureResult.class));
// Fake our own dimensions. // Fake our own dimensions.
camera.setTop(0); camera.setTop(0);

@ -1,76 +0,0 @@
package com.otaliastudios.cameraview;
import android.content.Context;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Color;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.support.test.filters.SmallTest;
import android.support.test.runner.AndroidJUnit4;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import java.io.ByteArrayOutputStream;
import java.io.OutputStream;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Mockito.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@RunWith(AndroidJUnit4.class)
@SmallTest
public class CropHelperTest extends BaseTest {
@Test
public void testCropFromYuv() {
testCropFromYuv(1600, 1600, AspectRatio.of(16, 9));
testCropFromYuv(1600, 1600, AspectRatio.of(9, 16));
}
@Test
public void testCropFromJpeg() {
testCropFromJpeg(1600, 1600, AspectRatio.of(16, 9));
testCropFromJpeg(1600, 1600, AspectRatio.of(9, 16));
}
private void testCropFromYuv(final int w, final int h, final AspectRatio target) {
final boolean wider = target.toFloat() > ((float) w / (float) h);
byte[] b = CropHelper.cropToJpeg(mockYuv(w, h), target, 100);
Bitmap result = BitmapFactory.decodeByteArray(b, 0, b.length);
// Assert.
AspectRatio ratio = AspectRatio.of(result.getWidth(), result.getHeight());
assertEquals(target, ratio);
if (wider) { // width must match.
assertEquals(result.getWidth(), w);
} else {
assertEquals(result.getHeight(), h);
}
}
private void testCropFromJpeg(int w, int h, AspectRatio target) {
final boolean wider = target.toFloat() > ((float) w / (float) h);
byte[] b = CropHelper.cropToJpeg(mockJpeg(w, h), target, 100);
Bitmap result = BitmapFactory.decodeByteArray(b, 0, b.length);
// Assert.
AspectRatio ratio = AspectRatio.of(result.getWidth(), result.getHeight());
assertEquals(target, ratio);
if (wider) { // width must match.
assertEquals(result.getWidth(), w);
} else {
assertEquals(result.getHeight(), h);
}
}
}

@ -137,7 +137,7 @@ public class IntegrationTest extends BaseTest {
private byte[] waitForPicture(boolean expectSuccess) { private byte[] waitForPicture(boolean expectSuccess) {
final Task<byte[]> pic = new Task<>(true); final Task<byte[]> pic = new Task<>(true);
doEndTask(pic, 0).when(listener).onPictureTaken(any(byte[].class)); doEndTask(pic, 0).when(listener).onPictureTaken(any(PictureResult.class));
byte[] result = pic.await(5000); byte[] result = pic.await(5000);
if (expectSuccess) { if (expectSuccess) {
assertNotNull("Can take picture", result); assertNotNull("Can take picture", result);
@ -517,7 +517,7 @@ public class IntegrationTest extends BaseTest {
waitForOpen(true); waitForOpen(true);
CountDownLatch latch = new CountDownLatch(2); CountDownLatch latch = new CountDownLatch(2);
doCountDown(latch).when(listener).onPictureTaken(any(byte[].class)); doCountDown(latch).when(listener).onPictureTaken(any(PictureResult.class));
camera.takePicture(); camera.takePicture();
camera.takePicture(); camera.takePicture();
@ -553,7 +553,7 @@ public class IntegrationTest extends BaseTest {
waitForOpen(true); waitForOpen(true);
CountDownLatch latch = new CountDownLatch(2); CountDownLatch latch = new CountDownLatch(2);
doCountDown(latch).when(listener).onPictureTaken(any(byte[].class)); doCountDown(latch).when(listener).onPictureTaken(any(PictureResult.class));
camera.takePictureSnapshot(); camera.takePictureSnapshot();
camera.takePictureSnapshot(); camera.takePictureSnapshot();

@ -97,7 +97,7 @@ public class MockCameraController extends CameraController {
} }
@Override @Override
void takePictureSnapshot() { void takePictureSnapshot(boolean shouldCrop, AspectRatio viewAspectRatio) {
} }
@Override @Override

@ -0,0 +1,43 @@
package com.otaliastudios.cameraview;
import android.location.Location;
import android.support.test.filters.SmallTest;
import android.support.test.runner.AndroidJUnit4;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import java.io.File;
import static org.junit.Assert.assertEquals;
@RunWith(AndroidJUnit4.class)
@SmallTest
public class PictureResultTest extends BaseTest {
private PictureResult result = new PictureResult();
@Test
public void testResult() {
int rotation = 90;
Size size = new Size(20, 120);
byte[] jpeg = new byte[]{2, 4, 1, 5, 2};
Location location = Mockito.mock(Location.class);
boolean isSnapshot = true;
result.rotation = rotation;
result.size = size;
result.jpeg = jpeg;
result.location = location;
result.isSnapshot = isSnapshot;
assertEquals(result.getRotation(), rotation);
assertEquals(result.getSize(), size);
assertEquals(result.getJpeg(), jpeg);
assertEquals(result.getLocation(), location);
assertEquals(result.isSnapshot(), isSnapshot);
}
}

@ -0,0 +1,64 @@
package com.otaliastudios.cameraview;
import android.content.Context;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Color;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.support.test.filters.SmallTest;
import android.support.test.runner.AndroidJUnit4;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import java.io.ByteArrayOutputStream;
import java.io.OutputStream;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Mockito.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@RunWith(AndroidJUnit4.class)
@SmallTest
public class YuvHelperTest extends BaseTest {
@Test
public void testCrop() {
testCrop(new Size(1600, 1600), AspectRatio.of(16, 16));
testCrop(new Size(1600, 1600), AspectRatio.of(16, 9));
testCrop(new Size(1600, 1600), AspectRatio.of(9, 16));
}
private void testCrop(final Size inSize, final AspectRatio outRatio) {
AspectRatio inRatio = AspectRatio.of(inSize.getWidth(), inSize.getHeight());
Rect out = YuvHelper.computeCrop(inSize, outRatio);
Size outSize = new Size(out.width(), out.height());
assertTrue(outRatio.matches(outSize));
if (outRatio.matches(inSize)) {
// They are equal.
assertEquals(outSize.getWidth(), inSize.getWidth());
assertEquals(outSize.getHeight(), inSize.getHeight());
} else if (outRatio.toFloat() > inRatio.toFloat()) {
// Width must match.
assertEquals(outSize.getWidth(), inSize.getWidth());
assertNotEquals(outSize.getHeight(), inSize.getHeight());
} else {
// Height must match.
assertEquals(outSize.getHeight(), inSize.getHeight());
assertNotEquals(outSize.getWidth(), inSize.getWidth());
}
}
}

@ -15,8 +15,11 @@ import android.support.annotation.NonNull;
import android.support.annotation.Nullable; import android.support.annotation.Nullable;
import android.support.annotation.WorkerThread; import android.support.annotation.WorkerThread;
import android.util.Log; import android.util.Log;
import android.support.media.ExifInterface;
import android.view.SurfaceHolder; import android.view.SurfaceHolder;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -509,9 +512,12 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
mIsCapturingImage = true; mIsCapturingImage = true;
final int sensorToOutput = computeSensorToOutputOffset(); final int sensorToOutput = computeSensorToOutputOffset();
final int sensorToView = computeSensorToViewOffset(); int outputWidth = mPictureSize.getWidth();
final boolean outputMatchesView = (sensorToOutput + sensorToView + 180) % 180 == 0; int outputHeight = mPictureSize.getHeight();
final boolean outputFlip = mFacing == Facing.FRONT; //noinspection SuspiciousNameCombination
final Size outputSize = sensorToOutput % 180 == 0 ?
new Size(outputWidth, outputHeight) :
new Size(outputHeight, outputWidth);
Camera.Parameters params = mCamera.getParameters(); Camera.Parameters params = mCamera.getParameters();
params.setRotation(sensorToOutput); params.setRotation(sensorToOutput);
mCamera.setParameters(params); mCamera.setParameters(params);
@ -528,7 +534,21 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
@Override @Override
public void onPictureTaken(byte[] data, final Camera camera) { public void onPictureTaken(byte[] data, final Camera camera) {
mIsCapturingImage = false; mIsCapturingImage = false;
mCameraCallbacks.processPicture(data, outputMatchesView, outputFlip); int exifRotation;
try {
ExifInterface exif = new ExifInterface(new ByteArrayInputStream(data));
int exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);
exifRotation = CameraUtils.decodeExifOrientation(exifOrientation);
} catch (IOException e) {
exifRotation = 0;
}
PictureResult result = new PictureResult();
result.jpeg = data;
result.isSnapshot = false;
result.location = mLocation;
result.rotation = exifRotation;
result.size = outputSize;
mCameraCallbacks.dispatchOnPictureTaken(result);
camera.startPreview(); // This is needed, read somewhere in the docs. camera.startPreview(); // This is needed, read somewhere in the docs.
} }
} }
@ -539,7 +559,7 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
@Override @Override
void takePictureSnapshot() { void takePictureSnapshot(final boolean shouldCrop, final AspectRatio viewAspectRatio) {
LOG.v("takePictureSnapshot: scheduling"); LOG.v("takePictureSnapshot: scheduling");
schedule(null, true, new Runnable() { schedule(null, true, new Runnable() {
@Override @Override
@ -548,6 +568,7 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
if (mIsCapturingImage) return; if (mIsCapturingImage) return;
// This won't work while capturing a video. // This won't work while capturing a video.
// Switch to takePicture. // Switch to takePicture.
// TODO v2: what to do here?
if (mIsCapturingVideo) { if (mIsCapturingVideo) {
takePicture(); takePicture();
return; return;
@ -555,7 +576,7 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
mIsCapturingImage = true; mIsCapturingImage = true;
mCamera.setOneShotPreviewCallback(new Camera.PreviewCallback() { mCamera.setOneShotPreviewCallback(new Camera.PreviewCallback() {
@Override @Override
public void onPreviewFrame(final byte[] data, Camera camera) { public void onPreviewFrame(final byte[] yuv, Camera camera) {
mCameraCallbacks.onShutter(true); mCameraCallbacks.onShutter(true);
// Got to rotate the preview frame, since byte[] data here does not include // Got to rotate the preview frame, since byte[] data here does not include
@ -564,22 +585,33 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
final int sensorToOutput = computeSensorToOutputOffset(); final int sensorToOutput = computeSensorToOutputOffset();
final int sensorToView = computeSensorToViewOffset(); final int sensorToView = computeSensorToViewOffset();
final boolean outputMatchesView = (sensorToOutput + sensorToView + 180) % 180 == 0; final boolean outputMatchesView = (sensorToOutput + sensorToView + 180) % 180 == 0;
final boolean outputFlip = mFacing == Facing.FRONT; final Size originalSize = new Size(mPreviewSize.getWidth(), mPreviewSize.getHeight());
final boolean flip = sensorToOutput % 180 != 0; final Size outputSize = sensorToOutput % 180 == 0 ? originalSize : originalSize.flip();
final int preWidth = mPreviewSize.getWidth(); final AspectRatio outputRatio = outputMatchesView ? viewAspectRatio : viewAspectRatio.inverse();
final int preHeight = mPreviewSize.getHeight();
final int postWidth = flip ? preHeight : preWidth;
final int postHeight = flip ? preWidth : preHeight;
final int format = mPreviewFormat; final int format = mPreviewFormat;
WorkerHandler.run(new Runnable() { WorkerHandler.run(new Runnable() {
@Override @Override
public void run() { public void run() {
// Rotate the picture, because no one will write EXIF data,
LOG.v("takePictureSnapshot: rotating."); // then crop if needed. In both cases, transform yuv to jpeg.
byte[] rotatedData = RotationHelper.rotate(data, preWidth, preHeight, sensorToOutput); LOG.v("takePictureSnapshot:", "rotating.");
LOG.v("takePictureSnapshot: rotated."); byte[] data = YuvHelper.rotate(yuv, originalSize, sensorToOutput);
YuvImage yuv = new YuvImage(rotatedData, format, postWidth, postHeight, null); YuvImage yuv = new YuvImage(data, format, outputSize.getWidth(), outputSize.getHeight(), null);
mCameraCallbacks.processSnapshot(yuv, outputMatchesView, outputFlip);
LOG.v("takePictureSnapshot:", "rotated. Cropping and transforming to jpeg.");
ByteArrayOutputStream stream = new ByteArrayOutputStream();
Rect outputRect = YuvHelper.computeCrop(outputSize, outputRatio);
yuv.compressToJpeg(outputRect, 90, stream);
data = stream.toByteArray();
LOG.v("takePictureSnapshot:", "cropped. Dispatching.");
PictureResult result = new PictureResult();
result.jpeg = data;
result.size = new Size(outputRect.width(), outputRect.height());
result.rotation = 0;
result.location = mLocation;
result.isSnapshot = true;
mCameraCallbacks.dispatchOnPictureTaken(result);
mIsCapturingImage = false; mIsCapturingImage = false;
} }
}); });

@ -91,7 +91,7 @@ class Camera2 extends CameraController {
} }
@Override @Override
void takePictureSnapshot() { void takePictureSnapshot(boolean shouldCrop, AspectRatio viewAspectRatio) {
} }

@ -326,7 +326,7 @@ abstract class CameraController implements
abstract void takePicture(); abstract void takePicture();
abstract void takePictureSnapshot(); abstract void takePictureSnapshot(boolean shouldCrop, AspectRatio viewAspectRatio);
abstract void takeVideo(@NonNull File file); abstract void takeVideo(@NonNull File file);
@ -409,7 +409,7 @@ abstract class CameraController implements
return mPreviewSize; return mPreviewSize;
} }
final boolean isCapturingVideo() { final boolean isTakingVideo() {
return mIsCapturingVideo; return mIsCapturingVideo;
} }

@ -49,16 +49,16 @@ public abstract class CameraListener {
/** /**
* Notifies that a picture previously captured with {@link CameraView#takePicture()} * Notifies that a picture previously captured with {@link CameraView#takePicture()}
* or {@link CameraView#takePictureSnapshot()} is ready to be shown or saved. * or {@link CameraView#takePictureSnapshot()} is ready to be shown or saved to file.
* *
* If planning to get a bitmap, you can use {@link CameraUtils#decodeBitmap(byte[], CameraUtils.BitmapCallback)} * If planning to show a bitmap, you can use {@link PictureResult#asBitmap(int, int, BitmapCallback)}
* to decode the byte array taking care about orientation. * to decode the byte array taking care about orientation and threading.
* *
* @param jpeg captured picture * @param result captured picture
*/ */
@UiThread @UiThread
public void onPictureTaken(byte[] jpeg) { public void onPictureTaken(PictureResult result) {
// TODO v2: use a PictureResult.
} }

@ -15,8 +15,6 @@ import android.content.pm.PackageInfo;
import android.content.pm.PackageManager; import android.content.pm.PackageManager;
import android.content.res.TypedArray; import android.content.res.TypedArray;
import android.graphics.PointF; import android.graphics.PointF;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.location.Location; import android.location.Location;
import android.media.MediaActionSound; import android.media.MediaActionSound;
import android.os.Build; import android.os.Build;
@ -29,7 +27,6 @@ import android.view.MotionEvent;
import android.view.ViewGroup; import android.view.ViewGroup;
import android.widget.FrameLayout; import android.widget.FrameLayout;
import java.io.ByteArrayOutputStream;
import java.io.File; import java.io.File;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
@ -1126,7 +1123,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
/** /**
* Asks the camera to capture an image of the current scene. * Asks the camera to capture an image of the current scene.
* This will trigger {@link CameraListener#onPictureTaken(byte[])} if a listener * This will trigger {@link CameraListener#onPictureTaken(PictureResult)} if a listener
* was registered. * was registered.
* *
* Note that if sessionType is {@link SessionType#VIDEO}, this * Note that if sessionType is {@link SessionType#VIDEO}, this
@ -1141,7 +1138,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
/** /**
* Asks the camera to capture a snapshot of the current preview. * Asks the camera to capture a snapshot of the current preview.
* This eventually triggers {@link CameraListener#onPictureTaken(byte[])} if a listener * This eventually triggers {@link CameraListener#onPictureTaken(PictureResult)} if a listener
* was registered. * was registered.
* *
* The difference with {@link #takePicture()} is that this capture is faster, so it might be * The difference with {@link #takePicture()} is that this capture is faster, so it might be
@ -1150,7 +1147,9 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @see #takePicture() * @see #takePicture()
*/ */
public void takePictureSnapshot() { public void takePictureSnapshot() {
mCameraController.takePictureSnapshot(); mCameraController.takePictureSnapshot(
mCameraPreview.isCropping(),
AspectRatio.of(getWidth(), getHeight()));
} }
@ -1374,8 +1373,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* Returns true if the camera is currently recording a video * Returns true if the camera is currently recording a video
* @return boolean indicating if the camera is recording a video * @return boolean indicating if the camera is recording a video
*/ */
public boolean isCapturingVideo(){ public boolean isTakingVideo(){
return mCameraController.isCapturingVideo(); return mCameraController.isTakingVideo();
} }
//endregion //endregion
@ -1387,9 +1386,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
void dispatchOnCameraClosed(); void dispatchOnCameraClosed();
void onCameraPreviewSizeChanged(); void onCameraPreviewSizeChanged();
void onShutter(boolean shouldPlaySound); void onShutter(boolean shouldPlaySound);
void processPicture(byte[] jpeg, boolean consistentWithView, boolean flipHorizontally);
void processSnapshot(YuvImage image, boolean consistentWithView, boolean flipHorizontally);
void dispatchOnVideoTaken(VideoResult result); void dispatchOnVideoTaken(VideoResult result);
void dispatchOnPictureTaken(PictureResult result);
void dispatchOnFocusStart(@Nullable Gesture trigger, PointF where); void dispatchOnFocusStart(@Nullable Gesture trigger, PointF where);
void dispatchOnFocusEnd(@Nullable Gesture trigger, boolean success, PointF where); void dispatchOnFocusEnd(@Nullable Gesture trigger, boolean success, PointF where);
void dispatchOnZoomChanged(final float newValue, final PointF[] fingers); void dispatchOnZoomChanged(final float newValue, final PointF[] fingers);
@ -1453,62 +1451,14 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
} }
} }
/**
* What would be great here is to ensure the EXIF tag in the jpeg is consistent with what we expect,
* and maybe add flipping when we have been using the front camera.
* Unfortunately this is not easy, because
* - You can't write EXIF data to a byte[] array, not with support library at least
* - You don't know what byte[] is, see {@link android.hardware.Camera.Parameters#setRotation(int)}.
* Sometimes our rotation is encoded in the byte array, sometimes a rotated byte[] is returned.
* Depends on the hardware.
*
* So for now we ignore flipping.
*
* @param consistentWithView is the final image (decoded respecting EXIF data) consistent with
* the view width and height? Or should we flip dimensions to have a
* consistent measure?
* @param flipHorizontally whether this picture should be flipped horizontally after decoding,
* because it was taken with the front camera.
*/
@Override
public void processPicture(final byte[] jpeg, final boolean consistentWithView, final boolean flipHorizontally) {
mLogger.i("processPicture");
dispatchOnPictureTaken(jpeg);
// TODO: remove.
}
@Override
public void processSnapshot(final YuvImage yuv, final boolean consistentWithView, boolean flipHorizontally) {
mLogger.i("processSnapshot");
mWorkerHandler.post(new Runnable() {
@Override @Override
public void run() { public void dispatchOnPictureTaken(final PictureResult result) {
byte[] jpeg;
if (mCameraPreview.isCropping()) {
int w = consistentWithView ? getWidth() : getHeight();
int h = consistentWithView ? getHeight() : getWidth();
AspectRatio targetRatio = AspectRatio.of(w, h);
mLogger.i("processSnapshot", "is consistent?", consistentWithView);
mLogger.i("processSnapshot", "viewWidth?", getWidth(), "viewHeight?", getHeight());
jpeg = CropHelper.cropToJpeg(yuv, targetRatio, 90);
} else {
ByteArrayOutputStream out = new ByteArrayOutputStream();
yuv.compressToJpeg(new Rect(0, 0, yuv.getWidth(), yuv.getHeight()), 90, out);
jpeg = out.toByteArray();
}
dispatchOnPictureTaken(jpeg);
}
});
}
private void dispatchOnPictureTaken(byte[] jpeg) {
mLogger.i("dispatchOnPictureTaken"); mLogger.i("dispatchOnPictureTaken");
final byte[] data = jpeg;
mUiHandler.post(new Runnable() { mUiHandler.post(new Runnable() {
@Override @Override
public void run() { public void run() {
for (CameraListener listener : mListeners) { for (CameraListener listener : mListeners) {
listener.onPictureTaken(data); listener.onPictureTaken(result);
} }
} }
}); });

@ -0,0 +1,85 @@
package com.otaliastudios.cameraview;
import android.location.Location;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
/**
* Wraps the picture captured by {@link CameraView#takePicture()} or
* {@link CameraView#takePictureSnapshot()}.
*/
public class PictureResult {
boolean isSnapshot;
Location location;
int rotation;
Size size;
byte[] jpeg;
PictureResult() {}
/**
* Returns whether this result comes from a snapshot.
*
* @return whether this is a snapshot
*/
public boolean isSnapshot() {
return isSnapshot;
}
/**
* Returns geographic information for this picture, if any.
* If it was set, it is also present in the file metadata.
*
* @return a nullable Location
*/
@Nullable
public Location getLocation() {
return location;
}
/**
* Returns the clock-wise rotation that should be applied to the
* picture before displaying. If it is non-zero, it is also present
* in the jpeg EXIF metadata.
*
* @return the clock-wise rotation
*/
public int getRotation() {
return rotation;
}
/**
* Returns the size of the picture after the rotation is applied.
*
* @return the Size of this picture
*/
@NonNull
public Size getSize() {
return size;
}
/**
* Returns the raw jpeg, ready to be saved to file.
*
* @return the jpeg stream
*/
@NonNull
public byte[] getJpeg() {
return jpeg;
}
/**
* Shorthand for {@link CameraUtils#decodeBitmap(byte[], int, int, BitmapCallback)}.
* Decodes this picture on a background thread and posts the result in the UI thread using
* the given callback.
*
* @param maxWidth the max. width of final bitmap
* @param maxHeight the max. height of final bitmap
* @param callback a callback to be notified of image decoding
*/
public void asBitmap(int maxWidth, int maxHeight, BitmapCallback callback) {
CameraUtils.decodeBitmap(getJpeg(), maxWidth, maxHeight, callback);
}
}

@ -0,0 +1,19 @@
package com.otaliastudios.cameraview;
import android.graphics.Bitmap;
import android.support.annotation.UiThread;
/**
* Receives callbacks about a bitmap decoding operation.
*/
public interface BitmapCallback {
/**
* Notifies that the bitmap was succesfully decoded.
* This is run on the UI thread.
*
* @param bitmap decoded bitmap
*/
@UiThread
void onBitmapReady(Bitmap bitmap);
}

@ -172,27 +172,8 @@ public class CameraUtils {
// http://sylvana.net/jpegcrop/exif_orientation.html // http://sylvana.net/jpegcrop/exif_orientation.html
stream = new ByteArrayInputStream(source); stream = new ByteArrayInputStream(source);
ExifInterface exif = new ExifInterface(stream); ExifInterface exif = new ExifInterface(stream);
Integer exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL); int exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);
switch (exifOrientation) { orientation = decodeExifOrientation(exifOrientation);
case ExifInterface.ORIENTATION_NORMAL:
case ExifInterface.ORIENTATION_FLIP_HORIZONTAL:
orientation = 0; break;
case ExifInterface.ORIENTATION_ROTATE_180:
case ExifInterface.ORIENTATION_FLIP_VERTICAL:
orientation = 180; break;
case ExifInterface.ORIENTATION_ROTATE_90:
case ExifInterface.ORIENTATION_TRANSPOSE:
orientation = 90; break;
case ExifInterface.ORIENTATION_ROTATE_270:
case ExifInterface.ORIENTATION_TRANSVERSE:
orientation = 270; break;
default: orientation = 0;
}
flip = exifOrientation == ExifInterface.ORIENTATION_FLIP_HORIZONTAL || flip = exifOrientation == ExifInterface.ORIENTATION_FLIP_HORIZONTAL ||
exifOrientation == ExifInterface.ORIENTATION_FLIP_VERTICAL || exifOrientation == ExifInterface.ORIENTATION_FLIP_VERTICAL ||
exifOrientation == ExifInterface.ORIENTATION_TRANSPOSE || exifOrientation == ExifInterface.ORIENTATION_TRANSPOSE ||
@ -238,6 +219,30 @@ public class CameraUtils {
return bitmap; return bitmap;
} }
static int decodeExifOrientation(int exifOrientation) {
int orientation;
switch (exifOrientation) {
case ExifInterface.ORIENTATION_NORMAL:
case ExifInterface.ORIENTATION_FLIP_HORIZONTAL:
orientation = 0; break;
case ExifInterface.ORIENTATION_ROTATE_180:
case ExifInterface.ORIENTATION_FLIP_VERTICAL:
orientation = 180; break;
case ExifInterface.ORIENTATION_ROTATE_90:
case ExifInterface.ORIENTATION_TRANSPOSE:
orientation = 90; break;
case ExifInterface.ORIENTATION_ROTATE_270:
case ExifInterface.ORIENTATION_TRANSVERSE:
orientation = 270; break;
default: orientation = 0;
}
return orientation;
}
private static int computeSampleSize(int width, int height, int maxWidth, int maxHeight) { private static int computeSampleSize(int width, int height, int maxWidth, int maxHeight) {
// https://developer.android.com/topic/performance/graphics/load-bitmap.html // https://developer.android.com/topic/performance/graphics/load-bitmap.html
@ -252,17 +257,4 @@ public class CameraUtils {
} }
/**
* Receives callbacks about a bitmap decoding operation.
*/
public interface BitmapCallback {
/**
* Notifies that the bitmap was succesfully decoded.
* This is run on the UI thread.
*
* @param bitmap decoded bitmap
*/
@UiThread void onBitmapReady(Bitmap bitmap);
}
} }

@ -1,50 +0,0 @@
package com.otaliastudios.cameraview;
import android.graphics.Bitmap;
import android.graphics.Rect;
import android.graphics.YuvImage;
import java.io.ByteArrayOutputStream;
class CropHelper {
static byte[] cropToJpeg(YuvImage yuv, AspectRatio targetRatio, int jpegCompression) {
Rect crop = computeCrop(yuv.getWidth(), yuv.getHeight(), targetRatio);
ByteArrayOutputStream out = new ByteArrayOutputStream();
yuv.compressToJpeg(crop, jpegCompression, out);
return out.toByteArray();
}
// This reads a rotated Bitmap thanks to CameraUtils. Then crops and returns a byte array.
// In doing so, EXIF data is deleted.
static byte[] cropToJpeg(byte[] jpeg, AspectRatio targetRatio, int jpegCompression) {
Bitmap image = CameraUtils.decodeBitmap(jpeg, Integer.MAX_VALUE, Integer.MAX_VALUE);
Rect cropRect = computeCrop(image.getWidth(), image.getHeight(), targetRatio);
Bitmap crop = Bitmap.createBitmap(image, cropRect.left, cropRect.top, cropRect.width(), cropRect.height());
image.recycle();
ByteArrayOutputStream out = new ByteArrayOutputStream();
crop.compress(Bitmap.CompressFormat.JPEG, jpegCompression, out);
crop.recycle();
return out.toByteArray();
}
private static Rect computeCrop(int currentWidth, int currentHeight, AspectRatio targetRatio) {
AspectRatio currentRatio = AspectRatio.of(currentWidth, currentHeight);
int x, y, width, height;
if (currentRatio.toFloat() > targetRatio.toFloat()) {
height = currentHeight;
width = (int) (height * targetRatio.toFloat());
y = 0;
x = (currentWidth - width) / 2;
} else {
width = currentWidth;
height = (int) (width / targetRatio.toFloat());
y = (currentHeight - height) / 2;
x = 0;
}
return new Rect(x, y, x + width, y + height);
}
}

@ -1,13 +1,40 @@
package com.otaliastudios.cameraview; package com.otaliastudios.cameraview;
class RotationHelper { import android.graphics.Rect;
class YuvHelper {
static byte[] rotate(final byte[] yuv, final int width, final int height, final int rotation) { static Rect computeCrop(Size currentSize, AspectRatio targetRatio) {
int currentWidth = currentSize.getWidth();
int currentHeight = currentSize.getHeight();
if (targetRatio.matches(currentSize)) {
return new Rect(0, 0, currentWidth, currentHeight);
}
// They are not equal. Compute.
AspectRatio currentRatio = AspectRatio.of(currentWidth, currentHeight);
int x, y, width, height;
if (currentRatio.toFloat() > targetRatio.toFloat()) {
height = currentHeight;
width = (int) (height * targetRatio.toFloat());
y = 0;
x = (currentWidth - width) / 2;
} else {
width = currentWidth;
height = (int) (width / targetRatio.toFloat());
y = (currentHeight - height) / 2;
x = 0;
}
return new Rect(x, y, x + width, y + height);
}
static byte[] rotate(final byte[] yuv, final Size size, final int rotation) {
if (rotation == 0) return yuv; if (rotation == 0) return yuv;
if (rotation % 90 != 0 || rotation < 0 || rotation > 270) { if (rotation % 90 != 0 || rotation < 0 || rotation > 270) {
throw new IllegalArgumentException("0 <= rotation < 360, rotation % 90 == 0"); throw new IllegalArgumentException("0 <= rotation < 360, rotation % 90 == 0");
} }
final int width = size.getWidth();
final int height = size.getHeight();
final byte[] output = new byte[yuv.length]; final byte[] output = new byte[yuv.length];
final int frameSize = width * height; final int frameSize = width * height;
final boolean swap = rotation % 180 != 0; final boolean swap = rotation % 180 != 0;

@ -17,6 +17,7 @@ import com.otaliastudios.cameraview.CameraListener;
import com.otaliastudios.cameraview.CameraLogger; import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.CameraOptions; import com.otaliastudios.cameraview.CameraOptions;
import com.otaliastudios.cameraview.CameraView; import com.otaliastudios.cameraview.CameraView;
import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.SessionType; import com.otaliastudios.cameraview.SessionType;
import com.otaliastudios.cameraview.Size; import com.otaliastudios.cameraview.Size;
import com.otaliastudios.cameraview.VideoResult; import com.otaliastudios.cameraview.VideoResult;
@ -48,7 +49,7 @@ public class CameraActivity extends AppCompatActivity implements View.OnClickLis
camera.setLifecycleOwner(this); camera.setLifecycleOwner(this);
camera.addCameraListener(new CameraListener() { camera.addCameraListener(new CameraListener() {
public void onCameraOpened(CameraOptions options) { onOpened(); } public void onCameraOpened(CameraOptions options) { onOpened(); }
public void onPictureTaken(byte[] jpeg) { onPicture(jpeg); } public void onPictureTaken(PictureResult result) { onPicture(result); }
@Override @Override
public void onVideoTaken(VideoResult result) { public void onVideoTaken(VideoResult result) {

@ -5,9 +5,9 @@ import android.graphics.Bitmap;
import android.os.Bundle; import android.os.Bundle;
import android.support.annotation.Nullable; import android.support.annotation.Nullable;
import android.widget.ImageView; import android.widget.ImageView;
import android.widget.TextView;
import com.otaliastudios.cameraview.AspectRatio; import com.otaliastudios.cameraview.AspectRatio;
import com.otaliastudios.cameraview.BitmapCallback;
import com.otaliastudios.cameraview.CameraUtils; import com.otaliastudios.cameraview.CameraUtils;
import java.lang.ref.WeakReference; import java.lang.ref.WeakReference;
@ -40,7 +40,7 @@ public class PicturePreviewActivity extends Activity {
return; return;
} }
CameraUtils.decodeBitmap(b, 1000, 1000, new CameraUtils.BitmapCallback() { CameraUtils.decodeBitmap(b, 1000, 1000, new BitmapCallback() {
@Override @Override
public void onBitmapReady(Bitmap bitmap) { public void onBitmapReady(Bitmap bitmap) {
imageView.setImageBitmap(bitmap); imageView.setImageBitmap(bitmap);

Loading…
Cancel
Save