Fix cropOutput behavior for captureImage(), fix orientation bugs

pull/1/head
Mattia Iavarone 7 years ago
parent 4481c7ad45
commit fa9fba1f90
  1. 40
      camerakit/src/main/api16/com/flurgle/camerakit/Camera1.java
  2. 10
      camerakit/src/main/api21/com/flurgle/camerakit/Camera2.java
  3. 2
      camerakit/src/main/base/com/flurgle/camerakit/CameraImpl.java
  4. 34
      camerakit/src/main/java/com/flurgle/camerakit/CameraUtils.java
  5. 120
      camerakit/src/main/java/com/flurgle/camerakit/CameraView.java
  6. 28
      camerakit/src/main/java/com/flurgle/camerakit/ExifUtils.java
  7. 67
      camerakit/src/main/utils/com/flurgle/camerakit/CenterCrop.java
  8. 39
      camerakit/src/main/utils/com/flurgle/camerakit/CommonAspectRatioFilter.java
  9. 53
      camerakit/src/main/utils/com/flurgle/camerakit/CropHelper.java
  10. 5
      demo/src/main/java/com/flurgle/camerakit/demo/MainActivity.java
  11. 15
      demo/src/main/java/com/flurgle/camerakit/demo/PicturePreviewActivity.java

@ -150,7 +150,7 @@ class Camera1 extends CameraImpl {
if (isCameraOpened()) stop(); if (isCameraOpened()) stop();
if (collectCameraId()) { if (collectCameraId()) {
mCamera = Camera.open(mCameraId); mCamera = Camera.open(mCameraId);
mCameraListener.onCameraOpened(); mCameraListener.dispatchOnCameraOpened();
// Set parameters that might have been set before the camera was opened. // Set parameters that might have been set before the camera was opened.
synchronized (mLock) { synchronized (mLock) {
@ -164,7 +164,7 @@ class Camera1 extends CameraImpl {
} }
// Try starting preview. // Try starting preview.
mCamera.setDisplayOrientation(computeCameraToDisplayOffset()); // <- not allowed during preview mCamera.setDisplayOrientation(computeSensorToDisplayOffset()); // <- not allowed during preview
if (shouldSetup()) setup(); if (shouldSetup()) setup();
collectExtraProperties(); collectExtraProperties();
} }
@ -177,7 +177,7 @@ class Camera1 extends CameraImpl {
if (isCameraOpened()) { if (isCameraOpened()) {
mCamera.stopPreview(); mCamera.stopPreview();
mCamera.release(); mCamera.release();
mCameraListener.onCameraClosed(); mCameraListener.dispatchOnCameraClosed();
} }
mCamera = null; mCamera = null;
mPreviewSize = null; mPreviewSize = null;
@ -347,7 +347,7 @@ class Camera1 extends CameraImpl {
@Override @Override
void setZoom(@ZoomMode int zoom) { void setZoomMode(@ZoomMode int zoom) {
this.mZoom = zoom; this.mZoom = zoom;
} }
@ -370,17 +370,25 @@ class Camera1 extends CameraImpl {
mIsCapturingImage = true; mIsCapturingImage = true;
synchronized (mLock) { synchronized (mLock) {
Camera.Parameters parameters = mCamera.getParameters(); Camera.Parameters parameters = mCamera.getParameters();
parameters.setRotation(computeExifRotation()); int rotation = computeExifRotation();
// TODO: add flipping Log.e(TAG, "Setting exif rotation to "+rotation);
parameters.setRotation(rotation);
mCamera.setParameters(parameters); mCamera.setParameters(parameters);
} }
final int exifRotation = computeExifRotation();
final boolean exifFlip = computeExifFlip();
final int sensorToDisplay = computeSensorToDisplayOffset();
// Is the final picture (decoded respecting EXIF) consistent with CameraView orientation?
// We must consider exifOrientation to bring back the picture in the sensor world.
// Then use sensorToDisplay to move to the display world, where CameraView lives.
final boolean consistentWithView = (exifRotation + sensorToDisplay + 180) % 180 == 0;
mCamera.takePicture(null, null, null, mCamera.takePicture(null, null, null,
new Camera.PictureCallback() { new Camera.PictureCallback() {
@Override @Override
public void onPictureTaken(byte[] data, Camera camera) { public void onPictureTaken(byte[] data, Camera camera) {
mCameraListener.onPictureTaken(data);
mIsCapturingImage = false; mIsCapturingImage = false;
camera.startPreview(); // This is needed, read somewhere in the docs. camera.startPreview(); // This is needed, read somewhere in the docs.
mCameraListener.processJpegPicture(data, consistentWithView, exifFlip);
} }
}); });
} }
@ -411,7 +419,7 @@ class Camera1 extends CameraImpl {
public void run() { public void run() {
byte[] rotatedData = RotationHelper.rotate(data, preWidth, preHeight, rotation); byte[] rotatedData = RotationHelper.rotate(data, preWidth, preHeight, rotation);
YuvImage yuv = new YuvImage(rotatedData, format, postWidth, postHeight, null); YuvImage yuv = new YuvImage(rotatedData, format, postWidth, postHeight, null);
mCameraListener.processYuvImage(yuv); mCameraListener.processYuvPicture(yuv);
mIsCapturingImage = false; mIsCapturingImage = false;
} }
}).start(); }).start();
@ -452,9 +460,9 @@ class Camera1 extends CameraImpl {
* Returns how much should the sensor image be rotated before being shown. * Returns how much should the sensor image be rotated before being shown.
* It is meant to be fed to Camera.setDisplayOrientation(). * It is meant to be fed to Camera.setDisplayOrientation().
*/ */
private int computeCameraToDisplayOffset() { private int computeSensorToDisplayOffset() {
if (mFacing == CameraKit.Constants.FACING_FRONT) { if (mFacing == CameraKit.Constants.FACING_FRONT) {
// or: (360 - ((info.orientation + displayOrientation) % 360)) % 360; // or: (360 - ((mSensorOffset + mDisplayOffset) % 360)) % 360;
return ((mSensorOffset - mDisplayOffset) + 360 + 180) % 360; return ((mSensorOffset - mDisplayOffset) + 360 + 180) % 360;
} else { } else {
return (mSensorOffset - mDisplayOffset + 360) % 360; return (mSensorOffset - mDisplayOffset + 360) % 360;
@ -471,6 +479,14 @@ class Camera1 extends CameraImpl {
} }
/**
* Whether the exif tag should include a 'flip' operation.
*/
private boolean computeExifFlip() {
return mFacing == CameraKit.Constants.FACING_FRONT;
}
/** /**
* This is called either on cameraView.start(), or when the underlying surface changes. * This is called either on cameraView.start(), or when the underlying surface changes.
* It is possible that in the first call the preview surface has not already computed its * It is possible that in the first call the preview surface has not already computed its
@ -540,7 +556,7 @@ class Camera1 extends CameraImpl {
mMediaRecorder.release(); mMediaRecorder.release();
mMediaRecorder = null; mMediaRecorder = null;
if (mVideoFile != null) { if (mVideoFile != null) {
mCameraListener.onVideoTaken(mVideoFile); mCameraListener.dispatchOnVideoTaken(mVideoFile);
mVideoFile = null; mVideoFile = null;
} }
} }
@ -557,7 +573,7 @@ class Camera1 extends CameraImpl {
mMediaRecorder.setProfile(getCamcorderProfile(mVideoQuality)); mMediaRecorder.setProfile(getCamcorderProfile(mVideoQuality));
mMediaRecorder.setOutputFile(mVideoFile.getAbsolutePath()); mMediaRecorder.setOutputFile(mVideoFile.getAbsolutePath());
mMediaRecorder.setOrientationHint(computeCameraToDisplayOffset()); // TODO is this correct? Should we use exif orientation? Maybe not. mMediaRecorder.setOrientationHint(computeSensorToDisplayOffset()); // TODO is this correct? Should we use exif orientation? Maybe not.
// Not needed. mMediaRecorder.setPreviewDisplay(mPreview.getSurface()); // Not needed. mMediaRecorder.setPreviewDisplay(mPreview.getSurface());
} }

@ -158,7 +158,7 @@ class Camera2 extends CameraImpl {
} }
@Override @Override
void setZoom(@ZoomMode int zoom) { void setZoomMode(@ZoomMode int zoom) {
} }
@ -203,7 +203,7 @@ class Camera2 extends CameraImpl {
TreeSet<Size> sizes = new TreeSet<>(); TreeSet<Size> sizes = new TreeSet<>();
sizes.addAll(getAvailableCaptureResolutions()); sizes.addAll(getAvailableCaptureResolutions());
TreeSet<AspectRatio> aspectRatios = new CommonAspectRatioFilter( /* TreeSet<AspectRatio> aspectRatios = new CommonAspectRatioFilter(
getAvailablePreviewResolutions(), getAvailablePreviewResolutions(),
getAvailableCaptureResolutions() getAvailableCaptureResolutions()
).filter(); ).filter();
@ -217,7 +217,7 @@ class Camera2 extends CameraImpl {
mCaptureSize = size; mCaptureSize = size;
break; break;
} }
} } */
} }
return mCaptureSize; return mCaptureSize;
@ -229,7 +229,7 @@ class Camera2 extends CameraImpl {
TreeSet<Size> sizes = new TreeSet<>(); TreeSet<Size> sizes = new TreeSet<>();
sizes.addAll(getAvailablePreviewResolutions()); sizes.addAll(getAvailablePreviewResolutions());
TreeSet<AspectRatio> aspectRatios = new CommonAspectRatioFilter( /* TreeSet<AspectRatio> aspectRatios = new CommonAspectRatioFilter(
getAvailablePreviewResolutions(), getAvailablePreviewResolutions(),
getAvailableCaptureResolutions() getAvailableCaptureResolutions()
).filter(); ).filter();
@ -243,7 +243,7 @@ class Camera2 extends CameraImpl {
mPreviewSize = size; mPreviewSize = size;
break; break;
} }
} } */
} }
return mPreviewSize; return mPreviewSize;

@ -26,7 +26,7 @@ abstract class CameraImpl implements PreviewImpl.SurfaceCallback {
abstract void setFacing(@Facing int facing); abstract void setFacing(@Facing int facing);
abstract void setFlash(@Flash int flash); abstract void setFlash(@Flash int flash);
abstract void setFocus(@Focus int focus); abstract void setFocus(@Focus int focus);
abstract void setZoom(@ZoomMode int zoom); abstract void setZoomMode(@ZoomMode int zoom);
abstract void setVideoQuality(@VideoQuality int videoQuality); abstract void setVideoQuality(@VideoQuality int videoQuality);
abstract void setWhiteBalance(@WhiteBalance int whiteBalance); abstract void setWhiteBalance(@WhiteBalance int whiteBalance);
abstract void setSessionType(@SessionType int sessionType); abstract void setSessionType(@SessionType int sessionType);

@ -51,12 +51,26 @@ public class CameraUtils {
new Thread(new Runnable() { new Thread(new Runnable() {
@Override @Override
public void run() { public void run() {
final Bitmap bitmap = decodeBitmap(source);
ui.post(new Runnable() {
@Override
public void run() {
callback.onBitmapReady(bitmap);
}
});
}
}).start();
}
int orientation = 0; static Bitmap decodeBitmap(byte[] source) {
boolean flip = false; int orientation;
boolean flip;
InputStream stream = null;
try { try {
// http://sylvana.net/jpegcrop/exif_orientation.html // http://sylvana.net/jpegcrop/exif_orientation.html
ExifInterface exif = new ExifInterface(new ByteArrayInputStream(source)); stream = new ByteArrayInputStream(source);
ExifInterface exif = new ExifInterface(stream);
Integer exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL); Integer exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);
switch (exifOrientation) { switch (exifOrientation) {
case ExifInterface.ORIENTATION_NORMAL: case ExifInterface.ORIENTATION_NORMAL:
@ -87,6 +101,10 @@ public class CameraUtils {
e.printStackTrace(); e.printStackTrace();
orientation = 0; orientation = 0;
flip = false; flip = false;
} finally {
if (stream != null) {
try { stream.close(); } catch (Exception e) {}
}
} }
@ -99,15 +117,7 @@ public class CameraUtils {
bitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true); bitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true);
temp.recycle(); temp.recycle();
} }
final Bitmap result = bitmap; return bitmap;
ui.post(new Runnable() {
@Override
public void run() {
callback.onBitmapReady(result);
}
});
}
}).start();
} }

@ -65,18 +65,18 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
private final static String TAG = CameraView.class.getSimpleName(); private final static String TAG = CameraView.class.getSimpleName();
private Handler sWorkerHandler; private Handler mWorkerHandler;
private Handler getWorkerHandler() { private Handler getWorkerHandler() {
synchronized (this) { synchronized (this) {
if (sWorkerHandler == null) { if (mWorkerHandler == null) {
HandlerThread workerThread = new HandlerThread("CameraViewWorker"); HandlerThread workerThread = new HandlerThread("CameraViewWorker");
workerThread.setDaemon(true); workerThread.setDaemon(true);
workerThread.start(); workerThread.start();
sWorkerHandler = new Handler(workerThread.getLooper()); mWorkerHandler = new Handler(workerThread.getLooper());
} }
} }
return sWorkerHandler; return mWorkerHandler;
} }
private void run(Runnable runnable) { private void run(Runnable runnable) {
@ -94,6 +94,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
@WhiteBalance private int mWhiteBalance; @WhiteBalance private int mWhiteBalance;
private int mJpegQuality; private int mJpegQuality;
private boolean mCropOutput; private boolean mCropOutput;
private int mDisplayOffset;
private CameraCallbacks mCameraCallbacks; private CameraCallbacks mCameraCallbacks;
private OrientationHelper mOrientationHelper; private OrientationHelper mOrientationHelper;
private CameraImpl mCameraImpl; private CameraImpl mCameraImpl;
@ -138,7 +139,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
setFlash(mFlash); setFlash(mFlash);
setFocus(mFocus); setFocus(mFocus);
setSessionType(mSessionType); setSessionType(mSessionType);
setZoom(mZoom); setZoomMode(mZoom);
setVideoQuality(mVideoQuality); setVideoQuality(mVideoQuality);
setWhiteBalance(mWhiteBalance); setWhiteBalance(mWhiteBalance);
@ -146,6 +147,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
mOrientationHelper = new OrientationHelper(context) { mOrientationHelper = new OrientationHelper(context) {
@Override @Override
public void onDisplayOffsetChanged(int displayOffset) { public void onDisplayOffsetChanged(int displayOffset) {
mDisplayOffset = displayOffset;
mCameraImpl.onDisplayOffset(displayOffset); mCameraImpl.onDisplayOffset(displayOffset);
mPreviewImpl.onDisplayOffset(displayOffset); mPreviewImpl.onDisplayOffset(displayOffset);
} }
@ -464,7 +466,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
public void destroy() { public void destroy() {
mCameraCallbacks.clearListeners(); // Release inner listener. mCameraCallbacks.clearListeners(); // Release inner listener.
// This might be useless, but no time to think about it now. // This might be useless, but no time to think about it now.
sWorkerHandler = null; mWorkerHandler = null;
} }
@ -701,16 +703,36 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
} }
/**
* Sets the zoom mode for the current session.
*
* @see CameraKit.Constants#ZOOM_OFF
* @see CameraKit.Constants#ZOOM_PINCH
*
* @param zoom the zoom mode
*/
public void setZoomMode(@ZoomMode int zoom) {
this.mZoom = zoom;
mCameraImpl.setZoomMode(mZoom);
}
/**
* Gets the current zoom mode.
* @return the current zoom mode
*/
@ZoomMode
public int getZoomMode() {
return mZoom;
}
public void setVideoQuality(@VideoQuality int videoQuality) { public void setVideoQuality(@VideoQuality int videoQuality) {
this.mVideoQuality = videoQuality; this.mVideoQuality = videoQuality;
mCameraImpl.setVideoQuality(mVideoQuality); mCameraImpl.setVideoQuality(mVideoQuality);
} }
public void setZoom(@ZoomMode int zoom) {
this.mZoom = zoom;
mCameraImpl.setZoom(mZoom);
}
@ -787,7 +809,11 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @see #captureSnapshot() * @see #captureSnapshot()
*/ */
public void captureImage() { public void captureImage() {
if (mSessionType == CameraKit.Constants.SESSION_TYPE_PICTURE) {
mCameraImpl.captureImage(); mCameraImpl.captureImage();
} else {
captureSnapshot();
}
} }
@ -898,18 +924,19 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
} }
} }
class CameraCallbacks extends CameraListener { class CameraCallbacks {
private ArrayList<CameraListener> mListeners; private ArrayList<CameraListener> mListeners;
private Handler uiHandler; private Handler uiHandler;
public CameraCallbacks() {
CameraCallbacks() {
mListeners = new ArrayList<>(2); mListeners = new ArrayList<>(2);
uiHandler = new Handler(Looper.getMainLooper()); uiHandler = new Handler(Looper.getMainLooper());
} }
@Override
public void onCameraOpened() { public void dispatchOnCameraOpened() {
uiHandler.post(new Runnable() { uiHandler.post(new Runnable() {
@Override @Override
public void run() { public void run() {
@ -920,8 +947,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
}); });
} }
@Override
public void onCameraClosed() { public void dispatchOnCameraClosed() {
uiHandler.post(new Runnable() { uiHandler.post(new Runnable() {
@Override @Override
public void run() { public void run() {
@ -932,6 +959,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
}); });
} }
public void onCameraPreviewSizeChanged() { public void onCameraPreviewSizeChanged() {
// Camera preview size, as returned by getPreviewSize(), has changed. // Camera preview size, as returned by getPreviewSize(), has changed.
// Request a layout pass for onMeasure() to do its stuff. // Request a layout pass for onMeasure() to do its stuff.
@ -940,36 +968,63 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
requestLayout(); requestLayout();
} }
@Override
public void onPictureTaken(byte[] jpeg) {
if (mCropOutput) {
// TODO cropOutput won't work if image is rotated (e.g. byte[] contains exif orientation).
AspectRatio outputRatio = AspectRatio.of(getWidth(), getHeight());
jpeg = new CenterCrop(jpeg, outputRatio, mJpegQuality).getJpeg();
}
final byte[] data = jpeg; /**
uiHandler.post(new Runnable() { * What would be great here is to ensure the EXIF tag in the jpeg is consistent with what we expect,
* and maybe add flipping when we have been using the front camera.
* Unfortunately this is not easy, because
* - You can't write EXIF data to a byte[] array, not with support library at least
* - You don't know what byte[] is, see {@link android.hardware.Camera.Parameters#setRotation(int)}.
* Sometimes our rotation is encoded in the byte array, sometimes a rotated byte[] is returned.
* Depends on the hardware.
*
* So for now we ignore flipping.
*
* @param consistentWithView is the final image (decoded respecting EXIF data) consistent with
* the view width and height? Or should we flip dimensions to have a
* consistent measure?
* @param flipPicture whether this picture should be flipped horizontally after decoding,
* because it was taken with the front camera.
*/
public void processJpegPicture(final byte[] jpeg, final boolean consistentWithView, final boolean flipPicture) {
getWorkerHandler().post(new Runnable() {
@Override @Override
public void run() { public void run() {
for (CameraListener listener : mListeners) { byte[] jpeg2 = jpeg;
listener.onPictureTaken(data); if (mCropOutput) {
// If consistent, dimensions of the jpeg Bitmap and dimensions of getWidth(), getHeight()
// Live in the same reference system.
AspectRatio targetRatio;
if (consistentWithView) {
targetRatio = AspectRatio.of(getWidth(), getHeight());
} else {
targetRatio = AspectRatio.of(getHeight(), getWidth());
}
Log.e(TAG, "is Consistent? " + consistentWithView);
Log.e(TAG, "viewWidth? " + getWidth() + ", viewHeight? " + getHeight());
jpeg2 = CropHelper.cropToJpeg(jpeg, targetRatio, mJpegQuality);
} }
dispatchOnPictureTaken(jpeg2);
} }
}); });
} }
public void processYuvImage(YuvImage yuv) {
public void processYuvPicture(YuvImage yuv) {
byte[] jpeg; byte[] jpeg;
if (mCropOutput) { if (mCropOutput) {
AspectRatio outputRatio = AspectRatio.of(getWidth(), getHeight()); AspectRatio outputRatio = AspectRatio.of(getWidth(), getHeight());
jpeg = new CenterCrop(yuv, outputRatio, mJpegQuality).getJpeg(); jpeg = CropHelper.cropToJpeg(yuv, outputRatio, mJpegQuality);
} else { } else {
ByteArrayOutputStream out = new ByteArrayOutputStream(); ByteArrayOutputStream out = new ByteArrayOutputStream();
yuv.compressToJpeg(new Rect(0, 0, yuv.getWidth(), yuv.getHeight()), mJpegQuality, out); yuv.compressToJpeg(new Rect(0, 0, yuv.getWidth(), yuv.getHeight()), mJpegQuality, out);
jpeg = out.toByteArray(); jpeg = out.toByteArray();
} }
dispatchOnPictureTaken(jpeg);
}
private void dispatchOnPictureTaken(byte[] jpeg) {
final byte[] data = jpeg; final byte[] data = jpeg;
uiHandler.post(new Runnable() { uiHandler.post(new Runnable() {
@Override @Override
@ -981,8 +1036,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
}); });
} }
@Override
public void onVideoTaken(final File video) { public void dispatchOnVideoTaken(final File video) {
uiHandler.post(new Runnable() { uiHandler.post(new Runnable() {
@Override @Override
public void run() { public void run() {
@ -993,14 +1048,17 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
}); });
} }
private void addListener(@NonNull CameraListener cameraListener) { private void addListener(@NonNull CameraListener cameraListener) {
mListeners.add(cameraListener); mListeners.add(cameraListener);
} }
private void removeListener(@NonNull CameraListener cameraListener) { private void removeListener(@NonNull CameraListener cameraListener) {
mListeners.remove(cameraListener); mListeners.remove(cameraListener);
} }
private void clearListeners() { private void clearListeners() {
mListeners.clear(); mListeners.clear();
} }

@ -0,0 +1,28 @@
package com.flurgle.camerakit;
import android.support.media.ExifInterface;
class ExifUtils {
public static int getOrientationTag(int rotation, boolean flip) {
switch (rotation) {
case 90:
return flip ? ExifInterface.ORIENTATION_TRANSPOSE :
ExifInterface.ORIENTATION_ROTATE_90;
case 180:
return flip ? ExifInterface.ORIENTATION_FLIP_VERTICAL :
ExifInterface.ORIENTATION_ROTATE_180;
case 270:
return flip ? ExifInterface.ORIENTATION_TRANSVERSE :
ExifInterface.ORIENTATION_ROTATE_270;
case 0:
default:
return flip ? ExifInterface.ORIENTATION_FLIP_HORIZONTAL :
ExifInterface.ORIENTATION_NORMAL;
}
}
}

@ -1,67 +0,0 @@
package com.flurgle.camerakit;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.BitmapRegionDecoder;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.util.Log;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
public class CenterCrop {
private byte[] croppedJpeg;
public CenterCrop(YuvImage yuv, AspectRatio targetRatio, int jpegCompression) {
Rect crop = getCrop(yuv.getWidth(), yuv.getHeight(), targetRatio);
ByteArrayOutputStream out = new ByteArrayOutputStream();
yuv.compressToJpeg(crop, jpegCompression, out);
this.croppedJpeg = out.toByteArray();
}
public CenterCrop(byte[] jpeg, AspectRatio targetRatio, int jpegCompression) {
BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
BitmapFactory.decodeByteArray(jpeg, 0, jpeg.length, options);
Rect crop = getCrop(options.outWidth, options.outHeight, targetRatio);
try {
Bitmap bitmap = BitmapRegionDecoder.newInstance(
jpeg,
0,
jpeg.length,
true
).decodeRegion(crop, null);
ByteArrayOutputStream out = new ByteArrayOutputStream();
bitmap.compress(Bitmap.CompressFormat.JPEG, jpegCompression, out);
this.croppedJpeg = out.toByteArray();
} catch (IOException e) {
Log.e("CameraKit", e.toString());
}
}
private static Rect getCrop(int currentWidth, int currentHeight, AspectRatio targetRatio) {
AspectRatio currentRatio = AspectRatio.of(currentWidth, currentHeight);
Rect crop;
if (currentRatio.toFloat() > targetRatio.toFloat()) {
int width = (int) (currentHeight * targetRatio.toFloat());
int widthOffset = (currentWidth - width) / 2;
crop = new Rect(widthOffset, 0, currentWidth - widthOffset, currentHeight);
} else {
int height = (int) (currentWidth * targetRatio.inverse().toFloat());
int heightOffset = (currentHeight - height) / 2;
crop = new Rect(0, heightOffset, currentWidth, currentHeight - heightOffset);
}
return crop;
}
public byte[] getJpeg() {
return croppedJpeg;
}
}

@ -1,39 +0,0 @@
package com.flurgle.camerakit;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
public class CommonAspectRatioFilter {
private List<Size> mPreviewSizes;
private List<Size> mCaptureSizes;
public CommonAspectRatioFilter(List<Size> previewSizes, List<Size> captureSizes) {
this.mPreviewSizes = previewSizes;
this.mCaptureSizes = captureSizes;
}
public TreeSet<AspectRatio> filter() {
Set<AspectRatio> previewAspectRatios = new HashSet<>();
for (Size size : mPreviewSizes) {
previewAspectRatios.add(AspectRatio.of(size.getWidth(), size.getHeight()));
}
Set<AspectRatio> captureAspectRatios = new HashSet<>();
for (Size size : mCaptureSizes) {
captureAspectRatios.add(AspectRatio.of(size.getWidth(), size.getHeight()));
}
TreeSet<AspectRatio> output = new TreeSet<>();
for (AspectRatio aspectRatio : previewAspectRatios) {
if (captureAspectRatios.contains(aspectRatio)) {
output.add(aspectRatio);
}
}
return output;
}
}

@ -0,0 +1,53 @@
package com.flurgle.camerakit;
import android.graphics.Bitmap;
import android.graphics.BitmapRegionDecoder;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.util.Log;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
public class CropHelper {
// TODO test this. How is YuvImage? Does it come already well rotated?
public static byte[] cropToJpeg(YuvImage yuv, AspectRatio targetRatio, int jpegCompression) {
Rect crop = computeCrop(yuv.getWidth(), yuv.getHeight(), targetRatio);
ByteArrayOutputStream out = new ByteArrayOutputStream();
yuv.compressToJpeg(crop, jpegCompression, out);
return out.toByteArray();
}
// This reads a rotated Bitmap thanks to CameraUtils. Then crops and returns a byte array.
// In doing so, EXIF data is deleted.
public static byte[] cropToJpeg(byte[] jpeg, AspectRatio targetRatio, int jpegCompression) {
Bitmap image = CameraUtils.decodeBitmap(jpeg);
Log.e("CropHelper", "decoded image has width="+image.getWidth()+", height="+image.getHeight());
Rect cropRect = computeCrop(image.getWidth(), image.getHeight(), targetRatio);
Bitmap crop = Bitmap.createBitmap(image, cropRect.left, cropRect.top, cropRect.width(), cropRect.height());
image.recycle();
ByteArrayOutputStream out = new ByteArrayOutputStream();
crop.compress(Bitmap.CompressFormat.JPEG, jpegCompression, out);
return out.toByteArray();
}
private static Rect computeCrop(int currentWidth, int currentHeight, AspectRatio targetRatio) {
AspectRatio currentRatio = AspectRatio.of(currentWidth, currentHeight);
int x, y, width, height;
if (currentRatio.toFloat() > targetRatio.toFloat()) {
height = currentHeight;
width = (int) (height * targetRatio.toFloat());
y = 0;
x = (currentWidth - width) / 2;
} else {
width = currentWidth;
height = (int) (width * targetRatio.inverse().toFloat());
y = (currentHeight - height) / 2;
x = 0;
}
return new Rect(x, y, x+width, y+height);
}
}

@ -18,6 +18,7 @@ import android.widget.Toast;
import com.flurgle.camerakit.CameraKit; import com.flurgle.camerakit.CameraKit;
import com.flurgle.camerakit.CameraListener; import com.flurgle.camerakit.CameraListener;
import com.flurgle.camerakit.CameraView; import com.flurgle.camerakit.CameraView;
import com.flurgle.camerakit.Size;
import java.io.File; import java.io.File;
@ -114,6 +115,8 @@ public class MainActivity extends AppCompatActivity implements View.OnLayoutChan
if (mCapturing) return; if (mCapturing) return;
mCapturing = true; mCapturing = true;
final long startTime = System.currentTimeMillis(); final long startTime = System.currentTimeMillis();
final Size nativeSize = camera.getSessionType() == CameraKit.Constants.SESSION_TYPE_PICTURE ?
camera.getCaptureSize() : camera.getSnapshotSize();
camera.clearCameraListeners(); camera.clearCameraListeners();
camera.addCameraListener(new CameraListener() { camera.addCameraListener(new CameraListener() {
@Override @Override
@ -124,6 +127,8 @@ public class MainActivity extends AppCompatActivity implements View.OnLayoutChan
PicturePreviewActivity.setImage(jpeg); PicturePreviewActivity.setImage(jpeg);
Intent intent = new Intent(MainActivity.this, PicturePreviewActivity.class); Intent intent = new Intent(MainActivity.this, PicturePreviewActivity.class);
intent.putExtra("delay", callbackTime-startTime); intent.putExtra("delay", callbackTime-startTime);
intent.putExtra("nativeWidth", nativeSize.getWidth());
intent.putExtra("nativeHeight", nativeSize.getHeight());
startActivity(intent); startActivity(intent);
} }
}); });

@ -46,6 +46,8 @@ public class PicturePreviewActivity extends Activity {
ButterKnife.bind(this); ButterKnife.bind(this);
final long delay = getIntent().getLongExtra("delay", 0); final long delay = getIntent().getLongExtra("delay", 0);
final int nativeWidth = getIntent().getIntExtra("nativeWidth", 0);
final int nativeHeight = getIntent().getIntExtra("nativeHeight", 0);
byte[] b = image == null ? null : image.get(); byte[] b = image == null ? null : image.get();
if (b == null) { if (b == null) {
finish(); finish();
@ -56,15 +58,14 @@ public class PicturePreviewActivity extends Activity {
@Override @Override
public void onBitmapReady(Bitmap bitmap) { public void onBitmapReady(Bitmap bitmap) {
imageView.setImageBitmap(bitmap); imageView.setImageBitmap(bitmap);
// Native sizes are landscape, activity might now. <- not clear what this means but OK
// TODO: ncr and ar might be different when cropOutput is true.
AspectRatio aspectRatio = AspectRatio.of(bitmap.getHeight(), bitmap.getWidth());
nativeCaptureResolution.setText(bitmap.getHeight() + " x " + bitmap.getWidth() + " (" + aspectRatio.toString() + ")");
actualResolution.setText(bitmap.getWidth() + " x " + bitmap.getHeight());
approxUncompressedSize.setText(getApproximateFileMegabytes(bitmap) + "MB"); approxUncompressedSize.setText(getApproximateFileMegabytes(bitmap) + "MB");
captureLatency.setText(delay + " milliseconds"); captureLatency.setText(delay + " milliseconds");
// ncr and ar might be different when cropOutput is true.
AspectRatio nativeRatio = AspectRatio.of(nativeWidth, nativeHeight);
AspectRatio finalRatio = AspectRatio.of(bitmap.getWidth(), bitmap.getHeight());
nativeCaptureResolution.setText(nativeWidth + "x" + nativeHeight + " (" + nativeRatio + ")");
actualResolution.setText(bitmap.getWidth() + "x" + bitmap.getHeight() + " (" + finalRatio + ")");
} }
}); });

Loading…
Cancel
Save