Facing now defaults to FRONT if we have no BACK cameras. Throw a new CameraException if no camera found

v2
Mattia Iavarone 6 years ago
parent 8b2c85fe0a
commit 662641d6db
  1. 6
      MIGRATION.md
  2. 2
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraViewTest.java
  3. 12
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/VideoResultTest.java
  4. 10
      cameraview/src/main/java/com/otaliastudios/cameraview/Camera1.java
  5. 7
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraException.java
  6. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java
  7. 7
      cameraview/src/main/java/com/otaliastudios/cameraview/FullVideoRecorder.java
  8. 18
      cameraview/src/main/java/com/otaliastudios/cameraview/SnapshotVideoRecorder.java
  9. 11
      cameraview/src/main/java/com/otaliastudios/cameraview/VideoResult.java
  10. 17
      cameraview/src/main/options/com/otaliastudios/cameraview/Facing.java
  11. 3
      demo/src/main/java/com/otaliastudios/cameraview/demo/CameraActivity.java

@ -47,7 +47,11 @@
TODO: document this
- VideoSnapshots now record audio according to your Audio setting
- VideoSnapshots now respect maxDuration and maxSize limits
- Added videoFrameRate to the videoResult
- TODO: cameraPreview documentation
- TODO: takeVideoSnapshot documentation
- New setVideoBitRate() and setAudioBitRate() (and XML too) options, accepting bits per second values.
TODO: document them
TODO: document them
TODO: revisit the demo app, change the controls appearance, add missing controls,
add all information from the VideoResult in the VideoPreviewActivity, same for pictures

@ -107,7 +107,7 @@ public class CameraViewTest extends BaseTest {
public void testDefaults() {
// CameraController
assertEquals(cameraView.getFlash(), Flash.DEFAULT);
assertEquals(cameraView.getFacing(), Facing.DEFAULT);
assertEquals(cameraView.getFacing(), Facing.DEFAULT(context()));
assertEquals(cameraView.getGrid(), Grid.DEFAULT);
assertEquals(cameraView.getWhiteBalance(), WhiteBalance.DEFAULT);
assertEquals(cameraView.getMode(), Mode.DEFAULT);

@ -32,6 +32,10 @@ public class VideoResultTest extends BaseTest {
int maxDuration = 1234;
long maxFileSize = 500000;
int reason = VideoResult.REASON_MAX_DURATION_REACHED;
int videoFrameRate = 30;
int videoBitRate = 300000;
int audioBitRate = 30000;
Audio audio = Audio.ON;
result.file = file;
result.rotation = rotation;
@ -42,6 +46,10 @@ public class VideoResultTest extends BaseTest {
result.maxDuration = maxDuration;
result.maxSize = maxFileSize;
result.endReason = reason;
result.videoFrameRate = videoFrameRate;
result.videoBitRate = videoBitRate;
result.audioBitRate = audioBitRate;
result.audio = audio;
assertEquals(result.getFile(), file);
assertEquals(result.getRotation(), rotation);
@ -52,5 +60,9 @@ public class VideoResultTest extends BaseTest {
assertEquals(result.getMaxSize(), maxFileSize);
assertEquals(result.getMaxDuration(), maxDuration);
assertEquals(result.getTerminationReason(), reason);
assertEquals(result.getVideoFrameRate(), videoFrameRate);
assertEquals(result.getVideoBitRate(), videoBitRate);
assertEquals(result.getAudioBitRate(), audioBitRate);
assertEquals(result.getAudio(), audio);
}
}

@ -12,6 +12,7 @@ import android.os.Build;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.annotation.WorkerThread;
import android.util.Log;
import android.view.SurfaceHolder;
import java.io.File;
@ -248,6 +249,9 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
if (shouldBindToSurface()) bindToSurface();
if (shouldStartPreview()) startPreview("onStart");
LOG.i("onStart:", "Ended");
} else {
LOG.e("onStart:", "No camera available for facing", mFacing);
throw new CameraException(CameraException.REASON_NO_CAMERA);
}
}
@ -280,6 +284,7 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
private boolean collectCameraId() {
int internalFacing = mMapper.map(mFacing);
LOG.i("collectCameraId", "Facing:", mFacing, "Internal:", internalFacing, "Cameras:", Camera.getNumberOfCameras());
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int i = 0, count = Camera.getNumberOfCameras(); i < count; i++) {
Camera.getCameraInfo(i, cameraInfo);
@ -360,13 +365,16 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
@Override
void setFacing(Facing facing) {
if (facing != mFacing) {
final Facing old = mFacing;
if (facing != old) {
mFacing = facing;
schedule(null, true, new Runnable() {
@Override
public void run() {
if (collectCameraId()) {
restart();
} else {
mFacing = old;
}
}
});

@ -42,6 +42,13 @@ public class CameraException extends RuntimeException {
*/
public static final int REASON_VIDEO_FAILED = 5;
/**
* Indicates that we could not find a camera for the current {@link Facing}
* value.
* This can be solved by changing the facing value and starting again.
*/
public static final int REASON_NO_CAMERA = 6;
private int reason = REASON_UNKNOWN;
CameraException(Throwable cause) {

@ -99,7 +99,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
mPreview = Preview.fromValue(a.getInteger(R.styleable.CameraView_cameraPreview, Preview.DEFAULT.value()));
// Camera controller params
Facing facing = Facing.fromValue(a.getInteger(R.styleable.CameraView_cameraFacing, Facing.DEFAULT.value()));
Facing facing = Facing.fromValue(a.getInteger(R.styleable.CameraView_cameraFacing, Facing.DEFAULT(context).value()));
Flash flash = Flash.fromValue(a.getInteger(R.styleable.CameraView_cameraFlash, Flash.DEFAULT.value()));
Grid grid = Grid.fromValue(a.getInteger(R.styleable.CameraView_cameraGrid, Grid.DEFAULT.value()));
WhiteBalance whiteBalance = WhiteBalance.fromValue(a.getInteger(R.styleable.CameraView_cameraWhiteBalance, WhiteBalance.DEFAULT.value()));

@ -38,7 +38,12 @@ class FullVideoRecorder extends VideoRecorder {
Size size = mResult.getRotation() % 180 != 0 ? mResult.getSize().flip() : mResult.getSize();
mMediaRecorder.setOutputFormat(mProfile.fileFormat);
mMediaRecorder.setVideoFrameRate(mProfile.videoFrameRate);
if (mResult.videoFrameRate <= 0) {
mMediaRecorder.setVideoFrameRate(mProfile.videoFrameRate);
mResult.videoFrameRate = mProfile.videoFrameRate;
} else {
mMediaRecorder.setVideoFrameRate(mResult.videoFrameRate);
}
mMediaRecorder.setVideoSize(size.getWidth(), size.getHeight());
switch (mResult.getCodec()) {
case H_263: mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H263); break;

@ -17,6 +17,10 @@ class SnapshotVideoRecorder extends VideoRecorder implements GlCameraPreview.Ren
private static final String TAG = SnapshotVideoRecorder.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
private static final int DEFAULT_VIDEO_FRAMERATE = 30;
private static final int DEFAULT_VIDEO_BITRATE = 1000000;
private static final int DEFAULT_AUDIO_BITRATE = 64000;
private static final int STATE_RECORDING = 0;
private static final int STATE_NOT_RECORDING = 1;
@ -65,13 +69,13 @@ class SnapshotVideoRecorder extends VideoRecorder implements GlCameraPreview.Ren
case H_264: type = "video/avc"; break; // MediaFormat.MIMETYPE_VIDEO_AVC:
case DEVICE_DEFAULT: type = "video/avc"; break;
}
if (mResult.videoBitRate <= 0) mResult.videoBitRate = 1000000;
if (mResult.audioBitRate <= 0) mResult.audioBitRate = 64000;
TextureMediaEncoder.Config config = new TextureMediaEncoder.Config(
width, height,
if (mResult.videoBitRate <= 0) mResult.videoBitRate = DEFAULT_VIDEO_BITRATE;
if (mResult.audioBitRate <= 0) mResult.audioBitRate = DEFAULT_AUDIO_BITRATE;
if (mResult.videoFrameRate <= 0) mResult.videoFrameRate = DEFAULT_VIDEO_FRAMERATE;
TextureMediaEncoder.Config config = new TextureMediaEncoder.Config(width, height,
mResult.videoBitRate,
30,
mResult.getRotation(),
mResult.videoFrameRate,
mResult.rotation,
type, mTextureId,
scaleX, scaleY,
EGL14.eglGetCurrentContext()
@ -91,7 +95,7 @@ class SnapshotVideoRecorder extends VideoRecorder implements GlCameraPreview.Ren
if (mCurrentState == STATE_RECORDING) {
TextureMediaEncoder.Frame frame = new TextureMediaEncoder.Frame();
frame.timestamp = surfaceTexture.getTimestamp();
frame.transform = new float[16];
frame.transform = new float[16]; // TODO would be cool to avoid this at every frame. But it's not easy.
surfaceTexture.getTransformMatrix(frame.transform);
mEncoderEngine.notify(TextureMediaEncoder.FRAME_EVENT, frame);
}

@ -26,6 +26,7 @@ public class VideoResult {
int maxDuration;
int endReason;
int videoBitRate;
int videoFrameRate;
int audioBitRate;
VideoResult() {}
@ -138,6 +139,16 @@ public class VideoResult {
return videoBitRate;
}
/**
* Returns the frame rate used for video encoding
* in frames per second.
*
* @return the video frame rate
*/
public int getVideoFrameRate() {
return videoFrameRate;
}
/**
* Returns the bit rate used for audio encoding.
*

@ -1,6 +1,9 @@
package com.otaliastudios.cameraview;
import android.content.Context;
import android.support.annotation.Nullable;
/**
* Facing value indicates which camera sensor should be used for the current session.
*
@ -18,7 +21,19 @@ public enum Facing implements Control {
*/
FRONT(1);
final static Facing DEFAULT = BACK;
final static Facing DEFAULT(@Nullable Context context) {
if (context == null) {
return BACK;
} else if (CameraUtils.hasCameraFacing(context, BACK)) {
return BACK;
} else if (CameraUtils.hasCameraFacing(context, FRONT)) {
return FRONT;
} else {
// The controller will throw a CameraException.
// This device has no cameras.
return BACK;
}
}
private int value;

@ -36,9 +36,6 @@ public class CameraActivity extends AppCompatActivity implements View.OnClickLis
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().setFlags(
WindowManager.LayoutParams.FLAG_HARDWARE_ACCELERATED,
WindowManager.LayoutParams.FLAG_HARDWARE_ACCELERATED);
setContentView(R.layout.activity_camera);
CameraLogger.setLogLevel(CameraLogger.LEVEL_VERBOSE);

Loading…
Cancel
Save