diff --git a/cameraview/src/androidTest/java/com/otaliastudios/cameraview/MapperTest.java b/cameraview/src/androidTest/java/com/otaliastudios/cameraview/MapperTest.java index 6a61c36d..17228208 100644 --- a/cameraview/src/androidTest/java/com/otaliastudios/cameraview/MapperTest.java +++ b/cameraview/src/androidTest/java/com/otaliastudios/cameraview/MapperTest.java @@ -26,11 +26,4 @@ public class MapperTest extends BaseTest { WhiteBalance unmapWhiteBalance(T cameraConstant) { return null; } Hdr unmapHdr(T cameraConstant) { return null; } }; - - @Test - public void testMap() { - assertEquals(mapper.map(VideoCodec.DEVICE_DEFAULT), MediaRecorder.VideoEncoder.DEFAULT); - assertEquals(mapper.map(VideoCodec.H_263), MediaRecorder.VideoEncoder.H263); - assertEquals(mapper.map(VideoCodec.H_264), MediaRecorder.VideoEncoder.H264); - } } diff --git a/cameraview/src/main/gles/com/otaliastudios/cameraview/VideoCoreEncoder.java b/cameraview/src/main/gles/com/otaliastudios/cameraview/VideoCoreEncoder.java index e2c80111..bcf3286c 100644 --- a/cameraview/src/main/gles/com/otaliastudios/cameraview/VideoCoreEncoder.java +++ b/cameraview/src/main/gles/com/otaliastudios/cameraview/VideoCoreEncoder.java @@ -45,8 +45,6 @@ import java.nio.ByteBuffer; @RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2) class VideoCoreEncoder { - private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding - private Surface mInputSurface; private MediaMuxer mMuxer; private MediaCodec mEncoder; @@ -58,11 +56,11 @@ class VideoCoreEncoder { /** * Configures encoder and muxer state, and prepares the input Surface. */ - public VideoCoreEncoder(int width, int height, int bitRate, int frameRate, int rotation, File outputFile) + public VideoCoreEncoder(int width, int height, int bitRate, int frameRate, int rotation, File outputFile, String mimeType) throws IOException { mBufferInfo = new MediaCodec.BufferInfo(); - MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, width, height); + MediaFormat format = MediaFormat.createVideoFormat(mimeType, width, height); // Set some properties. Failing to specify some of these can cause the MediaCodec // configure() call to throw an unhelpful exception. @@ -75,7 +73,7 @@ class VideoCoreEncoder { // Create a MediaCodec encoder, and configure it with our format. Get a Surface // we can use for input and wrap it with a class that handles the EGL work. - mEncoder = MediaCodec.createEncoderByType(MIME_TYPE); + mEncoder = MediaCodec.createEncoderByType(mimeType); mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); mInputSurface = mEncoder.createInputSurface(); mEncoder.start(); diff --git a/cameraview/src/main/gles/com/otaliastudios/cameraview/VideoTextureEncoder.java b/cameraview/src/main/gles/com/otaliastudios/cameraview/VideoTextureEncoder.java index e7adf75c..1e205931 100644 --- a/cameraview/src/main/gles/com/otaliastudios/cameraview/VideoTextureEncoder.java +++ b/cameraview/src/main/gles/com/otaliastudios/cameraview/VideoTextureEncoder.java @@ -72,16 +72,17 @@ class VideoTextureEncoder implements Runnable { private EglCore mEglCore; private EglViewport mFullScreen; private int mTextureId; - private int mFrameNum; + private int mFrameNum = -1; // Important private VideoCoreEncoder mVideoEncoder; private float mTransformationScaleX = 1F; private float mTransformationScaleY = 1F; + private int mTransformationRotation = 0; // ----- accessed by multiple threads ----- private volatile EncoderHandler mHandler; - private final Object mReadyFence = new Object(); // guards ready/running - private boolean mReady; + private final Object mLooperReadyLock = new Object(); // guards ready/running + private boolean mLooperReady; private boolean mRunning; /** @@ -102,11 +103,13 @@ class VideoTextureEncoder implements Runnable { final float mScaleX; final float mScaleY; final EGLContext mEglContext; + final String mMimeType; Config(File outputFile, int width, int height, int bitRate, int frameRate, int rotation, float scaleX, float scaleY, + String mimeType, EGLContext sharedEglContext) { mOutputFile = outputFile; mWidth = width; @@ -117,6 +120,7 @@ class VideoTextureEncoder implements Runnable { mScaleX = scaleX; mScaleY = scaleY; mRotation = rotation; + mMimeType = mimeType; } @Override @@ -130,8 +134,9 @@ class VideoTextureEncoder implements Runnable { try { mVideoEncoder = new VideoCoreEncoder(config.mWidth, config.mHeight, config.mBitRate, config.mFrameRate, - config.mRotation, - config.mOutputFile); + 0, // The video encoder rotation does not work, so we apply it here using Matrix.rotateM(). + config.mOutputFile, + config.mMimeType); } catch (IOException ioe) { throw new RuntimeException(ioe); } @@ -141,6 +146,7 @@ class VideoTextureEncoder implements Runnable { mFullScreen = new EglViewport(); mTransformationScaleX = config.mScaleX; mTransformationScaleY = config.mScaleY; + mTransformationRotation = config.mRotation; } private void releaseEncoder() { @@ -169,16 +175,16 @@ class VideoTextureEncoder implements Runnable { */ public void startRecording(Config config) { Log.d(TAG, "Encoder: startRecording()"); - synchronized (mReadyFence) { + synchronized (mLooperReadyLock) { if (mRunning) { Log.w(TAG, "Encoder thread already running"); return; } mRunning = true; new Thread(this, "TextureMovieEncoder").start(); - while (!mReady) { + while (!mLooperReady) { try { - mReadyFence.wait(); + mLooperReadyLock.wait(); } catch (InterruptedException ie) { // ignore } @@ -205,7 +211,7 @@ class VideoTextureEncoder implements Runnable { * Returns true if recording has been started. */ public boolean isRecording() { - synchronized (mReadyFence) { + synchronized (mLooperReadyLock) { return mRunning; } } @@ -224,18 +230,14 @@ class VideoTextureEncoder implements Runnable { * stall the caller while this thread does work. */ public void frameAvailable(SurfaceTexture st) { - synchronized (mReadyFence) { - if (!mReady) { + synchronized (mLooperReadyLock) { + if (!mLooperReady) { return; } } float[] transform = new float[16]; // TODO - avoid alloc every frame. Not easy, need a pool st.getTransformMatrix(transform); - float translX = (1F - mTransformationScaleX) / 2F; - float translY = (1F - mTransformationScaleY) / 2F; - Matrix.translateM(transform, 0, translX, translY, 0); - Matrix.scaleM(transform, 0, mTransformationScaleX, mTransformationScaleY, 1); long timestamp = st.getTimestamp(); if (timestamp == 0) { // Seeing this after device is toggled off/on with power button. The @@ -257,8 +259,8 @@ class VideoTextureEncoder implements Runnable { * TODO: do something less clumsy */ public void setTextureId(int id) { - synchronized (mReadyFence) { - if (!mReady) return; + synchronized (mLooperReadyLock) { + if (!mLooperReady) return; } mHandler.sendMessage(mHandler.obtainMessage(MSG_SET_TEXTURE_ID, id, 0, null)); } @@ -272,16 +274,15 @@ class VideoTextureEncoder implements Runnable { public void run() { // Establish a Looper for this thread, and define a Handler for it. Looper.prepare(); - synchronized (mReadyFence) { + synchronized (mLooperReadyLock) { mHandler = new EncoderHandler(this); - mReady = true; - mReadyFence.notify(); + mLooperReady = true; + mLooperReadyLock.notify(); } Looper.loop(); - Log.d(TAG, "Encoder thread exiting"); - synchronized (mReadyFence) { - mReady = mRunning = false; + synchronized (mLooperReadyLock) { + mLooperReady = mRunning = false; mHandler = null; } } @@ -314,14 +315,63 @@ class VideoTextureEncoder implements Runnable { encoder.prepareEncoder(config); break; case MSG_STOP_RECORDING: + encoder.mFrameNum = -1; encoder.mVideoEncoder.drainEncoder(true); encoder.releaseEncoder(); ((Runnable) obj).run(); break; case MSG_FRAME_AVAILABLE: + if (encoder.mFrameNum < 0) break; encoder.mFrameNum++; long timestamp = (((long) inputMessage.arg1) << 32) | (((long) inputMessage.arg2) & 0xffffffffL); float[] transform = (float[]) obj; + + // We must scale this matrix like GLCameraPreview does, because it might have some cropping. + // Scaling takes place with respect to the (0, 0, 0) point, so we must apply a Translation to compensate. + + // We also must rotate this matrix. In GLCameraPreview it is not needed because it is a live + // stream, but the output video, must be correctly rotated based on the device rotation at the moment. + // Rotation also takes place with respect to the origin (the Z axis), so we must apply another Translation to compensate. + + // The order of operations must be translate, rotate & scale. + float scaleX = encoder.mTransformationScaleX; + float scaleY = encoder.mTransformationScaleY; + int rotation = encoder.mTransformationRotation; + float W = scaleX; + float H = scaleY; + float scaleTranslX = (1F - scaleX) / 2F; + float scaleTranslY = (1F - scaleY) / 2F; + float rotationTranslX = 0F; + float rotationTranslY = 0F; + boolean flip = false;// rotation % 180 != 0; + Log.e("VideoTextureEncoder", "Rotation is " + rotation); + if (rotation == 90) { + rotationTranslX = W / 2F; + rotationTranslY = W / 2F; + } else if (rotation == 180) { + rotationTranslX = W / 2F; + rotationTranslY = H / 2F; + } else if (rotation == 270) { + rotationTranslX = H / 2F; + rotationTranslY = H / 2F; + Log.e("VideoTextureEncoder", "Rotation translY is" + rotationTranslY + ", h is " + H); + } + + // Matrix.translateM(transform, 0, 0, 0, 0); // vedo il lato destro e alto + // Matrix.translateM(transform, 0, 0, 0.5F, 0); // same + // Matrix.translateM(transform, 0, 0, -0.5F, 0); // peggio + // Matrix.translateM(transform, 0, 0.5F, 0, 0); // peggio: vedo il pixel in alto a dx + // Matrix.translateM(transform, 0, -0.5F, 0, 0); // ho aggiustato la VERTICALE + // Matrix.translateM(transform, 0, -0.5F, -1, 0); // no changes + // Matrix.translateM(transform, 0, -0.5F, 1, 0); // ci siamo quasi + Matrix.translateM(transform, 0, -0.5F, 1.75F, 0); // ottimo! ma รจ scalato male!! + Matrix.rotateM(transform, 0, rotation, 0, 0, 1); + Matrix.translateM(transform, 0, rotationTranslX, rotationTranslY, 0); + + + Matrix.translateM(transform, 0, scaleTranslX, scaleTranslY, 0); + Matrix.scaleM(transform, 0, scaleX, scaleY, 1); + encoder.mVideoEncoder.drainEncoder(false); encoder.mFullScreen.drawFrame(encoder.mTextureId, transform); encoder.mInputWindowSurface.setPresentationTime(timestamp); diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/Mapper.java b/cameraview/src/main/java/com/otaliastudios/cameraview/Mapper.java index 1c964960..c6359580 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/Mapper.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/Mapper.java @@ -16,13 +16,4 @@ abstract class Mapper { abstract Facing unmapFacing(T cameraConstant); abstract WhiteBalance unmapWhiteBalance(T cameraConstant); abstract Hdr unmapHdr(T cameraConstant); - - int map(VideoCodec codec) { - switch (codec) { - case DEVICE_DEFAULT: return MediaRecorder.VideoEncoder.DEFAULT; - case H_263: return MediaRecorder.VideoEncoder.H263; - case H_264: return MediaRecorder.VideoEncoder.H264; - default: return MediaRecorder.VideoEncoder.DEFAULT; - } - } } diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/MediaCodecVideoRecorder.java b/cameraview/src/main/java/com/otaliastudios/cameraview/MediaCodecVideoRecorder.java index d8c05a3b..3bf0949f 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/MediaCodecVideoRecorder.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/MediaCodecVideoRecorder.java @@ -2,6 +2,7 @@ package com.otaliastudios.cameraview; import android.graphics.SurfaceTexture; import android.media.CamcorderProfile; +import android.media.MediaFormat; import android.opengl.EGL14; import android.os.Build; import android.os.Handler; @@ -65,13 +66,20 @@ class MediaCodecVideoRecorder extends VideoRecorder implements GLCameraPreview.R @Override public void onRendererFrame(SurfaceTexture surfaceTexture, float scaleX, float scaleY) { if (mCurrentState == STATE_NOT_RECORDING && mDesiredState == STATE_RECORDING) { - // Size must not be flipped based on rotation, unlike MediaRecorderVideoRecorder - Size size = mResult.getSize(); + // Size must be flipped based on rotation, because we will rotate the texture in the encoder + Size size = mResult.getRotation() % 180 == 0 ? mResult.getSize() : mResult.getSize().flip(); + // size = mResult.size; // Ensure width and height are divisible by 2, as I have read somewhere. int width = size.getWidth(); int height = size.getHeight(); width = width % 2 == 0 ? width : width + 1; height = height % 2 == 0 ? height : height + 1; + String type = ""; + switch (mResult.codec) { + case H_263: type = "video/3gpp"; break; // MediaFormat.MIMETYPE_VIDEO_H263; + case H_264: type = "video/avc"; break; // MediaFormat.MIMETYPE_VIDEO_AVC: + case DEVICE_DEFAULT: type = "video/avc"; break; + } VideoTextureEncoder.Config configuration = new VideoTextureEncoder.Config( mResult.getFile(), width, @@ -81,6 +89,7 @@ class MediaCodecVideoRecorder extends VideoRecorder implements GLCameraPreview.R mResult.getRotation(), scaleX, scaleY, + type, EGL14.eglGetCurrentContext() ); mEncoder.startRecording(configuration); diff --git a/cameraview/src/main/java/com/otaliastudios/cameraview/MediaRecorderVideoRecorder.java b/cameraview/src/main/java/com/otaliastudios/cameraview/MediaRecorderVideoRecorder.java index 631308e1..1197bbe8 100644 --- a/cameraview/src/main/java/com/otaliastudios/cameraview/MediaRecorderVideoRecorder.java +++ b/cameraview/src/main/java/com/otaliastudios/cameraview/MediaRecorderVideoRecorder.java @@ -42,10 +42,10 @@ class MediaRecorderVideoRecorder extends VideoRecorder { mMediaRecorder.setOutputFormat(mProfile.fileFormat); mMediaRecorder.setVideoFrameRate(mProfile.videoFrameRate); mMediaRecorder.setVideoSize(size.getWidth(), size.getHeight()); - if (mResult.getCodec() == VideoCodec.DEFAULT) { - mMediaRecorder.setVideoEncoder(mProfile.videoCodec); - } else { - mMediaRecorder.setVideoEncoder(mMapper.map(mResult.getCodec())); + switch (mResult.getCodec()) { + case H_263: mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H263); break; + case H_264: mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264); break; + case DEVICE_DEFAULT: mMediaRecorder.setVideoEncoder(mProfile.videoCodec); break; } mMediaRecorder.setVideoEncodingBitRate(mProfile.videoBitRate); if (mResult.getAudio() == Audio.ON) { diff --git a/demo/src/main/java/com/otaliastudios/cameraview/demo/Control.java b/demo/src/main/java/com/otaliastudios/cameraview/demo/Control.java index 8003d3a4..0a455af1 100644 --- a/demo/src/main/java/com/otaliastudios/cameraview/demo/Control.java +++ b/demo/src/main/java/com/otaliastudios/cameraview/demo/Control.java @@ -63,7 +63,7 @@ public enum Control { int boundary = this == WIDTH ? root.getWidth() : root.getHeight(); if (boundary == 0) boundary = 1000; int step = boundary / 10; - // list.add(this == WIDTH ? 12 : 16); + list.add(this == WIDTH ? 300 : 700); list.add(ViewGroup.LayoutParams.WRAP_CONTENT); list.add(ViewGroup.LayoutParams.MATCH_PARENT); for (int i = step; i < boundary; i += step) { diff --git a/demo/src/main/res/layout/activity_camera.xml b/demo/src/main/res/layout/activity_camera.xml index a159747d..8779405e 100644 --- a/demo/src/main/res/layout/activity_camera.xml +++ b/demo/src/main/res/layout/activity_camera.xml @@ -11,8 +11,8 @@