Working implementation of MediaCodecVideoRecorder

v2
Mattia Iavarone 6 years ago
parent 504e1fb871
commit 818a57e94c
  1. 1
      cameraview/build.gradle
  2. 10
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/MockCameraController.java
  3. 1
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/VideoRecorderTest.java
  4. 200
      cameraview/src/main/gles/com/otaliastudios/cameraview/EglBaseSurface.java
  5. 377
      cameraview/src/main/gles/com/otaliastudios/cameraview/EglCore.java
  6. 5
      cameraview/src/main/gles/com/otaliastudios/cameraview/EglElement.java
  7. 18
      cameraview/src/main/gles/com/otaliastudios/cameraview/EglViewport.java
  8. 93
      cameraview/src/main/gles/com/otaliastudios/cameraview/EglWindowSurface.java
  9. 201
      cameraview/src/main/gles/com/otaliastudios/cameraview/VideoCoreEncoder.java
  10. 397
      cameraview/src/main/gles/com/otaliastudios/cameraview/VideoTextureEncoder.java
  11. 33
      cameraview/src/main/java/com/otaliastudios/cameraview/Camera1.java
  12. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraController.java
  13. 29
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java
  14. 77
      cameraview/src/main/java/com/otaliastudios/cameraview/MediaCodecVideoRecorder.java
  15. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/MediaRecorderVideoRecorder.java
  16. 7
      cameraview/src/main/java/com/otaliastudios/cameraview/VideoRecorder.java
  17. 27
      cameraview/src/main/views/com/otaliastudios/cameraview/GLCameraPreview.java
  18. 11
      demo/src/main/java/com/otaliastudios/cameraview/demo/CameraActivity.java

@ -35,6 +35,7 @@ android {
main.java.srcDirs += 'src/main/options'
main.java.srcDirs += 'src/main/views'
main.java.srcDirs += 'src/main/utils'
main.java.srcDirs += 'src/main/gles'
}
}

@ -99,6 +99,11 @@ public class MockCameraController extends CameraController {
void takeVideo(@NonNull File file) {
}
@Override
void takeVideoSnapshot(@NonNull File file) {
}
@Override
void stopVideo() {
}
@ -116,6 +121,11 @@ public class MockCameraController extends CameraController {
public void onSurfaceAvailable() {
}
@Override
public void onSurfaceDestroyed() {
}
@Override
public void onBufferAvailable(byte[] buffer) {
}

@ -25,6 +25,7 @@ public class VideoRecorderTest extends BaseTest {
VideoRecorder.VideoResultListener listener = Mockito.mock(VideoRecorder.VideoResultListener.class);
VideoRecorder recorder = new VideoRecorder(result, listener) {
void start() {}
void stop() { }
};
recorder.start();
recorder.stop();

@ -0,0 +1,200 @@
/*
* Copyright 2013 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.otaliastudios.cameraview;
import android.graphics.Bitmap;
import android.opengl.EGL14;
import android.opengl.EGLSurface;
import android.opengl.GLES20;
import android.os.Build;
import android.support.annotation.RequiresApi;
import android.util.Log;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/**
* Common base class for EGL surfaces.
* <p>
* There can be multiple surfaces associated with a single context.
*/
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2)
class EglBaseSurface extends EglElement {
protected static final String TAG = EglBaseSurface.class.getSimpleName();
// EglCore object we're associated with. It may be associated with multiple surfaces.
protected EglCore mEglCore;
private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE;
private int mWidth = -1;
private int mHeight = -1;
protected EglBaseSurface(EglCore eglCore) {
mEglCore = eglCore;
}
/**
* Creates a window surface.
* <p>
* @param surface May be a Surface or SurfaceTexture.
*/
public void createWindowSurface(Object surface) {
if (mEGLSurface != EGL14.EGL_NO_SURFACE) {
throw new IllegalStateException("surface already created");
}
mEGLSurface = mEglCore.createWindowSurface(surface);
// Don't cache width/height here, because the size of the underlying surface can change
// out from under us (see e.g. HardwareScalerActivity).
//mWidth = mEglCore.querySurface(mEGLSurface, EGL14.EGL_WIDTH);
//mHeight = mEglCore.querySurface(mEGLSurface, EGL14.EGL_HEIGHT);
}
/**
* Creates an off-screen surface.
*/
public void createOffscreenSurface(int width, int height) {
if (mEGLSurface != EGL14.EGL_NO_SURFACE) {
throw new IllegalStateException("surface already created");
}
mEGLSurface = mEglCore.createOffscreenSurface(width, height);
mWidth = width;
mHeight = height;
}
/**
* Returns the surface's width, in pixels.
* <p>
* If this is called on a window surface, and the underlying surface is in the process
* of changing size, we may not see the new size right away (e.g. in the "surfaceChanged"
* callback). The size should match after the next buffer swap.
*/
public int getWidth() {
if (mWidth < 0) {
return mEglCore.querySurface(mEGLSurface, EGL14.EGL_WIDTH);
} else {
return mWidth;
}
}
/**
* Returns the surface's height, in pixels.
*/
public int getHeight() {
if (mHeight < 0) {
return mEglCore.querySurface(mEGLSurface, EGL14.EGL_HEIGHT);
} else {
return mHeight;
}
}
/**
* Release the EGL surface.
*/
public void releaseEglSurface() {
mEglCore.releaseSurface(mEGLSurface);
mEGLSurface = EGL14.EGL_NO_SURFACE;
mWidth = mHeight = -1;
}
/**
* Makes our EGL context and surface current.
*/
public void makeCurrent() {
mEglCore.makeCurrent(mEGLSurface);
}
/**
* Makes our EGL context and surface current for drawing, using the supplied surface
* for reading.
*/
public void makeCurrentReadFrom(EglBaseSurface readSurface) {
mEglCore.makeCurrent(mEGLSurface, readSurface.mEGLSurface);
}
/**
* Calls eglSwapBuffers. Use this to "publish" the current frame.
*
* @return false on failure
*/
public boolean swapBuffers() {
boolean result = mEglCore.swapBuffers(mEGLSurface);
if (!result) {
Log.d(TAG, "WARNING: swapBuffers() failed");
}
return result;
}
/**
* Sends the presentation time stamp to EGL.
*
* @param nsecs Timestamp, in nanoseconds.
*/
public void setPresentationTime(long nsecs) {
mEglCore.setPresentationTime(mEGLSurface, nsecs);
}
/**
* Saves the EGL surface to a file.
* <p>
* Expects that this object's EGL surface is current.
*/
public void saveFrame(File file) throws IOException {
if (!mEglCore.isCurrent(mEGLSurface)) {
throw new RuntimeException("Expected EGL context/surface is not current");
}
// glReadPixels fills in a "direct" ByteBuffer with what is essentially big-endian RGBA
// data (i.e. a byte of red, followed by a byte of green...). While the Bitmap
// constructor that takes an int[] wants little-endian ARGB (blue/red swapped), the
// Bitmap "copy pixels" method wants the same format GL provides.
//
// Ideally we'd have some way to re-use the ByteBuffer, especially if we're calling
// here often.
//
// Making this even more interesting is the upside-down nature of GL, which means
// our output will look upside down relative to what appears on screen if the
// typical GL conventions are used.
String filename = file.toString();
int width = getWidth();
int height = getHeight();
ByteBuffer buf = ByteBuffer.allocateDirect(width * height * 4);
buf.order(ByteOrder.LITTLE_ENDIAN);
GLES20.glReadPixels(0, 0, width, height,
GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);
check("glReadPixels");
buf.rewind();
BufferedOutputStream bos = null;
try {
bos = new BufferedOutputStream(new FileOutputStream(filename));
Bitmap bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
bmp.copyPixelsFromBuffer(buf);
bmp.compress(Bitmap.CompressFormat.PNG, 90, bos);
bmp.recycle();
} finally {
if (bos != null) bos.close();
}
Log.d(TAG, "Saved " + width + "x" + height + " frame as '" + filename + "'");
}
}

@ -0,0 +1,377 @@
/*
* Copyright 2013 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.otaliastudios.cameraview;
import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLExt;
import android.opengl.EGLSurface;
import android.os.Build;
import android.support.annotation.RequiresApi;
import android.util.Log;
import android.view.Surface;
/**
* -- from grafika --
*
* Core EGL state (display, context, config).
* <p>
* The EGLContext must only be attached to one thread at a time. This class is not thread-safe.
*/
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2)
final class EglCore {
private static final String TAG = EglCore.class.getSimpleName();
/**
* Constructor flag: surface must be recordable. This discourages EGL from using a
* pixel format that cannot be converted efficiently to something usable by the video
* encoder.
*/
public static final int FLAG_RECORDABLE = 0x01;
/**
* Constructor flag: ask for GLES3, fall back to GLES2 if not available. Without this
* flag, GLES2 is used.
*/
public static final int FLAG_TRY_GLES3 = 0x02;
// Android-specific extension.
private static final int EGL_RECORDABLE_ANDROID = 0x3142;
private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY;
private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT;
private EGLConfig mEGLConfig = null;
private int mGlVersion = -1;
/**
* Prepares EGL display and context.
* <p>
* Equivalent to EglCore(null, 0).
*/
public EglCore() {
this(null, 0);
}
/**
* Prepares EGL display and context.
* <p>
* @param sharedContext The context to share, or null if sharing is not desired.
* @param flags Configuration bit flags, e.g. FLAG_RECORDABLE.
*/
public EglCore(EGLContext sharedContext, int flags) {
if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
throw new RuntimeException("EGL already set up");
}
if (sharedContext == null) {
sharedContext = EGL14.EGL_NO_CONTEXT;
}
mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
throw new RuntimeException("unable to get EGL14 display");
}
int[] version = new int[2];
if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
mEGLDisplay = null;
throw new RuntimeException("unable to initialize EGL14");
}
// Try to get a GLES3 context, if requested.
if ((flags & FLAG_TRY_GLES3) != 0) {
//Log.d(TAG, "Trying GLES 3");
EGLConfig config = getConfig(flags, 3);
if (config != null) {
int[] attrib3_list = {
EGL14.EGL_CONTEXT_CLIENT_VERSION, 3,
EGL14.EGL_NONE
};
EGLContext context = EGL14.eglCreateContext(mEGLDisplay, config, sharedContext,
attrib3_list, 0);
if (EGL14.eglGetError() == EGL14.EGL_SUCCESS) {
//Log.d(TAG, "Got GLES 3 config");
mEGLConfig = config;
mEGLContext = context;
mGlVersion = 3;
}
}
}
if (mEGLContext == EGL14.EGL_NO_CONTEXT) { // GLES 2 only, or GLES 3 attempt failed
//Log.d(TAG, "Trying GLES 2");
EGLConfig config = getConfig(flags, 2);
if (config == null) {
throw new RuntimeException("Unable to find a suitable EGLConfig");
}
int[] attrib2_list = {
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL14.EGL_NONE
};
EGLContext context = EGL14.eglCreateContext(mEGLDisplay, config, sharedContext,
attrib2_list, 0);
checkEglError("eglCreateContext");
mEGLConfig = config;
mEGLContext = context;
mGlVersion = 2;
}
// Confirm with query.
int[] values = new int[1];
EGL14.eglQueryContext(mEGLDisplay, mEGLContext, EGL14.EGL_CONTEXT_CLIENT_VERSION,
values, 0);
Log.d(TAG, "EGLContext created, client version " + values[0]);
}
/**
* Finds a suitable EGLConfig.
*
* @param flags Bit flags from constructor.
* @param version Must be 2 or 3.
*/
private EGLConfig getConfig(int flags, int version) {
int renderableType = EGL14.EGL_OPENGL_ES2_BIT;
if (version >= 3) {
renderableType |= EGLExt.EGL_OPENGL_ES3_BIT_KHR;
}
// The actual surface is generally RGBA or RGBX, so situationally omitting alpha
// doesn't really help. It can also lead to a huge performance hit on glReadPixels()
// when reading into a GL_RGBA buffer.
int[] attribList = {
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_ALPHA_SIZE, 8,
//EGL14.EGL_DEPTH_SIZE, 16,
//EGL14.EGL_STENCIL_SIZE, 8,
EGL14.EGL_RENDERABLE_TYPE, renderableType,
EGL14.EGL_NONE, 0, // placeholder for recordable [@-3]
EGL14.EGL_NONE
};
if ((flags & FLAG_RECORDABLE) != 0) {
attribList[attribList.length - 3] = EGL_RECORDABLE_ANDROID;
attribList[attribList.length - 2] = 1;
}
EGLConfig[] configs = new EGLConfig[1];
int[] numConfigs = new int[1];
if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length,
numConfigs, 0)) {
Log.w(TAG, "unable to find RGB8888 / " + version + " EGLConfig");
return null;
}
return configs[0];
}
/**
* Discards all resources held by this class, notably the EGL context. This must be
* called from the thread where the context was created.
* <p>
* On completion, no context will be current.
*/
public void release() {
if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
// Android is unusual in that it uses a reference-counted EGLDisplay. So for
// every eglInitialize() we need an eglTerminate().
EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
EGL14.EGL_NO_CONTEXT);
EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
EGL14.eglReleaseThread();
EGL14.eglTerminate(mEGLDisplay);
}
mEGLDisplay = EGL14.EGL_NO_DISPLAY;
mEGLContext = EGL14.EGL_NO_CONTEXT;
mEGLConfig = null;
}
@Override
protected void finalize() throws Throwable {
try {
if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
// We're limited here -- finalizers don't run on the thread that holds
// the EGL state, so if a surface or context is still current on another
// thread we can't fully release it here. Exceptions thrown from here
// are quietly discarded. Complain in the log file.
Log.w(TAG, "WARNING: EglCore was not explicitly released -- state may be leaked");
release();
}
} finally {
super.finalize();
}
}
/**
* Destroys the specified surface. Note the EGLSurface won't actually be destroyed if it's
* still current in a context.
*/
public void releaseSurface(EGLSurface eglSurface) {
EGL14.eglDestroySurface(mEGLDisplay, eglSurface);
}
/**
* Creates an EGL surface associated with a Surface.
* <p>
* If this is destined for MediaCodec, the EGLConfig should have the "recordable" attribute.
*/
public EGLSurface createWindowSurface(Object surface) {
if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) {
throw new RuntimeException("invalid surface: " + surface);
}
// Create a window surface, and attach it to the Surface we received.
int[] surfaceAttribs = {
EGL14.EGL_NONE
};
EGLSurface eglSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, mEGLConfig, surface,
surfaceAttribs, 0);
checkEglError("eglCreateWindowSurface");
if (eglSurface == null) {
throw new RuntimeException("surface was null");
}
return eglSurface;
}
/**
* Creates an EGL surface associated with an offscreen buffer.
*/
public EGLSurface createOffscreenSurface(int width, int height) {
int[] surfaceAttribs = {
EGL14.EGL_WIDTH, width,
EGL14.EGL_HEIGHT, height,
EGL14.EGL_NONE
};
EGLSurface eglSurface = EGL14.eglCreatePbufferSurface(mEGLDisplay, mEGLConfig,
surfaceAttribs, 0);
checkEglError("eglCreatePbufferSurface");
if (eglSurface == null) {
throw new RuntimeException("surface was null");
}
return eglSurface;
}
/**
* Makes our EGL context current, using the supplied surface for both "draw" and "read".
*/
public void makeCurrent(EGLSurface eglSurface) {
if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
// called makeCurrent() before create?
Log.d(TAG, "NOTE: makeCurrent w/o display");
}
if (!EGL14.eglMakeCurrent(mEGLDisplay, eglSurface, eglSurface, mEGLContext)) {
throw new RuntimeException("eglMakeCurrent failed");
}
}
/**
* Makes our EGL context current, using the supplied "draw" and "read" surfaces.
*/
public void makeCurrent(EGLSurface drawSurface, EGLSurface readSurface) {
if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
// called makeCurrent() before create?
Log.d(TAG, "NOTE: makeCurrent w/o display");
}
if (!EGL14.eglMakeCurrent(mEGLDisplay, drawSurface, readSurface, mEGLContext)) {
throw new RuntimeException("eglMakeCurrent(draw,read) failed");
}
}
/**
* Makes no context current.
*/
public void makeNothingCurrent() {
if (!EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
EGL14.EGL_NO_CONTEXT)) {
throw new RuntimeException("eglMakeCurrent failed");
}
}
/**
* Calls eglSwapBuffers. Use this to "publish" the current frame.
*
* @return false on failure
*/
public boolean swapBuffers(EGLSurface eglSurface) {
return EGL14.eglSwapBuffers(mEGLDisplay, eglSurface);
}
/**
* Sends the presentation time stamp to EGL. Time is expressed in nanoseconds.
*/
public void setPresentationTime(EGLSurface eglSurface, long nsecs) {
EGLExt.eglPresentationTimeANDROID(mEGLDisplay, eglSurface, nsecs);
}
/**
* Returns true if our context and the specified surface are current.
*/
public boolean isCurrent(EGLSurface eglSurface) {
return mEGLContext.equals(EGL14.eglGetCurrentContext()) &&
eglSurface.equals(EGL14.eglGetCurrentSurface(EGL14.EGL_DRAW));
}
/**
* Performs a simple surface query.
*/
public int querySurface(EGLSurface eglSurface, int what) {
int[] value = new int[1];
EGL14.eglQuerySurface(mEGLDisplay, eglSurface, what, value, 0);
return value[0];
}
/**
* Queries a string value.
*/
public String queryString(int what) {
return EGL14.eglQueryString(mEGLDisplay, what);
}
/**
* Returns the GLES version this context is configured for (currently 2 or 3).
*/
public int getGlVersion() {
return mGlVersion;
}
/**
* Writes the current display, context, and surface to the log.
*/
public static void logCurrent(String msg) {
EGLDisplay display;
EGLContext context;
EGLSurface surface;
display = EGL14.eglGetCurrentDisplay();
context = EGL14.eglGetCurrentContext();
surface = EGL14.eglGetCurrentSurface(EGL14.EGL_DRAW);
Log.i(TAG, "Current EGL (" + msg + "): display=" + display + ", context=" + context +
", surface=" + surface);
}
/**
* Checks for EGL errors. Throws an exception if an error has been raised.
*/
private void checkEglError(String msg) {
int error;
if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
}
}
}

@ -1,7 +1,6 @@
package com.otaliastudios.cameraview;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.Matrix;
@ -9,9 +8,9 @@ import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
class GLElement {
class EglElement {
private final static CameraLogger LOG = CameraLogger.create(GLElement.class.getSimpleName());
private final static CameraLogger LOG = CameraLogger.create(EglElement.class.getSimpleName());
// Identity matrix for general use.
protected static final float[] IDENTITY_MATRIX = new float[16];

@ -3,14 +3,15 @@ package com.otaliastudios.cameraview;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.Matrix;
import android.util.Log;
import java.nio.FloatBuffer;
class GLViewport extends GLElement {
/**
* This is a mix of 3 grafika classes, FullFrameRect, Texture2dProgram, Drawable2d.
*/
class EglViewport extends EglElement {
private final static CameraLogger LOG = CameraLogger.create(GLViewport.class.getSimpleName());
private final static CameraLogger LOG = CameraLogger.create(EglViewport.class.getSimpleName());
// Simple vertex shader.
private static final String SIMPLE_VERTEX_SHADER =
@ -74,7 +75,7 @@ class GLViewport extends GLElement {
// private int muTexOffsetLoc; // Used for filtering
// private int muColorAdjustLoc; // Used for filtering
GLViewport() {
EglViewport() {
mTextureTarget = GLES11Ext.GL_TEXTURE_EXTERNAL_OES;
mProgramHandle = createProgram(SIMPLE_VERTEX_SHADER, SIMPLE_FRAGMENT_SHADER);
maPositionLoc = GLES20.glGetAttribLocation(mProgramHandle, "aPosition");
@ -90,10 +91,13 @@ class GLViewport extends GLElement {
}
void release(boolean doEglCleanup) {
if (doEglCleanup) GLES20.glDeleteProgram(mProgramHandle);
mProgramHandle = -1;
}
void release() {
GLES20.glDeleteProgram(mProgramHandle);
mProgramHandle = -1;
release(true);
}
int createTexture() {

@ -0,0 +1,93 @@
/*
* Copyright 2013 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.otaliastudios.cameraview;
import android.graphics.SurfaceTexture;
import android.os.Build;
import android.support.annotation.RequiresApi;
import android.view.Surface;
/**
* Recordable EGL window surface.
* <p>
* It's good practice to explicitly release() the surface, preferably from a "finally" block.
*/
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2)
class EglWindowSurface extends EglBaseSurface {
private Surface mSurface;
private boolean mReleaseSurface;
/**
* Associates an EGL surface with the native window surface.
* <p>
* Set releaseSurface to true if you want the Surface to be released when release() is
* called. This is convenient, but can interfere with framework classes that expect to
* manage the Surface themselves (e.g. if you release a SurfaceView's Surface, the
* surfaceDestroyed() callback won't fire).
*/
public EglWindowSurface(EglCore eglCore, Surface surface, boolean releaseSurface) {
super(eglCore);
createWindowSurface(surface);
mSurface = surface;
mReleaseSurface = releaseSurface;
}
/**
* Associates an EGL surface with the SurfaceTexture.
*/
public EglWindowSurface(EglCore eglCore, SurfaceTexture surfaceTexture) {
super(eglCore);
createWindowSurface(surfaceTexture);
}
/**
* Releases any resources associated with the EGL surface (and, if configured to do so,
* with the Surface as well).
* <p>
* Does not require that the surface's EGL context be current.
*/
public void release() {
releaseEglSurface();
if (mSurface != null) {
if (mReleaseSurface) {
mSurface.release();
}
mSurface = null;
}
}
/**
* Recreate the EGLSurface, using the new EglBase. The caller should have already
* freed the old EGLSurface with releaseEglSurface().
* <p>
* This is useful when we want to update the EGLSurface associated with a Surface.
* For example, if we want to share with a different EGLContext, which can only
* be done by tearing down and recreating the context. (That's handled by the caller;
* this just creates a new EGLSurface for the Surface we were handed earlier.)
* <p>
* If the previous EGLSurface isn't fully destroyed, e.g. it's still current on a
* context somewhere, the create call will fail with complaints from the Surface
* about already being connected.
*/
public void recreate(EglCore newEglCore) {
if (mSurface == null) {
throw new RuntimeException("not yet implemented for SurfaceTexture");
}
mEglCore = newEglCore; // switch to new context
createWindowSurface(mSurface); // create new surface
}
}

@ -0,0 +1,201 @@
/*
* Copyright 2014 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.otaliastudios.cameraview;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.os.Build;
import android.support.annotation.RequiresApi;
import android.util.Log;
import android.view.Surface;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
/**
* -- From grafika VideoEncoderCore.java --
*
* This class wraps up the core components used for surface-input video encoding.
* <p>
* Once created, frames are fed to the input surface. Remember to provide the presentation
* time stamp, and always call drainEncoder() before swapBuffers() to ensure that the
* producer side doesn't get backed up.
* <p>
* This class is not thread-safe, with one exception: it is valid to use the input surface
* on one thread, and drain the output on a different thread.
*/
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2)
class VideoCoreEncoder {
// TODO: these ought to be configurable as well
private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding
private static final int FRAME_RATE = 30; // 30fps
private static final int IFRAME_INTERVAL = 5; // 5 seconds between I-frames
private Surface mInputSurface;
private MediaMuxer mMuxer;
private MediaCodec mEncoder;
private MediaCodec.BufferInfo mBufferInfo;
private int mTrackIndex;
private boolean mMuxerStarted;
/**
* Configures encoder and muxer state, and prepares the input Surface.
*/
public VideoCoreEncoder(int width, int height, int bitRate, File outputFile)
throws IOException {
mBufferInfo = new MediaCodec.BufferInfo();
MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, width, height);
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
// Create a MediaCodec encoder, and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work.
mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mInputSurface = mEncoder.createInputSurface();
mEncoder.start();
// Create a MediaMuxer. We can't add the video track and start() the muxer here,
// because our MediaFormat doesn't have the Magic Goodies. These can only be
// obtained from the encoder after it has started processing data.
//
// We're not actually interested in multiplexing audio. We just want to convert
// the raw H.264 elementary stream we get from MediaCodec into a .mp4 file.
mMuxer = new MediaMuxer(outputFile.toString(),
MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
mTrackIndex = -1;
mMuxerStarted = false;
}
/**
* Returns the encoder's input surface.
*/
public Surface getInputSurface() {
return mInputSurface;
}
/**
* Releases encoder resources.
*/
public void release() {
if (mEncoder != null) {
mEncoder.stop();
mEncoder.release();
mEncoder = null;
}
if (mMuxer != null) {
// TODO: stop() throws an exception if you haven't fed it any data. Keep track
// of frames submitted, and don't call stop() if we haven't written anything.
mMuxer.stop();
mMuxer.release();
mMuxer = null;
}
}
/**
* Extracts all pending data from the encoder and forwards it to the muxer.
* <p>
* If endOfStream is not set, this returns when there is no more data to drain. If it
* is set, we send EOS to the encoder, and then iterate until we see EOS on the output.
* Calling this with endOfStream set should be done once, right before stopping the muxer.
* <p>
* We're just using the muxer to get a .mp4 file (instead of a raw H.264 stream). We're
* not recording audio.
*/
public void drainEncoder(boolean endOfStream) {
final int TIMEOUT_USEC = 10000;
if (endOfStream) {
mEncoder.signalEndOfInputStream();
}
ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
while (true) {
int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (!endOfStream) {
break; // out of while
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
encoderOutputBuffers = mEncoder.getOutputBuffers();
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// should happen before receiving buffers, and should only happen once
if (mMuxerStarted) {
throw new RuntimeException("format changed twice");
}
MediaFormat newFormat = mEncoder.getOutputFormat();
// now that we have the Magic Goodies, start the muxer
mTrackIndex = mMuxer.addTrack(newFormat);
mMuxer.start();
mMuxerStarted = true;
} else if (encoderStatus < 0) {
Log.w("VideoCoreEncoder", "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
// let's ignore it
} else {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null");
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
mBufferInfo.size = 0;
}
if (mBufferInfo.size != 0) {
if (!mMuxerStarted) {
throw new RuntimeException("muxer hasn't started");
}
// adjust the ByteBuffer values to match BufferInfo (not needed?)
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
}
mEncoder.releaseOutputBuffer(encoderStatus, false);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (!endOfStream) {
Log.w("VideoCoreEncoder", "reached end of stream unexpectedly");
}
break; // out of while
}
}
}
}
}

@ -0,0 +1,397 @@
/*
* Copyright 2013 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.otaliastudios.cameraview;
import android.graphics.SurfaceTexture;
import android.opengl.EGLContext;
import android.opengl.GLES20;
import android.os.Build;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.support.annotation.RequiresApi;
import android.util.Log;
import java.io.File;
import java.io.IOException;
import java.lang.ref.WeakReference;
/**
* -- from grafika --
*
* Encode a movie from frames rendered from an external texture image.
* <p>
* The object wraps an encoder running on a dedicated thread. The various control messages
* may be sent from arbitrary threads (typically the app UI thread). The encoder thread
* manages both sides of the encoder (feeding and draining); the only external input is
* the GL texture.
* <p>
* The design is complicated slightly by the need to create an EGL context that shares state
* with a view that gets restarted if (say) the device orientation changes. When the view
* in question is a GLSurfaceView, we don't have full control over the EGL context creation
* on that side, so we have to bend a bit backwards here.
* <p>
* To use:
* <ul>
* <li>create TextureMovieEncoder object
* <li>create an EncoderConfig
* <li>call TextureMovieEncoder#startRecording() with the config
* <li>call TextureMovieEncoder#setTextureId() with the texture object that receives frames
* <li>for each frame, after latching it with SurfaceTexture#updateTexImage(),
* call TextureMovieEncoder#frameAvailable().
* </ul>
*
* TODO: tweak the API (esp. textureId) so it's less awkward for simple use cases.
*/
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2)
class VideoTextureEncoder implements Runnable {
private static final String TAG = VideoTextureEncoder.class.getSimpleName();
private static final int MSG_START_RECORDING = 0;
private static final int MSG_STOP_RECORDING = 1;
private static final int MSG_FRAME_AVAILABLE = 2;
private static final int MSG_SET_TEXTURE_ID = 3;
private static final int MSG_UPDATE_SHARED_CONTEXT = 4;
private static final int MSG_QUIT = 5;
// ----- accessed exclusively by encoder thread -----
private EglWindowSurface mInputWindowSurface;
private EglCore mEglCore;
private EglViewport mFullScreen;
private int mTextureId;
private int mFrameNum;
private VideoCoreEncoder mVideoEncoder;
// ----- accessed by multiple threads -----
private volatile EncoderHandler mHandler;
private final Object mReadyFence = new Object(); // guards ready/running
private boolean mReady;
private boolean mRunning;
/**
* Encoder configuration.
* <p>
* Object is immutable, which means we can safely pass it between threads without
* explicit synchronization (and don't need to worry about it getting tweaked out from
* under us).
* <p>
* TODO: make frame rate and iframe interval configurable?
*/
public static class EncoderConfig {
final File mOutputFile;
final int mWidth;
final int mHeight;
final int mBitRate;
final EGLContext mEglContext;
public EncoderConfig(File outputFile, int width, int height, int bitRate,
EGLContext sharedEglContext) {
mOutputFile = outputFile;
mWidth = width;
mHeight = height;
mBitRate = bitRate;
mEglContext = sharedEglContext;
}
@Override
public String toString() {
return "EncoderConfig: " + mWidth + "x" + mHeight + " @" + mBitRate +
" to '" + mOutputFile.toString() + "' ctxt=" + mEglContext;
}
}
/**
* Tells the video recorder to start recording. (Call from non-encoder thread.)
* <p>
* Creates a new thread, which will create an encoder using the provided configuration.
* <p>
* Returns after the recorder thread has started and is ready to accept Messages. The
* encoder may not yet be fully configured.
*/
public void startRecording(EncoderConfig config) {
Log.d(TAG, "Encoder: startRecording()");
synchronized (mReadyFence) {
if (mRunning) {
Log.w(TAG, "Encoder thread already running");
return;
}
mRunning = true;
new Thread(this, "TextureMovieEncoder").start();
while (!mReady) {
try {
mReadyFence.wait();
} catch (InterruptedException ie) {
// ignore
}
}
}
mHandler.sendMessage(mHandler.obtainMessage(MSG_START_RECORDING, config));
}
/**
* Tells the video recorder to stop recording. (Call from non-encoder thread.)
* <p>
* Returns immediately; the encoder/muxer may not yet be finished creating the movie.
* <p>
* TODO: have the encoder thread invoke a callback on the UI thread just before it shuts down
* so we can provide reasonable status UI (and let the caller know that movie encoding
* has completed).
*/
public void stopRecording(Runnable onStop) {
mHandler.sendMessage(mHandler.obtainMessage(MSG_STOP_RECORDING, onStop));
mHandler.sendMessage(mHandler.obtainMessage(MSG_QUIT));
// We don't know when these will actually finish (or even start). We don't want to
// delay the UI thread though, so we return immediately.
}
/**
* Returns true if recording has been started.
*/
public boolean isRecording() {
synchronized (mReadyFence) {
return mRunning;
}
}
/**
* Tells the video recorder to refresh its EGL surface. (Call from non-encoder thread.)
*/
public void updateSharedContext(EGLContext sharedContext) {
mHandler.sendMessage(mHandler.obtainMessage(MSG_UPDATE_SHARED_CONTEXT, sharedContext));
}
/**
* Tells the video recorder that a new frame is available. (Call from non-encoder thread.)
* <p>
* This function sends a message and returns immediately. This isn't sufficient -- we
* don't want the caller to latch a new frame until we're done with this one -- but we
* can get away with it so long as the input frame rate is reasonable and the encoder
* thread doesn't stall.
* <p>
* TODO: either block here until the texture has been rendered onto the encoder surface,
* or have a separate "block if still busy" method that the caller can execute immediately
* before it calls updateTexImage(). The latter is preferred because we don't want to
* stall the caller while this thread does work.
*/
public void frameAvailable(SurfaceTexture st) {
synchronized (mReadyFence) {
if (!mReady) {
return;
}
}
float[] transform = new float[16]; // TODO - avoid alloc every frame
st.getTransformMatrix(transform);
long timestamp = st.getTimestamp();
if (timestamp == 0) {
// Seeing this after device is toggled off/on with power button. The
// first frame back has a zero timestamp.
// MPEG4Writer thinks this is cause to abort() in native code, so it's very
// important that we just ignore the frame.
Log.w(TAG, "HEY: got SurfaceTexture with timestamp of zero");
return;
}
mHandler.sendMessage(mHandler.obtainMessage(MSG_FRAME_AVAILABLE,
(int) (timestamp >> 32), (int) timestamp, transform));
}
/**
* Tells the video recorder what texture name to use. This is the external texture that
* we're receiving camera previews in. (Call from non-encoder thread.)
* <p>
* TODO: do something less clumsy
*/
public void setTextureId(int id) {
synchronized (mReadyFence) {
if (!mReady) return;
}
mHandler.sendMessage(mHandler.obtainMessage(MSG_SET_TEXTURE_ID, id, 0, null));
}
/**
* Encoder thread entry point. Establishes Looper/Handler and waits for messages.
* <p>
* @see java.lang.Thread#run()
*/
@Override
public void run() {
// Establish a Looper for this thread, and define a Handler for it.
Looper.prepare();
synchronized (mReadyFence) {
mHandler = new EncoderHandler(this);
mReady = true;
mReadyFence.notify();
}
Looper.loop();
Log.d(TAG, "Encoder thread exiting");
synchronized (mReadyFence) {
mReady = mRunning = false;
mHandler = null;
}
}
/**
* Handles encoder state change requests. The handler is created on the encoder thread.
*/
private static class EncoderHandler extends Handler {
private WeakReference<VideoTextureEncoder> mWeakEncoder;
public EncoderHandler(VideoTextureEncoder encoder) {
mWeakEncoder = new WeakReference<>(encoder);
}
@Override // runs on encoder thread
public void handleMessage(Message inputMessage) {
int what = inputMessage.what;
Object obj = inputMessage.obj;
VideoTextureEncoder encoder = mWeakEncoder.get();
if (encoder == null) {
Log.w(TAG, "EncoderHandler.handleMessage: encoder is null");
return;
}
switch (what) {
case MSG_START_RECORDING:
encoder.handleStartRecording((EncoderConfig) obj);
break;
case MSG_STOP_RECORDING:
encoder.handleStopRecording((Runnable) inputMessage.obj);
break;
case MSG_FRAME_AVAILABLE:
long timestamp = (((long) inputMessage.arg1) << 32) |
(((long) inputMessage.arg2) & 0xffffffffL);
encoder.handleFrameAvailable((float[]) obj, timestamp);
break;
case MSG_SET_TEXTURE_ID:
encoder.handleSetTexture(inputMessage.arg1);
break;
case MSG_UPDATE_SHARED_CONTEXT:
encoder.handleUpdateSharedContext((EGLContext) inputMessage.obj);
break;
case MSG_QUIT:
Looper.myLooper().quit();
break;
default:
throw new RuntimeException("Unhandled msg what=" + what);
}
}
}
/**
* Starts recording.
*/
private void handleStartRecording(EncoderConfig config) {
Log.d(TAG, "handleStartRecording " + config);
mFrameNum = 0;
prepareEncoder(config.mEglContext, config.mWidth, config.mHeight, config.mBitRate,
config.mOutputFile);
}
/**
* Handles notification of an available frame.
* <p>
* The texture is rendered onto the encoder's input surface, along with a moving
* box (just because we can).
* <p>
* @param transform The texture transform, from SurfaceTexture.
* @param timestampNanos The frame's timestamp, from SurfaceTexture.
*/
private void handleFrameAvailable(float[] transform, long timestampNanos) {
mVideoEncoder.drainEncoder(false);
mFullScreen.drawFrame(mTextureId, transform);
mInputWindowSurface.setPresentationTime(timestampNanos);
mInputWindowSurface.swapBuffers();
}
/**
* Handles a request to stop encoding.
*/
private void handleStopRecording(Runnable onStop) {
Log.d(TAG, "handleStopRecording");
mVideoEncoder.drainEncoder(true);
releaseEncoder();
onStop.run();
}
/**
* Sets the texture name that SurfaceTexture will use when frames are received.
*/
private void handleSetTexture(int id) {
mTextureId = id;
}
/**
* Tears down the EGL surface and context we've been using to feed the MediaCodec input
* surface, and replaces it with a new one that shares with the new context.
* <p>
* This is useful if the old context we were sharing with went away (maybe a GLSurfaceView
* that got torn down) and we need to hook up with the new one.
*/
private void handleUpdateSharedContext(EGLContext newSharedContext) {
Log.d(TAG, "handleUpdatedSharedContext " + newSharedContext);
// Release the EGLSurface and EGLContext.
mInputWindowSurface.releaseEglSurface();
mFullScreen.release(false);
mEglCore.release();
// Create a new EGLContext and recreate the window surface.
mEglCore = new EglCore(newSharedContext, EglCore.FLAG_RECORDABLE);
mInputWindowSurface.recreate(mEglCore);
mInputWindowSurface.makeCurrent();
// Create new programs and such for the new context.
mFullScreen = new EglViewport();
}
private void prepareEncoder(EGLContext sharedContext, int width, int height, int bitRate,
File outputFile) {
try {
mVideoEncoder = new VideoCoreEncoder(width, height, bitRate, outputFile);
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
mEglCore = new EglCore(sharedContext, EglCore.FLAG_RECORDABLE);
mInputWindowSurface = new EglWindowSurface(mEglCore, mVideoEncoder.getInputSurface(), true);
mInputWindowSurface.makeCurrent();
mFullScreen = new EglViewport();
}
private void releaseEncoder() {
mVideoEncoder.release();
if (mInputWindowSurface != null) {
mInputWindowSurface.release();
mInputWindowSurface = null;
}
if (mFullScreen != null) {
mFullScreen.release(false);
mFullScreen = null;
}
if (mEglCore != null) {
mEglCore.release();
mEglCore = null;
}
}
}

@ -696,6 +696,39 @@ class Camera1 extends CameraController implements Camera.PreviewCallback, Camera
});
}
@Override
void takeVideoSnapshot(@NonNull final File file) {
// TODO check api level and Preview instance
schedule(mStartVideoTask, true, new Runnable() {
@Override
public void run() {
if (mIsTakingVideo) return;
mIsTakingVideo = true;
// Create the video result stub
VideoResult videoResult = new VideoResult();
videoResult.file = file;
videoResult.isSnapshot = true;
videoResult.codec = mVideoCodec;
videoResult.location = mLocation;
videoResult.rotation = offset(REF_SENSOR, REF_OUTPUT);
Size preview = getPreviewSize(REF_VIEW); // The preview stream size in REF_VIEW
Size view = mPreview.getOutputSurfaceSize(); // The view size in REF_VIEW
Rect crop = CropHelper.computeCrop(preview, AspectRatio.of(view.getWidth(), view.getHeight()));
Size cropSize = new Size(crop.width(), crop.height()); // The visible size in REF_VIEW
// Move the REF_VIEW size to REF_OUTPUT
videoResult.size = flip(REF_VIEW, REF_OUTPUT) ? cropSize.flip() : cropSize;
videoResult.audio = mAudio;
videoResult.maxSize = mVideoMaxSize;
videoResult.maxDuration = mVideoMaxDuration;
GLCameraPreview cameraPreview = (GLCameraPreview) mPreview;
mVideoRecorder = new MediaCodecVideoRecorder(videoResult, Camera1.this, cameraPreview, mCameraId);
mVideoRecorder.start();
}
});
}
@Override
void stopVideo() {
schedule(null, false, new Runnable() {

@ -331,6 +331,8 @@ abstract class CameraController implements
abstract void takeVideo(@NonNull File file);
abstract void takeVideoSnapshot(@NonNull File file);
abstract void stopVideo();
abstract void startAutoFocus(@Nullable Gesture gesture, PointF point);

@ -1171,7 +1171,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
*
* @param file a file where the video will be saved
*/
public void takeVideo(File file) {
public void takeVideo(@Nullable File file) {
if (file == null) {
file = new File(getContext().getFilesDir(), "video.mp4");
}
@ -1185,6 +1185,20 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
});
}
public void takeVideoSnapshot(@Nullable File file) {
if (file == null) {
file = new File(getContext().getFilesDir(), "video.mp4");
}
mCameraController.takeVideoSnapshot(file);
mUiHandler.post(new Runnable() {
@Override
public void run() {
mKeepScreenOn = getKeepScreenOn();
if (!mKeepScreenOn) setKeepScreenOn(true);
}
});
}
/**
* Starts recording a video. Video will be written to the given file,
@ -1209,6 +1223,19 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
takeVideo(file);
}
public void takeVideoSnapshot(File file, int durationMillis) {
final int old = getVideoMaxDuration();
addCameraListener(new CameraListener() {
@Override
public void onVideoTaken(VideoResult result) {
setVideoMaxDuration(old);
removeCameraListener(this);
}
});
setVideoMaxDuration(durationMillis);
takeVideoSnapshot(file);
}
// TODO: pauseCapturingVideo and resumeCapturingVideo. There is mediarecorder.pause(), but API 24...

@ -1,27 +1,98 @@
package com.otaliastudios.cameraview;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.media.CamcorderProfile;
import android.media.MediaRecorder;
import android.opengl.EGL14;
import android.os.Build;
import android.os.Handler;
import android.support.annotation.RequiresApi;
/**
* A {@link VideoRecorder} that uses {@link android.media.MediaCodec} APIs.
*/
class MediaCodecVideoRecorder extends VideoRecorder {
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2)
class MediaCodecVideoRecorder extends VideoRecorder implements GLCameraPreview.RendererFrameCallback {
private static final String TAG = MediaCodecVideoRecorder.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
MediaCodecVideoRecorder(VideoResult stub, VideoResultListener listener, Camera camera, int cameraId) {
private static final int STATE_RECORDING = 0;
private static final int STATE_NOT_RECORDING = 1;
private CamcorderProfile mProfile;
private VideoTextureEncoder mEncoder;
private GLCameraPreview mPreview;
private int mCurrentState = STATE_NOT_RECORDING;
private int mDesiredState = STATE_NOT_RECORDING;
private int mTextureId = 0;
MediaCodecVideoRecorder(VideoResult stub, VideoResultListener listener, GLCameraPreview preview, int cameraId) {
super(stub, listener);
mProfile = CamcorderProfile.get(cameraId, CamcorderProfile.QUALITY_HIGH);
mEncoder = new VideoTextureEncoder();
mPreview = preview;
mPreview.setRendererFrameCallback(this);
}
@Override
void start() {
mDesiredState = STATE_RECORDING;
if (mResult.maxDuration > 0) {
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
mDesiredState = STATE_NOT_RECORDING;
}
}, (long) mResult.maxDuration);
}
}
@Override
void stop() {
mDesiredState = STATE_NOT_RECORDING;
}
@Override
public void onRendererTextureCreated(int textureId) {
mTextureId = textureId;
}
@Override
public void onRendererFrame(SurfaceTexture surfaceTexture) {
if (mCurrentState == STATE_NOT_RECORDING && mDesiredState == STATE_RECORDING) {
VideoTextureEncoder.EncoderConfig configuration = new VideoTextureEncoder.EncoderConfig(
mResult.file,
mResult.size.getWidth(),
mResult.size.getHeight(),
1000000,
EGL14.eglGetCurrentContext()
);
mEncoder.startRecording(configuration);
mEncoder.setTextureId(mTextureId);
mCurrentState = STATE_RECORDING;
}
if (mCurrentState == STATE_RECORDING) {
mEncoder.frameAvailable(surfaceTexture);
}
if (mCurrentState == STATE_RECORDING && mDesiredState == STATE_NOT_RECORDING) {
mEncoder.stopRecording(new Runnable() {
@Override
void close() {
public void run() {
// We are in the encoder thread.
dispatchResult();
}
});
mCurrentState = STATE_NOT_RECORDING;
mEncoder = null;
mPreview.setRendererFrameCallback(null);
mPreview = null;
}
}
}

@ -90,7 +90,7 @@ class MediaRecorderVideoRecorder extends VideoRecorder {
}
@Override
void close() {
void stop() {
if (mMediaRecorder != null) {
try {
mMediaRecorder.stop();
@ -103,5 +103,6 @@ class MediaRecorderVideoRecorder extends VideoRecorder {
mProfile = null;
mMediaRecorder = null;
mMapper = null;
dispatchResult();
}
}

@ -21,17 +21,16 @@ abstract class VideoRecorder {
abstract void start();
final void stop() {
abstract void stop();
protected void dispatchResult() {
if (mListener != null) {
close();
mListener.onVideoResult(mResult);
mListener = null;
mResult = null;
}
}
abstract void close();
interface VideoResultListener {
void onVideoResult(@Nullable VideoResult result);

@ -5,7 +5,7 @@ import android.graphics.SurfaceTexture;
import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
import android.support.annotation.NonNull;
import android.util.Log;
import android.support.annotation.Nullable;
import android.view.LayoutInflater;
import android.view.SurfaceHolder;
import android.view.View;
@ -53,7 +53,8 @@ class GLCameraPreview extends CameraPreview<GLSurfaceView, SurfaceTexture> imple
private final float[] mTransformMatrix = new float[16];
private int mOutputTextureId = -1;
private SurfaceTexture mInputSurfaceTexture;
private GLViewport mOutputViewport;
private EglViewport mOutputViewport;
private RendererFrameCallback mRendererFrameCallback;
GLCameraPreview(Context context, ViewGroup parent, SurfaceCallback callback) {
super(context, parent, callback);
@ -104,6 +105,7 @@ class GLCameraPreview extends CameraPreview<GLSurfaceView, SurfaceTexture> imple
mInputSurfaceTexture.release();
mInputSurfaceTexture = null;
}
mOutputTextureId = 0;
if (mOutputViewport != null) {
mOutputViewport.release();
mOutputViewport = null;
@ -111,7 +113,7 @@ class GLCameraPreview extends CameraPreview<GLSurfaceView, SurfaceTexture> imple
}
private void createInputSurfaceTexture() {
mOutputViewport = new GLViewport();
mOutputViewport = new EglViewport();
mOutputTextureId = mOutputViewport.createTexture();
mInputSurfaceTexture = new SurfaceTexture(mOutputTextureId);
@ -169,6 +171,10 @@ class GLCameraPreview extends CameraPreview<GLSurfaceView, SurfaceTexture> imple
return;
}
if (mRendererFrameCallback != null) {
mRendererFrameCallback.onRendererFrame(mInputSurfaceTexture);
}
// Draw the video frame.
mInputSurfaceTexture.getTransformMatrix(mTransformMatrix);
if (isCropping()) {
@ -324,4 +330,19 @@ class GLCameraPreview extends CameraPreview<GLSurfaceView, SurfaceTexture> imple
}
});
}
interface RendererFrameCallback {
// Renderer thread.
void onRendererTextureCreated(int textureId);
// Renderer thread.
void onRendererFrame(SurfaceTexture surfaceTexture);
}
void setRendererFrameCallback(@Nullable RendererFrameCallback callback) {
mRendererFrameCallback = callback;
if (mRendererFrameCallback != null && mOutputTextureId != 0) {
mRendererFrameCallback.onRendererTextureCreated(mOutputTextureId);
}
}
}

@ -53,6 +53,7 @@ public class CameraActivity extends AppCompatActivity implements View.OnClickLis
findViewById(R.id.capturePicture).setOnClickListener(this);
findViewById(R.id.capturePictureSnapshot).setOnClickListener(this);
findViewById(R.id.captureVideo).setOnClickListener(this);
findViewById(R.id.captureVideoSnapshot).setOnClickListener(this);
findViewById(R.id.toggleCamera).setOnClickListener(this);
controlPanel = findViewById(R.id.controls);
@ -118,6 +119,7 @@ public class CameraActivity extends AppCompatActivity implements View.OnClickLis
case R.id.capturePicture: capturePicture(); break;
case R.id.capturePictureSnapshot: capturePictureSnapshot(); break;
case R.id.captureVideo: captureVideo(); break;
case R.id.captureVideoSnapshot: captureVideoSnapshot(); break;
case R.id.toggleCamera: toggleCamera(); break;
}
}
@ -165,6 +167,15 @@ public class CameraActivity extends AppCompatActivity implements View.OnClickLis
camera.takeVideo(null, 5000);
}
private void captureVideoSnapshot() {
if (camera.isTakingVideo()) {
message("Already taking video.", false);
return;
}
message("Recording snapshot for 5 seconds...", true);
camera.takeVideoSnapshot(null, 5000);
}
private void toggleCamera() {
if (camera.isTakingPicture() || camera.isTakingVideo()) return;
switch (camera.toggleFacing()) {

Loading…
Cancel
Save