+ * The EGLContext must only be attached to one thread at a time. This class is not thread-safe. + */ + +public final class EglCore { + + private final static String TAG = EglCore.class.getSimpleName(); + + public final static int FLAG_RECORDABLE = 0x01; + + public final static int FLAG_TRY_GLES3 = 0x02; + + private final static int EGL_RECORDABLE_ANDROID = 0x3142; + + private int mGlVersion = -1; + private EGLConfig mEGLConfig = null; + private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY; + private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT; + + public EglCore() { + this(null, 0); + } + + public EglCore(EGLContext sharedContext, int flag) { + mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY); + int[] version = new int[2]; + if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) { + throw new RuntimeException("unable to init EGL14"); + } + + if ((flag & FLAG_TRY_GLES3) != 0) { + initEGLContext(sharedContext, flag, 3); + } + if (mEGLContext == EGL14.EGL_NO_CONTEXT) { + initEGLContext(sharedContext, flag, 2); + } + + int[] value = new int[1]; + EGL14.eglQueryContext(mEGLDisplay, mEGLContext, EGL14.EGL_CONTEXT_CLIENT_VERSION, value, 0); + Log.i(TAG, "EGLContext client version=" + value[0]); + } + + private void initEGLContext(EGLContext sharedContext, int flag, int version) { + EGLConfig config = getConfig(flag, version); + if (config == null) { + throw new RuntimeException("unable to find suitable EGLConfig"); + } + int[] attributeList = {EGL14.EGL_CONTEXT_CLIENT_VERSION, version, EGL14.EGL_NONE}; + EGLContext context = EGL14.eglCreateContext(mEGLDisplay, config, sharedContext, attributeList, 0); + if (EGL14.eglGetError() == EGL14.EGL_SUCCESS) { + mEGLConfig = config; + mEGLContext = context; + mGlVersion = version; + } + } + + private EGLConfig getConfig(int flag, int version) { + int renderType = EGL14.EGL_OPENGL_ES2_BIT; + if (version >= 3) { + renderType |= EGLExt.EGL_OPENGL_ES3_BIT_KHR; + } + + int[] attributeList = { + EGL14.EGL_RED_SIZE, 8, + EGL14.EGL_GREEN_SIZE, 8, + EGL14.EGL_BLUE_SIZE, 8, + EGL14.EGL_ALPHA_SIZE, 8, + //EGL14.EGL_DEPTH_SIZE, 16, + //EGL14.EGL_STENCIL_SIZE, 8, + EGL14.EGL_RENDERABLE_TYPE, renderType, + EGL14.EGL_NONE, 0, + EGL14.EGL_NONE + }; + + if ((flag & FLAG_RECORDABLE) != 0) { + attributeList[attributeList.length - 3] = EGL_RECORDABLE_ANDROID; + attributeList[attributeList.length - 2] = 1; + } + int[] numConfigs = new int[1]; + EGLConfig[] configs = new EGLConfig[1]; + if (!EGL14.eglChooseConfig(mEGLDisplay, attributeList, 0, configs, + 0, configs.length, numConfigs, 0)) { + Log.e(TAG, "unable to find RGB8888 / " + version + " EGLConfig"); + return null; + } + return configs[0]; + } + + public void release() { + if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) { + EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT); + EGL14.eglDestroyContext(mEGLDisplay, mEGLContext); + EGL14.eglReleaseThread(); + EGL14.eglTerminate(mEGLDisplay); + } + mEGLConfig = null; + mEGLDisplay = EGL14.EGL_NO_DISPLAY; + mEGLContext = EGL14.EGL_NO_CONTEXT; + } + + @Override + protected void finalize() throws Throwable { + try { + if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) { + release(); + } + } finally { + super.finalize(); + } + } + + public void releaseSurface(EGLSurface eglSurface) { + if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) { + EGL14.eglDestroySurface(mEGLDisplay, eglSurface); + } + } + + public EGLSurface createWindowSurface(Object surface) { + if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) { + throw new RuntimeException("invalid surface:" + surface); + } + + int[] surfaceAttr = {EGL14.EGL_NONE}; + EGLSurface eglSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, mEGLConfig, surface, surfaceAttr, 0); + if (eglSurface == null) { + throw new RuntimeException("window surface is null"); + } + return eglSurface; + } + + public EGLSurface createOffsetScreenSurface(int width, int height) { + int[] surfaceAttr = {EGL14.EGL_WIDTH, width, + EGL14.EGL_HEIGHT, height, + EGL14.EGL_NONE}; + EGLSurface eglSurface = EGL14.eglCreatePbufferSurface(mEGLDisplay, mEGLConfig, surfaceAttr, 0); + if (eglSurface == null) { + throw new RuntimeException("offset-screen surface is null"); + } + return eglSurface; + } + + public void makeCurrent(EGLSurface eglSurface) { + if (!EGL14.eglMakeCurrent(mEGLDisplay, eglSurface, eglSurface, mEGLContext)) { + throw new RuntimeException("eglMakeCurrent failed!"); + } + } + + public void makeCurrent(EGLSurface drawSurface, EGLSurface readSurface) { + if (!EGL14.eglMakeCurrent(mEGLDisplay, drawSurface, readSurface, mEGLContext)) { + throw new RuntimeException("eglMakeCurrent failed!"); + } + } + + public boolean swapBuffers(EGLSurface eglSurface) { + return EGL14.eglSwapBuffers(mEGLDisplay, eglSurface); + } + + public void setPresentationTime(EGLSurface eglSurface, long nsec) { + EGLExt.eglPresentationTimeANDROID(mEGLDisplay, eglSurface, nsec); + } + + public boolean isCurrent(EGLSurface eglSurface) { + return mEGLContext.equals(EGL14.eglGetCurrentContext()) + && eglSurface.equals(EGL14.eglGetCurrentSurface(EGL14.EGL_DRAW)); + } + + public int querySurface(EGLSurface eglSurface, int what) { + int[] value = new int[1]; + EGL14.eglQuerySurface(mEGLDisplay, eglSurface, what, value, 0); + return value[0]; + } + + public String queryString(int what) { + return EGL14.eglQueryString(mEGLDisplay, what); + } + + public int getVersion() { + return mGlVersion; + } + +} diff --git a/CameraFilter/src/main/java/com/frank/camerafilter/recorder/gles/EglSurfaceBase.java b/CameraFilter/src/main/java/com/frank/camerafilter/recorder/gles/EglSurfaceBase.java new file mode 100644 index 0000000..f05d07b --- /dev/null +++ b/CameraFilter/src/main/java/com/frank/camerafilter/recorder/gles/EglSurfaceBase.java @@ -0,0 +1,106 @@ +package com.frank.camerafilter.recorder.gles; + +import android.graphics.Bitmap; +import android.opengl.EGL14; +import android.opengl.EGLSurface; +import android.opengl.GLES20; + +import java.io.BufferedOutputStream; +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.nio.IntBuffer; + +/** + * @author xufulong + * @date 2022/6/23 8:51 上午 + * @desc + */ +public class EglSurfaceBase { + + protected EglCore mEglCore; + protected int mWidth = -1; + protected int mHeight = -1; + + private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE; + + protected EglSurfaceBase(EglCore eglCore) { + mEglCore = eglCore; + } + + public void createWindowSurface(Object surface) { + if (mEGLSurface != EGL14.EGL_NO_SURFACE) { + throw new IllegalStateException("egl surface has already created"); + } + mEGLSurface = mEglCore.createWindowSurface(surface); + } + + public void createOffsetScreenSurface(int width, int height) { + if (mEGLSurface != EGL14.EGL_NO_SURFACE) { + throw new IllegalStateException("egl surface has already created"); + } + mWidth = width; + mHeight = height; + mEGLSurface = mEglCore.createOffsetScreenSurface(width, height); + } + + public int getWidth() { + if (mWidth <= 0) { + mWidth = mEglCore.querySurface(mEGLSurface, EGL14.EGL_WIDTH); + } + return mWidth; + } + + public int getHeight() { + if (mHeight <= 0) { + mHeight = mEglCore.querySurface(mEGLSurface, EGL14.EGL_HEIGHT); + } + return mHeight; + } + + public void releaseEglSurface() { + mEglCore.releaseSurface(mEGLSurface); + mEGLSurface = EGL14.EGL_NO_SURFACE; + mWidth = -1; + mHeight = -1; + } + + public void makeCurrent() { + mEglCore.makeCurrent(mEGLSurface); + } + + public void makeCurrentReadFrom(EglSurfaceBase readSurface) { + mEglCore.makeCurrent(mEGLSurface, readSurface.mEGLSurface); + } + + public boolean swapBuffers() { + return mEglCore.swapBuffers(mEGLSurface); + } + + public void setPresentationTime(long nsec) { + mEglCore.setPresentationTime(mEGLSurface, nsec); + } + + public void saveFrame(File file) throws IOException { + if (!mEglCore.isCurrent(mEGLSurface)) { + throw new RuntimeException("isn't current surface/context"); + } + String fileName = file.toString(); + int width = getWidth(); + int height = getHeight(); + IntBuffer buffer = IntBuffer.allocate(width * height); + GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buffer); + BufferedOutputStream outputStream = null; + try { + outputStream = new BufferedOutputStream(new FileOutputStream(fileName)); + Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); + bitmap.copyPixelsFromBuffer(buffer); + bitmap.compress(Bitmap.CompressFormat.JPEG, 100, outputStream); + bitmap.recycle(); + } finally { + if (outputStream != null) + outputStream.close(); + } + } + +} diff --git a/CameraFilter/src/main/java/com/frank/camerafilter/recorder/video/TextureVideoRecorder.java b/CameraFilter/src/main/java/com/frank/camerafilter/recorder/video/TextureVideoRecorder.java new file mode 100644 index 0000000..7535e9e --- /dev/null +++ b/CameraFilter/src/main/java/com/frank/camerafilter/recorder/video/TextureVideoRecorder.java @@ -0,0 +1,325 @@ +package com.frank.camerafilter.recorder.video; + +import android.content.Context; +import android.graphics.SurfaceTexture; +import android.opengl.EGLContext; +import android.os.Handler; +import android.os.Looper; +import android.os.Message; + +import androidx.annotation.NonNull; + +import com.frank.camerafilter.filter.BeautyCameraFilter; +import com.frank.camerafilter.filter.BaseFilter; +import com.frank.camerafilter.factory.BeautyFilterFactory; +import com.frank.camerafilter.factory.BeautyFilterType; +import com.frank.camerafilter.recorder.gles.EglCore; + +import java.io.File; +import java.io.IOException; +import java.lang.ref.WeakReference; +import java.nio.FloatBuffer; + +/** + * Encode a movie from frames rendered from an external texture image. + *
+ * The object wraps an encoder running on a dedicated thread. The various control messages + * may be sent from arbitrary threads (typically the app UI thread). The encoder thread + * manages both sides of the encoder (feeding and draining); the only external input is + * the GL texture. + *
+ * The design is complicated slightly by the need to create an EGL context that shares state + * with a view that gets restarted if (say) the device orientation changes. When the view + * in question is a GLSurfaceView, we don't have full control over the EGL context creation + * on that side, so we have to bend a bit backwards here. + *
+ * To use: + *
+ * Once created, frames are fed to the input surface. Remember to provide the presentation + * time stamp, and always call drainEncoder() before swapBuffers() to ensure that the + * producer side doesn't get backed up. + *
+ * This class is not thread-safe, with one exception: it is valid to use the input surface + * on one thread, and drain the output on a different thread. + */ +public class VideoRecorderCore { + + private final static String TAG = VideoRecorderCore.class.getSimpleName(); + + private final static int FRAME_RATE = 30; + private final static int IFRAME_INTERVAL = 5; + private final static String MIME_TYPE = "video/avc"; + private final static int TIMEOUT_USEC = 10000; + + private int mTrackIndex; + private boolean mMuxerStarted; + private final Surface mInputSurface; + private MediaMuxer mMediaMuxer; + private MediaCodec mVideoEncoder; + private final MediaCodec.BufferInfo mBufferInfo; + + public VideoRecorderCore(int width, int height, int bitrate, File outputFile) throws IOException { + mBufferInfo = new MediaCodec.BufferInfo(); + MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, width, height); + mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); + mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitrate); + mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE); + mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); + + mVideoEncoder = MediaCodec.createEncoderByType(MIME_TYPE); + mVideoEncoder.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); + mInputSurface = mVideoEncoder.createInputSurface(); + mVideoEncoder.start(); + + mMediaMuxer = new MediaMuxer(outputFile.toString(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); + mTrackIndex = -1; + mMuxerStarted = false; + } + + public Surface getInputSurface() { + return mInputSurface; + } + + public void drainEncoder(boolean endOfStream) { + if (endOfStream) { + mVideoEncoder.signalEndOfInputStream(); + } + + ByteBuffer[] outputBuffers = mVideoEncoder.getOutputBuffers(); + while (true) { + int encodeStatus = mVideoEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC); + if (encodeStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { + if (!endOfStream) { + break; + } + } else if (encodeStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { + outputBuffers = mVideoEncoder.getOutputBuffers(); + } else if (encodeStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { + if (mMuxerStarted) { + throw new RuntimeException("format has changed!"); + } + MediaFormat newFormat = mVideoEncoder.getOutputFormat(); + mTrackIndex = mMediaMuxer.addTrack(newFormat); + mMediaMuxer.start(); + mMuxerStarted = true; + } else if (encodeStatus < 0) { + Log.e(TAG, "error encodeStatus=" + encodeStatus); + } else { + ByteBuffer data = outputBuffers[encodeStatus]; + if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { + mBufferInfo.size = 0; + } + if (mBufferInfo.size != 0) { + if (!mMuxerStarted) { + throw new RuntimeException("muxer hasn't started"); + } + data.position(mBufferInfo.offset); + data.limit(mBufferInfo.offset + mBufferInfo.size); + mMediaMuxer.writeSampleData(mTrackIndex, data, mBufferInfo); + } + mVideoEncoder.releaseOutputBuffer(encodeStatus, false); + // end of stream + if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { + break; + } + } + } + } + + public void release() { + if (mVideoEncoder != null) { + mVideoEncoder.stop(); + mVideoEncoder.release(); + mVideoEncoder = null; + } + if (mMediaMuxer != null) { + mMediaMuxer.stop(); + mMediaMuxer.release(); + mMediaMuxer = null; + } + } + +} diff --git a/CameraFilter/src/main/java/com/frank/camerafilter/recorder/video/WindowEglSurface.java b/CameraFilter/src/main/java/com/frank/camerafilter/recorder/video/WindowEglSurface.java new file mode 100644 index 0000000..42f2b0b --- /dev/null +++ b/CameraFilter/src/main/java/com/frank/camerafilter/recorder/video/WindowEglSurface.java @@ -0,0 +1,45 @@ +package com.frank.camerafilter.recorder.video; + +import android.view.Surface; + +import com.frank.camerafilter.recorder.gles.EglCore; +import com.frank.camerafilter.recorder.gles.EglSurfaceBase; + +/** + * @author xufulong + * @date 2022/6/23 9:15 上午 + * @desc + */ +public class WindowEglSurface extends EglSurfaceBase { + + private Surface mSurface; + private boolean mReleaseSurface; + + public WindowEglSurface(EglCore eglCore, Surface surface) { + this(eglCore, surface, false); + } + + public WindowEglSurface(EglCore eglCore, Surface surface, boolean releaseSurface) { + super(eglCore); + createWindowSurface(surface); + mSurface = surface; + mReleaseSurface = releaseSurface; + } + + public void release() { + releaseEglSurface(); + if (mSurface != null && mReleaseSurface) { + mSurface.release(); + } + mSurface = null; + } + + public void recreate(EglCore newEglCore) { + if (mSurface == null) { + throw new RuntimeException("Surface is null"); + } + mEglCore = newEglCore; + createWindowSurface(mSurface); + } + +} diff --git a/CameraFilter/src/main/java/com/frank/camerafilter/util/OpenGLUtil.java b/CameraFilter/src/main/java/com/frank/camerafilter/util/OpenGLUtil.java new file mode 100644 index 0000000..5eecd86 --- /dev/null +++ b/CameraFilter/src/main/java/com/frank/camerafilter/util/OpenGLUtil.java @@ -0,0 +1,128 @@ +package com.frank.camerafilter.util; + +import android.content.Context; +import android.content.res.AssetManager; +import android.graphics.Bitmap; +import android.graphics.BitmapFactory; +import android.opengl.GLES11Ext; +import android.opengl.GLES30; +import android.opengl.GLUtils; +import android.util.Log; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; + +import javax.microedition.khronos.opengles.GL10; + +public class OpenGLUtil { + + public final static int ON_DRAWN = 1; + public static final int NOT_INIT = -1; + public static final int NO_SHADER = 0; + public static final int NO_TEXTURE = -1; + + private static Bitmap getBitmapFromAssetFile(Context context, String name) { + try { + AssetManager assetManager = context.getResources().getAssets(); + InputStream stream = assetManager.open(name); + Bitmap bitmap = BitmapFactory.decodeStream(stream); + stream.close(); + return bitmap; + } catch (IOException e) { + return null; + } + } + + public static int loadTexture(final Context context, final String name) { + if (context == null || name == null) + return NO_TEXTURE; + final int[] textures = new int[1]; + GLES30.glGenTextures(1, textures, 0); + if (textures[0] == 0) + return NO_TEXTURE; + Bitmap bitmap = getBitmapFromAssetFile(context, name); + if (bitmap == null) + return NO_TEXTURE; + GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, textures[0]); + GLES30.glTexParameteri(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MIN_FILTER, GLES30.GL_LINEAR); + GLES30.glTexParameteri(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MAG_FILTER, GLES30.GL_LINEAR); + GLES30.glTexParameteri(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_WRAP_S, GLES30.GL_CLAMP_TO_EDGE); + GLES30.glTexParameteri(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_WRAP_T, GLES30.GL_CLAMP_TO_EDGE); + GLUtils.texImage2D(GLES30.GL_TEXTURE_2D, 0, bitmap, 0); + bitmap.recycle(); + return textures[0]; + } + + private static int loadShader(final String source, final int type) { + int shader = GLES30.glCreateShader(type); + GLES30.glShaderSource(shader, source); + GLES30.glCompileShader(shader); + int[] compile = new int[1]; + GLES30.glGetShaderiv(shader, GLES30.GL_COMPILE_STATUS, compile, 0); + if (compile[0] <= 0) { + Log.e("OpenGlUtil", "Shader compile error=" + GLES30.glGetShaderInfoLog(shader)); + return NO_SHADER; + } + return shader; + } + + public static int loadProgram(final String vertexSource, final String fragmentSource) { + int vertexShader = loadShader(vertexSource, GLES30.GL_VERTEX_SHADER); + int fragmentShader = loadShader(fragmentSource, GLES30.GL_FRAGMENT_SHADER); + if (vertexShader == NO_SHADER || fragmentShader == NO_SHADER) { + return 0; + } + int programId = GLES30.glCreateProgram(); + GLES30.glAttachShader(programId, vertexShader); + GLES30.glAttachShader(programId, fragmentShader); + GLES30.glLinkProgram(programId); + int[] linked = new int[1]; + GLES30.glGetProgramiv(programId, GLES30.GL_LINK_STATUS, linked, 0); + if (linked[0] <= 0) { + programId = 0; + Log.e("OpenGlUtil", "program link error=" + GLES30.glGetProgramInfoLog(programId)); + } + GLES30.glDeleteShader(vertexShader); + GLES30.glDeleteShader(fragmentShader); + return programId; + } + + public static int getExternalOESTextureId() { + int[] textures = new int[1]; + GLES30.glGenTextures(1, textures, 0); + GLES30.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[0]); + GLES30.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR); + GLES30.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR); + GLES30.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE); + GLES30.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE); + return textures[0]; + } + + public static String readShaderFromSource(Context context, final int resourceId) { + String line; + StringBuilder builder = new StringBuilder(); + InputStream inputStream = context.getResources().openRawResource(resourceId); + InputStreamReader reader = new InputStreamReader(inputStream); + BufferedReader bufferedReader = new BufferedReader(reader); + try { + while ((line = bufferedReader.readLine()) != null) { + builder.append(line).append("\n"); + } + } catch (IOException e) { + return null; + } finally { + try { + inputStream.close(); + reader.close(); + bufferedReader.close(); + } catch (IOException e) { + e.printStackTrace(); + } + + } + return builder.toString(); + } + +} diff --git a/CameraFilter/src/main/java/com/frank/camerafilter/util/Rotation.java b/CameraFilter/src/main/java/com/frank/camerafilter/util/Rotation.java new file mode 100644 index 0000000..56bec97 --- /dev/null +++ b/CameraFilter/src/main/java/com/frank/camerafilter/util/Rotation.java @@ -0,0 +1,36 @@ +package com.frank.camerafilter.util; + +public enum Rotation { + + NORMAL, ROTATION_90, ROTATION_180, ROTATION_270; + + public int toInt() { + switch (this) { + case NORMAL: + return 0; + case ROTATION_90: + return 90; + case ROTATION_180: + return 180; + case ROTATION_270: + return 270; + default: + throw new IllegalStateException("unknown rotation value..."); + } + } + + public static Rotation fromInt(int rotation) { + switch (rotation) { + case 0: + return NORMAL; + case 90: + return ROTATION_90; + case 180: + return ROTATION_180; + case 270: + return ROTATION_270; + default: + throw new IllegalStateException("unknown rotation=" +rotation); + } + } +} diff --git a/CameraFilter/src/main/java/com/frank/camerafilter/util/TextureRotateUtil.java b/CameraFilter/src/main/java/com/frank/camerafilter/util/TextureRotateUtil.java new file mode 100644 index 0000000..0657433 --- /dev/null +++ b/CameraFilter/src/main/java/com/frank/camerafilter/util/TextureRotateUtil.java @@ -0,0 +1,82 @@ +package com.frank.camerafilter.util; + +public class TextureRotateUtil { + + public final static float[] TEXTURE_ROTATE_0 = { + 0.0f, 1.0f, + 1.0f, 1.0f, + 0.0f, 0.0f, + 1.0f, 0.0f + }; + + public final static float[] TEXTURE_ROTATE_90 = { + 1.0f, 1.0f, + 1.0f, 0.0f, + 0.0f, 1.0f, + 0.0f, 0.0f + }; + + public final static float[] TEXTURE_ROTATE_180 = { + 1.0f, 0.0f, + 0.0f, 0.0f, + 1.0f, 1.0f, + 0.0f, 1.0f + }; + + public final static float[] TEXTURE_ROTATE_270 = { + 0.0f, 0.0f, + 0.0f, 1.0f, + 1.0f, 0.0f, + 1.0f, 1.0f + }; + + public final static float[] VERTEX = { + -1.0f, -1.0f, + 1.0f, -1.0f, + -1.0f, 1.0f, + 1.0f, 1.0f + }; + + private TextureRotateUtil() {} + + private static float flip(float value) { + return value == 1.0f ? 0.0f : 1.0f; + } + + public static float[] getRotateTexture(Rotation rotation, boolean horizontalFlip, boolean verticalFlip) { + float[] rotateTexture; + switch (rotation) { + case ROTATION_90: + rotateTexture = TEXTURE_ROTATE_90; + break; + case ROTATION_180: + rotateTexture = TEXTURE_ROTATE_180; + break; + case ROTATION_270: + rotateTexture = TEXTURE_ROTATE_270; + break; + case NORMAL: + default: + rotateTexture = TEXTURE_ROTATE_0; + break; + } + if (horizontalFlip) { + rotateTexture = new float[] { + flip(rotateTexture[0]), rotateTexture[1], + flip(rotateTexture[2]), rotateTexture[3], + flip(rotateTexture[4]), rotateTexture[5], + flip(rotateTexture[6]), rotateTexture[7] + }; + } + if (verticalFlip) { + rotateTexture = new float[] { + rotateTexture[0], flip(rotateTexture[1]), + rotateTexture[2], flip(rotateTexture[3]), + rotateTexture[4], flip(rotateTexture[5]), + rotateTexture[6], flip(rotateTexture[7]) + }; + } + return rotateTexture; + } + +} diff --git a/CameraFilter/src/main/java/com/frank/camerafilter/widget/BeautyCameraView.java b/CameraFilter/src/main/java/com/frank/camerafilter/widget/BeautyCameraView.java new file mode 100644 index 0000000..e7d1181 --- /dev/null +++ b/CameraFilter/src/main/java/com/frank/camerafilter/widget/BeautyCameraView.java @@ -0,0 +1,50 @@ +package com.frank.camerafilter.widget; + +import android.content.Context; +import android.opengl.GLSurfaceView; +import android.util.AttributeSet; +import android.view.SurfaceHolder; + +import com.frank.camerafilter.factory.BeautyFilterType; + +public class BeautyCameraView extends GLSurfaceView { + + private final CameraRender mCameraRender; + + public BeautyCameraView(Context context) { + this(context, null); + } + + public BeautyCameraView(Context context, AttributeSet attrs) { + super(context, attrs); + getHolder().addCallback(this); + + mCameraRender = new CameraRender(this); + setEGLContextClientVersion(3); + setRenderer(mCameraRender); + setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY); + } + + @Override + public void surfaceDestroyed(SurfaceHolder holder) { + super.surfaceDestroyed(holder); + if (mCameraRender != null) { + mCameraRender.releaseCamera(); + } + } + + public void switchCamera() { + if (mCameraRender != null) { + mCameraRender.switchCamera(); + } + } + + public void setFilter(BeautyFilterType type) { + mCameraRender.setFilter(type); + } + + public void setRecording(boolean isRecording) { + mCameraRender.setRecording(isRecording); + } + +} diff --git a/CameraFilter/src/main/java/com/frank/camerafilter/widget/CameraRender.java b/CameraFilter/src/main/java/com/frank/camerafilter/widget/CameraRender.java new file mode 100644 index 0000000..3427403 --- /dev/null +++ b/CameraFilter/src/main/java/com/frank/camerafilter/widget/CameraRender.java @@ -0,0 +1,246 @@ +package com.frank.camerafilter.widget; + +import android.graphics.SurfaceTexture; +import android.hardware.Camera; +import android.opengl.EGL14; +import android.opengl.GLES30; +import android.opengl.GLSurfaceView; +import android.os.Environment; + +import com.frank.camerafilter.camera.CameraManager; +import com.frank.camerafilter.filter.BeautyCameraFilter; +import com.frank.camerafilter.filter.BaseFilter; +import com.frank.camerafilter.factory.BeautyFilterFactory; +import com.frank.camerafilter.factory.BeautyFilterType; +import com.frank.camerafilter.recorder.video.TextureVideoRecorder; +import com.frank.camerafilter.util.OpenGLUtil; +import com.frank.camerafilter.util.Rotation; +import com.frank.camerafilter.util.TextureRotateUtil; + +import java.io.File; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.nio.FloatBuffer; + +import javax.microedition.khronos.egl.EGLConfig; +import javax.microedition.khronos.opengles.GL10; + +public class CameraRender implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener { + + protected BaseFilter mFilter; + + private SurfaceTexture surfaceTexture; + private BeautyCameraFilter cameraFilter; + + private final CameraManager cameraManager; + + protected int mTextureId = OpenGLUtil.NO_TEXTURE; + + protected FloatBuffer mVertexBuffer; + + protected FloatBuffer mTextureBuffer; + + protected int mImageWidth, mImageHeight; + + protected int mSurfaceWidth, mSurfaceHeight; + private final float[] mMatrix = new float[16]; + + private final BeautyCameraView mCameraView; + + private final File outputFile; + private int recordStatus; + protected boolean recordEnable; + private final TextureVideoRecorder videoRecorder; + + private final static int RECORDING_OFF = 0; + private final static int RECORDING_ON = 1; + private final static int RECORDING_RESUME = 2; + + private static final int videoBitrate = 6 * 1024 * 1024; + private static final String videoName = "camera_record.mp4"; + private static final String videoPath = Environment.getExternalStorageDirectory().getPath(); + + public CameraRender(BeautyCameraView cameraView) { + mCameraView = cameraView; + + cameraManager = new CameraManager(); + mVertexBuffer = ByteBuffer.allocateDirect(TextureRotateUtil.VERTEX.length * 4) + .order(ByteOrder.nativeOrder()) + .asFloatBuffer(); + mVertexBuffer.put(TextureRotateUtil.VERTEX).position(0); + mTextureBuffer = ByteBuffer.allocateDirect(TextureRotateUtil.TEXTURE_ROTATE_0.length * 4) + .order(ByteOrder.nativeOrder()) + .asFloatBuffer(); + mTextureBuffer.put(TextureRotateUtil.TEXTURE_ROTATE_0).position(0); + + recordEnable = false; + recordStatus = RECORDING_OFF; + videoRecorder = new TextureVideoRecorder(mCameraView.getContext()); + outputFile = new File(videoPath, videoName); + } + + private void openCamera() { + if (cameraManager.getCamera() == null) + cameraManager.openCamera(); + Camera.Size size = cameraManager.getPreviewSize(); + if (cameraManager.getOrientation() == 90 || cameraManager.getOrientation() == 270) { + mImageWidth = size.height; + mImageHeight = size.width; + } else { + mImageWidth = size.width; + mImageHeight = size.height; + } + cameraFilter.onInputSizeChanged(mImageWidth, mImageHeight); + adjustSize(cameraManager.getOrientation(), cameraManager.isFront(), true); + } + + @Override + public void onSurfaceCreated(GL10 gl10, EGLConfig eglConfig) { + GLES30.glDisable(GL10.GL_DITHER); + GLES30.glClearColor(0.0f, 0.0f, 0.0f, 0.0f); + GLES30.glEnable(GL10.GL_CULL_FACE); + GLES30.glEnable(GL10.GL_DEPTH_TEST); + + cameraFilter = new BeautyCameraFilter(mCameraView.getContext()); + cameraFilter.init(); + mTextureId = OpenGLUtil.getExternalOESTextureId(); + if (mTextureId != OpenGLUtil.NO_TEXTURE) { + surfaceTexture = new SurfaceTexture(mTextureId); + surfaceTexture.setOnFrameAvailableListener(this); + } + + openCamera(); + } + + @Override + public void onSurfaceChanged(GL10 gl10, int width, int height) { + GLES30.glViewport(0, 0, width, height); + mSurfaceWidth = width; + mSurfaceHeight = height; + cameraManager.startPreview(surfaceTexture); + onFilterChanged(); + } + + @Override + public void onDrawFrame(GL10 gl10) { + GLES30.glClear(GLES30.GL_COLOR_BUFFER_BIT | GLES30.GL_DEPTH_BUFFER_BIT); + GLES30.glClearColor(0.0f, 0.0f, 0.0f, 0.0f); + + surfaceTexture.updateTexImage(); + + surfaceTexture.getTransformMatrix(mMatrix); + cameraFilter.setTextureTransformMatrix(mMatrix); + int id = mTextureId; + if (mFilter == null) { + cameraFilter.onDrawFrame(mTextureId, mVertexBuffer, mTextureBuffer); + } else { + id = cameraFilter.onDrawToTexture(mTextureId); + mFilter.onDrawFrame(id, mVertexBuffer, mTextureBuffer); + } + + onRecordVideo(id); + } + + @Override + public void onFrameAvailable(SurfaceTexture surfaceTexture) { + mCameraView.requestRender(); + } + + public void adjustSize(int rotation, boolean horizontalFlip, boolean verticalFlip) { + float[] vertexData = TextureRotateUtil.VERTEX; + float[] textureData = TextureRotateUtil.getRotateTexture(Rotation.fromInt(rotation), + horizontalFlip, verticalFlip); + + mVertexBuffer.clear(); + mVertexBuffer.put(vertexData).position(0); + mTextureBuffer.clear(); + mTextureBuffer.put(textureData).position(0); + + } + + public void switchCamera() { + if (cameraManager != null) { + cameraManager.switchCamera(); + } + } + + public void releaseCamera() { + if (cameraManager != null) { + cameraManager.releaseCamera(); + } + } + + private void onRecordVideo(int textureId) { + if (recordEnable) { + switch (recordStatus) { + case RECORDING_OFF: + videoRecorder.setPreviewSize(mImageWidth, mImageHeight); + videoRecorder.setTextureBuffer(mTextureBuffer); + videoRecorder.setCubeBuffer(mVertexBuffer); + videoRecorder.startRecording(new TextureVideoRecorder.RecorderConfig( + mImageWidth, + mImageHeight, + videoBitrate, + outputFile, + EGL14.eglGetCurrentContext())); + recordStatus = RECORDING_ON; + break; + case RECORDING_RESUME: + videoRecorder.updateSharedContext(EGL14.eglGetCurrentContext()); + recordStatus = RECORDING_ON; + break; + case RECORDING_ON: + break; + default: + throw new RuntimeException("unknown status " + recordStatus); + } + } else { + switch (recordStatus) { + case RECORDING_ON: + case RECORDING_RESUME: + videoRecorder.stopRecording(); + recordStatus = RECORDING_OFF; + break; + case RECORDING_OFF: + break; + default: + throw new RuntimeException("unknown status " + recordStatus); + } + } + videoRecorder.setTextureId(textureId); + videoRecorder.frameAvailable(surfaceTexture); + } + + public void setRecording(boolean isRecording) { + recordEnable = isRecording; + } + + public void setFilter(final BeautyFilterType type) { + mCameraView.queueEvent(new Runnable() { + @Override + public void run() { + if (mFilter != null) + mFilter.destroy(); + mFilter = null; + mFilter = BeautyFilterFactory.getFilter(type, mCameraView.getContext()); + if (mFilter != null) + mFilter.init(); + onFilterChanged(); + } + }); + mCameraView.requestRender(); + } + + public void onFilterChanged() { + if (mFilter != null) { + mFilter.onInputSizeChanged(mImageWidth, mImageHeight); + mFilter.onOutputSizeChanged(mSurfaceWidth, mSurfaceHeight); + } + cameraFilter.onOutputSizeChanged(mSurfaceWidth, mSurfaceHeight); + if (mFilter != null) + cameraFilter.initFrameBuffer(mImageWidth, mImageHeight); + else + cameraFilter.destroyFrameBuffer(); + } + +} diff --git a/CameraFilter/src/main/res/raw/crayon.glsl b/CameraFilter/src/main/res/raw/crayon.glsl new file mode 100644 index 0000000..b477a6d --- /dev/null +++ b/CameraFilter/src/main/res/raw/crayon.glsl @@ -0,0 +1,53 @@ +varying highp vec2 textureCoordinate; +precision mediump float; + +uniform sampler2D inputImageTexture; +uniform vec2 singleStepOffset; +uniform float strength; + +const highp vec3 W = vec3(0.299,0.587,0.114); + +const mat3 rgb2yiqMatrix = mat3( + 0.299, 0.587, 0.114, + 0.596,-0.275,-0.321, + 0.212,-0.523, 0.311); + +const mat3 yiq2rgbMatrix = mat3( + 1.0, 0.956, 0.621, + 1.0,-0.272,-1.703, + 1.0,-1.106, 0.0); + + +void main() +{ + vec4 oralColor = texture2D(inputImageTexture, textureCoordinate); + + vec3 maxValue = vec3(0.,0.,0.); + + for(int i = -2; i<=2; i++) + { + for(int j = -2; j<=2; j++) + { + vec4 tempColor = texture2D(inputImageTexture, textureCoordinate+singleStepOffset*vec2(i,j)); + maxValue.r = max(maxValue.r,tempColor.r); + maxValue.g = max(maxValue.g,tempColor.g); + maxValue.b = max(maxValue.b,tempColor.b); + } + } + + vec3 textureColor = oralColor.rgb / maxValue; + + float gray = dot(textureColor, W); + float k = 0.223529; + float alpha = min(gray,k)/k; + + textureColor = textureColor * alpha + (1.-alpha)*oralColor.rgb; + + vec3 yiqColor = textureColor * rgb2yiqMatrix; + + yiqColor.r = max(0.0,min(1.0,pow(gray,strength))); + + textureColor = yiqColor * yiq2rgbMatrix; + + gl_FragColor = vec4(textureColor, oralColor.w); +} \ No newline at end of file diff --git a/CameraFilter/src/main/res/raw/default_fragment.glsl b/CameraFilter/src/main/res/raw/default_fragment.glsl new file mode 100644 index 0000000..677fb07 --- /dev/null +++ b/CameraFilter/src/main/res/raw/default_fragment.glsl @@ -0,0 +1,13 @@ +#extension GL_OES_EGL_image_external : require + +precision mediump float; + +varying mediump vec2 textureCoordinate; + +uniform samplerExternalOES inputImageTexture; + +void main(){ + + vec3 centralColor = texture2D(inputImageTexture, textureCoordinate).rgb; + gl_FragColor = vec4(centralColor.rgb,1.0);; +} \ No newline at end of file diff --git a/CameraFilter/src/main/res/raw/default_vertex.glsl b/CameraFilter/src/main/res/raw/default_vertex.glsl new file mode 100644 index 0000000..6349d0f --- /dev/null +++ b/CameraFilter/src/main/res/raw/default_vertex.glsl @@ -0,0 +1,11 @@ +attribute vec4 position; +attribute vec4 inputTextureCoordinate; + +uniform mat4 textureTransform; +varying vec2 textureCoordinate; + +void main() +{ + textureCoordinate = (textureTransform * inputTextureCoordinate).xy; + gl_Position = position; +} diff --git a/CameraFilter/src/main/res/raw/sketch.glsl b/CameraFilter/src/main/res/raw/sketch.glsl new file mode 100644 index 0000000..ee14800 --- /dev/null +++ b/CameraFilter/src/main/res/raw/sketch.glsl @@ -0,0 +1,46 @@ +varying highp vec2 textureCoordinate; +precision mediump float; + +uniform sampler2D inputImageTexture; +uniform vec2 singleStepOffset; +uniform float strength; + +const highp vec3 W = vec3(0.299,0.587,0.114); + + +void main() +{ + float threshold = 0.0; + //pic1 + vec4 oralColor = texture2D(inputImageTexture, textureCoordinate); + + //pic2 + vec3 maxValue = vec3(0.,0.,0.); + + for(int i = -2; i<=2; i++) + { + for(int j = -2; j<=2; j++) + { + vec4 tempColor = texture2D(inputImageTexture, textureCoordinate+singleStepOffset*vec2(i,j)); + maxValue.r = max(maxValue.r,tempColor.r); + maxValue.g = max(maxValue.g,tempColor.g); + maxValue.b = max(maxValue.b,tempColor.b); + threshold += dot(tempColor.rgb, W); + } + } + //pic3 + float gray1 = dot(oralColor.rgb, W); + + //pic4 + float gray2 = dot(maxValue, W); + + //pic5 + float contour = gray1 / gray2; + + threshold = threshold / 25.; + float alpha = max(1.0,gray1>threshold?1.0:(gray1/threshold)); + + float result = contour * alpha + (1.0-alpha)*gray1; + + gl_FragColor = vec4(vec3(result,result,result), oralColor.w); +} \ No newline at end of file diff --git a/app/build.gradle b/app/build.gradle index f8d26ef..8d259e3 100644 --- a/app/build.gradle +++ b/app/build.gradle @@ -66,4 +66,5 @@ dependencies { implementation project(':AndroidMedia') //implementation "libmp3" if you need mp3-lite module // implementation project(':libmp3') + implementation project(':CameraFilter') } diff --git a/settings.gradle b/settings.gradle index b4f965b..abf84e4 100644 --- a/settings.gradle +++ b/settings.gradle @@ -2,3 +2,4 @@ include ':AndroidMedia' include ':app' include ':Live' include ':libmp3' +include ':CameraFilter'