RTMP直播推流:更新为Camera2预览,暂时去掉音频推流

RTMP直播推流:更新为Camera2预览,暂时去掉音频推流
pull/107/head
xufulong 5 years ago
parent 2f35000e84
commit d80cbd80e8
  1. 2
      Live/src/main/AndroidManifest.xml
  2. 121
      Live/src/main/cpp/live.c
  3. 16
      Live/src/main/java/com/frank/live/LiveUtil.java
  4. 11
      Live/src/main/java/com/frank/live/Push/LivePusher.java
  5. 122
      Live/src/main/java/com/frank/live/Push/VideoPusher.java
  6. 155
      Live/src/main/java/com/frank/live/Push/VideoPusherNew.java
  7. 27
      Live/src/main/java/com/frank/live/RtmpLiveActivity.java
  8. 643
      Live/src/main/java/com/frank/live/camera2/Camera2Helper.java
  9. 16
      Live/src/main/java/com/frank/live/camera2/Camera2Listener.java
  10. 4
      Live/src/main/res/layout/activity_rtmp_live.xml
  11. 2
      app/build.gradle
  12. 26
      app/src/main/java/com/frank/ffmpeg/activity/LiveActivity.java
  13. 2
      app/src/main/res/layout/activity_live.xml

@ -14,7 +14,7 @@
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/AppTheme" >
<!--<activity android:name="com.frank.live.LiveActivity"-->
<!--<activity android:name="com.frank.live.RtmpLiveActivityvity"-->
<!--android:screenOrientation="landscape">-->
<!--<intent-filter>-->
<!--<action android:name="android.intent.action.MAIN" />-->

@ -14,7 +14,6 @@
x264_picture_t picture_in;
x264_picture_t picture_out;
int y_len, uv_len;
x264_t *video_encode_handle;
faacEncHandle *audio_encode_handle;
uint32_t start_time;
@ -44,12 +43,17 @@ const int ERROR_RTMP_CONNECT = 0x05;
const int ERROR_RTMP_CONNECT_STREAM = 0x06;
//RTMP发送数据包失败
const int ERROR_RTMP_SEND_PACKAT = 0x07;
/***************与Java层对应**************/
void add_rtmp_packet(RTMPPacket *pPacket);
void add_x264_body(uint8_t *buf, int len);
void add_x264_key_header(unsigned char sps[100], unsigned char pps[100], int len, int pps_len);
void add_aac_body(unsigned char *buf, int len);
void add_aac_header();
//当调用System.loadLibrary时,会回调这个方法
@ -152,8 +156,6 @@ Java_com_frank_live_LiveUtil_native_1start(JNIEnv *env, jobject instance, jstrin
JNIEXPORT void JNICALL
Java_com_frank_live_LiveUtil_setVideoParam(JNIEnv *env, jobject instance, jint width, jint height,
jint bitRate, jint frameRate) {
y_len = width * height;
uv_len = y_len/4;
x264_param_t param;
//默认设置
@ -188,7 +190,8 @@ Java_com_frank_live_LiveUtil_setVideoParam(JNIEnv *env, jobject instance, jint w
//音频编码器FAAC参数配置
JNIEXPORT void JNICALL
Java_com_frank_live_LiveUtil_setAudioParam(JNIEnv *env, jobject instance, jint sampleRate, jint numChannels) {
Java_com_frank_live_LiveUtil_setAudioParam(JNIEnv *env, jobject instance, jint sampleRate,
jint numChannels) {
inputSamples;
maxOutputBytes;
audio_encode_handle = faacEncOpen((unsigned long) sampleRate,
@ -313,56 +316,6 @@ void add_x264_body(uint8_t *buf, int len) {
add_rtmp_packet(packet);
}
//推送视频流
JNIEXPORT void JNICALL
Java_com_frank_live_LiveUtil_pushVideo(JNIEnv *env, jobject instance, jbyteArray data_) {
//NV21转成YUV420P
jbyte *nv21_buffer = (*env)->GetByteArrayElements(env, data_, NULL);
//Y相同,直接拷贝
memcpy(picture_in.img.plane[0], nv21_buffer, (size_t) y_len);
jbyte *v_buffer = (jbyte *) picture_in.img.plane[2];
jbyte *u_buffer = (jbyte *) picture_in.img.plane[1];
int i;
//U和V交换
for(i=0; i<uv_len; i++){
*(u_buffer+i) = *(nv21_buffer + y_len + 2*i + 1);
*(v_buffer+i) = *(nv21_buffer + y_len + 2*i);
}
x264_nal_t *nal = NULL;
int nal_num = -1;//NAL unit个数
//调用h264编码
if(x264_encoder_encode(video_encode_handle, &nal, &nal_num, &picture_in, & picture_out) < 0){
LOGE("x264_encoder_encode fail");
throw_error_to_java(ERROR_VIDEO_ENCODE);
goto end;
}
if(nal_num <= 0){
LOGE("nal_num <= 0");
goto end;
}
//使用RTMP推流
//关键帧(I帧)加上SPS和PPS
int sps_len = 0, pps_len = 0;
unsigned char sps[100];
unsigned char pps[100];
memset(sps, 0, 100);
memset(pps, 0, 100);
for (i = 0; i < nal_num; ++i) {
if(nal[i].i_type == NAL_SPS){//sps
sps_len = nal[i].i_payload - 4;
memcpy(sps, nal[i].p_payload + 4, (size_t) sps_len);
} else if(nal[i].i_type == NAL_PPS){//pps
pps_len = nal[i].i_payload - 4;
memcpy(pps, nal[i].p_payload + 4, (size_t) pps_len);
add_x264_key_header(sps, pps, sps_len, pps_len);
} else{
add_x264_body(nal[i].p_payload, nal[i].i_payload);
}
}
end:
(*env)->ReleaseByteArrayElements(env, data_, nv21_buffer, 0);
}
//添加AAC header
void add_aac_header() {
unsigned char *ppBuffer;
@ -416,7 +369,8 @@ void add_aac_body(unsigned char *buf, int len) {
//推送音频流
JNIEXPORT void JNICALL
Java_com_frank_live_LiveUtil_pushAudio(JNIEnv *env, jobject instance, jbyteArray data_, jint length) {
Java_com_frank_live_LiveUtil_pushAudio(JNIEnv *env, jobject instance, jbyteArray data_,
jint length) {
jbyte *data = (*env)->GetByteArrayElements(env, data_, NULL);
int *pcm_buf;
unsigned char *aac_buf;
@ -439,7 +393,8 @@ Java_com_frank_live_LiveUtil_pushAudio(JNIEnv *env, jobject instance, jbyteArray
}
count += inputSamples;
//调用FAAC编码,返回编码字节数
int bytes_len = faacEncEncode(audio_encode_handle, pcm_buf, (unsigned int) audio_length, aac_buf, maxOutputBytes);
int bytes_len = faacEncEncode(audio_encode_handle, pcm_buf, (unsigned int) audio_length,
aac_buf, maxOutputBytes);
if (bytes_len <= 0) {
// throw_error_to_java(ERROR_AUDIO_ENCODE);
LOGE("音频编码失败...");
@ -453,6 +408,60 @@ Java_com_frank_live_LiveUtil_pushAudio(JNIEnv *env, jobject instance, jbyteArray
}
}
//推送视频流
JNIEXPORT void JNICALL
Java_com_frank_live_LiveUtil_pushVideoNew(JNIEnv *env, jobject instance, jbyteArray yPlane,
jbyteArray uPlane, jbyteArray vPlane) {
jbyte *y_plane = (*env)->GetByteArrayElements(env, yPlane, NULL);
jbyte *u_plane = (*env)->GetByteArrayElements(env, uPlane, NULL);
jbyte *v_plane = (*env)->GetByteArrayElements(env, vPlane, NULL);
jsize y_length = (*env)->GetArrayLength(env, yPlane);
jsize u_length = (*env)->GetArrayLength(env, uPlane);
jsize v_length = (*env)->GetArrayLength(env, vPlane);
//直接拷贝
memcpy(picture_in.img.plane[0], y_plane, (size_t) y_length);
memcpy(picture_in.img.plane[1], u_plane, (size_t) u_length);
memcpy(picture_in.img.plane[2], v_plane, (size_t) v_length);
int i;
x264_nal_t *nal = NULL;
int nal_num = -1;//NAL unit个数
//调用h264编码
if (x264_encoder_encode(video_encode_handle, &nal, &nal_num, &picture_in, &picture_out) < 0) {
LOGE("x264_encoder_encode fail");
throw_error_to_java(ERROR_VIDEO_ENCODE);
goto end;
}
if (nal_num <= 0) {
LOGE("nal_num <= 0");
goto end;
}
//使用RTMP推流
//关键帧(I帧)加上SPS和PPS
int sps_len = 0, pps_len = 0;
unsigned char sps[100];
unsigned char pps[100];
memset(sps, 0, 100);
memset(pps, 0, 100);
for (i = 0; i < nal_num; ++i) {
if (nal[i].i_type == NAL_SPS) {//sps
sps_len = nal[i].i_payload - 4;
memcpy(sps, nal[i].p_payload + 4, (size_t) sps_len);
} else if (nal[i].i_type == NAL_PPS) {//pps
pps_len = nal[i].i_payload - 4;
memcpy(pps, nal[i].p_payload + 4, (size_t) pps_len);
add_x264_key_header(sps, pps, sps_len, pps_len);
} else {
add_x264_body(nal[i].p_payload, nal[i].i_payload);
}
}
end:
(*env)->ReleaseByteArrayElements(env, yPlane, y_plane, 0);
(*env)->ReleaseByteArrayElements(env, uPlane, u_plane, 0);
(*env)->ReleaseByteArrayElements(env, vPlane, v_plane, 0);
}
//停止推流
JNIEXPORT void JNICALL
Java_com_frank_live_LiveUtil_native_1stop(JNIEnv *env, jobject instance) {

@ -14,11 +14,17 @@ public class LiveUtil {
}
private native int native_start(String url);
private native void setVideoParam(int width, int height, int bitRate, int frameRate);
private native void setAudioParam(int sampleRate, int numChannels);
private native void pushVideo(byte[] data);
private native void pushVideoNew(byte[] yPlane, byte[] uPlane, byte[] vPlane);
private native void pushAudio(byte[] data, int length);
private native void native_stop();
private native void native_release();
//视频编码器打开失败
@ -38,7 +44,8 @@ public class LiveUtil {
private LiveStateChangeListener liveStateChangeListener;
public LiveUtil(){}
public LiveUtil() {
}
public int startPush(String url) {
return native_start(url);
@ -52,8 +59,8 @@ public class LiveUtil {
setAudioParam(sampleRate, numChannels);
}
public void pushVideoData(byte[] data){
pushVideo(data);
public void pushVideoData(byte[] yPlane, byte[] uPlane, byte[] vPlane) {
pushVideoNew(yPlane, uPlane, vPlane);
}
public void pushAudioData(byte[] data, int length) {
@ -74,6 +81,7 @@ public class LiveUtil {
/**
* 当native报错时回调这个方法
*
* @param errCode errCode
*/
public void errorFromNative(int errCode) {

@ -1,7 +1,9 @@
package com.frank.live.Push;
import android.content.Context;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.TextureView;
import com.frank.live.LiveUtil;
import com.frank.live.listener.LiveStateChangeListener;
import com.frank.live.param.AudioParam;
@ -14,13 +16,13 @@ import com.frank.live.param.VideoParam;
public class LivePusher {
private VideoPusher videoPusher;
private VideoPusherNew videoPusher;
private AudioPusher audioPusher;
private LiveUtil liveUtil;
public LivePusher(SurfaceHolder surfaceHolder, VideoParam videoParam, AudioParam audioParam){
public LivePusher(TextureView textureView, VideoParam videoParam, AudioParam audioParam, Context context) {
liveUtil = new LiveUtil();
videoPusher = new VideoPusher(surfaceHolder, videoParam, liveUtil);
videoPusher = new VideoPusherNew(textureView, videoParam, liveUtil, context);
audioPusher = new AudioPusher(audioParam, liveUtil);
}
@ -62,6 +64,7 @@ public class LivePusher {
/**
* 设置静音
*
* @param isMute 是否静音
*/
public void setMute(boolean isMute) {

@ -1,122 +0,0 @@
package com.frank.live.Push;
import android.graphics.ImageFormat;
import android.hardware.Camera;
import android.view.SurfaceHolder;
import com.frank.live.LiveUtil;
import com.frank.live.param.VideoParam;
import java.io.IOException;
/**
* 视频推流
* Created by frank on 2018/1/28.
*/
public class VideoPusher extends Pusher implements SurfaceHolder.Callback, Camera.PreviewCallback {
private SurfaceHolder surfaceHolder;
private VideoParam videoParam;
private Camera camera;
private boolean isPushing;
private byte[] previewBuffer;
private LiveUtil liveUtil;
VideoPusher(SurfaceHolder surfaceHolder, VideoParam videoParam, LiveUtil liveUtil){
this.surfaceHolder = surfaceHolder;
this.videoParam = videoParam;
this.liveUtil = liveUtil;
surfaceHolder.addCallback(this);
liveUtil.setVideoParams(videoParam.getWidth(), videoParam.getHeight(),
videoParam.getBitRate(), videoParam.getFrameRate());
}
@Override
public void startPush() {
isPushing = true;
}
@Override
public void stopPush() {
isPushing = false;
}
@Override
public void release() {
stopPush();
stopPreview();
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
startPreview();
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
stopPreview();
}
/**
* 开始预览
*/
private void startPreview() {
try {
camera = Camera.open(videoParam.getCameraId());
Camera.Parameters parameters = camera.getParameters();
parameters.setPreviewFormat(ImageFormat.NV21);
parameters.setPictureSize(videoParam.getWidth(), videoParam.getHeight());
camera.setParameters(parameters);
camera.setDisplayOrientation(0);//竖屏是90°
camera.setPreviewDisplay(surfaceHolder);
camera.startPreview();
previewBuffer = new byte[videoParam.getWidth() * videoParam.getHeight() * 4];
camera.addCallbackBuffer(previewBuffer);
camera.setPreviewCallbackWithBuffer(this);
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 停止预览
*/
private void stopPreview() {
if(camera != null){
camera.stopPreview();
camera.setPreviewCallback(null);
camera.release();
camera = null;
}
}
/**
* 切换摄像头
*/
void switchCamera(){
if(videoParam.getCameraId() == Camera.CameraInfo.CAMERA_FACING_BACK){
videoParam.setCameraId(Camera.CameraInfo.CAMERA_FACING_FRONT);
}else {
videoParam.setCameraId(Camera.CameraInfo.CAMERA_FACING_BACK);
}
//重新开始推流
stopPreview();
startPreview();
}
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
camera.addCallbackBuffer(previewBuffer);
if(isPushing){
liveUtil.pushVideoData(data);
}
}
}

@ -0,0 +1,155 @@
package com.frank.live.Push;
import android.app.Activity;
import android.content.Context;
import android.graphics.Point;
import android.graphics.SurfaceTexture;
import android.util.Log;
import android.util.Size;
import android.view.TextureView;
import com.frank.live.LiveUtil;
import com.frank.live.camera2.Camera2Helper;
import com.frank.live.camera2.Camera2Listener;
import com.frank.live.param.VideoParam;
/**
* 视频推流:使用Camera2
* Created by frank on 2019/12/18.
*/
public class VideoPusherNew extends Pusher implements TextureView.SurfaceTextureListener, Camera2Listener {
private final static String TAG = VideoPusherNew.class.getSimpleName();
private VideoParam mVideoParam;
private Camera2Helper camera2Helper;
private boolean isPushing;
private LiveUtil mLiveUtil;
private TextureView mTextureView;
private Context mContext;
VideoPusherNew(TextureView textureView, VideoParam videoParam, LiveUtil liveUtil, Context context) {
this.mTextureView = textureView;
this.mVideoParam = videoParam;
this.mLiveUtil = liveUtil;
this.mContext = context;
mTextureView.setSurfaceTextureListener(this);
liveUtil.setVideoParams(videoParam.getWidth(), videoParam.getHeight(),
videoParam.getBitRate(), videoParam.getFrameRate());
}
@Override
public void startPush() {
isPushing = true;
}
@Override
public void stopPush() {
isPushing = false;
}
@Override
public void release() {
stopPush();
releasePreview();
}
/**
* 开始预览
*/
private void startPreview() {
int rotateDegree = 0;
if (mContext instanceof Activity) {
rotateDegree = ((Activity) mContext).getWindowManager().getDefaultDisplay().getRotation();
}
Log.e(TAG, "preview width=" + mTextureView.getWidth() + "--height=" + mTextureView.getHeight());
camera2Helper = new Camera2Helper.Builder()
.cameraListener(this)
.maxPreviewSize(new Point(1080, 720))
.minPreviewSize(new Point(mVideoParam.getWidth(), mVideoParam.getHeight()))
.specificCameraId(Camera2Helper.CAMERA_ID_BACK)
.context(mContext.getApplicationContext())
.previewOn(mTextureView)
// .previewViewSize(new Point(mTextureView.getWidth(), mTextureView.getHeight()))
.previewViewSize(new Point(mVideoParam.getWidth(), mVideoParam.getHeight()))
.rotation(rotateDegree)
.build();
camera2Helper.start();
}
/**
* 停止预览
*/
private void stopPreview() {
if (camera2Helper != null) {
camera2Helper.stop();
}
}
/**
* 释放资源
*/
private void releasePreview() {
if (camera2Helper != null) {
camera2Helper.stop();
camera2Helper.release();
camera2Helper = null;
}
}
/**
* 切换摄像头
*/
void switchCamera() {
if (camera2Helper != null) {
camera2Helper.switchCamera();
}
}
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
Log.e(TAG, "onSurfaceTextureAvailable...");
startPreview();
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
Log.e(TAG, "onSurfaceTextureDestroyed...");
stopPreview();
return false;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
@Override
public void onPreviewFrame(byte[] y, byte[] u, byte[] v) {
if (isPushing && mLiveUtil != null) {
mLiveUtil.pushVideoData(y, u, v);
}
}
@Override
public void onCameraOpened(Size previewSize, int displayOrientation) {
Log.e(TAG, "onCameraOpened previewSize=" + previewSize.toString());
}
@Override
public void onCameraClosed() {
Log.e(TAG, "onCameraClosed");
}
@Override
public void onCameraError(Exception e) {
Log.e(TAG, "onCameraError=" + e.toString());
}
}

@ -3,23 +3,25 @@ package com.frank.live;
import android.Manifest;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.hardware.Camera;
import android.media.AudioFormat;
import android.os.Build;
import android.os.Handler;
import android.os.Message;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import android.os.Bundle;
import android.text.TextUtils;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.TextureView;
import android.view.View;
import android.widget.CompoundButton;
import android.widget.Toast;
import android.widget.ToggleButton;
import com.frank.live.Push.LivePusher;
import com.frank.live.camera2.Camera2Helper;
import com.frank.live.listener.LiveStateChangeListener;
import com.frank.live.param.AudioParam;
import com.frank.live.param.VideoParam;
@ -29,14 +31,14 @@ import com.frank.live.param.VideoParam;
* Created by frank on 2018/1/28.
*/
public class LiveActivity extends AppCompatActivity implements View.OnClickListener, CompoundButton.OnCheckedChangeListener, LiveStateChangeListener {
public class RtmpLiveActivity extends AppCompatActivity implements View.OnClickListener, CompoundButton.OnCheckedChangeListener, LiveStateChangeListener {
private final static String TAG = LiveActivity.class.getSimpleName();
private final static String TAG = RtmpLiveActivity.class.getSimpleName();
private final static int CODE_CAMERA_RECORD = 0x0001;
private final static String[] permissions = new String[]{Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO};
private final static String LIVE_URL = "rtmp://192.168.8.115/live/stream";
private final static String LIVE_URL = "rtmp://192.168.1.3/live/stream";
private final static int MSG_ERROR = 100;
private SurfaceHolder surfaceHolder;
private TextureView textureView;
private LivePusher livePusher;
@SuppressLint("HandlerLeak")
private Handler mHandler = new Handler() {
@ -46,7 +48,7 @@ public class LiveActivity extends AppCompatActivity implements View.OnClickListe
if (msg.what == MSG_ERROR) {
String errMsg = (String) msg.obj;
if (!TextUtils.isEmpty(errMsg)) {
Toast.makeText(LiveActivity.this, errMsg, Toast.LENGTH_SHORT).show();
Toast.makeText(RtmpLiveActivity.this, errMsg, Toast.LENGTH_SHORT).show();
}
}
}
@ -55,7 +57,7 @@ public class LiveActivity extends AppCompatActivity implements View.OnClickListe
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_live);
setContentView(R.layout.activity_rtmp_live);
initView();
requirePermission();
@ -65,8 +67,7 @@ public class LiveActivity extends AppCompatActivity implements View.OnClickListe
private void initView() {
findViewById(R.id.btn_swap).setOnClickListener(this);
((ToggleButton) findViewById(R.id.btn_live)).setOnCheckedChangeListener(this);
SurfaceView surface_camera = (SurfaceView) findViewById(R.id.surface_camera);
surfaceHolder = surface_camera.getHolder();
textureView = findViewById(R.id.surface_camera);
}
private void initPusher() {
@ -75,13 +76,13 @@ public class LiveActivity extends AppCompatActivity implements View.OnClickListe
int videoBitRate = 400;//kb/s jason-->480kb
int videoFrameRate = 25;//fps
VideoParam videoParam = new VideoParam(width, height,
Camera.CameraInfo.CAMERA_FACING_BACK, videoBitRate, videoFrameRate);
Integer.valueOf(Camera2Helper.CAMERA_ID_BACK), videoBitRate, videoFrameRate);
int sampleRate = 44100;//采样率:Hz
int channelConfig = AudioFormat.CHANNEL_IN_STEREO;//立体声道
int audioFormat = AudioFormat.ENCODING_PCM_16BIT;//pcm16位
int numChannels = 2;//声道数
AudioParam audioParam = new AudioParam(sampleRate, channelConfig, audioFormat, numChannels);
livePusher = new LivePusher(surfaceHolder, videoParam, audioParam);
livePusher = new LivePusher(textureView, videoParam, audioParam, this);
}
@Override

@ -0,0 +1,643 @@
package com.frank.live.camera2;
import android.Manifest;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.pm.PackageManager;
import android.graphics.ImageFormat;
import android.graphics.Matrix;
import android.graphics.Point;
import android.graphics.RectF;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
import android.os.Build;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.util.Size;
import android.view.Surface;
import android.view.TextureView;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.ReentrantLock;
import androidx.annotation.NonNull;
/**
* Camera2操作
* Created by frank on 2019/12/18.
*/
@TargetApi(21)
public class Camera2Helper {
private static final String TAG = Camera2Helper.class.getSimpleName();
private Point maxPreviewSize;
private Point minPreviewSize;
public static final String CAMERA_ID_FRONT = "1";
public static final String CAMERA_ID_BACK = "0";
private String mCameraId;
private String specificCameraId;
private Camera2Listener camera2Listener;
private TextureView mTextureView;
private int rotation;
private Point previewViewSize;
private Point specificPreviewSize;
private Context context;
/**
* A {@link CameraCaptureSession } for camera preview.
*/
private CameraCaptureSession mCaptureSession;
/**
* A reference to the opened {@link CameraDevice}.
*/
private CameraDevice mCameraDevice;
private Size mPreviewSize;
private Camera2Helper(Builder builder) {
mTextureView = builder.previewDisplayView;
specificCameraId = builder.specificCameraId;
camera2Listener = builder.camera2Listener;
rotation = builder.rotation;
previewViewSize = builder.previewViewSize;
specificPreviewSize = builder.previewSize;
maxPreviewSize = builder.maxPreviewSize;
minPreviewSize = builder.minPreviewSize;
context = builder.context;
}
public void switchCamera() {
if (CAMERA_ID_BACK.equals(mCameraId)) {
specificCameraId = CAMERA_ID_FRONT;
} else if (CAMERA_ID_FRONT.equals(mCameraId)) {
specificCameraId = CAMERA_ID_BACK;
}
stop();
start();
}
private int getCameraOri(int rotation, String cameraId) {
int degrees = rotation * 90;
switch (rotation) {
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90:
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:
degrees = 270;
break;
default:
break;
}
int result;
if (CAMERA_ID_FRONT.equals(cameraId)) {
result = (mSensorOrientation + degrees) % 360;
result = (360 - result) % 360;
} else {
result = (mSensorOrientation - degrees + 360) % 360;
}
Log.i(TAG, "getCameraOri: " + rotation + " " + result + " " + mSensorOrientation);
return result;
}
private final TextureView.SurfaceTextureListener mSurfaceTextureListener
= new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture texture, int width, int height) {
Log.i(TAG, "onSurfaceTextureAvailable: ");
openCamera();
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture texture, int width, int height) {
Log.i(TAG, "onSurfaceTextureSizeChanged: ");
configureTransform(width, height);
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture texture) {
Log.i(TAG, "onSurfaceTextureDestroyed: ");
return true;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture texture) {
}
};
private CameraDevice.StateCallback mDeviceStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice cameraDevice) {
Log.i(TAG, "onOpened: ");
// This method is called when the camera is opened. We start camera preview here.
mCameraOpenCloseLock.release();
mCameraDevice = cameraDevice;
createCameraPreviewSession();
if (camera2Listener != null) {
camera2Listener.onCameraOpened(mPreviewSize, getCameraOri(rotation, mCameraId));
}
}
@Override
public void onDisconnected(@NonNull CameraDevice cameraDevice) {
Log.i(TAG, "onDisconnected: ");
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
if (camera2Listener != null) {
camera2Listener.onCameraClosed();
}
}
@Override
public void onError(@NonNull CameraDevice cameraDevice, int error) {
Log.i(TAG, "onError: ");
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
if (camera2Listener != null) {
camera2Listener.onCameraError(new Exception("error occurred, code is " + error));
}
}
};
private CameraCaptureSession.StateCallback mCaptureStateCallback = new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
Log.i(TAG, "onConfigured: ");
// The camera is already closed
if (null == mCameraDevice) {
return;
}
// When the session is ready, we start displaying the preview.
mCaptureSession = cameraCaptureSession;
try {
mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(),
new CameraCaptureSession.CaptureCallback() {
}, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(
@NonNull CameraCaptureSession cameraCaptureSession) {
Log.i(TAG, "onConfigureFailed: ");
if (camera2Listener != null) {
camera2Listener.onCameraError(new Exception("configureFailed"));
}
}
};
/**
* An additional thread for running tasks that shouldn't block the UI.
*/
private HandlerThread mBackgroundThread;
/**
* A {@link Handler} for running tasks in the background.
*/
private Handler mBackgroundHandler;
private ImageReader mImageReader;
/**
* {@link CaptureRequest.Builder} for the camera preview
*/
private CaptureRequest.Builder mPreviewRequestBuilder;
/**
* A {@link Semaphore} to prevent the app from exiting before closing the camera.
*/
private Semaphore mCameraOpenCloseLock = new Semaphore(1);
/**
* Orientation of the camera sensor
*/
private int mSensorOrientation;
private Size getBestSupportedSize(List<Size> sizes) {
Size defaultSize = sizes.get(0);
Size[] tempSizes = sizes.toArray(new Size[0]);
Arrays.sort(tempSizes, new Comparator<Size>() {
@Override
public int compare(Size o1, Size o2) {
if (o1.getWidth() > o2.getWidth()) {
return -1;
} else if (o1.getWidth() == o2.getWidth()) {
return o1.getHeight() > o2.getHeight() ? -1 : 1;
} else {
return 1;
}
}
});
sizes = new ArrayList<>(Arrays.asList(tempSizes));
for (int i = sizes.size() - 1; i >= 0; i--) {
if (maxPreviewSize != null) {
if (sizes.get(i).getWidth() > maxPreviewSize.x || sizes.get(i).getHeight() > maxPreviewSize.y) {
sizes.remove(i);
continue;
}
}
if (minPreviewSize != null) {
if (sizes.get(i).getWidth() < minPreviewSize.x || sizes.get(i).getHeight() < minPreviewSize.y) {
sizes.remove(i);
}
}
}
if (sizes.size() == 0) {
String msg = "can not find suitable previewSize, now using default";
if (camera2Listener != null) {
Log.e(TAG, msg);
camera2Listener.onCameraError(new Exception(msg));
}
return defaultSize;
}
Size bestSize = sizes.get(0);
float previewViewRatio;
if (previewViewSize != null) {
previewViewRatio = (float) previewViewSize.x / (float) previewViewSize.y;
} else {
previewViewRatio = (float) bestSize.getWidth() / (float) bestSize.getHeight();
}
if (previewViewRatio > 1) {
previewViewRatio = 1 / previewViewRatio;
}
for (Size s : sizes) {
if (specificPreviewSize != null && specificPreviewSize.x == s.getWidth() && specificPreviewSize.y == s.getHeight()) {
return s;
}
if (Math.abs((s.getHeight() / (float) s.getWidth()) - previewViewRatio) < Math.abs(bestSize.getHeight() / (float) bestSize.getWidth() - previewViewRatio)) {
bestSize = s;
}
}
return bestSize;
}
public synchronized void start() {
if (mCameraDevice != null) {
return;
}
startBackgroundThread();
// When the screen is turned off and turned back on, the SurfaceTexture is already
// available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open
// a camera and start preview from here (otherwise, we wait until the surface is ready in
// the SurfaceTextureListener).
if (mTextureView.isAvailable()) {
openCamera();
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}
public synchronized void stop() {
if (mCameraDevice == null) {
return;
}
closeCamera();
stopBackgroundThread();
}
public void release() {
stop();
mTextureView = null;
camera2Listener = null;
context = null;
}
private void setUpCameraOutputs(CameraManager cameraManager) {
try {
if (configCameraParams(cameraManager, specificCameraId)) {
return;
}
for (String cameraId : cameraManager.getCameraIdList()) {
if (configCameraParams(cameraManager, cameraId)) {
return;
}
}
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (NullPointerException e) {
// Currently an NPE is thrown when the Camera2API is used but not supported on the
// device this code runs.
if (camera2Listener != null) {
camera2Listener.onCameraError(e);
}
}
}
private boolean configCameraParams(CameraManager manager, String cameraId) throws CameraAccessException {
CameraCharacteristics characteristics
= manager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map = characteristics.get(
CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (map == null) {
return false;
}
mPreviewSize = getBestSupportedSize(new ArrayList<>(Arrays.asList(map.getOutputSizes(SurfaceTexture.class))));
mImageReader = ImageReader.newInstance(mPreviewSize.getWidth(), mPreviewSize.getHeight(),
ImageFormat.YUV_420_888, 2);
mImageReader.setOnImageAvailableListener(
new OnImageAvailableListenerImpl(), mBackgroundHandler);
mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
mCameraId = cameraId;
return true;
}
private void openCamera() {
CameraManager cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
setUpCameraOutputs(cameraManager);
configureTransform(mTextureView.getWidth(), mTextureView.getHeight());
try {
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M
&& context.checkSelfPermission(Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
return;
}
cameraManager.openCamera(mCameraId, mDeviceStateCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
if (camera2Listener != null) {
camera2Listener.onCameraError(e);
}
} catch (InterruptedException e) {
if (camera2Listener != null) {
camera2Listener.onCameraError(e);
}
}
}
/**
* Closes the current {@link CameraDevice}.
*/
private void closeCamera() {
try {
mCameraOpenCloseLock.acquire();
if (null != mCaptureSession) {
mCaptureSession.close();
mCaptureSession = null;
}
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
if (null != mImageReader) {
mImageReader.close();
mImageReader = null;
}
if (camera2Listener != null) {
camera2Listener.onCameraClosed();
}
} catch (InterruptedException e) {
if (camera2Listener != null) {
camera2Listener.onCameraError(e);
}
} finally {
mCameraOpenCloseLock.release();
}
}
/**
* Starts a background thread and its {@link Handler}.
*/
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
/**
* Stops the background thread and its {@link Handler}.
*/
private void stopBackgroundThread() {
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
/**
* Creates a new {@link CameraCaptureSession} for camera preview.
*/
private void createCameraPreviewSession() {
try {
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
// We configure the size of default buffer to be the size of camera preview we want.
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
// This is the output Surface we need to start preview.
Surface surface = new Surface(texture);
// We set up a CaptureRequest.Builder with the output Surface.
mPreviewRequestBuilder
= mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
mPreviewRequestBuilder.addTarget(surface);
mPreviewRequestBuilder.addTarget(mImageReader.getSurface());
// Here, we create a CameraCaptureSession for camera preview.
mCameraDevice.createCaptureSession(Arrays.asList(surface, mImageReader.getSurface()),
mCaptureStateCallback, mBackgroundHandler
);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
* Configures the necessary {@link Matrix} transformation to `mTextureView`.
* This method should be called after the camera preview size is determined in
* setUpCameraOutputs and also the size of `mTextureView` is fixed.
*
* @param viewWidth The width of `mTextureView`
* @param viewHeight The height of `mTextureView`
*/
private void configureTransform(int viewWidth, int viewHeight) {
if (null == mTextureView || null == mPreviewSize) {
return;
}
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale = Math.max(
(float) viewHeight / mPreviewSize.getHeight(),
(float) viewWidth / mPreviewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate((90 * (rotation - 2)) % 360, centerX, centerY);
} else if (Surface.ROTATION_180 == rotation) {
matrix.postRotate(180, centerX, centerY);
}
Log.i(TAG, "configureTransform: " + getCameraOri(rotation, mCameraId) + " " + rotation * 90);
mTextureView.setTransform(matrix);
}
public static final class Builder {
private TextureView previewDisplayView;
private String specificCameraId;
private Camera2Listener camera2Listener;
private Point previewViewSize;
private int rotation;
private Point previewSize;
private Point maxPreviewSize;
private Point minPreviewSize;
private Context context;
public Builder() {
}
public Builder previewOn(TextureView val) {
previewDisplayView = val;
return this;
}
public Builder previewSize(Point val) {
previewSize = val;
return this;
}
public Builder maxPreviewSize(Point val) {
maxPreviewSize = val;
return this;
}
public Builder minPreviewSize(Point val) {
minPreviewSize = val;
return this;
}
public Builder previewViewSize(Point val) {
previewViewSize = val;
return this;
}
public Builder rotation(int val) {
rotation = val;
return this;
}
public Builder specificCameraId(String val) {
specificCameraId = val;
return this;
}
public Builder cameraListener(Camera2Listener val) {
camera2Listener = val;
return this;
}
public Builder context(Context val) {
context = val;
return this;
}
public Camera2Helper build() {
if (previewDisplayView == null) {
throw new NullPointerException("must preview on a textureView or a surfaceView");
}
if (maxPreviewSize != null && minPreviewSize != null) {
if (maxPreviewSize.x < minPreviewSize.x || maxPreviewSize.y < minPreviewSize.y) {
throw new IllegalArgumentException("maxPreviewSize must greater than minPreviewSize");
}
}
return new Camera2Helper(this);
}
}
private class OnImageAvailableListenerImpl implements ImageReader.OnImageAvailableListener {
private byte[] yPlane;
private byte[] uPlane;
private byte[] vPlane;
private ReentrantLock lock = new ReentrantLock();
@Override
public void onImageAvailable(ImageReader reader) {
Image image = reader.acquireNextImage();
// YUV_420_888
if (camera2Listener != null && image.getFormat() == ImageFormat.YUV_420_888) {
Image.Plane[] planes = image.getPlanes();
lock.lock();
if (yPlane == null || uPlane == null || vPlane == null) {
yPlane = new byte[planes[0].getBuffer().limit() - planes[0].getBuffer().position()];
uPlane = new byte[planes[1].getBuffer().limit() - planes[1].getBuffer().position()];
vPlane = new byte[planes[2].getBuffer().limit() - planes[2].getBuffer().position()];
}
if (image.getPlanes()[0].getBuffer().remaining() == yPlane.length) {
planes[0].getBuffer().get(yPlane);
planes[1].getBuffer().get(uPlane);
planes[2].getBuffer().get(vPlane);
if (camera2Listener != null) {
camera2Listener.onPreviewFrame(yPlane, uPlane, vPlane);
}
}
lock.unlock();
}
image.close();
}
}
}

@ -0,0 +1,16 @@
package com.frank.live.camera2;
import android.util.Size;
public interface Camera2Listener {
void onCameraOpened(Size previewSize, int displayOrientation);
void onPreviewFrame(byte[] y, byte[] u, byte[] v);
void onCameraClosed();
void onCameraError(Exception e);
}

@ -4,9 +4,9 @@
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context="com.frank.live.LiveActivity">
tools:context="com.frank.live.RtmpLiveActivity">
<SurfaceView
<TextureView
android:id="@+id/surface_camera"
android:layout_width="match_parent"
android:layout_height="match_parent" />

@ -17,7 +17,7 @@ android {
}
}
ndk {
abiFilters "armeabi-v7a", "arm64-v8a"
abiFilters "armeabi-v7a"//, "arm64-v8a"
}
}
buildTypes {

@ -1,21 +1,21 @@
package com.frank.ffmpeg.activity;
import android.annotation.SuppressLint;
import android.hardware.Camera;
import android.media.AudioFormat;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.text.TextUtils;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.TextureView;
import android.view.View;
import android.widget.CompoundButton;
import android.widget.Toast;
import android.widget.ToggleButton;
import com.frank.ffmpeg.R;
import com.frank.live.Push.LivePusher;
import com.frank.live.camera2.Camera2Helper;
import com.frank.live.listener.LiveStateChangeListener;
import com.frank.live.param.AudioParam;
import com.frank.live.param.VideoParam;
@ -30,7 +30,7 @@ public class LiveActivity extends BaseActivity implements CompoundButton.OnCheck
private final static String TAG = LiveActivity.class.getSimpleName();
private final static String LIVE_URL = "rtmp://192.168.1.3/live/stream";
private final static int MSG_ERROR = 100;
private SurfaceHolder surfaceHolder;
private TextureView textureView;
private LivePusher livePusher;
@SuppressLint("HandlerLeak")
private Handler mHandler = new Handler() {
@ -64,23 +64,25 @@ public class LiveActivity extends BaseActivity implements CompoundButton.OnCheck
initViewsWithClick(R.id.btn_swap);
((ToggleButton) findViewById(R.id.btn_live)).setOnCheckedChangeListener(this);
((ToggleButton) findViewById(R.id.btn_mute)).setOnCheckedChangeListener(this);
SurfaceView surface_camera = getView(R.id.surface_camera);
surfaceHolder = surface_camera.getHolder();
textureView = getView(R.id.surface_camera);
}
private void initPusher() {
int width = 640;//分辨率设置
int height = 480;
int videoBitRate = 400;//kb/s
int videoFrameRate = 25;//fps
int videoFrameRate = 20;//fps
VideoParam videoParam = new VideoParam(width, height,
Camera.CameraInfo.CAMERA_FACING_BACK, videoBitRate, videoFrameRate);
Integer.valueOf(Camera2Helper.CAMERA_ID_BACK), videoBitRate, videoFrameRate);
int sampleRate = 44100;//采样率:Hz
int channelConfig = AudioFormat.CHANNEL_IN_STEREO;//立体声道
int audioFormat = AudioFormat.ENCODING_PCM_16BIT;//pcm16位
int numChannels = 2;//声道数
AudioParam audioParam = new AudioParam(sampleRate, channelConfig, audioFormat, numChannels);
livePusher = new LivePusher(surfaceHolder, videoParam, audioParam);
livePusher = new LivePusher(textureView, videoParam, audioParam, this);
//TODO:暂时去掉音频推流
livePusher.setMute(true);
findViewById(R.id.btn_mute).setVisibility(View.INVISIBLE);
}
@Override
@ -111,9 +113,9 @@ public class LiveActivity extends BaseActivity implements CompoundButton.OnCheck
@Override
protected void onDestroy() {
super.onDestroy();
if (livePusher != null) {
livePusher.release();
}
// if (livePusher != null) {
// livePusher.release();
// }
}
@Override

@ -6,7 +6,7 @@
android:layout_height="match_parent"
tools:context="com.frank.ffmpeg.activity.LiveActivity">
<SurfaceView
<TextureView
android:id="@+id/surface_camera"
android:layout_width="match_parent"
android:layout_height="match_parent" />

Loading…
Cancel
Save