From 94518f18e42dbd93a692b22691ad5e8433eddca0 Mon Sep 17 00:00:00 2001 From: xufulong <839789740@qq.com> Date: Thu, 13 Feb 2020 12:10:23 +0800 Subject: [PATCH] add VideoStreamNew with camera2 add VideoStreamNew with camera2 --- Live/src/main/cpp/VideoStream.cpp | 37 ++++- Live/src/main/cpp/VideoStream.h | 3 +- .../java/com/frank/live/LivePusherNew.java | 15 ++ .../com/frank/live/stream/VideoStreamNew.java | 151 ++++++++++++++++++ 4 files changed, 201 insertions(+), 5 deletions(-) create mode 100644 Live/src/main/java/com/frank/live/stream/VideoStreamNew.java diff --git a/Live/src/main/cpp/VideoStream.cpp b/Live/src/main/cpp/VideoStream.cpp index a21d557..c3bc019 100644 --- a/Live/src/main/cpp/VideoStream.cpp +++ b/Live/src/main/cpp/VideoStream.cpp @@ -1,5 +1,6 @@ #include +#include #include "VideoStream.h" #include "include/rtmp/rtmp.h" #include "PushGeneric.h" @@ -116,11 +117,40 @@ void VideoStream::encodeData(int8_t *data) { pthread_mutex_unlock(&mutex); } +void VideoStream::encodeDataNew(int8_t *y_plane, int8_t *u_plane, int8_t *v_plane) { + pthread_mutex_lock(&mutex); + + //直接拷贝 + memcpy(pic_in->img.plane[0], y_plane, (size_t) ySize); + memcpy(pic_in->img.plane[1], u_plane, (size_t) ySize/4); + memcpy(pic_in->img.plane[2], v_plane, (size_t) ySize/4); + + x264_nal_t *pp_nal; + int pi_nal; + x264_picture_t pic_out; + x264_encoder_encode(videoCodec, &pp_nal, &pi_nal, pic_in, &pic_out); + int sps_len = 0; + int pps_len = 0; + uint8_t sps[100]; + uint8_t pps[100]; + for (int i = 0; i < pi_nal; ++i) { + if (pp_nal[i].i_type == NAL_SPS) { + sps_len = pp_nal[i].i_payload - 4; + memcpy(sps, pp_nal[i].p_payload + 4, static_cast(sps_len)); + } else if (pp_nal[i].i_type == NAL_PPS) { + pps_len = pp_nal[i].i_payload - 4; + memcpy(pps, pp_nal[i].p_payload + 4, static_cast(pps_len)); + sendSpsPps(sps, pps, sps_len, pps_len); + } else { + sendFrame(pp_nal[i].i_type, pp_nal[i].p_payload, pp_nal[i].i_payload); + } + } + pthread_mutex_unlock(&mutex); +} + void VideoStream::sendSpsPps(uint8_t *sps, uint8_t *pps, int sps_len, int pps_len) { - //看表 int bodySize = 13 + sps_len + 3 + pps_len; RTMPPacket *packet = new RTMPPacket; - // RTMPPacket_Alloc(packet, bodySize); int i = 0; //固定头 @@ -139,7 +169,7 @@ void VideoStream::sendSpsPps(uint8_t *sps, uint8_t *pps, int sps_len, int pps_le packet->m_body[i++] = sps[3]; packet->m_body[i++] = 0xFF; - //整个sps + //sps packet->m_body[i++] = 0xE1; //sps长度 packet->m_body[i++] = (sps_len >> 8) & 0xff; @@ -156,7 +186,6 @@ void VideoStream::sendSpsPps(uint8_t *sps, uint8_t *pps, int sps_len, int pps_le //视频 packet->m_packetType = RTMP_PACKET_TYPE_VIDEO; packet->m_nBodySize = bodySize; - //随意分配一个管道(尽量避开rtmp.c中使用的) packet->m_nChannel = 10; //sps pps没有时间戳 packet->m_nTimeStamp = 0; diff --git a/Live/src/main/cpp/VideoStream.h b/Live/src/main/cpp/VideoStream.h index 2565ea6..8d84fa6 100644 --- a/Live/src/main/cpp/VideoStream.h +++ b/Live/src/main/cpp/VideoStream.h @@ -19,6 +19,8 @@ public: void encodeData(int8_t *data); + void encodeDataNew(int8_t *y_plane, int8_t *u_plane, int8_t *v_plane); + void setVideoCallback(VideoCallback videoCallback); private: @@ -38,5 +40,4 @@ private: void sendFrame(int type, uint8_t *payload, int i_payload); }; - #endif diff --git a/Live/src/main/java/com/frank/live/LivePusherNew.java b/Live/src/main/java/com/frank/live/LivePusherNew.java index 70735bd..95721b7 100644 --- a/Live/src/main/java/com/frank/live/LivePusherNew.java +++ b/Live/src/main/java/com/frank/live/LivePusherNew.java @@ -2,12 +2,14 @@ package com.frank.live; import android.app.Activity; import android.view.SurfaceHolder; +import android.view.TextureView; import com.frank.live.listener.LiveStateChangeListener; import com.frank.live.param.AudioParam; import com.frank.live.param.VideoParam; import com.frank.live.stream.AudioStream; import com.frank.live.stream.VideoStream; +import com.frank.live.stream.VideoStreamNew; public class LivePusherNew { @@ -32,6 +34,7 @@ public class LivePusherNew { private AudioStream audioStream; private VideoStream videoStream; +// private VideoStreamNew videoStream; private LiveStateChangeListener liveStateChangeListener; @@ -42,6 +45,12 @@ public class LivePusherNew { audioStream = new AudioStream(this, audioParam); } + public LivePusherNew(Activity activity, VideoParam videoParam, AudioParam audioParam, TextureView textureView) { + native_init(); +// videoStream = new VideoStreamNew(this, textureView, videoParam, activity); + audioStream = new AudioStream(this, audioParam); + } + public void setPreviewDisplay(SurfaceHolder surfaceHolder) { videoStream.setPreviewDisplay(surfaceHolder); } @@ -141,6 +150,10 @@ public class LivePusherNew { native_pushVideo(data); } + public void pushVideo(byte[] y, byte[] u, byte[] v) { + native_pushVideoNew(y, u, v); + } + private native void native_init(); private native void native_start(String path); @@ -155,6 +168,8 @@ public class LivePusherNew { private native void native_pushVideo(byte[] data); + private native void native_pushVideoNew(byte[] y, byte[] u, byte[] v); + private native void native_stop(); private native void native_release(); diff --git a/Live/src/main/java/com/frank/live/stream/VideoStreamNew.java b/Live/src/main/java/com/frank/live/stream/VideoStreamNew.java new file mode 100644 index 0000000..b4a52ff --- /dev/null +++ b/Live/src/main/java/com/frank/live/stream/VideoStreamNew.java @@ -0,0 +1,151 @@ +package com.frank.live.stream; + +import android.app.Activity; +import android.content.Context; +import android.graphics.Point; +import android.graphics.SurfaceTexture; +import android.util.Log; +import android.util.Size; +import android.view.SurfaceHolder; +import android.view.TextureView; + +import com.frank.live.LivePusherNew; +import com.frank.live.camera2.Camera2Helper; +import com.frank.live.camera2.Camera2Listener; +import com.frank.live.param.VideoParam; + +/** + * 视频推流:使用Camera2 + * Created by frank on 2020/02/12. + */ +public class VideoStreamNew implements TextureView.SurfaceTextureListener, Camera2Listener { + + private static final String TAG = VideoStreamNew.class.getSimpleName(); + + private LivePusherNew mLivePusher; + private Camera2Helper camera2Helper; + private boolean isLiving; + private TextureView mTextureView; + private Context mContext; + private VideoParam mVideoParam; + + public VideoStreamNew(LivePusherNew livePusher, TextureView textureView, VideoParam videoParam, Context context) { + this.mLivePusher = livePusher; + this.mTextureView = textureView; + this.mVideoParam = videoParam; + this.mContext = context; + mTextureView.setSurfaceTextureListener(this); + } + + public void setPreviewDisplay(SurfaceHolder surfaceHolder) { +// cameraHelper.setPreviewDisplay(surfaceHolder); + } + + /** + * 开始预览 + */ + private void startPreview() { + int rotateDegree = 0; + if (mContext instanceof Activity) { + rotateDegree = ((Activity) mContext).getWindowManager().getDefaultDisplay().getRotation(); + } + Log.e(TAG, "preview width=" + mTextureView.getWidth() + "--height=" + mTextureView.getHeight()); + camera2Helper = new Camera2Helper.Builder() + .cameraListener(this) + .specificCameraId(Camera2Helper.CAMERA_ID_BACK) + .context(mContext.getApplicationContext()) + .previewOn(mTextureView) +// .previewViewSize(new Point(mTextureView.getWidth(), mTextureView.getHeight())) + .previewViewSize(new Point(mVideoParam.getWidth(), mVideoParam.getHeight())) + .rotation(rotateDegree) + .build(); + camera2Helper.start(); + } + + public void switchCamera() { + if (camera2Helper != null) { + camera2Helper.switchCamera(); + } + } + + public void startLive() { + isLiving = true; + } + + public void stopLive() { + isLiving = false; + } + + public void release() { + if (camera2Helper != null) { + camera2Helper.stop(); + camera2Helper.release(); + camera2Helper = null; + } + } + + /** + * 停止预览 + */ + private void stopPreview() { + if (camera2Helper != null) { + camera2Helper.stop(); + } + } + + @Override + public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) { + Log.e(TAG, "onSurfaceTextureAvailable..."); + startPreview(); + } + + @Override + public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) { + + } + + @Override + public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) { + Log.e(TAG, "onSurfaceTextureDestroyed..."); + stopPreview(); + return false; + } + + @Override + public void onSurfaceTextureUpdated(SurfaceTexture surface) { + + } + + /** + * nv21摄像头数据 + * @param y plane of y + * @param u plane of u + * @param v plane of v + */ + @Override + public void onPreviewFrame(byte[] y, byte[] u, byte[] v) { + if (isLiving && mLivePusher != null) { + mLivePusher.pushVideo(y, u, v); + } + } + + @Override + public void onCameraOpened(Size previewSize, int displayOrientation) { + Log.e(TAG, "onCameraOpened previewSize=" + previewSize.toString()); + if (mLivePusher != null && mVideoParam != null) { + mLivePusher.setVideoCodecInfo(previewSize.getWidth(), previewSize.getHeight(), + mVideoParam.getFrameRate(), mVideoParam.getBitRate()); + } + } + + @Override + public void onCameraClosed() { + Log.e(TAG, "onCameraClosed"); + } + + @Override + public void onCameraError(Exception e) { + Log.e(TAG, "onCameraError=" + e.toString()); + } + +}