修复rtmp推流崩溃、无声音、画面延迟问题

修复rtmp推流崩溃、无声音、画面延迟问题
pull/107/head
xufulong 5 years ago
parent a1002eac69
commit b096c7081e
  1. 5
      Live/CMakeLists.txt
  2. 2
      Live/src/main/AndroidManifest.xml
  3. 103
      Live/src/main/cpp/AudioStream.cpp
  4. 36
      Live/src/main/cpp/AudioStream.h
  5. 12
      Live/src/main/cpp/PushGeneric.h
  6. 239
      Live/src/main/cpp/RtmpPusher.cpp
  7. 207
      Live/src/main/cpp/VideoStream.cpp
  8. 42
      Live/src/main/cpp/VideoStream.h
  9. 12
      Live/src/main/cpp/live.c
  10. 169
      Live/src/main/cpp/safe_queue.h
  11. 162
      Live/src/main/java/com/frank/live/LivePusherNew.java
  12. 130
      Live/src/main/java/com/frank/live/RtmpLiveActivity.java
  13. 84
      Live/src/main/java/com/frank/live/stream/AudioStream.java
  14. 217
      Live/src/main/java/com/frank/live/stream/CameraHelper.java
  15. 66
      Live/src/main/java/com/frank/live/stream/VideoStream.java
  16. 2
      app/src/main/AndroidManifest.xml
  17. 21
      app/src/main/java/com/frank/ffmpeg/activity/LiveActivity.java
  18. 2
      app/src/main/res/layout/activity_live.xml

@ -34,7 +34,10 @@ include_directories(main/cpp/include)
add_library(live
SHARED
src/main/cpp/live.c
src/main/cpp/queue.c)
src/main/cpp/queue.c
src/main/cpp/AudioStream.cpp
src/main/cpp/VideoStream.cpp
src/main/cpp/RtmpPusher.cpp)
find_library( log-lib
log )

@ -14,7 +14,7 @@
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/AppTheme" >
<!--<activity android:name="com.frank.live.RtmpLiveActivityvity"-->
<!--<activity android:name="com.frank.live.PushActivity"-->
<!--android:screenOrientation="landscape">-->
<!--<intent-filter>-->
<!--<action android:name="android.intent.action.MAIN" />-->

@ -0,0 +1,103 @@
#include <cstring>
#include "AudioStream.h"
#include "PushGeneric.h"
AudioStream::AudioStream() {
}
AudioStream::~AudioStream() {
DELETE(buffer);
if (audioCodec) {
faacEncClose(audioCodec);
audioCodec = 0;
}
}
void AudioStream::setAudioCallback(AudioCallback audioCallback) {
this->audioCallback = audioCallback;
}
void AudioStream::setAudioEncInfo(int samplesInHZ, int channels) {
//打开编码器
mChannels = channels;
//一次最大能输入编码器的样本数量 (一个样本是16位 2字节)
//编码后的最大字节数
audioCodec = faacEncOpen(static_cast<unsigned long>(samplesInHZ),
static_cast<unsigned int>(channels),
&inputSamples,
&maxOutputBytes);
//设置编码器参数
faacEncConfigurationPtr config = faacEncGetCurrentConfiguration(audioCodec);
//指定为 mpeg4 标准
config->mpegVersion = MPEG4;
//lc 标准
config->aacObjectType = LOW;
//16位
config->inputFormat = FAAC_INPUT_16BIT;
// 编码出原始数据
config->outputFormat = 0;
faacEncSetConfiguration(audioCodec, config);
//输出缓冲区 编码后的数据 用这个缓冲区来保存
buffer = new u_char[maxOutputBytes];
}
int AudioStream::getInputSamples() {
return static_cast<int>(inputSamples);
}
RTMPPacket *AudioStream::getAudioTag() {
u_char *buf;
u_long len;
faacEncGetDecoderSpecificInfo(audioCodec, &buf, &len);
int bodySize = static_cast<int>(2 + len);
RTMPPacket *packet = new RTMPPacket;
RTMPPacket_Alloc(packet, bodySize);
//双声道
packet->m_body[0] = 0xAF;
if (mChannels == 1) {
packet->m_body[0] = 0xAE;
}
packet->m_body[1] = 0x00;
memcpy(&packet->m_body[2], buf, len);
packet->m_hasAbsTimestamp = 0;
packet->m_nBodySize = bodySize;
packet->m_packetType = RTMP_PACKET_TYPE_AUDIO;
packet->m_nChannel = 0x11;
packet->m_headerType = RTMP_PACKET_SIZE_LARGE;
return packet;
}
void AudioStream::encodeData(int8_t *data) {
//返回编码后数据字节的长度
int byteLen = faacEncEncode(audioCodec, reinterpret_cast<int32_t *>(data),
static_cast<unsigned int>(inputSamples),
buffer,
static_cast<unsigned int>(maxOutputBytes));
if (byteLen > 0) {
int bodySize = 2 + byteLen;
RTMPPacket *packet = new RTMPPacket;
RTMPPacket_Alloc(packet, bodySize);
//双声道
packet->m_body[0] = 0xAF;
if (mChannels == 1) {
packet->m_body[0] = 0xAE;
}
packet->m_body[1] = 0x01;
memcpy(&packet->m_body[2], buffer, static_cast<size_t>(byteLen));
packet->m_hasAbsTimestamp = 0;
packet->m_nBodySize = bodySize;
packet->m_packetType = RTMP_PACKET_TYPE_AUDIO;
packet->m_nChannel = 0x11;
packet->m_headerType = RTMP_PACKET_SIZE_LARGE;
audioCallback(packet);
}
}

@ -0,0 +1,36 @@
#ifndef AUDIOSTREAM_H
#define AUDIOSTREAM_H
#include "include/rtmp/rtmp.h"
#include "include/faac/faac.h"
#include <sys/types.h>
class AudioStream {
typedef void (*AudioCallback)(RTMPPacket *packet);
public:
AudioStream();
~AudioStream();
void setAudioEncInfo(int samplesInHZ, int channels);
void setAudioCallback(AudioCallback audioCallback);
int getInputSamples();
void encodeData(int8_t *data);
RTMPPacket* getAudioTag();
private:
AudioCallback audioCallback;
int mChannels;
faacEncHandle audioCodec = 0;
u_long inputSamples;
u_long maxOutputBytes;
u_char *buffer = 0;
};
#endif

@ -0,0 +1,12 @@
#ifndef PUSHGENERIC_H
#define PUSHGENERIC_H
#include <android/log.h>
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,"FrankLive",__VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,"FrankLive",__VA_ARGS__)
#define DELETE(obj) if(obj){ delete obj; obj = 0; }
#endif

@ -0,0 +1,239 @@
#include <jni.h>
#include <string>
#include "include/rtmp/rtmp.h"
#include "safe_queue.h"
#include "PushGeneric.h"
#include "VideoStream.h"
#include "AudioStream.h"
SafeQueue<RTMPPacket *> packets;
VideoStream *videoChannel = 0;
int isStart = 0;
pthread_t pid;
int readyPushing = 0;
uint32_t start_time;
AudioStream *audioChannel = 0;
//子线程回调给Java需要用到JavaVM
JavaVM *javaVM;
//调用类
jobject jobject_error;
/***************与Java层对应**************/
//视频编码器打开失败
const int ERROR_VIDEO_ENCODER_OPEN = 0x01;
//视频帧编码失败
const int ERROR_VIDEO_ENCODE = 0x02;
//音频编码器打开失败
const int ERROR_AUDIO_ENCODER_OPEN = 0x03;
//音频帧编码失败
const int ERROR_AUDIO_ENCODE = 0x04;
//RTMP连接失败
const int ERROR_RTMP_CONNECT = 0x05;
//RTMP连接流失败
const int ERROR_RTMP_CONNECT_STREAM = 0x06;
//RTMP发送数据包失败
const int ERROR_RTMP_SEND_PACKET = 0x07;
/***************与Java层对应**************/
//当调用System.loadLibrary时,会回调这个方法
jint JNICALL JNI_OnLoad(JavaVM *vm, void *reserved) {
javaVM = vm;
return JNI_VERSION_1_6;
}
//回调异常给java
void throwErrToJava(int error_code) {
JNIEnv *env;
javaVM->AttachCurrentThread(&env, NULL);
jclass classErr = env->GetObjectClass(jobject_error);
jmethodID methodErr = env->GetMethodID(classErr, "errorFromNative", "(I)V");
env->CallVoidMethod(jobject_error, methodErr, error_code);
javaVM->DetachCurrentThread();
}
void releasePackets(RTMPPacket *&packet) {
if (packet) {
RTMPPacket_Free(packet);
delete packet;
packet = 0;
}
}
void callback(RTMPPacket *packet) {
if (packet) {
packet->m_nTimeStamp = RTMP_GetTime() - start_time;
packets.push(packet);
}
}
extern "C"
JNIEXPORT void JNICALL
Java_com_frank_live_LivePusherNew_native_1init(JNIEnv *env, jobject instance) {
videoChannel = new VideoStream;
videoChannel->setVideoCallback(callback);
audioChannel = new AudioStream;
audioChannel->setAudioCallback(callback);
packets.setReleaseCallback(releasePackets);
jobject_error = env->NewGlobalRef(instance);
}
extern "C"
JNIEXPORT void JNICALL
Java_com_frank_live_LivePusherNew_native_1setVideoCodecInfo(JNIEnv *env, jobject instance,
jint width, jint height, jint fps,
jint bitrate) {
if (videoChannel) {
videoChannel->setVideoEncInfo(width, height, fps, bitrate);
}
}
void *start(void *args) {
char *url = static_cast<char *>(args);
RTMP *rtmp = 0;
do {
rtmp = RTMP_Alloc();
if (!rtmp) {
LOGE("RTMP_Alloc fail");
break;
}
RTMP_Init(rtmp);
int ret = RTMP_SetupURL(rtmp, url);
if (!ret) {
LOGE("RTMP_SetupURL:%s", url);
break;
}
//超时时间
rtmp->Link.timeout = 5;
RTMP_EnableWrite(rtmp);
ret = RTMP_Connect(rtmp, 0);
if (!ret) {
LOGE("RTMP_Connect:%s", url);
throwErrToJava(ERROR_RTMP_CONNECT);
break;
}
ret = RTMP_ConnectStream(rtmp, 0);
if (!ret) {
LOGE("RTMP_ConnectStream:%s", url);
throwErrToJava(ERROR_RTMP_CONNECT_STREAM);
break;
}
//开始时间
start_time = RTMP_GetTime();
//开始推流
readyPushing = 1;
packets.setWork(1);
callback(audioChannel->getAudioTag());
RTMPPacket *packet = 0;
while (readyPushing) {
packets.pop(packet);
if (!readyPushing) {
break;
}
if (!packet) {
continue;
}
packet->m_nInfoField2 = rtmp->m_stream_id;
ret = RTMP_SendPacket(rtmp, packet, 1);
releasePackets(packet);
if (!ret) {
LOGE("RTMP_SendPacket fail...");
throwErrToJava(ERROR_RTMP_SEND_PACKET);
break;
}
}
releasePackets(packet);
} while (0);
isStart = 0;
readyPushing = 0;
packets.setWork(0);
packets.clear();
if (rtmp) {
RTMP_Close(rtmp);
RTMP_Free(rtmp);
}
delete (url);
return 0;
}
extern "C"
JNIEXPORT void JNICALL
Java_com_frank_live_LivePusherNew_native_1start(JNIEnv *env, jobject instance,
jstring path_) {
if (isStart) {
return;
}
isStart = 1;
const char *path = env->GetStringUTFChars(path_, 0);
char *url = new char[strlen(path) + 1];
strcpy(url, path);
pthread_create(&pid, 0, start, url);
env->ReleaseStringUTFChars(path_, path);
}
extern "C"
JNIEXPORT void JNICALL
Java_com_frank_live_LivePusherNew_native_1pushVideo(JNIEnv *env, jobject instance,
jbyteArray data_) {
if (!videoChannel || !readyPushing) {
return;
}
jbyte *data = env->GetByteArrayElements(data_, NULL);
videoChannel->encodeData(data);
env->ReleaseByteArrayElements(data_, data, 0);
}
extern "C"
JNIEXPORT void JNICALL
Java_com_frank_live_LivePusherNew_native_1setAudioCodecInfo(JNIEnv *env, jobject instance,
jint sampleRateInHz,
jint channels) {
if (audioChannel) {
audioChannel->setAudioEncInfo(sampleRateInHz, channels);
}
}
extern "C"
JNIEXPORT jint JNICALL
Java_com_frank_live_LivePusherNew_getInputSamples(JNIEnv *env, jobject instance) {
if (audioChannel) {
return audioChannel->getInputSamples();
}
return -1;
}
extern "C"
JNIEXPORT void JNICALL
Java_com_frank_live_LivePusherNew_native_1pushAudio(JNIEnv *env, jobject instance,
jbyteArray data_) {
if (!audioChannel || !readyPushing) {
return;
}
jbyte *data = env->GetByteArrayElements(data_, NULL);
audioChannel->encodeData(data);
env->ReleaseByteArrayElements(data_, data, 0);
}
extern "C"
JNIEXPORT void JNICALL
Java_com_frank_live_LivePusherNew_native_1stop(JNIEnv *env, jobject instance) {
readyPushing = 0;
packets.setWork(0);
pthread_join(pid, 0);
}
extern "C"
JNIEXPORT void JNICALL
Java_com_frank_live_LivePusherNew_native_1release(JNIEnv *env, jobject instance) {
env->DeleteGlobalRef(jobject_error);
DELETE(videoChannel);
DELETE(audioChannel);
}

@ -0,0 +1,207 @@
#include "VideoStream.h"
#include "include/rtmp/rtmp.h"
#include "PushGeneric.h"
VideoStream::VideoStream() {
pthread_mutex_init(&mutex, 0);
}
VideoStream::~VideoStream() {
pthread_mutex_destroy(&mutex);
if (videoCodec) {
x264_encoder_close(videoCodec);
videoCodec = 0;
}
if (pic_in) {
x264_picture_clean(pic_in);
DELETE(pic_in);
}
}
void VideoStream::setVideoEncInfo(int width, int height, int fps, int bitrate) {
pthread_mutex_lock(&mutex);
mWidth = width;
mHeight = height;
mFps = fps;
mBitrate = bitrate;
ySize = width * height;
uvSize = ySize / 4;
if (videoCodec) {
x264_encoder_close(videoCodec);
videoCodec = 0;
}
if (pic_in) {
x264_picture_clean(pic_in);
DELETE(pic_in);
}
//打开x264编码器
//x264编码器的属性
x264_param_t param;
x264_param_default_preset(&param, "ultrafast", "zerolatency");
param.i_level_idc = 32;
//输入数据格式
param.i_csp = X264_CSP_I420;
param.i_width = width;
param.i_height = height;
//无b帧
param.i_bframe = 0;
//参数i_rc_method表示码率控制,CQP(恒定质量),CRF(恒定码率),ABR(平均码率)
param.rc.i_rc_method = X264_RC_ABR;
//码率(比特率,单位Kbps)
param.rc.i_bitrate = bitrate / 1000;
//瞬时最大码率
param.rc.i_vbv_max_bitrate = bitrate / 1000 * 1.2;
//设置了i_vbv_max_bitrate必须设置此参数,码率控制区大小,单位kbps
param.rc.i_vbv_buffer_size = bitrate / 1000;
//帧率
param.i_fps_num = fps;
param.i_fps_den = 1;
param.i_timebase_den = param.i_fps_num;
param.i_timebase_num = param.i_fps_den;
//用fps而不是时间戳来计算帧间距离
param.b_vfr_input = 0;
//帧距离(关键帧) 2s一个关键帧
param.i_keyint_max = fps * 2;
//设置是让每个关键帧(I帧)都附带sps/pps
param.b_repeat_headers = 1;
//多线程
param.i_threads = 1;
x264_param_apply_profile(&param, "baseline");
//打开编码器
videoCodec = x264_encoder_open(&param);
pic_in = new x264_picture_t;
x264_picture_alloc(pic_in, X264_CSP_I420, width, height);
pthread_mutex_unlock(&mutex);
}
void VideoStream::setVideoCallback(VideoCallback videoCallback) {
this->videoCallback = videoCallback;
}
void VideoStream::encodeData(int8_t *data) {
pthread_mutex_lock(&mutex);
//y数据
memcpy(pic_in->img.plane[0], data, ySize);
for (int i = 0; i < uvSize; ++i) {
//u数据
*(pic_in->img.plane[1] + i) = *(data + ySize + i * 2 + 1);
*(pic_in->img.plane[2] + i) = *(data + ySize + i * 2);
}
x264_nal_t *pp_nal;
int pi_nal;
x264_picture_t pic_out;
x264_encoder_encode(videoCodec, &pp_nal, &pi_nal, pic_in, &pic_out);
int sps_len = 0;
int pps_len = 0;
uint8_t sps[100];
uint8_t pps[100];
for (int i = 0; i < pi_nal; ++i) {
if (pp_nal[i].i_type == NAL_SPS) {
sps_len = pp_nal[i].i_payload - 4;
memcpy(sps, pp_nal[i].p_payload + 4, static_cast<size_t>(sps_len));
} else if (pp_nal[i].i_type == NAL_PPS) {
pps_len = pp_nal[i].i_payload - 4;
memcpy(pps, pp_nal[i].p_payload + 4, static_cast<size_t>(pps_len));
sendSpsPps(sps, pps, sps_len, pps_len);
} else {
sendFrame(pp_nal[i].i_type, pp_nal[i].p_payload, pp_nal[i].i_payload);
}
}
pthread_mutex_unlock(&mutex);
}
void VideoStream::sendSpsPps(uint8_t *sps, uint8_t *pps, int sps_len, int pps_len) {
//看表
int bodySize = 13 + sps_len + 3 + pps_len;
RTMPPacket *packet = new RTMPPacket;
//
RTMPPacket_Alloc(packet, bodySize);
int i = 0;
//固定头
packet->m_body[i++] = 0x17;
//类型
packet->m_body[i++] = 0x00;
packet->m_body[i++] = 0x00;
packet->m_body[i++] = 0x00;
packet->m_body[i++] = 0x00;
//版本
packet->m_body[i++] = 0x01;
//编码规格
packet->m_body[i++] = sps[1];
packet->m_body[i++] = sps[2];
packet->m_body[i++] = sps[3];
packet->m_body[i++] = 0xFF;
//整个sps
packet->m_body[i++] = 0xE1;
//sps长度
packet->m_body[i++] = (sps_len >> 8) & 0xff;
packet->m_body[i++] = sps_len & 0xff;
memcpy(&packet->m_body[i], sps, sps_len);
i += sps_len;
//pps
packet->m_body[i++] = 0x01;
packet->m_body[i++] = (pps_len >> 8) & 0xff;
packet->m_body[i++] = (pps_len) & 0xff;
memcpy(&packet->m_body[i], pps, pps_len);
//视频
packet->m_packetType = RTMP_PACKET_TYPE_VIDEO;
packet->m_nBodySize = bodySize;
//随意分配一个管道(尽量避开rtmp.c中使用的)
packet->m_nChannel = 10;
//sps pps没有时间戳
packet->m_nTimeStamp = 0;
//不使用绝对时间
packet->m_hasAbsTimestamp = 0;
packet->m_headerType = RTMP_PACKET_SIZE_MEDIUM;
videoCallback(packet);
}
void VideoStream::sendFrame(int type, uint8_t *payload, int i_payload) {
if (payload[2] == 0x00) {
i_payload -= 4;
payload += 4;
} else {
i_payload -= 3;
payload += 3;
}
int bodySize = 9 + i_payload;
RTMPPacket *packet = new RTMPPacket;
//
RTMPPacket_Alloc(packet, bodySize);
packet->m_body[0] = 0x27;
if(type == NAL_SLICE_IDR){
packet->m_body[0] = 0x17;
LOGE("IDR key frame");
}
//类型
packet->m_body[1] = 0x01;
//时间戳
packet->m_body[2] = 0x00;
packet->m_body[3] = 0x00;
packet->m_body[4] = 0x00;
//数据长度 int 4个字节
packet->m_body[5] = (i_payload >> 24) & 0xff;
packet->m_body[6] = (i_payload >> 16) & 0xff;
packet->m_body[7] = (i_payload >> 8) & 0xff;
packet->m_body[8] = (i_payload) & 0xff;
memcpy(&packet->m_body[9], payload, static_cast<size_t>(i_payload));
packet->m_hasAbsTimestamp = 0;
packet->m_nBodySize = bodySize;
packet->m_packetType = RTMP_PACKET_TYPE_VIDEO;
packet->m_nChannel = 0x10;
packet->m_headerType = RTMP_PACKET_SIZE_LARGE;
videoCallback(packet);
}

@ -0,0 +1,42 @@
#ifndef VIDEOSTREAM_H
#define VIDEOSTREAM_H
#include <inttypes.h>
#include <pthread.h>
#include "include/rtmp/rtmp.h"
#include "include/x264/x264.h"
class VideoStream {
typedef void (*VideoCallback)(RTMPPacket* packet);
public:
VideoStream();
~VideoStream();
//创建x264编码器
void setVideoEncInfo(int width, int height, int fps, int bitrate);
void encodeData(int8_t *data);
void setVideoCallback(VideoCallback videoCallback);
private:
pthread_mutex_t mutex;
int mWidth;
int mHeight;
int mFps;
int mBitrate;
x264_t *videoCodec = 0;
x264_picture_t *pic_in = 0;
int ySize;
int uvSize;
VideoCallback videoCallback;
void sendSpsPps(uint8_t *sps, uint8_t *pps, int sps_len, int pps_len);
void sendFrame(int type, uint8_t *payload, int i_payload);
};
#endif

@ -42,7 +42,7 @@ const int ERROR_RTMP_CONNECT = 0x05;
//RTMP连接流失败
const int ERROR_RTMP_CONNECT_STREAM = 0x06;
//RTMP发送数据包失败
const int ERROR_RTMP_SEND_PACKAT = 0x07;
const int ERROR_RTMP_SEND_PACKET = 0x07;
/***************与Java层对应**************/
@ -57,10 +57,10 @@ void add_aac_body(unsigned char *buf, int len);
void add_aac_header();
//当调用System.loadLibrary时,会回调这个方法
jint JNICALL JNI_OnLoad(JavaVM *vm, void *reserved) {
javaVM = vm;
return JNI_VERSION_1_6;
}
//jint JNICALL JNI_OnLoad(JavaVM *vm, void *reserved) {
// javaVM = vm;
// return JNI_VERSION_1_6;
//}
//回调异常给java
void throw_error_to_java(int error_code) {
@ -117,7 +117,7 @@ void *push_thread(void *args) {
LOGE("RTMP_SendPacket fail...");
RTMPPacket_Free(packet);
pthread_mutex_unlock(&mutex);
throw_error_to_java(ERROR_RTMP_SEND_PACKAT);
throw_error_to_java(ERROR_RTMP_SEND_PACKET);
goto end;
}
RTMPPacket_Free(packet);

@ -0,0 +1,169 @@
#ifndef SAFE_QUEUE_H
#define SAFE_QUEUE_H
#include <queue>
#include <pthread.h>
#ifdef C11
#include <thread>
#endif
using namespace std;
template<typename T>
class SafeQueue {
typedef void (*ReleaseCallback)(T &);
typedef void (*SyncHandle)(queue<T> &);
public:
SafeQueue() {
#ifdef C11
#else
pthread_mutex_init(&mutex, NULL);
pthread_cond_init(&cond, NULL);
#endif
}
~SafeQueue() {
#ifdef C11
#else
pthread_cond_destroy(&cond);
pthread_mutex_destroy(&mutex);
#endif
}
void push( T new_value) {
#ifdef C11
//锁 和智能指针原理类似,自动释放
lock_guard<mutex> lk(mt);
if (work) {
q.push(new_value);
cv.notify_one();
}
#else
pthread_mutex_lock(&mutex);
if (work) {
q.push(new_value);
pthread_cond_signal(&cond);
}else{
releaseCallback(new_value);
}
pthread_mutex_unlock(&mutex);
#endif
}
int pop(T& value) {
int ret = 0;
#ifdef C11
//占用空间相对lock_guard 更大一点且相对更慢一点,但是配合条件必须使用它,更灵活
unique_lock<mutex> lk(mt);
//第二个参数 lambda表达式:false则不阻塞 往下走
cv.wait(lk,[this]{return !work || !q.empty();});
if (!q.empty()) {
value = q.front();
q.pop();
ret = 1;
}
#else
pthread_mutex_lock(&mutex);
while (work && q.empty()) {
pthread_cond_wait(&cond, &mutex);
}
if (!q.empty()) {
value = q.front();
q.pop();
ret = 1;
}
pthread_mutex_unlock(&mutex);
#endif
return ret;
}
void setWork(int work) {
#ifdef C11
lock_guard<mutex> lk(mt);
this->work = work;
#else
pthread_mutex_lock(&mutex);
this->work = work;
pthread_cond_signal(&cond);
pthread_mutex_unlock(&mutex);
#endif
}
int empty() {
return q.empty();
}
int size() {
return q.size();
}
void clear() {
#ifdef C11
lock_guard<mutex> lk(mt);
int size = q.size();
for (int i = 0; i < size; ++i) {
T value = q.front();
releaseHandle(value);
q.pop();
}
#else
pthread_mutex_lock(&mutex);
int size = q.size();
for (int i = 0; i < size; ++i) {
T value = q.front();
releaseCallback(value);
q.pop();
}
pthread_mutex_unlock(&mutex);
#endif
}
void sync() {
#ifdef C11
lock_guard<mutex> lk(mt);
syncHandle(q);
#else
pthread_mutex_lock(&mutex);
syncHandle(q);
pthread_mutex_unlock(&mutex);
#endif
}
void setReleaseCallback(ReleaseCallback r) {
releaseCallback = r;
}
void setSyncHandle(SyncHandle s) {
syncHandle = s;
}
private:
#ifdef C11
mutex mt;
condition_variable cv;
#else
pthread_cond_t cond;
pthread_mutex_t mutex;
#endif
queue<T> q;
int work;
ReleaseCallback releaseCallback;
SyncHandle syncHandle;
};
#endif

@ -0,0 +1,162 @@
package com.frank.live;
import android.app.Activity;
import android.view.SurfaceHolder;
import com.frank.live.listener.LiveStateChangeListener;
import com.frank.live.param.AudioParam;
import com.frank.live.param.VideoParam;
import com.frank.live.stream.AudioStream;
import com.frank.live.stream.VideoStream;
public class LivePusherNew {
//视频编码器打开失败
private final static int ERROR_VIDEO_ENCODER_OPEN = 0x01;
//视频帧编码失败
private final static int ERROR_VIDEO_ENCODE = 0x02;
//音频编码器打开失败
private final static int ERROR_AUDIO_ENCODER_OPEN = 0x03;
//音频帧编码失败
private final static int ERROR_AUDIO_ENCODE = 0x04;
//RTMP连接失败
private final static int ERROR_RTMP_CONNECT = 0x05;
//RTMP连接流失败
private final static int ERROR_RTMP_CONNECT_STREAM = 0x06;
//RTMP发送数据包失败
private final static int ERROR_RTMP_SEND_PACKET = 0x07;
static {
System.loadLibrary("live");
}
private AudioStream audioStream;
private VideoStream videoStream;
private LiveStateChangeListener liveStateChangeListener;
public LivePusherNew(Activity activity, VideoParam videoParam, AudioParam audioParam) {
native_init();
videoStream = new VideoStream(this, activity, videoParam.getWidth(), videoParam.getHeight(),
videoParam.getBitRate(), videoParam.getFrameRate(), videoParam.getCameraId());
audioStream = new AudioStream(this, audioParam);
}
public void setPreviewDisplay(SurfaceHolder surfaceHolder) {
videoStream.setPreviewDisplay(surfaceHolder);
}
public void switchCamera() {
videoStream.switchCamera();
}
/**
* 设置静音
*
* @param isMute 是否静音
*/
public void setMute(boolean isMute) {
audioStream.setMute(isMute);
}
public void startPush(String path, LiveStateChangeListener stateChangeListener) {
this.liveStateChangeListener = stateChangeListener;
native_start(path);
videoStream.startLive();
audioStream.startLive();
}
public void stopPush() {
videoStream.stopLive();
audioStream.stopLive();
native_stop();
}
public void release() {
videoStream.release();
audioStream.release();
native_release();
}
/**
* 当native报错时回调这个方法
*
* @param errCode errCode
*/
public void errorFromNative(int errCode) {
//停止推流
stopPush();
if (liveStateChangeListener != null) {
String msg = "";
switch (errCode) {
case ERROR_VIDEO_ENCODER_OPEN:
msg = "视频编码器打开失败...";
break;
case ERROR_VIDEO_ENCODE:
msg = "视频帧编码失败...";
break;
case ERROR_AUDIO_ENCODER_OPEN:
msg = "音频编码器打开失败...";
break;
case ERROR_AUDIO_ENCODE:
msg = "音频帧编码失败...";
break;
case ERROR_RTMP_CONNECT:
msg = "RTMP连接失败...";
break;
case ERROR_RTMP_CONNECT_STREAM:
msg = "RTMP连接流失败...";
break;
case ERROR_RTMP_SEND_PACKET:
msg = "RTMP发送数据包失败...";
break;
default:
break;
}
liveStateChangeListener.onError(msg);
}
}
public void setVideoCodecInfo(int width, int height, int fps, int bitrate) {
native_setVideoCodecInfo(width, height, fps, bitrate);
}
public void setAudioCodecInfo(int sampleRateInHz, int channels) {
native_setAudioCodecInfo(sampleRateInHz, channels);
}
public void start(String path) {
native_start(path);
}
public int getInputSample() {
return getInputSamples();
}
public void pushAudio(byte[] data) {
native_pushAudio(data);
}
public void pushVideo(byte[] data) {
native_pushVideo(data);
}
private native void native_init();
private native void native_start(String path);
private native void native_setVideoCodecInfo(int width, int height, int fps, int bitrate);
private native void native_setAudioCodecInfo(int sampleRateInHz, int channels);
private native int getInputSamples();
private native void native_pushAudio(byte[] data);
private native void native_pushVideo(byte[] data);
private native void native_stop();
private native void native_release();
}

@ -1,130 +0,0 @@
package com.frank.live;
import android.Manifest;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.media.AudioFormat;
import android.os.Build;
import android.os.Handler;
import android.os.Message;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import android.os.Bundle;
import android.text.TextUtils;
import android.util.Log;
import android.view.TextureView;
import android.view.View;
import android.widget.CompoundButton;
import android.widget.Toast;
import android.widget.ToggleButton;
import com.frank.live.Push.LivePusher;
import com.frank.live.camera2.Camera2Helper;
import com.frank.live.listener.LiveStateChangeListener;
import com.frank.live.param.AudioParam;
import com.frank.live.param.VideoParam;
/**
* h264与rtmp实时推流直播
* Created by frank on 2018/1/28.
*/
public class RtmpLiveActivity extends AppCompatActivity implements View.OnClickListener, CompoundButton.OnCheckedChangeListener, LiveStateChangeListener {
private final static String TAG = RtmpLiveActivity.class.getSimpleName();
private final static int CODE_CAMERA_RECORD = 0x0001;
private final static String[] permissions = new String[]{Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO};
private final static String LIVE_URL = "rtmp://192.168.1.3/live/stream";
private final static int MSG_ERROR = 100;
private TextureView textureView;
private LivePusher livePusher;
@SuppressLint("HandlerLeak")
private Handler mHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
super.handleMessage(msg);
if (msg.what == MSG_ERROR) {
String errMsg = (String) msg.obj;
if (!TextUtils.isEmpty(errMsg)) {
Toast.makeText(RtmpLiveActivity.this, errMsg, Toast.LENGTH_SHORT).show();
}
}
}
};
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_rtmp_live);
initView();
requirePermission();
initPusher();
}
private void initView() {
findViewById(R.id.btn_swap).setOnClickListener(this);
((ToggleButton) findViewById(R.id.btn_live)).setOnCheckedChangeListener(this);
textureView = findViewById(R.id.surface_camera);
}
private void initPusher() {
int width = 640;//分辨率设置很重要
int height = 480;
int videoBitRate = 400;//kb/s jason-->480kb
int videoFrameRate = 25;//fps
VideoParam videoParam = new VideoParam(width, height,
Integer.valueOf(Camera2Helper.CAMERA_ID_BACK), videoBitRate, videoFrameRate);
int sampleRate = 44100;//采样率:Hz
int channelConfig = AudioFormat.CHANNEL_IN_STEREO;//立体声道
int audioFormat = AudioFormat.ENCODING_PCM_16BIT;//pcm16位
int numChannels = 2;//声道数
AudioParam audioParam = new AudioParam(sampleRate, channelConfig, audioFormat, numChannels);
livePusher = new LivePusher(textureView, videoParam, audioParam, this);
}
@Override
public void onClick(View v) {
if (v.getId() == R.id.btn_swap) {
livePusher.switchCamera();
}
}
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
if (isChecked) {
livePusher.startPush(LIVE_URL, this);
} else {
livePusher.stopPush();
}
}
@Override
public void onError(String msg) {
Log.e(TAG, "errMsg=" + msg);
mHandler.obtainMessage(MSG_ERROR, msg).sendToTarget();
}
@TargetApi(Build.VERSION_CODES.M)
private void requirePermission() {
requestPermissions(permissions, CODE_CAMERA_RECORD);
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (permissions.length > 0 && grantResults.length == permissions.length) {
for (int i = 0; i < permissions.length; i++) {
Log.i(TAG, permissions[i] + ":grantResult=" + grantResults[i]);
}
}
}
@Override
protected void onDestroy() {
super.onDestroy();
livePusher.release();
}
}

@ -0,0 +1,84 @@
package com.frank.live.stream;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import com.frank.live.LivePusherNew;
import com.frank.live.param.AudioParam;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
public class AudioStream {
private int inputSamples;
private ExecutorService executor;
private AudioRecord audioRecord;
private LivePusherNew mLivePusher;
private boolean isLiving;
private boolean isMute;
public AudioStream(LivePusherNew livePusher, AudioParam audioParam) {
mLivePusher = livePusher;
executor = Executors.newSingleThreadExecutor();
int channelConfig;
if (audioParam.getNumChannels() == 2) {
channelConfig = AudioFormat.CHANNEL_IN_STEREO;
} else {
channelConfig = AudioFormat.CHANNEL_IN_MONO;
}
mLivePusher.setAudioCodecInfo(audioParam.getSampleRate(), audioParam.getNumChannels());
inputSamples = mLivePusher.getInputSample() * 2;
int minBufferSize = AudioRecord.getMinBufferSize(audioParam.getSampleRate(),
channelConfig, audioParam.getAudioFormat()) * 2;
int bufferSizeInBytes = minBufferSize > inputSamples ? minBufferSize : inputSamples;
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, audioParam.getSampleRate(),
channelConfig, audioParam.getAudioFormat(), bufferSizeInBytes);
}
public void startLive() {
isLiving = true;
executor.submit(new AudioTask());
}
public void stopLive() {
isLiving = false;
}
public void release() {
audioRecord.release();
}
class AudioTask implements Runnable {
@Override
public void run() {
audioRecord.startRecording();
byte[] bytes = new byte[inputSamples];
while (isLiving) {
if (!isMute) {
int len = audioRecord.read(bytes, 0, bytes.length);
if (len > 0) {
mLivePusher.pushAudio(bytes);
}
}
}
audioRecord.stop();
}
}
/**
* 设置静音
* @param isMute 是否静音
*/
public void setMute(boolean isMute){
this.isMute = isMute;
}
}

@ -0,0 +1,217 @@
package com.frank.live.stream;
import android.app.Activity;
import android.graphics.ImageFormat;
import android.hardware.Camera;
import android.view.Surface;
import android.view.SurfaceHolder;
import java.util.List;
public class CameraHelper implements SurfaceHolder.Callback, Camera.PreviewCallback {
private Activity mActivity;
private int mHeight;
private int mWidth;
private int mCameraId;
private Camera mCamera;
private byte[] buffer;
private SurfaceHolder mSurfaceHolder;
private Camera.PreviewCallback mPreviewCallback;
private int mRotation;
private OnChangedSizeListener mOnChangedSizeListener;
private byte[] bytes;
CameraHelper(Activity activity, int cameraId, int width, int height) {
mActivity = activity;
mCameraId = cameraId;
mWidth = width;
mHeight = height;
}
void switchCamera() {
if (mCameraId == Camera.CameraInfo.CAMERA_FACING_BACK) {
mCameraId = Camera.CameraInfo.CAMERA_FACING_FRONT;
} else {
mCameraId = Camera.CameraInfo.CAMERA_FACING_BACK;
}
stopPreview();
startPreview();
}
private void stopPreview() {
if (mCamera != null) {
//预览数据回调接口
mCamera.setPreviewCallback(null);
//停止预览
mCamera.stopPreview();
//释放摄像头
mCamera.release();
mCamera = null;
}
}
private void startPreview() {
try {
mCamera = Camera.open(mCameraId);
Camera.Parameters parameters = mCamera.getParameters();
parameters.setPreviewFormat(ImageFormat.NV21);
setPreviewSize(parameters);
setPreviewOrientation(parameters);
mCamera.setParameters(parameters);
buffer = new byte[mWidth * mHeight * 3 / 2];
bytes = new byte[buffer.length];
mCamera.addCallbackBuffer(buffer);
mCamera.setPreviewCallbackWithBuffer(this);
mCamera.setPreviewDisplay(mSurfaceHolder);
mCamera.startPreview();
} catch (Exception ex) {
ex.printStackTrace();
}
}
private void setPreviewOrientation(Camera.Parameters parameters) {
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(mCameraId, info);
mRotation = mActivity.getWindowManager().getDefaultDisplay().getRotation();
int degrees = 0;
switch (mRotation) {
case Surface.ROTATION_0:
degrees = 0;
mOnChangedSizeListener.onChanged(mHeight, mWidth);
break;
case Surface.ROTATION_90:
degrees = 90;
mOnChangedSizeListener.onChanged(mWidth, mHeight);
break;
case Surface.ROTATION_270:
degrees = 270;
mOnChangedSizeListener.onChanged(mWidth, mHeight);
break;
}
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (info.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
} else { // back-facing
result = (info.orientation - degrees + 360) % 360;
}
mCamera.setDisplayOrientation(result);
}
private void setPreviewSize(Camera.Parameters parameters) {
List<Camera.Size> supportedPreviewSizes = parameters.getSupportedPreviewSizes();
Camera.Size size = supportedPreviewSizes.get(0);
//选择最合适的camera支持分辨率
int m = Math.abs(size.height * size.width - mWidth * mHeight);
supportedPreviewSizes.remove(0);
for (Camera.Size next : supportedPreviewSizes) {
int n = Math.abs(next.height * next.width - mWidth * mHeight);
if (n < m) {
m = n;
size = next;
}
}
mWidth = size.width;
mHeight = size.height;
parameters.setPreviewSize(mWidth, mHeight);
}
void setPreviewDisplay(SurfaceHolder surfaceHolder) {
mSurfaceHolder = surfaceHolder;
mSurfaceHolder.addCallback(this);
}
void setPreviewCallback(Camera.PreviewCallback previewCallback) {
mPreviewCallback = previewCallback;
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
stopPreview();
startPreview();
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
stopPreview();
}
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
switch (mRotation) {
case Surface.ROTATION_0:
rotation90(data);
break;
case Surface.ROTATION_90:
break;
case Surface.ROTATION_270:
break;
}
mPreviewCallback.onPreviewFrame(bytes, camera);
camera.addCallbackBuffer(buffer);
}
private void rotation90(byte[] data) {
int index = 0;
int ySize = mWidth * mHeight;
int uvHeight = mHeight / 2;
//后置摄像头顺时针旋转90度
if (mCameraId == Camera.CameraInfo.CAMERA_FACING_BACK) {
for (int i = 0; i < mWidth; i++) {
for (int j = mHeight - 1; j >= 0; j--) {
bytes[index++] = data[mWidth * j + i];
}
}
for (int i = 0; i < mWidth; i += 2) {
for (int j = uvHeight - 1; j >= 0; j--) {
// v
bytes[index++] = data[ySize + mWidth * j + i];
// u
bytes[index++] = data[ySize + mWidth * j + i + 1];
}
}
} else {
//逆时针旋转90度
for (int i = 0; i < mWidth; i++) {
int nPos = mWidth - 1;
for (int j = 0; j < mHeight; j++) {
bytes[index++] = data[nPos - i];
nPos += mWidth;
}
}
//u v
for (int i = 0; i < mWidth; i += 2) {
int nPos = ySize + mWidth - 1;
for (int j = 0; j < uvHeight; j++) {
bytes[index++] = data[nPos - i - 1];
bytes[index++] = data[nPos - i];
nPos += mWidth;
}
}
}
}
void setOnChangedSizeListener(OnChangedSizeListener listener) {
mOnChangedSizeListener = listener;
}
public void release() {
mSurfaceHolder.removeCallback(this);
stopPreview();
}
public interface OnChangedSizeListener {
void onChanged(int w, int h);
}
}

@ -0,0 +1,66 @@
package com.frank.live.stream;
import android.app.Activity;
import android.hardware.Camera;
import android.view.SurfaceHolder;
import com.frank.live.LivePusherNew;
public class VideoStream implements Camera.PreviewCallback, CameraHelper.OnChangedSizeListener {
private LivePusherNew mLivePusher;
private CameraHelper cameraHelper;
private int mBitrate;
private int mFps;
private boolean isLiving;
public VideoStream(LivePusherNew livePusher, Activity activity, int width, int height, int bitrate, int fps, int cameraId) {
mLivePusher = livePusher;
mBitrate = bitrate;
mFps = fps;
cameraHelper = new CameraHelper(activity, cameraId, width, height);
cameraHelper.setPreviewCallback(this);
cameraHelper.setOnChangedSizeListener(this);
}
public void setPreviewDisplay(SurfaceHolder surfaceHolder) {
cameraHelper.setPreviewDisplay(surfaceHolder);
}
/**
* nv21摄像头数据
*
* @param data data
* @param camera camera
*/
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
if (isLiving) {
mLivePusher.pushVideo(data);
}
}
public void switchCamera() {
cameraHelper.switchCamera();
}
@Override
public void onChanged(int w, int h) {
mLivePusher.setVideoCodecInfo(w, h, mFps, mBitrate);
}
public void startLive() {
isLiving = true;
}
public void stopLive() {
isLiving = false;
}
public void release() {
cameraHelper.release();
}
}

@ -43,7 +43,7 @@
<!-- 实时推流直播 -->
<activity
android:name=".activity.LiveActivity"
android:screenOrientation="landscape" />
android:screenOrientation="portrait" />
<!-- 滤镜特效 -->
<activity
android:name=".activity.FilterActivity"

@ -7,18 +7,18 @@ import android.os.Handler;
import android.os.Message;
import android.text.TextUtils;
import android.util.Log;
import android.view.TextureView;
import android.view.SurfaceView;
import android.view.View;
import android.widget.CompoundButton;
import android.widget.Toast;
import android.widget.ToggleButton;
import com.frank.ffmpeg.R;
import com.frank.live.Push.LivePusher;
import com.frank.live.camera2.Camera2Helper;
import com.frank.live.listener.LiveStateChangeListener;
import com.frank.live.param.AudioParam;
import com.frank.live.param.VideoParam;
import com.frank.live.LivePusherNew;
/**
* h264与rtmp实时推流直播
@ -30,8 +30,8 @@ public class LiveActivity extends BaseActivity implements CompoundButton.OnCheck
private final static String TAG = LiveActivity.class.getSimpleName();
private final static String LIVE_URL = "rtmp://192.168.1.3/live/stream";
private final static int MSG_ERROR = 100;
private TextureView textureView;
private LivePusher livePusher;
private SurfaceView textureView;
private LivePusherNew livePusher;
@SuppressLint("HandlerLeak")
private Handler mHandler = new Handler() {
@Override
@ -70,7 +70,7 @@ public class LiveActivity extends BaseActivity implements CompoundButton.OnCheck
private void initPusher() {
int width = 640;//分辨率设置
int height = 480;
int videoBitRate = 400;//kb/s
int videoBitRate = 800_000;//kb/s
int videoFrameRate = 10;//fps
VideoParam videoParam = new VideoParam(width, height,
Integer.valueOf(Camera2Helper.CAMERA_ID_BACK), videoBitRate, videoFrameRate);
@ -79,9 +79,8 @@ public class LiveActivity extends BaseActivity implements CompoundButton.OnCheck
int audioFormat = AudioFormat.ENCODING_PCM_16BIT;//pcm16位
int numChannels = 2;//声道数
AudioParam audioParam = new AudioParam(sampleRate, channelConfig, audioFormat, numChannels);
livePusher = new LivePusher(textureView, videoParam, audioParam, this);
//TODO:暂时去掉音频推流
livePusher.setMute(true);
livePusher = new LivePusherNew(this, videoParam, audioParam);
livePusher.setPreviewDisplay(textureView.getHolder());
}
@Override
@ -112,9 +111,9 @@ public class LiveActivity extends BaseActivity implements CompoundButton.OnCheck
@Override
protected void onDestroy() {
super.onDestroy();
// if (livePusher != null) {
// livePusher.release();
// }
if (livePusher != null) {
livePusher.release();
}
}
@Override

@ -6,7 +6,7 @@
android:layout_height="match_parent"
tools:context="com.frank.ffmpeg.activity.LiveActivity">
<TextureView
<SurfaceView
android:id="@+id/surface_camera"
android:layout_width="match_parent"
android:layout_height="match_parent" />

Loading…
Cancel
Save