音频解码播放(AudioTrack与OpenSL)

FFmpeg音频解码,AudioTrack与OpenSL两种方式播放
pull/107/head
frank 7 years ago
parent 709b994c7d
commit 17b18af513
  1. 11
      app/CMakeLists.txt
  2. 154
      app/src/main/cpp/audio_player.c
  3. 319
      app/src/main/cpp/openSL_audio_player.c
  4. 45
      app/src/main/java/com/frank/ffmpeg/AudioPlayer.java
  5. 2
      app/src/main/java/com/frank/ffmpeg/FFmpegCmd.java
  6. 30
      app/src/main/java/com/frank/ffmpeg/activity/AudioHandleActivity.java
  7. 18
      app/src/main/res/layout/activity_audio_handle.xml
  8. 2
      app/src/main/res/values/strings.xml

@ -11,7 +11,7 @@ cmake_minimum_required(VERSION 3.4.1)
# Gradle automatically packages shared libraries with your APK.
add_library( # Sets the name of the library.
audio-handle
media-handle
# Sets the library as a shared library.
SHARED
@ -21,7 +21,9 @@ add_library( # Sets the name of the library.
src/main/cpp/cmdutils.c
src/main/cpp/ffmpeg.c
src/main/cpp/ffmpeg_filter.c
src/main/cpp/ffmpeg_opt.c)
src/main/cpp/ffmpeg_opt.c
src/main/cpp/audio_player.c
src/main/cpp/openSL_audio_player.c)
add_library( ffmpeg
SHARED
@ -38,8 +40,11 @@ find_library( # Sets the name of the path variable.
log )
target_link_libraries( # Specifies the target library.
audio-handle
media-handle
ffmpeg
-landroid #native_window
-ljnigraphics #bitmap
-lOpenSLES #openSLES
# Links the target library to the log library
# included in the NDK.
${log-lib} )

@ -0,0 +1,154 @@
//
// Created by frank on 2018/2/1.
//
#include <jni.h>
#include <stdlib.h>
#include <unistd.h>
//封装格式
#include "libavformat/avformat.h"
//解码
#include "libavcodec/avcodec.h"
//缩放
#include "libswscale/swscale.h"
//重采样
#include "libswresample/swresample.h"
#include <android/log.h>
#define TAG "MediaPlayer"
#define LOGI(FORMAT,...) __android_log_print(ANDROID_LOG_INFO, TAG, FORMAT, ##__VA_ARGS__);
#define LOGE(FORMAT,...) __android_log_print(ANDROID_LOG_ERROR, TAG, FORMAT, ##__VA_ARGS__);
#define MAX_AUDIO_FRAME_SIZE 48000 * 4
JNIEXPORT void JNICALL Java_com_frank_ffmpeg_AudioPlayer_play
(JNIEnv *env, jobject jthiz, jstring input_jstr){
const char* input_cstr = (*env)->GetStringUTFChars(env,input_jstr,NULL);
LOGI("input_cstr=%s", input_cstr);
//注册组件
av_register_all();
AVFormatContext *pFormatCtx = avformat_alloc_context();
//打开音频文件
if(avformat_open_input(&pFormatCtx,input_cstr,NULL,NULL) != 0){
LOGI("%s","无法打开音频文件");
return;
}
//获取输入文件信息
if(avformat_find_stream_info(pFormatCtx,NULL) < 0){
LOGI("%s","无法获取输入文件信息");
return;
}
//获取音频流索引位置
int i = 0, audio_stream_idx = -1;
for(; i < pFormatCtx->nb_streams;i++){
if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO){
audio_stream_idx = i;
break;
}
}
//获取音频解码器
AVCodecContext *codecCtx = pFormatCtx->streams[audio_stream_idx]->codec;
AVCodec *codec = avcodec_find_decoder(codecCtx->codec_id);
if(codec == NULL){
LOGI("%s","无法获取解码器");
return;
}
//打开解码器
if(avcodec_open2(codecCtx,codec,NULL) < 0){
LOGI("%s","无法打开解码器");
return;
}
//压缩数据
AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
//解压缩数据
AVFrame *frame = av_frame_alloc();
//frame->16bit 44100 PCM 统一音频采样格式与采样率
SwrContext *swrCtx = swr_alloc();
//输入的采样格式
enum AVSampleFormat in_sample_fmt = codecCtx->sample_fmt;
//输出采样格式16bit PCM
enum AVSampleFormat out_sample_fmt = AV_SAMPLE_FMT_S16;
//输入采样率
int in_sample_rate = codecCtx->sample_rate;
//输出采样率
int out_sample_rate = in_sample_rate;
//声道布局(2个声道,默认立体声stereo)
uint64_t in_ch_layout = codecCtx->channel_layout;
//输出的声道布局(立体声)
uint64_t out_ch_layout = AV_CH_LAYOUT_STEREO;
swr_alloc_set_opts(swrCtx,
out_ch_layout,out_sample_fmt,out_sample_rate,
in_ch_layout,in_sample_fmt,in_sample_rate,
0, NULL);
swr_init(swrCtx);
//输出的声道个数
int out_channel_nb = av_get_channel_layout_nb_channels(out_ch_layout);
jclass player_class = (*env)->GetObjectClass(env,jthiz);
if(!player_class){
LOGE("player_class not found...");
}
//AudioTrack对象
jmethodID audio_track_method = (*env)->GetMethodID(env,player_class,"createAudioTrack","(II)Landroid/media/AudioTrack;");
if(!audio_track_method){
LOGE("audio_track_method not found...");
}
jobject audio_track = (*env)->CallObjectMethod(env,jthiz,audio_track_method,out_sample_rate,out_channel_nb);
//调用play方法
jclass audio_track_class = (*env)->GetObjectClass(env,audio_track);
jmethodID audio_track_play_mid = (*env)->GetMethodID(env,audio_track_class,"play","()V");
(*env)->CallVoidMethod(env,audio_track,audio_track_play_mid);
//获取write()方法
jmethodID audio_track_write_mid = (*env)->GetMethodID(env,audio_track_class,"write","([BII)I");
//16bit 44100 PCM 数据
uint8_t *out_buffer = (uint8_t *)av_malloc(MAX_AUDIO_FRAME_SIZE);
int got_frame = 0,index = 0, ret;
//不断读取编码数据
while(av_read_frame(pFormatCtx,packet) >= 0){
//解码音频类型的Packet
if(packet->stream_index == audio_stream_idx){
//解码
ret = avcodec_decode_audio4(codecCtx,frame,&got_frame,packet);
if(ret < 0){
break;
}
//解码一帧成功
if(got_frame > 0){
LOGI("decode frame count=%d",index++);
//音频格式转换
swr_convert(swrCtx, &out_buffer, MAX_AUDIO_FRAME_SIZE,(const uint8_t **)frame->data,frame->nb_samples);
int out_buffer_size = av_samples_get_buffer_size(NULL, out_channel_nb,
frame->nb_samples, out_sample_fmt, 1);
jbyteArray audio_sample_array = (*env)->NewByteArray(env,out_buffer_size);
jbyte* sample_byte_array = (*env)->GetByteArrayElements(env,audio_sample_array,NULL);
//拷贝缓冲数据
memcpy(sample_byte_array, out_buffer, (size_t) out_buffer_size);
//释放数组
(*env)->ReleaseByteArrayElements(env,audio_sample_array,sample_byte_array,0);
//调用AudioTrack的write方法进行播放
(*env)->CallIntMethod(env,audio_track,audio_track_write_mid,
audio_sample_array,0,out_buffer_size);
//释放局部引用
(*env)->DeleteLocalRef(env,audio_sample_array);
usleep(1000 * 16);
}
}
av_free_packet(packet);
}
LOGI("decode audio finish");
av_frame_free(&frame);
av_free(out_buffer);
swr_free(&swrCtx);
avcodec_close(codecCtx);
avformat_close_input(&pFormatCtx);
(*env)->ReleaseStringUTFChars(env,input_jstr,input_cstr);
}

@ -0,0 +1,319 @@
//
// Created by frank on 2018/2/1.
//
#include <jni.h>
#include <string.h>
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libswresample/swresample.h"
#include "libavutil/samplefmt.h"
#include <SLES/OpenSLES.h>
#include <SLES/OpenSLES_Android.h>
#include <android/log.h>
#include <libavutil/opt.h>
#define TAG "OpenSLPlayer"
#define LOGI(FORMAT,...) __android_log_print(ANDROID_LOG_INFO, TAG, FORMAT,##__VA_ARGS__);
#define LOGE(FORMAT,...) __android_log_print(ANDROID_LOG_ERROR, TAG, FORMAT,##__VA_ARGS__);
//引擎接口
SLObjectItf engineObject = NULL;
SLEngineItf engineEngine;
//输出混音器接口
SLObjectItf outputMixObject = NULL;
SLEnvironmentalReverbItf outputMixEnvironmentalReverb = NULL;
//缓冲播放器接口
SLObjectItf bqPlayerObject = NULL;
SLPlayItf bqPlayerPlay;
SLAndroidSimpleBufferQueueItf bqPlayerBufferQueue;
SLEffectSendItf bqPlayerEffectSend;
SLVolumeItf bqPlayerVolume;
//音效设置
const SLEnvironmentalReverbSettings reverbSettings = SL_I3DL2_ENVIRONMENT_PRESET_STONECORRIDOR;
void *buffer;
size_t bufferSize;
uint8_t *outputBuffer;
size_t outputBufferSize;
//FFmpeg相关
AVPacket packet;
int audioStream;
AVFrame *aFrame;
SwrContext *swr;
AVFormatContext *aFormatCtx;
AVCodecContext *aCodecCtx;
int frame_count = 0;
int createAudioPlayer(int *rate, int *channel, const char *file_name) ;
// 释放相关资源
int releaseAudioPlayer();
// 获取PCM数据, 自动回调获取
int getPCM(void **pcm, size_t *pcmSize) ;
//播放回调方法
void bqPlayerCallback(SLAndroidSimpleBufferQueueItf bufferQueueItf, void *context) {
bufferSize = 0;
getPCM(&buffer, &bufferSize);
//如果buffer不为空,入待播放队列
if (NULL != buffer && 0 != bufferSize) {
SLresult result;
result = (*bqPlayerBufferQueue)->Enqueue(bqPlayerBufferQueue, buffer, bufferSize);
if(result < 0){
LOGE("Enqueue error...");
} else{
LOGI("decode frame count=%d", frame_count++);
}
}
}
//创建OpenSLES引擎
void createEngine() {
SLresult result;
//创建引擎
result = slCreateEngine(&engineObject, 0, NULL, 0, NULL, NULL);
LOGI("slCreateEngine=%d", result);
result = (*engineObject)->Realize(engineObject, SL_BOOLEAN_FALSE);
LOGI("engineObject->Realize=%d", result);
//获取引擎接口
result = (*engineObject)->GetInterface(engineObject, SL_IID_ENGINE, &engineEngine);
LOGI("engineObject->GetInterface=%d", result);
//创建输出混音器
result = (*engineEngine)->CreateOutputMix(engineEngine, &outputMixObject, 0, 0, 0);
LOGI("CreateOutputMix=%d", result);
//关联输出混音器
result = (*outputMixObject)->Realize(outputMixObject, SL_BOOLEAN_FALSE);
LOGI("outputMixObject->Realize=%d", result);
//获取reverb接口
result = (*outputMixObject)->GetInterface(outputMixObject, SL_IID_ENVIRONMENTALREVERB,
&outputMixEnvironmentalReverb);
LOGI("outputMixObject->GetInterface=%d", result);
if (SL_RESULT_SUCCESS == result) {
result = (*outputMixEnvironmentalReverb)->SetEnvironmentalReverbProperties(
outputMixEnvironmentalReverb, &reverbSettings);
}
LOGI("SetEnvironmentalReverbProperties=%d", result);
}
//创建带有缓冲队列的音频播放器
void createBufferQueueAudioPlayer(int rate, int channel, int bitsPerSample) {
SLresult result;
//配置音频源
SLDataLocator_AndroidSimpleBufferQueue buffer_queue = {SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, 2};
SLDataFormat_PCM format_pcm;
format_pcm.formatType = SL_DATAFORMAT_PCM;
format_pcm.numChannels = (SLuint32) channel;
format_pcm.bitsPerSample = (SLuint32) bitsPerSample;
format_pcm.samplesPerSec = (SLuint32) (rate * 1000);
format_pcm.containerSize = 16;
if (channel == 2)
format_pcm.channelMask = SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT;
else
format_pcm.channelMask = SL_SPEAKER_FRONT_CENTER;
format_pcm.endianness = SL_BYTEORDER_LITTLEENDIAN;
SLDataSource audioSrc = {&buffer_queue, &format_pcm};
//配置音频池
SLDataLocator_OutputMix loc_outmix = {SL_DATALOCATOR_OUTPUTMIX, outputMixObject};
SLDataSink audioSnk = {&loc_outmix, NULL};
//创建音频播放器
const SLInterfaceID ids[3] = {SL_IID_BUFFERQUEUE, SL_IID_EFFECTSEND, SL_IID_VOLUME};
const SLboolean req[3] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE};
result = (*engineEngine)->CreateAudioPlayer(engineEngine, &bqPlayerObject, &audioSrc, &audioSnk,
3, ids, req);
LOGI("CreateAudioPlayer=%d", result);
//关联播放器
result = (*bqPlayerObject)->Realize(bqPlayerObject, SL_BOOLEAN_FALSE);
LOGI("bqPlayerObject Realize=%d", result);
//获取播放接口
result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_PLAY, &bqPlayerPlay);
LOGI("GetInterface bqPlayerPlay=%d", result);
//获取缓冲队列接口
result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_BUFFERQUEUE,
&bqPlayerBufferQueue);
LOGI("GetInterface bqPlayerBufferQueue=%d", result);
//注册缓冲队列回调
result = (*bqPlayerBufferQueue)->RegisterCallback(bqPlayerBufferQueue, bqPlayerCallback, NULL);
LOGI("RegisterCallback=%d", result);
//获取音效接口
result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_EFFECTSEND,
&bqPlayerEffectSend);
LOGI("GetInterface effect=%d", result);
//获取音量接口
result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_VOLUME, &bqPlayerVolume);
LOGI("GetInterface volume=%d", result);
//开始播放音乐
result = (*bqPlayerPlay)->SetPlayState(bqPlayerPlay, SL_PLAYSTATE_PLAYING);
LOGI("SetPlayState=%d", result);
}
int createAudioPlayer(int *rate, int *channel, const char *file_name) {
//注册相关组件
av_register_all();
aFormatCtx = avformat_alloc_context();
//打开音频文件
if (avformat_open_input(&aFormatCtx, file_name, NULL, NULL) != 0) {
LOGE("Couldn't open file:%s\n", file_name);
return -1; // Couldn't open file
}
//寻找stream信息
if (avformat_find_stream_info(aFormatCtx, NULL) < 0) {
LOGE("Couldn't find stream information.");
return -1;
}
//寻找音频stream
int i;
audioStream = -1;
for (i = 0; i < aFormatCtx->nb_streams; i++) {
if (aFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO &&
audioStream < 0) {
audioStream = i;
}
}
if (audioStream == -1) {
LOGE("Couldn't find audio stream!");
return -1;
}
//获取解码器context
aCodecCtx = aFormatCtx->streams[audioStream]->codec;
//寻找音频解码器
AVCodec *aCodec = avcodec_find_decoder(aCodecCtx->codec_id);
if (!aCodec) {
fprintf(stderr, "Unsupported codec!\n");
return -1;
}
//打开解码器
if (avcodec_open2(aCodecCtx, aCodec, NULL) < 0) {
LOGE("Could not open codec.");
return -1;
}
aFrame = av_frame_alloc();
// 设置格式转换
swr = swr_alloc();
av_opt_set_int(swr, "in_channel_layout", aCodecCtx->channel_layout, 0);
av_opt_set_int(swr, "out_channel_layout", aCodecCtx->channel_layout, 0);
av_opt_set_int(swr, "in_sample_rate", aCodecCtx->sample_rate, 0);
av_opt_set_int(swr, "out_sample_rate", aCodecCtx->sample_rate, 0);
av_opt_set_sample_fmt(swr, "in_sample_fmt", aCodecCtx->sample_fmt, 0);
av_opt_set_sample_fmt(swr, "out_sample_fmt", AV_SAMPLE_FMT_S16, 0);
swr_init(swr);
// 分配PCM数据缓存
outputBufferSize = 8196;
outputBuffer = (uint8_t *) malloc(sizeof(uint8_t) * outputBufferSize);
// 返回sample rate和channels
*rate = aCodecCtx->sample_rate;
*channel = aCodecCtx->channels;
return 0;
}
// 获取PCM数据, 自动回调获取
int getPCM(void **pcm, size_t *pcmSize) {
while (av_read_frame(aFormatCtx, &packet) >= 0) {
int frameFinished = 0;
//音频流
if (packet.stream_index == audioStream) {
avcodec_decode_audio4(aCodecCtx, aFrame, &frameFinished, &packet);
//解码完一帧数据
if (frameFinished) {
// data_size为音频数据所占的字节数
int data_size = av_samples_get_buffer_size(
aFrame->linesize, aCodecCtx->channels,
aFrame->nb_samples, aCodecCtx->sample_fmt, 1);
if (data_size > outputBufferSize) {
outputBufferSize = (size_t) data_size;
outputBuffer = (uint8_t *) realloc(outputBuffer, sizeof(uint8_t) * outputBufferSize);
}
// 音频格式转换
swr_convert(swr, &outputBuffer, aFrame->nb_samples,
(uint8_t const **) (aFrame->extended_data),
aFrame->nb_samples);
// 返回pcm数据
*pcm = outputBuffer;
*pcmSize = (size_t) data_size;
return 0;
}
}
}
return -1;
}
// 释放相关资源
int releaseAudioPlayer() {
av_packet_unref(&packet);
av_free(outputBuffer);
av_free(aFrame);
avcodec_close(aCodecCtx);
avformat_close_input(&aFormatCtx);
return 0;
}
JNIEXPORT void JNICALL Java_com_frank_ffmpeg_AudioPlayer_playAudio
(JNIEnv * env, jclass jobject, jstring filePath) {
int rate, channel;
const char *file_name = (*env)->GetStringUTFChars(env, filePath, NULL);
LOGI("file_name=%s", file_name);
// 创建音频解码器
createAudioPlayer(&rate, &channel, file_name);
// 创建播放引擎
createEngine();
// 创建缓冲队列音频播放器
createBufferQueueAudioPlayer(rate, channel, SL_PCMSAMPLEFORMAT_FIXED_16);
// 启动音频播放
bqPlayerCallback(bqPlayerBufferQueue, NULL);
}
//停止播放,释放相关资源
JNIEXPORT jint JNICALL Java_com_frank_ffmpeg_AudioPlayer_stop
(JNIEnv * env, jclass jobject) {
if (bqPlayerObject != NULL) {
(*bqPlayerObject)->Destroy(bqPlayerObject);
bqPlayerObject = NULL;
bqPlayerPlay = NULL;
bqPlayerBufferQueue = NULL;
bqPlayerEffectSend = NULL;
bqPlayerVolume = NULL;
}
if (outputMixObject != NULL) {
(*outputMixObject)->Destroy(outputMixObject);
outputMixObject = NULL;
outputMixEnvironmentalReverb = NULL;
}
if (engineObject != NULL) {
(*engineObject)->Destroy(engineObject);
engineObject = NULL;
engineEngine = NULL;
}
// 释放解码器相关资源
releaseAudioPlayer();
}

@ -0,0 +1,45 @@
package com.frank.ffmpeg;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioTrack;
/**
* 音频播放器
* Created by frank on 2018/2/1.
*/
public class AudioPlayer {
static {
System.loadLibrary("media-handle");
}
//调用AudioTrack播放
public native void play(String audioPath);
//调用OpenSL ES播放
public native void playAudio(String audioPath);
//调用OpenSL ES播放
public native void stop(String audioPath);
/**
* 创建一个AudioTrack对象
* @param sampleRate 采样率
* @param channels 声道布局
* @return AudioTrack
*/
public AudioTrack createAudioTrack(int sampleRate, int channels){
int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
int channelConfig;
if(channels == 1){
channelConfig = AudioFormat.CHANNEL_OUT_MONO;
}else if(channels == 2){
channelConfig = AudioFormat.CHANNEL_OUT_STEREO;
}else{
channelConfig = AudioFormat.CHANNEL_OUT_STEREO;
}
int bufferSizeInBytes = AudioTrack.getMinBufferSize(sampleRate, channelConfig, audioFormat);
return new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, channelConfig, audioFormat,
bufferSizeInBytes, AudioTrack.MODE_STREAM);
}
}

@ -8,7 +8,7 @@ public class FFmpegCmd {
}
static{
System.loadLibrary("audio-handle");
System.loadLibrary("media-handle");
}
//开子线程调用native方法进行音频处理

@ -10,6 +10,8 @@ import android.util.Log;
import android.view.View;
import android.widget.ProgressBar;
import java.io.File;
import com.frank.ffmpeg.AudioPlayer;
import com.frank.ffmpeg.FFmpegCmd;
import com.frank.ffmpeg.R;
import com.frank.ffmpeg.util.FFmpegUtil;
@ -63,6 +65,8 @@ public class AudioHandleActivity extends AppCompatActivity implements View.OnCli
findViewById(R.id.btn_cut).setOnClickListener(this);
findViewById(R.id.btn_concat).setOnClickListener(this);
findViewById(R.id.btn_mix).setOnClickListener(this);
findViewById(R.id.btn_play_audio).setOnClickListener(this);
findViewById(R.id.btn_play_opensl).setOnClickListener(this);
}
private void setVisible() {
@ -70,6 +74,8 @@ public class AudioHandleActivity extends AppCompatActivity implements View.OnCli
findViewById(R.id.btn_cut).setVisibility(View.VISIBLE);
findViewById(R.id.btn_concat).setVisibility(View.VISIBLE);
findViewById(R.id.btn_mix).setVisibility(View.VISIBLE);
findViewById(R.id.btn_play_audio).setVisibility(View.VISIBLE);
findViewById(R.id.btn_play_opensl).setVisibility(View.VISIBLE);
}
private void setGone() {
@ -77,6 +83,8 @@ public class AudioHandleActivity extends AppCompatActivity implements View.OnCli
findViewById(R.id.btn_cut).setVisibility(View.GONE);
findViewById(R.id.btn_concat).setVisibility(View.GONE);
findViewById(R.id.btn_mix).setVisibility(View.GONE);
findViewById(R.id.btn_play_audio).setVisibility(View.GONE);
findViewById(R.id.btn_play_opensl).setVisibility(View.GONE);
}
@Override
@ -95,6 +103,12 @@ public class AudioHandleActivity extends AppCompatActivity implements View.OnCli
case R.id.btn_mix:
handleType = 3;
break;
case R.id.btn_play_audio:
handleType = 4;
break;
case R.id.btn_play_opensl:
handleType = 5;
break;
default:
handleType = 0;
break;
@ -125,6 +139,22 @@ public class AudioHandleActivity extends AppCompatActivity implements View.OnCli
String mixFile = PATH + File.separator + "mix.aac";
commandLine = FFmpegUtil.mixAudio(srcFile, appendFile, mixFile);
break;
case 4://解码播放(AudioTrack)
new Thread(new Runnable() {
@Override
public void run() {
new AudioPlayer().play(srcFile);
}
}).start();
return;
case 5://解码播放(OpenSL ES)
new Thread(new Runnable() {
@Override
public void run() {
new AudioPlayer().playAudio(srcFile);
}
}).start();
return;
default:
break;
}

@ -48,4 +48,22 @@
android:layout_centerInParent="true"
android:visibility="gone"/>
<Button
android:id="@+id/btn_play_audio"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/audio_play"
android:layout_below="@+id/btn_concat"
android:layout_marginTop="10dp"
android:layout_centerHorizontal="true"/>
<Button
android:id="@+id/btn_play_opensl"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/audio_opensl"
android:layout_below="@+id/btn_play_audio"
android:layout_marginTop="10dp"
android:layout_centerHorizontal="true"/>
</RelativeLayout>

@ -4,6 +4,8 @@
<string name="audio_cut">音频剪切</string>
<string name="audio_concat">音频合并</string>
<string name="audio_mix">音频混合</string>
<string name="audio_play">音频解码AudioTrack播放</string>
<string name="audio_opensl">音频解码OpenSL播放</string>
<string name="audio_handle">音频处理</string>
<string name="media_handle">音视频处理</string>

Loading…
Cancel
Save