start & add some translations

pull/107/head
nickzt 6 years ago
parent 513d2cf8db
commit f1f97c8947
  1. 14
      Live/src/main/AndroidManifest.xml
  2. 14
      Live/src/main/cpp/AudioStream.cpp
  3. 18
      Live/src/main/cpp/RtmpPusher.cpp
  4. 8
      Live/src/main/cpp/VideoStream.cpp
  5. 2
      Live/src/main/cpp/VideoStream.h
  6. 36
      Live/src/main/java/com/frank/live/LivePusherNew.java
  7. 22
      Live/src/main/java/com/frank/live/PushActivity.java
  8. 2
      Live/src/main/java/com/frank/live/param/AudioParam.java
  9. 2
      Live/src/main/java/com/frank/live/param/VideoParam.java
  10. 4
      Live/src/main/java/com/frank/live/stream/AudioStream.java
  11. 2
      Live/src/main/java/com/frank/live/stream/CameraHelper.java
  12. 2
      Live/src/main/java/com/frank/live/util/YUVUtil.java
  13. 4
      Live/src/main/res/layout/activity_push.xml
  14. 7
      Live/src/main/res/values-en/strings.xml
  15. 7
      Live/src/main/res/values-zh-rCN/strings.xml
  16. 6
      Live/src/main/res/values/strings.xml
  17. 12
      OnLive/src/main/java/com/frank/living/activity/MultiScreenActivity.java
  18. 18
      README.md
  19. 14
      app/src/main/AndroidManifest.xml
  20. 46
      app/src/main/cpp/audio_player.c
  21. 2
      app/src/main/cpp/ffmpeg_cmd.c
  22. 6
      app/src/main/cpp/ffmpeg_pusher.cpp
  23. 74
      app/src/main/cpp/media_player.c
  24. 72
      app/src/main/cpp/openSL_audio_player.c
  25. 84
      app/src/main/cpp/video_filter.c
  26. 36
      app/src/main/cpp/video_player.c
  27. 8
      app/src/main/java/com/frank/ffmpeg/AudioPlayer.java
  28. 2
      app/src/main/java/com/frank/ffmpeg/FFmpegCmd.java
  29. 2
      app/src/main/java/com/frank/ffmpeg/MediaPlayer.java
  30. 2
      app/src/main/java/com/frank/ffmpeg/VideoPlayer.java
  31. 22
      app/src/main/java/com/frank/ffmpeg/activity/AudioHandleActivity.java
  32. 28
      app/src/main/java/com/frank/ffmpeg/activity/FilterActivity.java
  33. 4
      app/src/main/java/com/frank/ffmpeg/activity/LiveActivity.java
  34. 12
      app/src/main/java/com/frank/ffmpeg/activity/MainActivity.java
  35. 16
      app/src/main/java/com/frank/ffmpeg/activity/MediaHandleActivity.java
  36. 2
      app/src/main/java/com/frank/ffmpeg/activity/MediaPlayerActivity.java
  37. 4
      app/src/main/java/com/frank/ffmpeg/activity/VideoHandleActivity.java
  38. 2
      app/src/main/java/com/frank/ffmpeg/adapter/HorizontalAdapter.java
  39. 4
      app/src/main/java/com/frank/ffmpeg/handler/FFmpegHandler.java
  40. 2
      app/src/main/java/com/frank/ffmpeg/hardware/HardwareDecode.java
  41. 2
      app/src/main/java/com/frank/ffmpeg/listener/OnHandleListener.java
  42. 2
      app/src/main/java/com/frank/ffmpeg/listener/OnItemClickListener.java
  43. 64
      app/src/main/java/com/frank/ffmpeg/util/FFmpegUtil.java
  44. 4
      app/src/main/java/com/frank/ffmpeg/util/TimeUtil.java
  45. 4
      app/src/main/res/drawable/btn.xml
  46. 6
      app/src/main/res/drawable/btn_circle.xml
  47. 6
      app/src/main/res/drawable/btn_point.xml
  48. 4
      app/src/main/res/drawable/white_background.xml
  49. 75
      app/src/main/res/values-en/strings.xml
  50. 77
      app/src/main/res/values-zh-rCN/strings.xml
  51. 128
      app/src/main/res/values/strings.xml

@ -14,14 +14,14 @@
android:roundIcon="@mipmap/ic_launcher_round" android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true" android:supportsRtl="true"
android:theme="@style/AppTheme" > android:theme="@style/AppTheme" >
<!--<activity android:name="com.frank.live.PushActivity"--> <activity android:name="com.frank.live.PushActivity"
<!--android:screenOrientation="landscape">--> android:screenOrientation="landscape">
<!--<intent-filter>--> <intent-filter>
<!--<action android:name="android.intent.action.MAIN" />--> <action android:name="android.intent.action.MAIN" />
<!--<category android:name="android.intent.category.LAUNCHER" />--> <category android:name="android.intent.category.LAUNCHER" />
<!--</intent-filter>--> </intent-filter>
<!--</activity>--> </activity>
</application> </application>
</manifest> </manifest>

@ -20,16 +20,16 @@ void AudioStream::setAudioCallback(AudioCallback audioCallback) {
} }
void AudioStream::setAudioEncInfo(int samplesInHZ, int channels) { void AudioStream::setAudioEncInfo(int samplesInHZ, int channels) {
//打开编码器 //打开 coding Device
mChannels = channels; mChannels = channels;
//一次最大能输入编码器的样本数量 (一个样本是16位 2字节) //一次最大能输入 coding Device的样本数量 (一个样本是16位 2字节)
//编码后的最大字节数 // coding 后的最大字节数
audioCodec = faacEncOpen(static_cast<unsigned long>(samplesInHZ), audioCodec = faacEncOpen(static_cast<unsigned long>(samplesInHZ),
static_cast<unsigned int>(channels), static_cast<unsigned int>(channels),
&inputSamples, &inputSamples,
&maxOutputBytes); &maxOutputBytes);
//设置编码器参数 //设置 coding Device参数
faacEncConfigurationPtr config = faacEncGetCurrentConfiguration(audioCodec); faacEncConfigurationPtr config = faacEncGetCurrentConfiguration(audioCodec);
//指定为 mpeg4 标准 //指定为 mpeg4 标准
config->mpegVersion = MPEG4; config->mpegVersion = MPEG4;
@ -37,11 +37,11 @@ void AudioStream::setAudioEncInfo(int samplesInHZ, int channels) {
config->aacObjectType = LOW; config->aacObjectType = LOW;
//16位 //16位
config->inputFormat = FAAC_INPUT_16BIT; config->inputFormat = FAAC_INPUT_16BIT;
// 编码出原始数据 // coding 出原始数据
config->outputFormat = 0; config->outputFormat = 0;
faacEncSetConfiguration(audioCodec, config); faacEncSetConfiguration(audioCodec, config);
//输出缓冲区 编码后的数据 用这个缓冲区来保存 //输出缓冲区 coding 后的数据 用这个缓冲区来保存
buffer = new u_char[maxOutputBytes]; buffer = new u_char[maxOutputBytes];
} }
@ -74,7 +74,7 @@ RTMPPacket *AudioStream::getAudioTag() {
} }
void AudioStream::encodeData(int8_t *data) { void AudioStream::encodeData(int8_t *data) {
//返回编码后数据字节的长度 //返回 coding 后数据字节的长度
int byteLen = faacEncEncode(audioCodec, reinterpret_cast<int32_t *>(data), int byteLen = faacEncEncode(audioCodec, reinterpret_cast<int32_t *>(data),
static_cast<unsigned int>(inputSamples), static_cast<unsigned int>(inputSamples),
buffer, buffer,

@ -26,23 +26,23 @@ JavaVM *javaVM;
//调用类 //调用类
jobject jobject_error; jobject jobject_error;
/***************Java层对应**************/ /*************** versus Java层对应**************/
//视频编码器打开失败 // Video encoding Device Open failed
const int ERROR_VIDEO_ENCODER_OPEN = 0x01; const int ERROR_VIDEO_ENCODER_OPEN = 0x01;
//视频帧编码失败 //Video frame encoding failed
const int ERROR_VIDEO_ENCODE = 0x02; const int ERROR_VIDEO_ENCODE = 0x02;
//音频编码器打开失败 // Audio coding Device Open failed
const int ERROR_AUDIO_ENCODER_OPEN = 0x03; const int ERROR_AUDIO_ENCODER_OPEN = 0x03;
//音频帧编码失败 // Audio帧 coding 失败
const int ERROR_AUDIO_ENCODE = 0x04; const int ERROR_AUDIO_ENCODE = 0x04;
//RTMP连接失败 //RTMP Connection failed
const int ERROR_RTMP_CONNECT = 0x05; const int ERROR_RTMP_CONNECT = 0x05;
//RTMP连接流失败 //RTMP Connection flow failed
const int ERROR_RTMP_CONNECT_STREAM = 0x06; const int ERROR_RTMP_CONNECT_STREAM = 0x06;
//RTMP发送数据包失败 //RTMP Failed to send packet
const int ERROR_RTMP_SEND_PACKET = 0x07; const int ERROR_RTMP_SEND_PACKET = 0x07;
/***************Java层对应**************/ /*************** versus Java层对应**************/
//当调用System.loadLibrary时,会回调这个方法 //当调用System.loadLibrary时,会回调这个方法
jint JNICALL JNI_OnLoad(JavaVM *vm, void *reserved) { jint JNICALL JNI_OnLoad(JavaVM *vm, void *reserved) {

@ -38,8 +38,8 @@ void VideoStream::setVideoEncInfo(int width, int height, int fps, int bitrate) {
DELETE(pic_in); DELETE(pic_in);
} }
//打开x264编码器 //打开x264 coding Device
//x264编码器的属性 //x264 coding Device的属性
x264_param_t param; x264_param_t param;
x264_param_default_preset(&param, "ultrafast", "zerolatency"); x264_param_default_preset(&param, "ultrafast", "zerolatency");
param.i_level_idc = 32; param.i_level_idc = 32;
@ -73,7 +73,7 @@ void VideoStream::setVideoEncInfo(int width, int height, int fps, int bitrate) {
param.i_threads = 1; param.i_threads = 1;
x264_param_apply_profile(&param, "baseline"); x264_param_apply_profile(&param, "baseline");
//打开编码器 //打开 coding Device
videoCodec = x264_encoder_open(&param); videoCodec = x264_encoder_open(&param);
pic_in = new x264_picture_t; pic_in = new x264_picture_t;
x264_picture_alloc(pic_in, X264_CSP_I420, width, height); x264_picture_alloc(pic_in, X264_CSP_I420, width, height);
@ -163,7 +163,7 @@ void VideoStream::sendSpsPps(uint8_t *sps, uint8_t *pps, int sps_len, int pps_le
//版本 //版本
packet->m_body[i++] = 0x01; packet->m_body[i++] = 0x01;
//编码规格 // coding 规格
packet->m_body[i++] = sps[1]; packet->m_body[i++] = sps[1];
packet->m_body[i++] = sps[2]; packet->m_body[i++] = sps[2];
packet->m_body[i++] = sps[3]; packet->m_body[i++] = sps[3];

@ -14,7 +14,7 @@ public:
~VideoStream(); ~VideoStream();
//创建x264编码器 //创建x264 coding Device
void setVideoEncInfo(int width, int height, int fps, int bitrate); void setVideoEncInfo(int width, int height, int fps, int bitrate);
void encodeData(int8_t *data); void encodeData(int8_t *data);

@ -13,19 +13,19 @@ import com.frank.live.stream.VideoStreamNew;
public class LivePusherNew { public class LivePusherNew {
//视频编码器打开失败 // Video encoding Device Open failed
private final static int ERROR_VIDEO_ENCODER_OPEN = 0x01; private final static int ERROR_VIDEO_ENCODER_OPEN = 0x01;
//视频帧编码失败 //Video frame encoding failed
private final static int ERROR_VIDEO_ENCODE = 0x02; private final static int ERROR_VIDEO_ENCODE = 0x02;
//音频编码器打开失败 // Audio coding Device Open failed
private final static int ERROR_AUDIO_ENCODER_OPEN = 0x03; private final static int ERROR_AUDIO_ENCODER_OPEN = 0x03;
//音频帧编码失败 // Audio帧 coding 失败
private final static int ERROR_AUDIO_ENCODE = 0x04; private final static int ERROR_AUDIO_ENCODE = 0x04;
//RTMP连接失败 //RTMP Connection failed
private final static int ERROR_RTMP_CONNECT = 0x05; private final static int ERROR_RTMP_CONNECT = 0x05;
//RTMP连接流失败 //RTMP Connection flow failed
private final static int ERROR_RTMP_CONNECT_STREAM = 0x06; private final static int ERROR_RTMP_CONNECT_STREAM = 0x06;
//RTMP发送数据包失败 //RTMP Failed to send packet
private final static int ERROR_RTMP_SEND_PACKET = 0x07; private final static int ERROR_RTMP_SEND_PACKET = 0x07;
static { static {
@ -60,9 +60,9 @@ public class LivePusherNew {
} }
/** /**
* 设置静音 * Set mute
* *
* @param isMute 是否静音 * @param isMute Whether to mute
*/ */
public void setMute(boolean isMute) { public void setMute(boolean isMute) {
audioStream.setMute(isMute); audioStream.setMute(isMute);
@ -88,36 +88,36 @@ public class LivePusherNew {
} }
/** /**
* 当native报错时回调这个方法 * Call this method when native reports an error
* *
* @param errCode errCode * @param errCode errCode
*/ */
public void errorFromNative(int errCode) { public void errorFromNative(int errCode) {
//停止推流 //Stop streaming
stopPush(); stopPush();
if (liveStateChangeListener != null) { if (liveStateChangeListener != null) {
String msg = ""; String msg = "";
switch (errCode) { switch (errCode) {
case ERROR_VIDEO_ENCODER_OPEN: case ERROR_VIDEO_ENCODER_OPEN:
msg = "视频编码器打开失败..."; msg = " Video encoding Device Open failed ...";
break; break;
case ERROR_VIDEO_ENCODE: case ERROR_VIDEO_ENCODE:
msg = "视频帧编码失败..."; msg = "Video frame encoding failed...";
break; break;
case ERROR_AUDIO_ENCODER_OPEN: case ERROR_AUDIO_ENCODER_OPEN:
msg = "音频编码器打开失败..."; msg = " Audio coding Device Open failed ...";
break; break;
case ERROR_AUDIO_ENCODE: case ERROR_AUDIO_ENCODE:
msg = "音频帧编码失败..."; msg = " Audio帧 coding 失败...";
break; break;
case ERROR_RTMP_CONNECT: case ERROR_RTMP_CONNECT:
msg = "RTMP连接失败..."; msg = "RTMP Connection failed...";
break; break;
case ERROR_RTMP_CONNECT_STREAM: case ERROR_RTMP_CONNECT_STREAM:
msg = "RTMP连接流失败..."; msg = "RTMP Connection flow failed ...";
break; break;
case ERROR_RTMP_SEND_PACKET: case ERROR_RTMP_SEND_PACKET:
msg = "RTMP发送数据包失败..."; msg = "RTMP Failed to send packet...";
break; break;
default: default:
break; break;

@ -50,7 +50,7 @@ public class PushActivity extends Activity implements Callback {
private Spinner beautyTypeSelector; private Spinner beautyTypeSelector;
private ImageView img_photo; private ImageView img_photo;
//拍照 //Take a picture
private boolean takePhoto; private boolean takePhoto;
private final static int videoWidth = 640; private final static int videoWidth = 640;
@ -58,7 +58,7 @@ public class PushActivity extends Activity implements Callback {
private final static String[] permissions = new String[]{Manifest.permission.CAMERA}; private final static String[] permissions = new String[]{Manifest.permission.CAMERA};
private final static int CODE_CAMERA = 1001; private final static int CODE_CAMERA = 1001;
private final static String[] beautySelector = new String[]{"美颜", "冷酷", "日出","素描","白猫", "浪漫", "原图"}; private final static String[] beautySelector = new String[]{"Beauty", "Cold", "sunrise","sketch","White cat", "romantic", "Original image"};
@Override @Override
public void onCreate(Bundle savedInstanceState) { public void onCreate(Bundle savedInstanceState) {
@ -83,11 +83,11 @@ public class PushActivity extends Activity implements Callback {
private void initView(){ private void initView(){
//SurfaceView //SurfaceView
mSmartCameraView = findViewById(R.id.gl_surfaceview); mSmartCameraView = findViewById(R.id.gl_surfaceview);
//美颜类型 //Beauty类型
beautyTypeSelector = findViewById(R.id.beauty_type_selctor); beautyTypeSelector = findViewById(R.id.beauty_type_selctor);
//静音 //Mute
btnMute = findViewById(R.id.button_mute); btnMute = findViewById(R.id.button_mute);
//拍照 //Take a picture
img_photo = findViewById(R.id.img_photo); img_photo = findViewById(R.id.img_photo);
} }
@ -137,13 +137,13 @@ public class PushActivity extends Activity implements Callback {
is_mute = !is_mute; is_mute = !is_mute;
if ( is_mute ) if ( is_mute )
btnMute.setText("取消静音"); btnMute.setText("Unmute");
else else
btnMute.setText("静音"); btnMute.setText("Mute");
} }
}); });
//预览数据回调(RGBA格式) //Preview data callback (RGBA格式)
mSmartCameraView.setPreviewCallback(new SmartCameraView.PreviewCallback() { mSmartCameraView.setPreviewCallback(new SmartCameraView.PreviewCallback() {
@Override @Override
public void onGetRgbaFrame(byte[] data, int width, int height) { public void onGetRgbaFrame(byte[] data, int width, int height) {
@ -171,9 +171,9 @@ public class PushActivity extends Activity implements Callback {
} }
/** /**
* 拍照 * Take a picture
* @param data 预览数据 * @param data Preview data
* @param width 图片宽度 * @param width Picture width
* @param height 图片高度 * @param height 图片高度
*/ */
private void doTakePhoto(byte[] data, int width, int height){ private void doTakePhoto(byte[] data, int width, int height){

@ -2,7 +2,7 @@ package com.frank.live.param;
/** /**
* 音频相关参数 * Audio Related parameters
* Created by frank on 2018/1/28. * Created by frank on 2018/1/28.
*/ */

@ -1,7 +1,7 @@
package com.frank.live.param; package com.frank.live.param;
/** /**
* 视频相关参数 * 视频 Related parameters
* Created by frank on 2018/1/28. * Created by frank on 2018/1/28.
*/ */

@ -74,8 +74,8 @@ public class AudioStream {
} }
/** /**
* 设置静音 * Set mute
* @param isMute 是否静音 * @param isMute Whether to mute
*/ */
public void setMute(boolean isMute){ public void setMute(boolean isMute){
this.isMute = isMute; this.isMute = isMute;

@ -41,7 +41,7 @@ public class CameraHelper implements SurfaceHolder.Callback, Camera.PreviewCallb
private void stopPreview() { private void stopPreview() {
if (mCamera != null) { if (mCamera != null) {
//预览数据回调接口 //Preview data callback 接口
mCamera.setPreviewCallback(null); mCamera.setPreviewCallback(null);
//停止预览 //停止预览
mCamera.stopPreview(); mCamera.stopPreview();

@ -1,7 +1,7 @@
package com.frank.live.util; package com.frank.live.util;
/** /**
* YUV与RGB转换工具类 * YUV versus RGB Conversion tools
* Created by frank on 2018/7/1. * Created by frank on 2018/7/1.
*/ */

@ -23,7 +23,7 @@
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:layout_alignParentRight="true" android:layout_alignParentRight="true"
android:src="@drawable/ic_camera_switch" android:src="@drawable/ic_camera_switch"
android:text="拍照" android:text="Take a picture"
android:clickable="true" android:clickable="true"
android:focusable="true" android:focusable="true"
android:tint="@color/colorPrimary" /> android:tint="@color/colorPrimary" />
@ -32,7 +32,7 @@
android:id="@+id/button_mute" android:id="@+id/button_mute"
android:layout_width="wrap_content" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:text=" 静音" android:text=" Mute"
android:layout_alignParentRight="true" android:layout_alignParentRight="true"
android:layout_marginTop="10dp" android:layout_marginTop="10dp"
android:layout_below="@+id/img_photo"/> android:layout_below="@+id/img_photo"/>

@ -0,0 +1,7 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">live</string>
<string name="swap">Switch</string>
<string name="start">Start</string>
<string name="stop">stop</string>
</resources>

@ -0,0 +1,7 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">live</string>
<string name="swap">切换</string>
<string name="start">开始</string>
<string name="stop">停止</string>
</resources>

@ -1,6 +1,6 @@
<resources> <resources>
<string name="app_name">live</string> <string name="app_name">live</string>
<string name="swap">切换</string> <string name="swap">Switch</string>
<string name="start">开始</string> <string name="start">Start</string>
<string name="stop">停止</string> <string name="stop">stop</string>
</resources> </resources>

@ -48,7 +48,7 @@ public class MultiScreenActivity extends AppCompatActivity {
//四分屏模式还是全屏模式 //四分屏模式还是全屏模式
private boolean isMultiScreen; private boolean isMultiScreen;
//保存客户端ip通道数对应关系 //保存客户端ip versus 通道数对应关系
private HashMap<String, Integer> clientMap = new HashMap<>(); private HashMap<String, Integer> clientMap = new HashMap<>();
//记录每个通道的投屏状态 //记录每个通道的投屏状态
private TreeMap<Integer, Boolean> channelMap = new TreeMap<>(); private TreeMap<Integer, Boolean> channelMap = new TreeMap<>();
@ -114,7 +114,7 @@ public class MultiScreenActivity extends AppCompatActivity {
mVideoView1.setIjkPlayerListener(new IjkPlayerListener() { mVideoView1.setIjkPlayerListener(new IjkPlayerListener() {
@Override @Override
public void onIjkPlayer(IjkMediaPlayer ijkMediaPlayer) { public void onIjkPlayer(IjkMediaPlayer ijkMediaPlayer) {
//设置播放器option //设置PlayDeviceoption
setOptions(ijkMediaPlayer); setOptions(ijkMediaPlayer);
} }
}); });
@ -161,7 +161,7 @@ public class MultiScreenActivity extends AppCompatActivity {
} }
/** /**
* 配置播放器参数项 * 配置PlayDevice参数项
*/ */
private void setOptions(IjkMediaPlayer ijkPlayer) { private void setOptions(IjkMediaPlayer ijkPlayer) {
if (ijkPlayer == null) if (ijkPlayer == null)
@ -181,7 +181,7 @@ public class MultiScreenActivity extends AppCompatActivity {
ijkPlayer.setOption(IjkMediaPlayer.OPT_CATEGORY_PLAYER, "max_cached_duration", 30);//最大缓存时长 ijkPlayer.setOption(IjkMediaPlayer.OPT_CATEGORY_PLAYER, "max_cached_duration", 30);//最大缓存时长
ijkPlayer.setOption(IjkMediaPlayer.OPT_CATEGORY_PLAYER, "infbuf", 1);//是否限制输入缓存数 ijkPlayer.setOption(IjkMediaPlayer.OPT_CATEGORY_PLAYER, "infbuf", 1);//是否限制输入缓存数
ijkPlayer.setOption(IjkMediaPlayer.OPT_CATEGORY_FORMAT, "fflags", "nobuffer"); ijkPlayer.setOption(IjkMediaPlayer.OPT_CATEGORY_FORMAT, "fflags", "nobuffer");
//设置播放前的最大探测时间,分析码流时长:默认1024*1000 //设置Play前的最大探测时间,分析码流时长:默认1024*1000
ijkPlayer.setOption(IjkMediaPlayer.OPT_CATEGORY_FORMAT, "analyzedmaxduration", 100); ijkPlayer.setOption(IjkMediaPlayer.OPT_CATEGORY_FORMAT, "analyzedmaxduration", 100);
//ijkPlayer.setOption(IjkMediaPlayer.OPT_CATEGORY_FORMAT, "rtsp_transport", "tcp");//tcp传输数据 //ijkPlayer.setOption(IjkMediaPlayer.OPT_CATEGORY_FORMAT, "rtsp_transport", "tcp");//tcp传输数据
} }
@ -208,7 +208,7 @@ public class MultiScreenActivity extends AppCompatActivity {
} }
/** /**
* 自定义广播接收 * 自定义广播接收Device
*/ */
private class CustomReceiver extends BroadcastReceiver { private class CustomReceiver extends BroadcastReceiver {
@Override @Override
@ -271,7 +271,7 @@ public class MultiScreenActivity extends AppCompatActivity {
} }
/** /**
* 获取当前投屏通道 * Obtain当前投屏通道
* *
* @return idleChannel * @return idleChannel
*/ */

@ -3,13 +3,13 @@ android端基于FFmpeg库的使用。<br>
添加编译ffmpeg、shine、mp3lame、x264源码的参考脚本<br> 添加编译ffmpeg、shine、mp3lame、x264源码的参考脚本<br>
目前音视频相关处理:<br> 目前音视频相关处理:<br>
- #### 音频剪切、拼接 - #### Audio剪切、拼接
- #### 音频混音 - #### Audio混音
- #### 音频转码 - #### Audio转码
- #### 音视频合成 - #### Audio and video synthesis
- #### 音频抽取 - #### Audio抽取
- #### 音频解码播放 - #### Audio decoding Play
- #### 音频编码 - #### Audio coding
- #### 视频抽取 - #### 视频抽取
- #### 视频剪切 - #### 视频剪切
- #### 视频转码 - #### 视频转码
@ -22,10 +22,10 @@ android端基于FFmpeg库的使用。<br>
- #### 视频反序倒播 - #### 视频反序倒播
- #### 视频画中画 - #### 视频画中画
- #### 图片合成视频 - #### 图片合成视频
- #### 视频解码播放 - #### 视频 decoding Play
- #### 本地直播推流 - #### 本地直播推流
- #### 实时直播推流 - #### 实时直播推流
- #### 音视频解码播放 - #### 音视频 decoding Play
- #### OpenGL+GPUImage滤镜 - #### OpenGL+GPUImage滤镜
- #### FFmpeg的AVFilter滤镜 - #### FFmpeg的AVFilter滤镜
- #### 使用mp3lame库进行mp3转码 - #### 使用mp3lame库进行mp3转码

@ -27,20 +27,20 @@
<category android:name="android.intent.category.LAUNCHER" /> <category android:name="android.intent.category.LAUNCHER" />
</intent-filter> </intent-filter>
</activity> </activity>
<!-- 音频处理 --> <!-- Audio处理 -->
<activity android:name=".activity.AudioHandleActivity" /> <activity android:name=".activity.AudioHandleActivity" />
<!-- 音视频处理 --> <!-- Audio and video processing -->
<activity android:name=".activity.MediaHandleActivity" /> <activity android:name=".activity.MediaHandleActivity" />
<!-- 视频处理 --> <!-- Video processing -->
<activity android:name=".activity.VideoHandleActivity" /> <activity android:name=".activity.VideoHandleActivity" />
<!-- 音视频解码播放 --> <!-- 音视频 decoding Play -->
<activity <activity
android:name=".activity.MediaPlayerActivity" android:name=".activity.MediaPlayerActivity"
android:screenOrientation="landscape" /> android:screenOrientation="landscape" />
<!-- 本地推流直播 --> <!-- Local streaming live -->
<activity <activity
android:name=".activity.PushActivity" /> android:name=".activity.PushActivity" />
<!-- 实时推流直播 --> <!-- Live streaming live -->
<activity <activity
android:name=".activity.LiveActivity" android:name=".activity.LiveActivity"
android:screenOrientation="portrait" /> android:screenOrientation="portrait" />
@ -48,7 +48,7 @@
<activity <activity
android:name=".activity.FilterActivity" android:name=".activity.FilterActivity"
android:screenOrientation="landscape" /> android:screenOrientation="landscape" />
<!--视频播放拖动预览--> <!--视频Play拖动预览-->
<activity android:name=".activity.VideoPreviewActivity" <activity android:name=".activity.VideoPreviewActivity"
android:screenOrientation="portrait" /> android:screenOrientation="portrait" />
<!--检测多媒体格式数据--> <!--检测多媒体格式数据-->

@ -6,7 +6,7 @@
#include <unistd.h> #include <unistd.h>
//封装格式 //封装格式
#include "libavformat/avformat.h" #include "libavformat/avformat.h"
//解码 // decoding
#include "libavcodec/avcodec.h" #include "libavcodec/avcodec.h"
//缩放 //缩放
#include "libswscale/swscale.h" #include "libswscale/swscale.h"
@ -25,17 +25,17 @@ AUDIO_PLAYER_FUNC(void, play, jstring input_jstr) {
//注册组件 //注册组件
av_register_all(); av_register_all();
AVFormatContext *pFormatCtx = avformat_alloc_context(); AVFormatContext *pFormatCtx = avformat_alloc_context();
//打开音频文件 //打开 Audio文件
if(avformat_open_input(&pFormatCtx,input_cstr,NULL,NULL) != 0){ if(avformat_open_input(&pFormatCtx,input_cstr,NULL,NULL) != 0){
LOGE(TAG, "无法打开音频文件"); LOGE(TAG, "无法打开 Audio文件");
return; return;
} }
//获取输入文件信息 //Obtain输入文件信息
if(avformat_find_stream_info(pFormatCtx,NULL) < 0){ if(avformat_find_stream_info(pFormatCtx,NULL) < 0){
LOGE(TAG, "无法获取输入文件信息"); LOGE(TAG, "无法Obtain输入文件信息");
return; return;
} }
//获取音频流索引位置 //Obtain Audio流索引位置
int i = 0, audio_stream_idx = -1; int i = 0, audio_stream_idx = -1;
for(; i < pFormatCtx->nb_streams;i++){ for(; i < pFormatCtx->nb_streams;i++){
if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO){ if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO){
@ -44,32 +44,32 @@ AUDIO_PLAYER_FUNC(void, play, jstring input_jstr) {
} }
} }
//获取音频解码器 //Obtain Audio decoding Device
AVCodecContext *codecCtx = pFormatCtx->streams[audio_stream_idx]->codec; AVCodecContext *codecCtx = pFormatCtx->streams[audio_stream_idx]->codec;
AVCodec *codec = avcodec_find_decoder(codecCtx->codec_id); AVCodec *codec = avcodec_find_decoder(codecCtx->codec_id);
if(codec == NULL){ if(codec == NULL){
LOGE(TAG, "无法获取解码器"); LOGE(TAG, "无法Obtain decoding Device");
return; return;
} }
//打开解码器 //打开 decoding Device
if(avcodec_open2(codecCtx,codec,NULL) < 0){ if(avcodec_open2(codecCtx,codec,NULL) < 0){
LOGE(TAG, "无法打开解码器"); LOGE(TAG, "无法打开 decoding Device");
return; return;
} }
//压缩数据 //压缩数据
AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket)); AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
//解压缩数据 //解压缩数据
AVFrame *frame = av_frame_alloc(); AVFrame *frame = av_frame_alloc();
//frame->16bit 44100 PCM 统一音频采样格式与采样率 //frame->16bit 44100 PCM Unified audio sampling format and sampling rate
SwrContext *swrCtx = swr_alloc(); SwrContext *swrCtx = swr_alloc();
//输入的采样格式 // Input sampling format
enum AVSampleFormat in_sample_fmt = codecCtx->sample_fmt; enum AVSampleFormat in_sample_fmt = codecCtx->sample_fmt;
//输出采样格式16bit PCM //输出采样格式16bit PCM
enum AVSampleFormat out_sample_fmt = AV_SAMPLE_FMT_S16; enum AVSampleFormat out_sample_fmt = AV_SAMPLE_FMT_S16;
//输入采样率 //输入采样率
int in_sample_rate = codecCtx->sample_rate; int in_sample_rate = codecCtx->sample_rate;
//输出采样率 //Output sampling rate
int out_sample_rate = in_sample_rate; int out_sample_rate = in_sample_rate;
//声道布局(2个声道,默认立体声stereo) //声道布局(2个声道,默认立体声stereo)
uint64_t in_ch_layout = codecCtx->channel_layout; uint64_t in_ch_layout = codecCtx->channel_layout;
@ -82,7 +82,7 @@ AUDIO_PLAYER_FUNC(void, play, jstring input_jstr) {
0, NULL); 0, NULL);
swr_init(swrCtx); swr_init(swrCtx);
//输出的声道个数 //Number of output channels
int out_channel_nb = av_get_channel_layout_nb_channels(out_ch_layout); int out_channel_nb = av_get_channel_layout_nb_channels(out_ch_layout);
jclass player_class = (*env)->GetObjectClass(env,thiz); jclass player_class = (*env)->GetObjectClass(env,thiz);
@ -101,40 +101,40 @@ AUDIO_PLAYER_FUNC(void, play, jstring input_jstr) {
jmethodID audio_track_play_mid = (*env)->GetMethodID(env,audio_track_class,"play","()V"); jmethodID audio_track_play_mid = (*env)->GetMethodID(env,audio_track_class,"play","()V");
(*env)->CallVoidMethod(env,audio_track,audio_track_play_mid); (*env)->CallVoidMethod(env,audio_track,audio_track_play_mid);
//获取write()方法 //Obtainwrite()方法
jmethodID audio_track_write_mid = (*env)->GetMethodID(env,audio_track_class,"write","([BII)I"); jmethodID audio_track_write_mid = (*env)->GetMethodID(env,audio_track_class,"write","([BII)I");
//16bit 44100 PCM 数据 //16bit 44100 PCM 数据
uint8_t *out_buffer = (uint8_t *)av_malloc(MAX_AUDIO_FRAME_SIZE); uint8_t *out_buffer = (uint8_t *)av_malloc(MAX_AUDIO_FRAME_SIZE);
int got_frame = 0,index = 0, ret; int got_frame = 0,index = 0, ret;
//不断读取编码数据 //不断读取 coding 数据
while(av_read_frame(pFormatCtx,packet) >= 0){ while(av_read_frame(pFormatCtx,packet) >= 0){
//解码音频类型的Packet // decoding Audio类型的Packet
if(packet->stream_index == audio_stream_idx){ if(packet->stream_index == audio_stream_idx){
//解码 // decoding
ret = avcodec_decode_audio4(codecCtx,frame,&got_frame,packet); ret = avcodec_decode_audio4(codecCtx,frame,&got_frame,packet);
if(ret < 0){ if(ret < 0){
break; break;
} }
//解码一帧成功 //Successfully decoded a frame
if(got_frame > 0){ if(got_frame > 0){
LOGI(TAG, "decode frame count=%d", index++); LOGI(TAG, "decode frame count=%d", index++);
//音频格式转换 // AudioFormat conversion
swr_convert(swrCtx, &out_buffer, MAX_AUDIO_FRAME_SIZE,(const uint8_t **)frame->data,frame->nb_samples); swr_convert(swrCtx, &out_buffer, MAX_AUDIO_FRAME_SIZE,(const uint8_t **)frame->data,frame->nb_samples);
int out_buffer_size = av_samples_get_buffer_size(NULL, out_channel_nb, int out_buffer_size = av_samples_get_buffer_size(NULL, out_channel_nb,
frame->nb_samples, out_sample_fmt, 1); frame->nb_samples, out_sample_fmt, 1);
jbyteArray audio_sample_array = (*env)->NewByteArray(env,out_buffer_size); jbyteArray audio_sample_array = (*env)->NewByteArray(env,out_buffer_size);
jbyte* sample_byte_array = (*env)->GetByteArrayElements(env,audio_sample_array,NULL); jbyte* sample_byte_array = (*env)->GetByteArrayElements(env,audio_sample_array,NULL);
//拷贝缓冲数据 //Copy buffered data
memcpy(sample_byte_array, out_buffer, (size_t) out_buffer_size); memcpy(sample_byte_array, out_buffer, (size_t) out_buffer_size);
//释放数组 //释放数组
(*env)->ReleaseByteArrayElements(env,audio_sample_array,sample_byte_array,0); (*env)->ReleaseByteArrayElements(env,audio_sample_array,sample_byte_array,0);
//调用AudioTrack的write方法进行播放 //调用AudioTrack的write方法进行Play
(*env)->CallIntMethod(env,audio_track,audio_track_write_mid, (*env)->CallIntMethod(env,audio_track,audio_track_write_mid,
audio_sample_array,0,out_buffer_size); audio_sample_array,0,out_buffer_size);
//释放局部引用 //Release local references
(*env)->DeleteLocalRef(env,audio_sample_array); (*env)->DeleteLocalRef(env,audio_sample_array);
usleep(1000 * 16); usleep(1000 * 16);
} }

@ -14,7 +14,7 @@ FFMPEG_FUNC(jint, handle, jobjectArray commands) {
strcpy(argv[i], temp); strcpy(argv[i], temp);
(*env)->ReleaseStringUTFChars(env, jstr, temp); (*env)->ReleaseStringUTFChars(env, jstr, temp);
} }
//执行ffmpeg命令 // carried out ffmpeg命令
result = run(argc, argv); result = run(argc, argv);
//释放内存 //释放内存
for (i = 0; i < argc; i++) { for (i = 0; i < argc; i++) {

@ -34,11 +34,11 @@ PUSHER_FUNC(jint, pushStream, jstring filePath, jstring liveUrl) {
LOGE(TAG, "file_path=%s", file_path); LOGE(TAG, "file_path=%s", file_path);
LOGE(TAG, "live_url=%s", live_url); LOGE(TAG, "live_url=%s", live_url);
//注册所有组件 //Register all components
av_register_all(); av_register_all();
//初始化网络 //初始化网络
avformat_network_init(); avformat_network_init();
//打开输入文件 //Open input file
if((ret = avformat_open_input(&in_format, file_path, 0, 0)) < 0){ if((ret = avformat_open_input(&in_format, file_path, 0, 0)) < 0){
LOGE(TAG, "could not open input file..."); LOGE(TAG, "could not open input file...");
goto end; goto end;
@ -100,7 +100,7 @@ PUSHER_FUNC(jint, pushStream, jstring filePath, jstring liveUrl) {
LOGE(TAG, "could not write header..."); LOGE(TAG, "could not write header...");
goto end; goto end;
} }
//获取开始时间 //Obtain开始时间
start_time = av_gettime(); start_time = av_gettime();
//开始循环读一帧数据 //开始循环读一帧数据
while (1){ while (1){

@ -74,7 +74,7 @@ jint JNICALL JNI_OnLoad(JavaVM* vm, void* reserved){
//初始化输入格式上下文 //初始化输入格式上下文
int init_input_format_context(MediaPlayer* player, const char* file_name){ int init_input_format_context(MediaPlayer* player, const char* file_name){
//注册所有组件 //Register all components
av_register_all(); av_register_all();
//分配上下文 //分配上下文
player->format_context = avformat_alloc_context(); player->format_context = avformat_alloc_context();
@ -114,11 +114,11 @@ int init_input_format_context(MediaPlayer* player, const char* file_name){
return 0; return 0;
} }
//打开音视频解码器 //打开音视频 decoding Device
int init_condec_context(MediaPlayer* player){ int init_condec_context(MediaPlayer* player){
//获取codec上下文指针 //Obtaincodec上下文指针
player->video_codec_context = player->format_context->streams[player->video_stream_index]->codec; player->video_codec_context = player->format_context->streams[player->video_stream_index]->codec;
//寻找视频流的解码器 //Looking for video streaming decoding Device
player->video_codec = avcodec_find_decoder(player->video_codec_context->codec_id); player->video_codec = avcodec_find_decoder(player->video_codec_context->codec_id);
if(player->video_codec == NULL) { if(player->video_codec == NULL) {
LOGE(TAG, "couldn't find video Codec."); LOGE(TAG, "couldn't find video Codec.");
@ -138,25 +138,25 @@ int init_condec_context(MediaPlayer* player){
LOGE(TAG, "Couldn't open audio codec."); LOGE(TAG, "Couldn't open audio codec.");
return -1; return -1;
} }
// 获取视频宽高 // Obtain视频宽高
player->video_width = player->video_codec_context->width; player->video_width = player->video_codec_context->width;
player->video_height = player->video_codec_context->height; player->video_height = player->video_codec_context->height;
return 0; return 0;
} }
//视频解码 //视频 decoding
void video_player_prepare(MediaPlayer* player, JNIEnv* env, jobject surface){ void video_player_prepare(MediaPlayer* player, JNIEnv* env, jobject surface){
// 获取native window // Obtainnative window
player->native_window = ANativeWindow_fromSurface(env, surface); player->native_window = ANativeWindow_fromSurface(env, surface);
} }
//获取当前播放时间 //Obtain当前Play时间
int64_t get_play_time(MediaPlayer* player){ int64_t get_play_time(MediaPlayer* player){
return (int64_t)(av_gettime() - player->start_time); return (int64_t)(av_gettime() - player->start_time);
} }
/** /**
* * Delayed wait
*/ */
void player_wait_for_frame(MediaPlayer *player, int64_t stream_time) { void player_wait_for_frame(MediaPlayer *player, int64_t stream_time) {
pthread_mutex_lock(&player->mutex); pthread_mutex_lock(&player->mutex);
@ -191,7 +191,7 @@ void player_wait_for_frame(MediaPlayer *player, int64_t stream_time) {
pthread_mutex_unlock(&player->mutex); pthread_mutex_unlock(&player->mutex);
} }
//视频解码 //视频 decoding
int decode_video(MediaPlayer* player, AVPacket* packet){ int decode_video(MediaPlayer* player, AVPacket* packet){
// 设置native window的buffer大小,可自动拉伸 // 设置native window的buffer大小,可自动拉伸
ANativeWindow_setBuffersGeometry(player->native_window, player->video_width, ANativeWindow_setBuffersGeometry(player->native_window, player->video_width,
@ -213,7 +213,7 @@ int decode_video(MediaPlayer* player, AVPacket* packet){
av_image_fill_arrays(player->rgba_frame->data, player->rgba_frame->linesize, player->buffer, AV_PIX_FMT_RGBA, av_image_fill_arrays(player->rgba_frame->data, player->rgba_frame->linesize, player->buffer, AV_PIX_FMT_RGBA,
player->video_width, player->video_height, 1); player->video_width, player->video_height, 1);
// 由于解码出来的帧格式不是RGBA的,在渲染之前需要进行格式转换 // 由于 decoding 出来的帧格式不是RGBA的,在渲染之前需要进行Format conversion
struct SwsContext *sws_ctx = sws_getContext( struct SwsContext *sws_ctx = sws_getContext(
player->video_width, player->video_width,
player->video_height, player->video_height,
@ -227,7 +227,7 @@ int decode_video(MediaPlayer* player, AVPacket* packet){
NULL); NULL);
int frameFinished; int frameFinished;
//对该帧进行解码 //对该帧进行 decoding
int ret = avcodec_decode_video2(player->video_codec_context, player->yuv_frame, &frameFinished, packet); int ret = avcodec_decode_video2(player->video_codec_context, player->yuv_frame, &frameFinished, packet);
if(ret < 0){ if(ret < 0){
LOGE(TAG, "avcodec_decode_video2 error..."); LOGE(TAG, "avcodec_decode_video2 error...");
@ -236,16 +236,16 @@ int decode_video(MediaPlayer* player, AVPacket* packet){
if (frameFinished) { if (frameFinished) {
// lock native window // lock native window
ANativeWindow_lock(player->native_window, &windowBuffer, 0); ANativeWindow_lock(player->native_window, &windowBuffer, 0);
// 格式转换 // Format conversion
sws_scale(sws_ctx, (uint8_t const * const *)player->yuv_frame->data, sws_scale(sws_ctx, (uint8_t const * const *)player->yuv_frame->data,
player->yuv_frame->linesize, 0, player->video_height, player->yuv_frame->linesize, 0, player->video_height,
player->rgba_frame->data, player->rgba_frame->linesize); player->rgba_frame->data, player->rgba_frame->linesize);
// 获取stride // Obtainstride
uint8_t * dst = windowBuffer.bits; uint8_t * dst = windowBuffer.bits;
int dstStride = windowBuffer.stride * 4; int dstStride = windowBuffer.stride * 4;
uint8_t * src = player->rgba_frame->data[0]; uint8_t * src = player->rgba_frame->data[0];
int srcStride = player->rgba_frame->linesize[0]; int srcStride = player->rgba_frame->linesize[0];
// 由于window的stride和帧的stride不同,因此需要逐行复制 // 由于window的stride And framed stride different , So you need to copy line by line
int h; int h;
for (h = 0; h < player->video_height; h++) { for (h = 0; h < player->video_height; h++) {
memcpy(dst + h * dstStride, src + h * srcStride, (size_t) srcStride); memcpy(dst + h * dstStride, src + h * srcStride, (size_t) srcStride);
@ -254,7 +254,7 @@ int decode_video(MediaPlayer* player, AVPacket* packet){
//计算延迟 //计算延迟
int64_t pts = av_frame_get_best_effort_timestamp(player->yuv_frame); int64_t pts = av_frame_get_best_effort_timestamp(player->yuv_frame);
AVStream *stream = player->format_context->streams[player->video_stream_index]; AVStream *stream = player->format_context->streams[player->video_stream_index];
//转换(不同时间基时间转换) //转换( different 时间基时间转换)
int64_t time = av_rescale_q(pts, stream->time_base, AV_TIME_BASE_Q); int64_t time = av_rescale_q(pts, stream->time_base, AV_TIME_BASE_Q);
//音视频帧同步 //音视频帧同步
player_wait_for_frame(player, time); player_wait_for_frame(player, time);
@ -264,17 +264,17 @@ int decode_video(MediaPlayer* player, AVPacket* packet){
return 0; return 0;
} }
//音频解码初始化 // Audio decoding 初始化
void audio_decoder_prepare(MediaPlayer* player) { void audio_decoder_prepare(MediaPlayer* player) {
player->swrContext = swr_alloc(); player->swrContext = swr_alloc();
//输入的采样格式 // Input sampling format
enum AVSampleFormat in_sample_fmt = player->audio_codec_context->sample_fmt; enum AVSampleFormat in_sample_fmt = player->audio_codec_context->sample_fmt;
//输出采样格式16bit PCM //输出采样格式16bit PCM
player->out_sample_fmt = AV_SAMPLE_FMT_S16; player->out_sample_fmt = AV_SAMPLE_FMT_S16;
//输入采样率 //输入采样率
int in_sample_rate = player->audio_codec_context->sample_rate; int in_sample_rate = player->audio_codec_context->sample_rate;
//输出采样率 //Output sampling rate
player->out_sample_rate = in_sample_rate; player->out_sample_rate = in_sample_rate;
//声道布局(2个声道,默认立体声stereo) //声道布局(2个声道,默认立体声stereo)
uint64_t in_ch_layout = player->audio_codec_context->channel_layout; uint64_t in_ch_layout = player->audio_codec_context->channel_layout;
@ -286,11 +286,11 @@ void audio_decoder_prepare(MediaPlayer* player) {
in_ch_layout, in_sample_fmt, in_sample_rate, in_ch_layout, in_sample_fmt, in_sample_rate,
0, NULL); 0, NULL);
swr_init(player->swrContext); swr_init(player->swrContext);
//输出的声道个数 //Number of output channels
player->out_channel_nb = av_get_channel_layout_nb_channels(out_ch_layout); player->out_channel_nb = av_get_channel_layout_nb_channels(out_ch_layout);
} }
//音频播放器 // AudioPlayDevice
void audio_player_prepare(MediaPlayer* player, JNIEnv* env, jclass jthiz){ void audio_player_prepare(MediaPlayer* player, JNIEnv* env, jclass jthiz){
jclass player_class = (*env)->GetObjectClass(env,jthiz); jclass player_class = (*env)->GetObjectClass(env,jthiz);
if(!player_class){ if(!player_class){
@ -311,7 +311,7 @@ void audio_player_prepare(MediaPlayer* player, JNIEnv* env, jclass jthiz){
(*env)->CallVoidMethod(env, audio_track, audio_track_play_mid); (*env)->CallVoidMethod(env, audio_track, audio_track_play_mid);
player->audio_track = (*env)->NewGlobalRef(env, audio_track); player->audio_track = (*env)->NewGlobalRef(env, audio_track);
//获取write()方法 //Obtainwrite()方法
player->audio_track_write_mid = (*env)->GetMethodID(env,audio_track_class,"write","([BII)I"); player->audio_track_write_mid = (*env)->GetMethodID(env,audio_track_class,"write","([BII)I");
//16bit 44100 PCM 数据 //16bit 44100 PCM 数据
@ -320,18 +320,18 @@ void audio_player_prepare(MediaPlayer* player, JNIEnv* env, jclass jthiz){
player->audio_frame = av_frame_alloc(); player->audio_frame = av_frame_alloc();
} }
//音频解码 // Audio decoding
int decode_audio(MediaPlayer* player, AVPacket* packet){ int decode_audio(MediaPlayer* player, AVPacket* packet){
int got_frame = 0, ret; int got_frame = 0, ret;
//解码 // decoding
ret = avcodec_decode_audio4(player->audio_codec_context, player->audio_frame, &got_frame, packet); ret = avcodec_decode_audio4(player->audio_codec_context, player->audio_frame, &got_frame, packet);
if(ret < 0){ if(ret < 0){
LOGE(TAG, "avcodec_decode_audio4 error..."); LOGE(TAG, "avcodec_decode_audio4 error...");
return -1; return -1;
} }
//解码一帧成功 //Successfully decoded a frame
if(got_frame > 0){ if(got_frame > 0){
//音频格式转换 // AudioFormat conversion
swr_convert(player->swrContext, &player->audio_buffer, MAX_AUDIO_FRAME_SIZE, (const uint8_t **)player->audio_frame->data, player->audio_frame->nb_samples); swr_convert(player->swrContext, &player->audio_buffer, MAX_AUDIO_FRAME_SIZE, (const uint8_t **)player->audio_frame->data, player->audio_frame->nb_samples);
int out_buffer_size = av_samples_get_buffer_size(NULL, player->out_channel_nb, int out_buffer_size = av_samples_get_buffer_size(NULL, player->out_channel_nb,
player->audio_frame->nb_samples, player->out_sample_fmt, 1); player->audio_frame->nb_samples, player->out_sample_fmt, 1);
@ -349,14 +349,14 @@ int decode_audio(MediaPlayer* player, AVPacket* packet){
(*javaVM)->AttachCurrentThread(javaVM, &env, NULL); (*javaVM)->AttachCurrentThread(javaVM, &env, NULL);
jbyteArray audio_sample_array = (*env)->NewByteArray(env,out_buffer_size); jbyteArray audio_sample_array = (*env)->NewByteArray(env,out_buffer_size);
jbyte* sample_byte_array = (*env)->GetByteArrayElements(env,audio_sample_array,NULL); jbyte* sample_byte_array = (*env)->GetByteArrayElements(env,audio_sample_array,NULL);
//拷贝缓冲数据 //Copy buffered data
memcpy(sample_byte_array, player->audio_buffer, (size_t) out_buffer_size); memcpy(sample_byte_array, player->audio_buffer, (size_t) out_buffer_size);
//释放数组 //释放数组
(*env)->ReleaseByteArrayElements(env,audio_sample_array,sample_byte_array,0); (*env)->ReleaseByteArrayElements(env,audio_sample_array,sample_byte_array,0);
//调用AudioTrack的write方法进行播放 //调用AudioTrack的write方法进行Play
(*env)->CallIntMethod(env, player->audio_track, player->audio_track_write_mid, (*env)->CallIntMethod(env, player->audio_track, player->audio_track_write_mid,
audio_sample_array,0,out_buffer_size); audio_sample_array,0,out_buffer_size);
//释放局部引用 //Release local references
(*env)->DeleteLocalRef(env,audio_sample_array); (*env)->DeleteLocalRef(env,audio_sample_array);
} }
} }
@ -394,7 +394,7 @@ void* write_packet_to_queue(void* arg){
break; break;
} }
if(pkt->stream_index == player->video_stream_index || pkt->stream_index == player->audio_stream_index){ if(pkt->stream_index == player->video_stream_index || pkt->stream_index == player->audio_stream_index){
//根据AVPacket->stream_index获取对应的队列 //根据AVPacket->stream_indexObtain对应的队列
AVPacketQueue *queue = player->packets[pkt->stream_index]; AVPacketQueue *queue = player->packets[pkt->stream_index];
pthread_mutex_lock(&player->mutex); pthread_mutex_lock(&player->mutex);
AVPacket* data = queue_push(queue, &player->mutex, &player->cond); AVPacket* data = queue_push(queue, &player->mutex, &player->cond);
@ -405,12 +405,12 @@ void* write_packet_to_queue(void* arg){
} }
} }
//音视频解码线程(消费者) //音视频 decoding 线程(消费者)
void* decode_func(void* arg){ void* decode_func(void* arg){
Decoder *decoder_data = (Decoder*)arg; Decoder *decoder_data = (Decoder*)arg;
MediaPlayer *player = decoder_data->player; MediaPlayer *player = decoder_data->player;
int stream_index = decoder_data->stream_index; int stream_index = decoder_data->stream_index;
//根据stream_index获取对应的AVPacket队列 //根据stream_indexObtain对应的AVPacket队列
AVPacketQueue *queue = player->packets[stream_index]; AVPacketQueue *queue = player->packets[stream_index];
int ret = 0; int ret = 0;
@ -421,7 +421,7 @@ void* decode_func(void* arg){
if(stream_index == player->video_stream_index) {//视频流 if(stream_index == player->video_stream_index) {//视频流
ret = decode_video(player, packet); ret = decode_video(player, packet);
} else if(stream_index == player->audio_stream_index) {//音频 } else if(stream_index == player->audio_stream_index) {// Audio
ret = decode_audio(player, packet); ret = decode_audio(player, packet);
} }
av_packet_unref(packet); av_packet_unref(packet);
@ -444,16 +444,16 @@ MEDIA_PLAYER_FUNC(jint, setup, jstring filePath, jobject surface){
if(ret < 0){ if(ret < 0){
return ret; return ret;
} }
//初始化音视频解码器 //初始化音视频 decoding Device
ret = init_condec_context(player); ret = init_condec_context(player);
if(ret < 0){ if(ret < 0){
return ret; return ret;
} }
//初始化视频surface //初始化视频surface
video_player_prepare( player, env, surface); video_player_prepare( player, env, surface);
//初始化音频相关参数 //初始化 Audio Related parameters
audio_decoder_prepare(player); audio_decoder_prepare(player);
//初始化音频播放器 //初始化 AudioPlayDevice
audio_player_prepare(player, env, thiz); audio_player_prepare(player, env, thiz);
//初始化音视频packet队列 //初始化音视频packet队列
init_queue(player, PACKET_SIZE); init_queue(player, PACKET_SIZE);
@ -486,7 +486,7 @@ MEDIA_PLAYER_FUNC(jint, play){
} }
MEDIA_PLAYER_FUNC(void, release){ MEDIA_PLAYER_FUNC(void, release){
//释放内存以及关闭文件 //Free up memory and close files
free(player->audio_track); free(player->audio_track);
free(player->audio_track_write_mid); free(player->audio_track_write_mid);
av_free(player->buffer); av_free(player->buffer);

@ -21,11 +21,11 @@
SLObjectItf engineObject = NULL; SLObjectItf engineObject = NULL;
SLEngineItf engineEngine; SLEngineItf engineEngine;
//输出混音接口 //输出混音Device接口
SLObjectItf outputMixObject = NULL; SLObjectItf outputMixObject = NULL;
SLEnvironmentalReverbItf outputMixEnvironmentalReverb = NULL; SLEnvironmentalReverbItf outputMixEnvironmentalReverb = NULL;
//缓冲播放器接口 //缓冲PlayDevice接口
SLObjectItf bqPlayerObject = NULL; SLObjectItf bqPlayerObject = NULL;
SLPlayItf bqPlayerPlay; SLPlayItf bqPlayerPlay;
SLAndroidSimpleBufferQueueItf bqPlayerBufferQueue; SLAndroidSimpleBufferQueueItf bqPlayerBufferQueue;
@ -53,14 +53,14 @@ int createAudioPlayer(int *rate, int *channel, const char *file_name) ;
// 释放相关资源 // 释放相关资源
int releaseAudioPlayer(); int releaseAudioPlayer();
// 获取PCM数据, 自动回调获取 // ObtainPCM数据, 自动回调Obtain
int getPCM(void **pcm, size_t *pcmSize) ; int getPCM(void **pcm, size_t *pcmSize) ;
//播放回调方法 //Play回调方法
void bqPlayerCallback(SLAndroidSimpleBufferQueueItf bufferQueueItf, void *context) { void bqPlayerCallback(SLAndroidSimpleBufferQueueItf bufferQueueItf, void *context) {
bufferSize = 0; bufferSize = 0;
getPCM(&buffer, &bufferSize); getPCM(&buffer, &bufferSize);
//如果buffer不为空,入待播放队列 //如果buffer不为空,入待Play队列
if (NULL != buffer && 0 != bufferSize) { if (NULL != buffer && 0 != bufferSize) {
SLresult result; SLresult result;
result = (*bqPlayerBufferQueue)->Enqueue(bqPlayerBufferQueue, buffer, bufferSize); result = (*bqPlayerBufferQueue)->Enqueue(bqPlayerBufferQueue, buffer, bufferSize);
@ -80,16 +80,16 @@ void createEngine() {
LOGI(TAG, "slCreateEngine=%d", result); LOGI(TAG, "slCreateEngine=%d", result);
result = (*engineObject)->Realize(engineObject, SL_BOOLEAN_FALSE); result = (*engineObject)->Realize(engineObject, SL_BOOLEAN_FALSE);
LOGI(TAG, "engineObject->Realize=%d", result); LOGI(TAG, "engineObject->Realize=%d", result);
//获取引擎接口 //Obtain引擎接口
result = (*engineObject)->GetInterface(engineObject, SL_IID_ENGINE, &engineEngine); result = (*engineObject)->GetInterface(engineObject, SL_IID_ENGINE, &engineEngine);
LOGI(TAG, "engineObject->GetInterface=%d", result); LOGI(TAG, "engineObject->GetInterface=%d", result);
//创建输出混音 //创建输出混音Device
result = (*engineEngine)->CreateOutputMix(engineEngine, &outputMixObject, 0, 0, 0); result = (*engineEngine)->CreateOutputMix(engineEngine, &outputMixObject, 0, 0, 0);
LOGI(TAG, "CreateOutputMix=%d", result); LOGI(TAG, "CreateOutputMix=%d", result);
//关联输出混音 //关联输出混音Device
result = (*outputMixObject)->Realize(outputMixObject, SL_BOOLEAN_FALSE); result = (*outputMixObject)->Realize(outputMixObject, SL_BOOLEAN_FALSE);
LOGI(TAG, "outputMixObject->Realize=%d", result); LOGI(TAG, "outputMixObject->Realize=%d", result);
//获取reverb接口 //Obtainreverb接口
result = (*outputMixObject)->GetInterface(outputMixObject, SL_IID_ENVIRONMENTALREVERB, result = (*outputMixObject)->GetInterface(outputMixObject, SL_IID_ENVIRONMENTALREVERB,
&outputMixEnvironmentalReverb); &outputMixEnvironmentalReverb);
LOGI(TAG, "outputMixObject->GetInterface=%d", result); LOGI(TAG, "outputMixObject->GetInterface=%d", result);
@ -101,11 +101,11 @@ void createEngine() {
} }
//创建带有缓冲队列的音频播放器 //创建带有缓冲队列的 AudioPlayDevice
void createBufferQueueAudioPlayer(int rate, int channel, int bitsPerSample) { void createBufferQueueAudioPlayer(int rate, int channel, int bitsPerSample) {
SLresult result; SLresult result;
//配置音频 //配置 Audio
SLDataLocator_AndroidSimpleBufferQueue buffer_queue = {SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, 2}; SLDataLocator_AndroidSimpleBufferQueue buffer_queue = {SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, 2};
SLDataFormat_PCM format_pcm; SLDataFormat_PCM format_pcm;
format_pcm.formatType = SL_DATAFORMAT_PCM; format_pcm.formatType = SL_DATAFORMAT_PCM;
@ -120,26 +120,26 @@ void createBufferQueueAudioPlayer(int rate, int channel, int bitsPerSample) {
format_pcm.endianness = SL_BYTEORDER_LITTLEENDIAN; format_pcm.endianness = SL_BYTEORDER_LITTLEENDIAN;
SLDataSource audioSrc = {&buffer_queue, &format_pcm}; SLDataSource audioSrc = {&buffer_queue, &format_pcm};
//配置音频 //配置 Audio
SLDataLocator_OutputMix loc_outmix = {SL_DATALOCATOR_OUTPUTMIX, outputMixObject}; SLDataLocator_OutputMix loc_outmix = {SL_DATALOCATOR_OUTPUTMIX, outputMixObject};
SLDataSink audioSnk = {&loc_outmix, NULL}; SLDataSink audioSnk = {&loc_outmix, NULL};
//创建音频播放器 //创建 AudioPlayDevice
const SLInterfaceID ids[3] = {SL_IID_BUFFERQUEUE, SL_IID_EFFECTSEND, SL_IID_VOLUME}; const SLInterfaceID ids[3] = {SL_IID_BUFFERQUEUE, SL_IID_EFFECTSEND, SL_IID_VOLUME};
const SLboolean req[3] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE}; const SLboolean req[3] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE};
result = (*engineEngine)->CreateAudioPlayer(engineEngine, &bqPlayerObject, &audioSrc, &audioSnk, result = (*engineEngine)->CreateAudioPlayer(engineEngine, &bqPlayerObject, &audioSrc, &audioSnk,
3, ids, req); 3, ids, req);
LOGI(TAG, "CreateAudioPlayer=%d", result); LOGI(TAG, "CreateAudioPlayer=%d", result);
//关联播放器 //关联PlayDevice
result = (*bqPlayerObject)->Realize(bqPlayerObject, SL_BOOLEAN_FALSE); result = (*bqPlayerObject)->Realize(bqPlayerObject, SL_BOOLEAN_FALSE);
LOGI(TAG, "bqPlayerObject Realize=%d", result); LOGI(TAG, "bqPlayerObject Realize=%d", result);
//获取播放接口 //ObtainPlay接口
result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_PLAY, &bqPlayerPlay); result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_PLAY, &bqPlayerPlay);
LOGI(TAG, "GetInterface bqPlayerPlay=%d", result); LOGI(TAG, "GetInterface bqPlayerPlay=%d", result);
//获取缓冲队列接口 //Obtain缓冲队列接口
result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_BUFFERQUEUE, result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_BUFFERQUEUE,
&bqPlayerBufferQueue); &bqPlayerBufferQueue);
LOGI(TAG, "GetInterface bqPlayerBufferQueue=%d", result); LOGI(TAG, "GetInterface bqPlayerBufferQueue=%d", result);
@ -148,16 +148,16 @@ void createBufferQueueAudioPlayer(int rate, int channel, int bitsPerSample) {
result = (*bqPlayerBufferQueue)->RegisterCallback(bqPlayerBufferQueue, bqPlayerCallback, NULL); result = (*bqPlayerBufferQueue)->RegisterCallback(bqPlayerBufferQueue, bqPlayerCallback, NULL);
LOGI(TAG, "RegisterCallback=%d", result); LOGI(TAG, "RegisterCallback=%d", result);
//获取音效接口 //Obtain音效接口
result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_EFFECTSEND, result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_EFFECTSEND,
&bqPlayerEffectSend); &bqPlayerEffectSend);
LOGI(TAG, "GetInterface effect=%d", result); LOGI(TAG, "GetInterface effect=%d", result);
//获取音量接口 //Obtain音量接口
result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_VOLUME, &bqPlayerVolume); result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_VOLUME, &bqPlayerVolume);
LOGI(TAG, "GetInterface volume=%d", result); LOGI(TAG, "GetInterface volume=%d", result);
//开始播放音乐 //开始Play音乐
result = (*bqPlayerPlay)->SetPlayState(bqPlayerPlay, SL_PLAYSTATE_PLAYING); result = (*bqPlayerPlay)->SetPlayState(bqPlayerPlay, SL_PLAYSTATE_PLAYING);
LOGI(TAG, "SetPlayState=%d", result); LOGI(TAG, "SetPlayState=%d", result);
} }
@ -167,7 +167,7 @@ int createAudioPlayer(int *rate, int *channel, const char *file_name) {
av_register_all(); av_register_all();
aFormatCtx = avformat_alloc_context(); aFormatCtx = avformat_alloc_context();
//打开音频文件 //打开 Audio文件
if (avformat_open_input(&aFormatCtx, file_name, NULL, NULL) != 0) { if (avformat_open_input(&aFormatCtx, file_name, NULL, NULL) != 0) {
LOGE(TAG, "Couldn't open file:%s\n", file_name); LOGE(TAG, "Couldn't open file:%s\n", file_name);
return -1; // Couldn't open file return -1; // Couldn't open file
@ -179,7 +179,7 @@ int createAudioPlayer(int *rate, int *channel, const char *file_name) {
return -1; return -1;
} }
//寻找音频stream //寻找 Audiostream
int i; int i;
audioStream = -1; audioStream = -1;
for (i = 0; i < aFormatCtx->nb_streams; i++) { for (i = 0; i < aFormatCtx->nb_streams; i++) {
@ -192,21 +192,21 @@ int createAudioPlayer(int *rate, int *channel, const char *file_name) {
LOGE(TAG, "Couldn't find audio stream!"); LOGE(TAG, "Couldn't find audio stream!");
return -1; return -1;
} }
//获取解码器context //Obtain decoding Devicecontext
aCodecCtx = aFormatCtx->streams[audioStream]->codec; aCodecCtx = aFormatCtx->streams[audioStream]->codec;
//寻找音频解码器 //寻找 Audio decoding Device
AVCodec *aCodec = avcodec_find_decoder(aCodecCtx->codec_id); AVCodec *aCodec = avcodec_find_decoder(aCodecCtx->codec_id);
if (!aCodec) { if (!aCodec) {
fprintf(stderr, "Unsupported codec!\n"); fprintf(stderr, "Unsupported codec!\n");
return -1; return -1;
} }
//打开解码器 //打开 decoding Device
if (avcodec_open2(aCodecCtx, aCodec, NULL) < 0) { if (avcodec_open2(aCodecCtx, aCodec, NULL) < 0) {
LOGE(TAG, "Could not open codec."); LOGE(TAG, "Could not open codec.");
return -1; return -1;
} }
aFrame = av_frame_alloc(); aFrame = av_frame_alloc();
// 设置格式转换 // 设置Format conversion
swr = swr_alloc(); swr = swr_alloc();
av_opt_set_int(swr, "in_channel_layout", aCodecCtx->channel_layout, 0); av_opt_set_int(swr, "in_channel_layout", aCodecCtx->channel_layout, 0);
av_opt_set_int(swr, "out_channel_layout", aCodecCtx->channel_layout, 0); av_opt_set_int(swr, "out_channel_layout", aCodecCtx->channel_layout, 0);
@ -225,16 +225,16 @@ int createAudioPlayer(int *rate, int *channel, const char *file_name) {
return 0; return 0;
} }
// 获取PCM数据, 自动回调获取 // ObtainPCM数据, 自动回调Obtain
int getPCM(void **pcm, size_t *pcmSize) { int getPCM(void **pcm, size_t *pcmSize) {
while (av_read_frame(aFormatCtx, &packet) >= 0) { while (av_read_frame(aFormatCtx, &packet) >= 0) {
int frameFinished = 0; int frameFinished = 0;
//音频 // Audio
if (packet.stream_index == audioStream) { if (packet.stream_index == audioStream) {
avcodec_decode_audio4(aCodecCtx, aFrame, &frameFinished, &packet); avcodec_decode_audio4(aCodecCtx, aFrame, &frameFinished, &packet);
//解码完一帧数据 // decoding 完一帧数据
if (frameFinished) { if (frameFinished) {
// data_size为音频数据所占的字节数 // data_size为 Audio数据所占的字节数
int data_size = av_samples_get_buffer_size( int data_size = av_samples_get_buffer_size(
aFrame->linesize, aCodecCtx->channels, aFrame->linesize, aCodecCtx->channels,
aFrame->nb_samples, aCodecCtx->sample_fmt, 1); aFrame->nb_samples, aCodecCtx->sample_fmt, 1);
@ -244,7 +244,7 @@ int getPCM(void **pcm, size_t *pcmSize) {
outputBuffer = (uint8_t *) realloc(outputBuffer, sizeof(uint8_t) * outputBufferSize); outputBuffer = (uint8_t *) realloc(outputBuffer, sizeof(uint8_t) * outputBufferSize);
} }
// 音频格式转换 // AudioFormat conversion
swr_convert(swr, &outputBuffer, aFrame->nb_samples, swr_convert(swr, &outputBuffer, aFrame->nb_samples,
(uint8_t const **) (aFrame->extended_data), (uint8_t const **) (aFrame->extended_data),
aFrame->nb_samples); aFrame->nb_samples);
@ -275,20 +275,20 @@ AUDIO_PLAYER_FUNC(void, playAudio, jstring filePath) {
const char *file_name = (*env)->GetStringUTFChars(env, filePath, NULL); const char *file_name = (*env)->GetStringUTFChars(env, filePath, NULL);
LOGI(TAG, "file_name=%s", file_name); LOGI(TAG, "file_name=%s", file_name);
// 创建音频解码器 // 创建 Audio decoding Device
createAudioPlayer(&rate, &channel, file_name); createAudioPlayer(&rate, &channel, file_name);
// 创建播放引擎 // 创建Play引擎
createEngine(); createEngine();
// 创建缓冲队列音频播放器 // 创建缓冲队列 AudioPlayDevice
createBufferQueueAudioPlayer(rate, channel, SL_PCMSAMPLEFORMAT_FIXED_16); createBufferQueueAudioPlayer(rate, channel, SL_PCMSAMPLEFORMAT_FIXED_16);
// 启动音频播放 // 启动 AudioPlay
bqPlayerCallback(bqPlayerBufferQueue, NULL); bqPlayerCallback(bqPlayerBufferQueue, NULL);
} }
//停止播放,释放相关资源 //停止Play,释放相关资源
AUDIO_PLAYER_FUNC(void, stop) { AUDIO_PLAYER_FUNC(void, stop) {
if (bqPlayerObject != NULL) { if (bqPlayerObject != NULL) {
(*bqPlayerObject)->Destroy(bqPlayerObject); (*bqPlayerObject)->Destroy(bqPlayerObject);
@ -311,6 +311,6 @@ AUDIO_PLAYER_FUNC(void, stop) {
engineEngine = NULL; engineEngine = NULL;
} }
// 释放解码器相关资源 // 释放 decoding Device相关资源
releaseAudioPlayer(); releaseAudioPlayer();
} }

@ -69,9 +69,9 @@ jboolean playAudio = JNI_TRUE;
//const char *filter_descr = "hflip";//左右反序 //const char *filter_descr = "hflip";//左右反序
//const char *filter_descr = "rotate=90";//旋转90° //const char *filter_descr = "rotate=90";//旋转90°
//const char *filter_descr = "colorbalance=bs=0.3";//添加蓝色背景 //const char *filter_descr = "colorbalance=bs=0.3";//添加蓝色背景
//const char *filter_descr = "drawbox=x=100:y=100:w=100:h=100:color=pink@0.5'";//绘制矩形 //const char *filter_descr = "drawbox=x=100:y=100:w=100:h=100:color=pink@0.5'";//绘制rectangle
//const char *filter_descr = "drawgrid=w=iw/3:h=ih/3:t=2:c=white@0.5";//九宫格分割 //const char *filter_descr = "drawgrid=w=iw/3:h=ih/3:t=2:c=white@0.5";//九宫格分割
//const char *filter_descr = "edgedetect=low=0.1:high=0.4";//边缘检测 //const char *filter_descr = "edgedetect=low=0.1:high=0.4";//edge检测
//const char *filter_descr = "lutrgb='r=0:g=0'";//去掉红色、绿色分量,只保留蓝色 //const char *filter_descr = "lutrgb='r=0:g=0'";//去掉红色、绿色分量,只保留蓝色
//const char *filter_descr = "noise=alls=20:allf=t+u";//添加噪声 //const char *filter_descr = "noise=alls=20:allf=t+u";//添加噪声
//const char *filter_descr = "vignette='PI/4+random(1)*PI/50':eval=frame";//闪烁装饰 //const char *filter_descr = "vignette='PI/4+random(1)*PI/50':eval=frame";//闪烁装饰
@ -79,7 +79,7 @@ jboolean playAudio = JNI_TRUE;
//const char *filter_descr = "drawtext=fontfile='arial.ttf':fontcolor=green:fontsize=30:text='Hello world'";//绘制文字 //const char *filter_descr = "drawtext=fontfile='arial.ttf':fontcolor=green:fontsize=30:text='Hello world'";//绘制文字
//const char *filter_descr = "movie=my_logo.png[wm];[in][wm]overlay=5:5[out]";//添加图片水印 //const char *filter_descr = "movie=my_logo.png[wm];[in][wm]overlay=5:5[out]";//添加图片水印
//初始化滤波器 //Initialize the filter
int init_filters(const char *filters_descr) { int init_filters(const char *filters_descr) {
char args[512]; char args[512];
int ret = 0; int ret = 0;
@ -149,10 +149,10 @@ int init_filters(const char *filters_descr) {
return ret; return ret;
} }
//初始化视频解码器与播放器 //初始化视频 decoding Device versus PlayDevice
int open_input(JNIEnv * env, const char* file_name, jobject surface){ int open_input(JNIEnv * env, const char* file_name, jobject surface){
LOGI(TAG, "open file:%s\n", file_name); LOGI(TAG, "open file:%s\n", file_name);
//注册所有组件 //Register all components
av_register_all(); av_register_all();
//分配上下文 //分配上下文
pFormatCtx = avformat_alloc_context(); pFormatCtx = avformat_alloc_context();
@ -166,7 +166,7 @@ int open_input(JNIEnv * env, const char* file_name, jobject surface){
LOGE(TAG, "Couldn't find stream information."); LOGE(TAG, "Couldn't find stream information.");
return -1; return -1;
} }
//寻找视频流的第一帧 //Looking for video streaming First frame
int i; int i;
for (i = 0; i < pFormatCtx->nb_streams; i++) { for (i = 0; i < pFormatCtx->nb_streams; i++) {
if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO
@ -179,9 +179,9 @@ int open_input(JNIEnv * env, const char* file_name, jobject surface){
return -1; return -1;
} }
//获取codec上下文指针 //Obtaincodec上下文指针
pCodecCtx = pFormatCtx->streams[video_stream_index]->codec; pCodecCtx = pFormatCtx->streams[video_stream_index]->codec;
//寻找视频流的解码器 //Looking for video streaming decoding Device
AVCodec * pCodec = avcodec_find_decoder(pCodecCtx->codec_id); AVCodec * pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
if(pCodec==NULL) { if(pCodec==NULL) {
LOGE(TAG, "couldn't find Codec."); LOGE(TAG, "couldn't find Codec.");
@ -191,7 +191,7 @@ int open_input(JNIEnv * env, const char* file_name, jobject surface){
LOGE(TAG, "Couldn't open codec."); LOGE(TAG, "Couldn't open codec.");
return -1; return -1;
} }
// 获取native window // Obtainnative window
nativeWindow = ANativeWindow_fromSurface(env, surface); nativeWindow = ANativeWindow_fromSurface(env, surface);
// 设置native window的buffer大小,可自动拉伸 // 设置native window的buffer大小,可自动拉伸
@ -209,7 +209,7 @@ int open_input(JNIEnv * env, const char* file_name, jobject surface){
buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t)); buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA, av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA,
pCodecCtx->width, pCodecCtx->height, 1); pCodecCtx->width, pCodecCtx->height, 1);
// 由于解码出来的帧格式不是RGBA的,在渲染之前需要进行格式转换 // 由于 decoding 出来的帧格式不是RGBA的,在渲染之前需要进行Format conversion
sws_ctx = sws_getContext(pCodecCtx->width, sws_ctx = sws_getContext(pCodecCtx->width,
pCodecCtx->height, pCodecCtx->height,
pCodecCtx->pix_fmt, pCodecCtx->pix_fmt,
@ -224,9 +224,9 @@ int open_input(JNIEnv * env, const char* file_name, jobject surface){
return 0; return 0;
} }
//初始化音频解码器与播放器 //Initialize the audio decoder versus PlayDevice
int init_audio(JNIEnv * env, jclass jthiz){ int init_audio(JNIEnv * env, jclass jthiz){
//获取音频流索引位置 //Obtain Audio流索引位置
int i; int i;
for(i=0; i < pFormatCtx->nb_streams;i++){ for(i=0; i < pFormatCtx->nb_streams;i++){
if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO){ if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO){
@ -235,28 +235,28 @@ int init_audio(JNIEnv * env, jclass jthiz){
} }
} }
//获取音频解码器 //Obtain Audio decoding Device
audioCodecCtx = pFormatCtx->streams[audio_stream_index]->codec; audioCodecCtx = pFormatCtx->streams[audio_stream_index]->codec;
AVCodec *codec = avcodec_find_decoder(audioCodecCtx->codec_id); AVCodec *codec = avcodec_find_decoder(audioCodecCtx->codec_id);
if(codec == NULL){ if(codec == NULL){
LOGE(TAG, "无法获取音频解码器"); LOGE(TAG, "无法Obtain Audio decoding Device");
return -1; return -1;
} }
//打开音频解码器 //打开 Audio decoding Device
if(avcodec_open2(audioCodecCtx,codec,NULL) < 0){ if(avcodec_open2(audioCodecCtx,codec,NULL) < 0){
LOGE(TAG, "无法打开音频解码器"); LOGE(TAG, "无法打开 Audio decoding Device");
return -1; return -1;
} }
//frame->16bit 44100 PCM 统一音频采样格式与采样率 //frame->16bit 44100 PCM Unified audio sampling format and sampling rate
audio_swr_ctx = swr_alloc(); audio_swr_ctx = swr_alloc();
//输入的采样格式 // Input sampling format
enum AVSampleFormat in_sample_fmt = audioCodecCtx->sample_fmt; enum AVSampleFormat in_sample_fmt = audioCodecCtx->sample_fmt;
//输出采样格式16bit PCM //输出采样格式16bit PCM
out_sample_fmt = AV_SAMPLE_FMT_S16; out_sample_fmt = AV_SAMPLE_FMT_S16;
//输入采样率 //输入采样率
int in_sample_rate = audioCodecCtx->sample_rate; int in_sample_rate = audioCodecCtx->sample_rate;
//输出采样率 //Output sampling rate
int out_sample_rate = in_sample_rate; int out_sample_rate = in_sample_rate;
//声道布局(2个声道,默认立体声stereo) //声道布局(2个声道,默认立体声stereo)
uint64_t in_ch_layout = audioCodecCtx->channel_layout; uint64_t in_ch_layout = audioCodecCtx->channel_layout;
@ -269,7 +269,7 @@ int init_audio(JNIEnv * env, jclass jthiz){
0, NULL); 0, NULL);
swr_init(audio_swr_ctx); swr_init(audio_swr_ctx);
//输出的声道个数 //Number of output channels
out_channel_nb = av_get_channel_layout_nb_channels(out_ch_layout); out_channel_nb = av_get_channel_layout_nb_channels(out_ch_layout);
jclass player_class = (*env)->GetObjectClass(env,jthiz); jclass player_class = (*env)->GetObjectClass(env,jthiz);
@ -290,7 +290,7 @@ int init_audio(JNIEnv * env, jclass jthiz){
jmethodID audio_track_play_mid = (*env)->GetMethodID(env,audio_track_class,"play","()V"); jmethodID audio_track_play_mid = (*env)->GetMethodID(env,audio_track_class,"play","()V");
(*env)->CallVoidMethod(env,audio_track,audio_track_play_mid); (*env)->CallVoidMethod(env,audio_track,audio_track_play_mid);
//获取write()方法 //Obtainwrite()方法
audio_track_write_mid = (*env)->GetMethodID(env,audio_track_class,"write","([BII)I"); audio_track_write_mid = (*env)->GetMethodID(env,audio_track_class,"write","([BII)I");
//16bit 44100 PCM 数据 //16bit 44100 PCM 数据
@ -299,28 +299,28 @@ int init_audio(JNIEnv * env, jclass jthiz){
} }
int play_audio(JNIEnv * env, AVPacket* packet, AVFrame* frame){ int play_audio(JNIEnv * env, AVPacket* packet, AVFrame* frame){
//解码 // decoding
int ret = avcodec_decode_audio4(audioCodecCtx, frame, &got_frame, packet); int ret = avcodec_decode_audio4(audioCodecCtx, frame, &got_frame, packet);
if(ret < 0){ if(ret < 0){
return ret; return ret;
} }
//解码一帧成功 //Successfully decoded a frame
if(got_frame > 0){ if(got_frame > 0){
//音频格式转换 // AudioFormat conversion
swr_convert(audio_swr_ctx, &out_buffer, MAX_AUDIO_FRAME_SIZE,(const uint8_t **)frame->data,frame->nb_samples); swr_convert(audio_swr_ctx, &out_buffer, MAX_AUDIO_FRAME_SIZE,(const uint8_t **)frame->data,frame->nb_samples);
int out_buffer_size = av_samples_get_buffer_size(NULL, out_channel_nb, int out_buffer_size = av_samples_get_buffer_size(NULL, out_channel_nb,
frame->nb_samples, out_sample_fmt, 1); frame->nb_samples, out_sample_fmt, 1);
jbyteArray audio_sample_array = (*env)->NewByteArray(env,out_buffer_size); jbyteArray audio_sample_array = (*env)->NewByteArray(env,out_buffer_size);
jbyte* sample_byte_array = (*env)->GetByteArrayElements(env,audio_sample_array,NULL); jbyte* sample_byte_array = (*env)->GetByteArrayElements(env,audio_sample_array,NULL);
//拷贝缓冲数据 //Copy buffered data
memcpy(sample_byte_array, out_buffer, (size_t) out_buffer_size); memcpy(sample_byte_array, out_buffer, (size_t) out_buffer_size);
//释放数组 //释放数组
(*env)->ReleaseByteArrayElements(env,audio_sample_array,sample_byte_array,0); (*env)->ReleaseByteArrayElements(env,audio_sample_array,sample_byte_array,0);
//调用AudioTrack的write方法进行播放 //调用AudioTrack的write方法进行Play
(*env)->CallIntMethod(env,audio_track,audio_track_write_mid, (*env)->CallIntMethod(env,audio_track,audio_track_write_mid,
audio_sample_array,0,out_buffer_size); audio_sample_array,0,out_buffer_size);
//释放局部引用 //Release local references
(*env)->DeleteLocalRef(env,audio_sample_array); (*env)->DeleteLocalRef(env,audio_sample_array);
usleep(1000);//1000 * 16 usleep(1000);//1000 * 16
} }
@ -332,14 +332,14 @@ VIDEO_PLAYER_FUNC(jint, filter, jstring filePath, jobject surface, jstring filte
int ret; int ret;
const char * file_name = (*env)->GetStringUTFChars(env, filePath, JNI_FALSE); const char * file_name = (*env)->GetStringUTFChars(env, filePath, JNI_FALSE);
const char *filter_descr = (*env)->GetStringUTFChars(env, filterDescr, JNI_FALSE); const char *filter_descr = (*env)->GetStringUTFChars(env, filterDescr, JNI_FALSE);
//打开输入文件 //Open input file
if(!is_playing){ if(!is_playing){
LOGI(TAG, "open_input..."); LOGI(TAG, "open_input...");
if((ret = open_input(env, file_name, surface)) < 0){ if((ret = open_input(env, file_name, surface)) < 0){
LOGE(TAG, "Couldn't allocate video frame."); LOGE(TAG, "Couldn't allocate video frame.");
goto end; goto end;
} }
//注册滤波器 //Registration filter
avfilter_register_all(); avfilter_register_all();
filter_frame = av_frame_alloc(); filter_frame = av_frame_alloc();
if(filter_frame == NULL) { if(filter_frame == NULL) {
@ -347,7 +347,7 @@ VIDEO_PLAYER_FUNC(jint, filter, jstring filePath, jobject surface, jstring filte
ret = -1; ret = -1;
goto end; goto end;
} }
//初始化音频解码器 //Initialize the audio decoder
if ((ret = init_audio(env, thiz)) < 0){ if ((ret = init_audio(env, thiz)) < 0){
LOGE(TAG, "Couldn't init_audio."); LOGE(TAG, "Couldn't init_audio.");
goto end; goto end;
@ -355,7 +355,7 @@ VIDEO_PLAYER_FUNC(jint, filter, jstring filePath, jobject surface, jstring filte
} }
//初始化滤波器 //Initialize the filter
if ((ret = init_filters(filter_descr)) < 0){ if ((ret = init_filters(filter_descr)) < 0){
LOGE(TAG, "init_filter error, ret=%d\n", ret); LOGE(TAG, "init_filter error, ret=%d\n", ret);
goto end; goto end;
@ -366,36 +366,36 @@ VIDEO_PLAYER_FUNC(jint, filter, jstring filePath, jobject surface, jstring filte
AVPacket packet; AVPacket packet;
while(av_read_frame(pFormatCtx, &packet)>=0 && !release) { while(av_read_frame(pFormatCtx, &packet)>=0 && !release) {
//切换滤波器,退出当初播放 //Switch the filter and exit the original playback
if(again){ if(again){
goto again; goto again;
} }
//判断是否为视频流 //Determine if it is a video stream
if(packet.stream_index == video_stream_index) { if(packet.stream_index == video_stream_index) {
//对该帧进行解码 //对该帧进行 decoding
avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet); avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
if (frameFinished) { if (frameFinished) {
//把解码后视频帧添加到filter_graph //Add decoded video frames to filter_graph
if (av_buffersrc_add_frame_flags(buffersrc_ctx, pFrame, AV_BUFFERSRC_FLAG_KEEP_REF) < 0) { if (av_buffersrc_add_frame_flags(buffersrc_ctx, pFrame, AV_BUFFERSRC_FLAG_KEEP_REF) < 0) {
LOGE(TAG, "Error while feeding the filter_graph\n"); LOGE(TAG, "Error while feeding the filter_graph\n");
break; break;
} }
//把滤波后的视频帧从filter graph取出来 //Take the filtered video frame from the filter graph
ret = av_buffersink_get_frame(buffersink_ctx, filter_frame); ret = av_buffersink_get_frame(buffersink_ctx, filter_frame);
if (ret >= 0){ if (ret >= 0){
// lock native window // lock native window
ANativeWindow_lock(nativeWindow, &windowBuffer, 0); ANativeWindow_lock(nativeWindow, &windowBuffer, 0);
// 格式转换 // Format conversion
sws_scale(sws_ctx, (uint8_t const * const *)filter_frame->data, sws_scale(sws_ctx, (uint8_t const * const *)filter_frame->data,
filter_frame->linesize, 0, pCodecCtx->height, filter_frame->linesize, 0, pCodecCtx->height,
pFrameRGBA->data, pFrameRGBA->linesize); pFrameRGBA->data, pFrameRGBA->linesize);
// 获取stride // Obtain stride
uint8_t * dst = windowBuffer.bits; uint8_t * dst = windowBuffer.bits;
int dstStride = windowBuffer.stride * 4; int dstStride = windowBuffer.stride * 4;
uint8_t * src = pFrameRGBA->data[0]; uint8_t * src = pFrameRGBA->data[0];
int srcStride = pFrameRGBA->linesize[0]; int srcStride = pFrameRGBA->linesize[0];
// 由于window的stride和帧的stride不同,因此需要逐行复制 // because of window of stride And framed stride different , So you need to copy line by line
int h; int h;
for (h = 0; h < pCodecCtx->height; h++) { for (h = 0; h < pCodecCtx->height; h++) {
memcpy(dst + h * dstStride, src + h * srcStride, (size_t) srcStride); memcpy(dst + h * dstStride, src + h * srcStride, (size_t) srcStride);
@ -404,11 +404,11 @@ VIDEO_PLAYER_FUNC(jint, filter, jstring filePath, jobject surface, jstring filte
} }
av_frame_unref(filter_frame); av_frame_unref(filter_frame);
} }
//延迟等待 //Delayed wait
if (!playAudio){ if (!playAudio){
usleep((unsigned long) (1000 * 40));//1000 * 40 usleep((unsigned long) (1000 * 40));//1000 * 40
} }
} else if(packet.stream_index == audio_stream_index){//音频 } else if(packet.stream_index == audio_stream_index){// Audio
if (playAudio){ if (playAudio){
play_audio(env, &packet, pFrame); play_audio(env, &packet, pFrame);
} }
@ -417,7 +417,7 @@ VIDEO_PLAYER_FUNC(jint, filter, jstring filePath, jobject surface, jstring filte
} }
end: end:
is_playing = 0; is_playing = 0;
//释放内存以及关闭文件 //Free up memory and close files
av_free(buffer); av_free(buffer);
av_free(pFrameRGBA); av_free(pFrameRGBA);
av_free(filter_frame); av_free(filter_frame);

@ -14,7 +14,7 @@
#define TAG "VideoPlayer" #define TAG "VideoPlayer"
//播放倍率 //Play倍率
float play_rate = 1; float play_rate = 1;
//视频总时长 //视频总时长
long duration = 0; long duration = 0;
@ -23,7 +23,7 @@ VIDEO_PLAYER_FUNC(jint, play, jstring filePath, jobject surface){
const char * file_name = (*env)->GetStringUTFChars(env, filePath, JNI_FALSE); const char * file_name = (*env)->GetStringUTFChars(env, filePath, JNI_FALSE);
LOGE(TAG, "open file:%s\n", file_name); LOGE(TAG, "open file:%s\n", file_name);
//注册所有组件 //Register all components
av_register_all(); av_register_all();
//分配上下文 //分配上下文
AVFormatContext * pFormatCtx = avformat_alloc_context(); AVFormatContext * pFormatCtx = avformat_alloc_context();
@ -37,7 +37,7 @@ VIDEO_PLAYER_FUNC(jint, play, jstring filePath, jobject surface){
LOGE(TAG, "Couldn't find stream information."); LOGE(TAG, "Couldn't find stream information.");
return -1; return -1;
} }
//寻找视频流的第一帧 //Looking for video streaming First frame
int videoStream = -1, i; int videoStream = -1, i;
for (i = 0; i < pFormatCtx->nb_streams; i++) { for (i = 0; i < pFormatCtx->nb_streams; i++) {
if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO
@ -50,15 +50,15 @@ VIDEO_PLAYER_FUNC(jint, play, jstring filePath, jobject surface){
return -1; return -1;
} }
//获取视频总时长 //Obtain视频总时长
if (pFormatCtx->duration != AV_NOPTS_VALUE) { if (pFormatCtx->duration != AV_NOPTS_VALUE) {
duration = (long) (pFormatCtx->duration / AV_TIME_BASE); duration = (long) (pFormatCtx->duration / AV_TIME_BASE);
LOGE(TAG, "duration==%ld", duration); LOGE(TAG, "duration==%ld", duration);
} }
//获取codec上下文指针 //Obtaincodec上下文指针
AVCodecContext * pCodecCtx = pFormatCtx->streams[videoStream]->codec; AVCodecContext * pCodecCtx = pFormatCtx->streams[videoStream]->codec;
//寻找视频流的解码器 //Looking for video streaming decoding Device
AVCodec * pCodec = avcodec_find_decoder(pCodecCtx->codec_id); AVCodec * pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
if(pCodec==NULL) { if(pCodec==NULL) {
LOGE(TAG, "couldn't find Codec."); LOGE(TAG, "couldn't find Codec.");
@ -68,9 +68,9 @@ VIDEO_PLAYER_FUNC(jint, play, jstring filePath, jobject surface){
LOGE(TAG, "Couldn't open codec."); LOGE(TAG, "Couldn't open codec.");
return -1; return -1;
} }
// 获取native window // Obtainnative window
ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env, surface); ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env, surface);
// 获取视频宽高 // Obtain视频宽高
int videoWidth = pCodecCtx->width; int videoWidth = pCodecCtx->width;
int videoHeight = pCodecCtx->height; int videoHeight = pCodecCtx->height;
// 设置native window的buffer大小,可自动拉伸 // 设置native window的buffer大小,可自动拉伸
@ -94,7 +94,7 @@ VIDEO_PLAYER_FUNC(jint, play, jstring filePath, jobject surface){
av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA, av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA,
pCodecCtx->width, pCodecCtx->height, 1); pCodecCtx->width, pCodecCtx->height, 1);
// 由于解码出来的帧格式不是RGBA的,在渲染之前需要进行格式转换 // 由于 decoding 出来的帧格式不是RGBA的,在渲染之前需要进行Format conversion
struct SwsContext *sws_ctx = sws_getContext(pCodecCtx->width, struct SwsContext *sws_ctx = sws_getContext(pCodecCtx->width,
pCodecCtx->height, pCodecCtx->height,
pCodecCtx->pix_fmt, pCodecCtx->pix_fmt,
@ -110,35 +110,35 @@ VIDEO_PLAYER_FUNC(jint, play, jstring filePath, jobject surface){
AVPacket packet; AVPacket packet;
while(av_read_frame(pFormatCtx, &packet)>=0) { while(av_read_frame(pFormatCtx, &packet)>=0) {
//判断是否为视频流 //Determine if it is a video stream
if(packet.stream_index==videoStream) { if(packet.stream_index==videoStream) {
//对该帧进行解码 //对该帧进行 decoding
avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet); avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
if (frameFinished) { if (frameFinished) {
// lock native window // lock native window
ANativeWindow_lock(nativeWindow, &windowBuffer, 0); ANativeWindow_lock(nativeWindow, &windowBuffer, 0);
// 格式转换 // Format conversion
sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data, sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
pFrame->linesize, 0, pCodecCtx->height, pFrame->linesize, 0, pCodecCtx->height,
pFrameRGBA->data, pFrameRGBA->linesize); pFrameRGBA->data, pFrameRGBA->linesize);
// 获取stride // Obtainstride
uint8_t * dst = windowBuffer.bits; uint8_t * dst = windowBuffer.bits;
int dstStride = windowBuffer.stride * 4; int dstStride = windowBuffer.stride * 4;
uint8_t * src = pFrameRGBA->data[0]; uint8_t * src = pFrameRGBA->data[0];
int srcStride = pFrameRGBA->linesize[0]; int srcStride = pFrameRGBA->linesize[0];
// 由于window的stride和帧的stride不同,因此需要逐行复制 // 由于window的stride And framed stride different , So you need to copy line by line
int h; int h;
for (h = 0; h < videoHeight; h++) { for (h = 0; h < videoHeight; h++) {
memcpy(dst + h * dstStride, src + h * srcStride, (size_t) srcStride); memcpy(dst + h * dstStride, src + h * srcStride, (size_t) srcStride);
} }
ANativeWindow_unlockAndPost(nativeWindow); ANativeWindow_unlockAndPost(nativeWindow);
} }
//延迟等待 //Delayed wait
usleep((unsigned long) (1000 * 40 * play_rate)); usleep((unsigned long) (1000 * 40 * play_rate));
} }
av_packet_unref(&packet); av_packet_unref(&packet);
} }
//释放内存以及关闭文件 //Free up memory and close files
av_free(buffer); av_free(buffer);
av_free(pFrameRGBA); av_free(pFrameRGBA);
av_free(pFrame); av_free(pFrame);
@ -147,12 +147,12 @@ VIDEO_PLAYER_FUNC(jint, play, jstring filePath, jobject surface){
return 0; return 0;
} }
//设置播放速率 //设置Play速率
VIDEO_PLAYER_FUNC(void, setPlayRate, jfloat playRate){ VIDEO_PLAYER_FUNC(void, setPlayRate, jfloat playRate){
play_rate = playRate; play_rate = playRate;
} }
//获取视频总时长 //Obtain视频总时长
VIDEO_PLAYER_FUNC(jint, getDuration){ VIDEO_PLAYER_FUNC(jint, getDuration){
return duration; return duration;
} }

@ -5,7 +5,7 @@ import android.media.AudioManager;
import android.media.AudioTrack; import android.media.AudioTrack;
/** /**
* 音频播放器 * AudioPlayDevice
* Created by frank on 2018/2/1. * Created by frank on 2018/2/1.
*/ */
@ -13,11 +13,11 @@ public class AudioPlayer {
static { static {
System.loadLibrary("media-handle"); System.loadLibrary("media-handle");
} }
//调用AudioTrack播放 //调用AudioTrackPlay
public native void play(String audioPath); public native void play(String audioPath);
//调用OpenSL ES播放 //调用OpenSL ESPlay
public native void playAudio(String audioPath); public native void playAudio(String audioPath);
//调用OpenSL ES播放 //调用OpenSL ESPlay
public native void stop(); public native void stop();
public native static void lameInitDefault(); public native static void lameInitDefault();

@ -14,7 +14,7 @@ public class FFmpegCmd {
private final static int RESULT_ERROR = 0; private final static int RESULT_ERROR = 0;
//开子线程调用native方法进行音视频处理 //开子线程调用native方法进行Audio and video processing
public static void execute(final String[] commands, final OnHandleListener onHandleListener){ public static void execute(final String[] commands, final OnHandleListener onHandleListener){
new Thread(new Runnable() { new Thread(new Runnable() {
@Override @Override

@ -5,7 +5,7 @@ import android.media.AudioManager;
import android.media.AudioTrack; import android.media.AudioTrack;
/** /**
* 音视频播放器 * 音视频PlayDevice
* Created by frank on 2018/2/12. * Created by frank on 2018/2/12.
*/ */

@ -5,7 +5,7 @@ import android.media.AudioManager;
import android.media.AudioTrack; import android.media.AudioTrack;
/** /**
* 视频播放器 * 视频PlayDevice
* Created by frank on 2018/2/1 * Created by frank on 2018/2/1
*/ */
public class VideoPlayer { public class VideoPlayer {

@ -24,7 +24,7 @@ import static com.frank.ffmpeg.handler.FFmpegHandler.MSG_BEGIN;
import static com.frank.ffmpeg.handler.FFmpegHandler.MSG_FINISH; import static com.frank.ffmpeg.handler.FFmpegHandler.MSG_FINISH;
/** /**
* 使用ffmpeg处理音频 * 使用ffmpeg处理 Audio
* Created by frank on 2018/1/23. * Created by frank on 2018/1/23.
*/ */
@ -101,7 +101,7 @@ public class AudioHandleActivity extends BaseActivity {
} }
/** /**
* 调用ffmpeg处理音频 * 调用ffmpeg处理 Audio
* *
* @param srcFile srcFile * @param srcFile srcFile
*/ */
@ -119,7 +119,7 @@ public class AudioHandleActivity extends BaseActivity {
if (useFFmpeg) { //使用FFmpeg转码 if (useFFmpeg) { //使用FFmpeg转码
String transformFile = PATH + File.separator + "transformAudio.mp3"; String transformFile = PATH + File.separator + "transformAudio.mp3";
commandLine = FFmpegUtil.transformAudio(srcFile, transformFile); commandLine = FFmpegUtil.transformAudio(srcFile, transformFile);
} else { //使用MediaCodecmp3lame转mp3 } else { //使用MediaCodec versus mp3lame转mp3
new Thread(new Runnable() { new Thread(new Runnable() {
@Override @Override
public void run() { public void run() {
@ -130,7 +130,7 @@ public class AudioHandleActivity extends BaseActivity {
}).start(); }).start();
} }
break; break;
case R.id.btn_cut://剪切(注意原文件剪切文件格式一致,文件绝对路径最好不包含中文、特殊字符) case R.id.btn_cut://剪切(注意原文件 versus 剪切文件格式一致,文件绝对路径最好不包含中文、特殊字符)
String suffix = FileUtil.getFileSuffix(srcFile); String suffix = FileUtil.getFileSuffix(srcFile);
if (suffix == null || suffix.isEmpty()) { if (suffix == null || suffix.isEmpty()) {
return; return;
@ -138,7 +138,7 @@ public class AudioHandleActivity extends BaseActivity {
String cutFile = PATH + File.separator + "cutAudio" + suffix; String cutFile = PATH + File.separator + "cutAudio" + suffix;
commandLine = FFmpegUtil.cutAudio(srcFile, 10, 15, cutFile); commandLine = FFmpegUtil.cutAudio(srcFile, 10, 15, cutFile);
break; break;
case R.id.btn_concat://合并,支持MP3、AAC、AMR等,不支持PCM裸流,不支持WAV(PCM裸流加音频头) case R.id.btn_concat://merge,支持MP3、AAC、AMR等,不支持PCM裸流,不支持WAV(PCM裸流加 Audio头)
if (!FileUtil.checkFileExist(appendFile)) { if (!FileUtil.checkFileExist(appendFile)) {
return; return;
} }
@ -159,7 +159,7 @@ public class AudioHandleActivity extends BaseActivity {
String mixFile = PATH + File.separator + "mix" + mixSuffix; String mixFile = PATH + File.separator + "mix" + mixSuffix;
commandLine = FFmpegUtil.mixAudio(srcFile, appendFile, mixFile); commandLine = FFmpegUtil.mixAudio(srcFile, appendFile, mixFile);
break; break;
case R.id.btn_play_audio://解码播放(AudioTrack) case R.id.btn_play_audio:// decoding Play(AudioTrack)
new Thread(new Runnable() { new Thread(new Runnable() {
@Override @Override
public void run() { public void run() {
@ -167,7 +167,7 @@ public class AudioHandleActivity extends BaseActivity {
} }
}).start(); }).start();
return; return;
case R.id.btn_play_opensl://解码播放(OpenSL ES) case R.id.btn_play_opensl:// decoding Play(OpenSL ES)
new Thread(new Runnable() { new Thread(new Runnable() {
@Override @Override
public void run() { public void run() {
@ -175,8 +175,8 @@ public class AudioHandleActivity extends BaseActivity {
} }
}).start(); }).start();
return; return;
case R.id.btn_audio_encode://音频编码 case R.id.btn_audio_encode:// Audio coding
//可编码成WAV、AAC。如果需要编码成MP3,ffmpeg需要重新编译,把MP3库enable //可 coding 成WAV、AAC。如果需要 coding 成MP3,ffmpeg需要重新编译,把MP3库enable
String pcmFile = PATH + File.separator + "concat.pcm"; String pcmFile = PATH + File.separator + "concat.pcm";
String wavFile = PATH + File.separator + "new.wav"; String wavFile = PATH + File.separator + "new.wav";
//pcm数据的采样率,一般采样率为8000、16000、44100 //pcm数据的采样率,一般采样率为8000、16000、44100
@ -185,10 +185,10 @@ public class AudioHandleActivity extends BaseActivity {
int channel = 1; int channel = 1;
commandLine = FFmpegUtil.encodeAudio(pcmFile, wavFile, sampleRate, channel); commandLine = FFmpegUtil.encodeAudio(pcmFile, wavFile, sampleRate, channel);
break; break;
case R.id.btn_pcm_concat://PCM裸流音频文件合并 case R.id.btn_pcm_concat://PCM裸流 Audio文件merge
String srcPCM = PATH + File.separator + "audio.pcm";//第一个pcm文件 String srcPCM = PATH + File.separator + "audio.pcm";//第一个pcm文件
String appendPCM = PATH + File.separator + "audio.pcm";//第二个pcm文件 String appendPCM = PATH + File.separator + "audio.pcm";//第二个pcm文件
String concatPCM = PATH + File.separator + "concat.pcm";//合并后的文件 String concatPCM = PATH + File.separator + "concat.pcm";//merge后的文件
if (!FileUtil.checkFileExist(srcPCM) || !FileUtil.checkFileExist(appendPCM)) { if (!FileUtil.checkFileExist(srcPCM) || !FileUtil.checkFileExist(appendPCM)) {
return; return;
} }

@ -38,7 +38,7 @@ public class FilterActivity extends BaseActivity implements SurfaceHolder.Callba
private SurfaceHolder surfaceHolder; private SurfaceHolder surfaceHolder;
//surface是否已经创建 //surface是否已经创建
private boolean surfaceCreated; private boolean surfaceCreated;
//是否正在播放 //是否正在Play
private boolean isPlaying; private boolean isPlaying;
//滤镜数组 //滤镜数组
private String[] filters = new String[]{ private String[] filters = new String[]{
@ -53,19 +53,19 @@ public class FilterActivity extends BaseActivity implements SurfaceHolder.Callba
"unsharp" "unsharp"
}; };
private String[] txtArray = new String[]{ private String[] txtArray = new String[]{
"素描", "sketch",
"鲜明",//hue "Sharp",//hue
"暖蓝", "Warm blue",
"边缘", "edge",
"九宫格", "drawgrid",
"均衡", "balanced",
"矩形", "rectangle",
"翻转",//vflip上下翻转,hflip是左右翻转 "Flip",//vflip上下Flip,hflip是左右Flip
"锐化" "Sharpen"
}; };
private HorizontalAdapter horizontalAdapter; private HorizontalAdapter horizontalAdapter;
private RecyclerView recyclerView; private RecyclerView recyclerView;
//是否播放音频 //是否Play Audio
private boolean playAudio = true; private boolean playAudio = true;
private ToggleButton btnSound; private ToggleButton btnSound;
private Button btnSelect; private Button btnSelect;
@ -129,7 +129,7 @@ public class FilterActivity extends BaseActivity implements SurfaceHolder.Callba
initViewsWithClick(R.id.btn_select_file); initViewsWithClick(R.id.btn_select_file);
} }
//注册监听 //注册监听Device
private void registerLister(){ private void registerLister(){
horizontalAdapter.setOnItemClickListener(new OnItemClickListener() { horizontalAdapter.setOnItemClickListener(new OnItemClickListener() {
@Override @Override
@ -166,7 +166,7 @@ public class FilterActivity extends BaseActivity implements SurfaceHolder.Callba
new Thread(new Runnable() { new Thread(new Runnable() {
@Override @Override
public void run() { public void run() {
//切换播放 //切换Play
if(isPlaying){ if(isPlaying){
videoPlayer.again(); videoPlayer.again();
} }
@ -176,7 +176,7 @@ public class FilterActivity extends BaseActivity implements SurfaceHolder.Callba
}).start(); }).start();
} }
//设置是否静音 //设置 Whether to mute
private void setPlayAudio(){ private void setPlayAudio(){
playAudio = !playAudio; playAudio = !playAudio;
videoPlayer.playAudio(playAudio); videoPlayer.playAudio(playAudio);

@ -21,7 +21,7 @@ import com.frank.live.param.VideoParam;
import com.frank.live.LivePusherNew; import com.frank.live.LivePusherNew;
/** /**
* h264与rtmp实时推流直播 * h264 versus rtmpLive streaming live
* Created by frank on 2018/1/28. * Created by frank on 2018/1/28.
*/ */
@ -93,7 +93,7 @@ public class LiveActivity extends BaseActivity implements CompoundButton.OnCheck
livePusher.stopPush(); livePusher.stopPush();
} }
break; break;
case R.id.btn_mute://设置静音 case R.id.btn_mute:// Set mute
Log.i(TAG, "isChecked=" + isChecked); Log.i(TAG, "isChecked=" + isChecked);
livePusher.setMute(isChecked); livePusher.setMute(isChecked);
break; break;

@ -7,7 +7,7 @@ import android.view.View;
import com.frank.ffmpeg.R; import com.frank.ffmpeg.R;
/** /**
* 使用ffmpeg进行音视频处理入口 * 使用ffmpeg进行Audio and video processing入口
* Created by frank on 2018/1/23. * Created by frank on 2018/1/23.
*/ */
public class MainActivity extends BaseActivity { public class MainActivity extends BaseActivity {
@ -38,22 +38,22 @@ public class MainActivity extends BaseActivity {
public void onViewClick(View v) { public void onViewClick(View v) {
Intent intent = new Intent(); Intent intent = new Intent();
switch (v.getId()){ switch (v.getId()){
case R.id.btn_audio://音频处理 case R.id.btn_audio:// Audio处理
intent.setClass(MainActivity.this, AudioHandleActivity.class); intent.setClass(MainActivity.this, AudioHandleActivity.class);
break; break;
case R.id.btn_video://视频处理 case R.id.btn_video://Video processing
intent.setClass(MainActivity.this, VideoHandleActivity.class); intent.setClass(MainActivity.this, VideoHandleActivity.class);
break; break;
case R.id.btn_media://音视频处理 case R.id.btn_media://Audio and video processing
intent.setClass(MainActivity.this, MediaHandleActivity.class); intent.setClass(MainActivity.this, MediaHandleActivity.class);
break; break;
case R.id.btn_play://音视频播放 case R.id.btn_play://音视频Play
intent.setClass(MainActivity.this, MediaPlayerActivity.class); intent.setClass(MainActivity.this, MediaPlayerActivity.class);
break; break;
case R.id.btn_push://FFmpeg推流 case R.id.btn_push://FFmpeg推流
intent.setClass(MainActivity.this, PushActivity.class); intent.setClass(MainActivity.this, PushActivity.class);
break; break;
case R.id.btn_live://实时推流直播:AAC音频编码、H264视频编码、RTMP推流 case R.id.btn_live://Live streaming live:AAC Audio coding 、H264 Video encoding 、RTMP推流
intent.setClass(MainActivity.this, LiveActivity.class); intent.setClass(MainActivity.this, LiveActivity.class);
break; break;
case R.id.btn_filter://滤镜特效 case R.id.btn_filter://滤镜特效

@ -23,7 +23,7 @@ import static com.frank.ffmpeg.handler.FFmpegHandler.MSG_CONTINUE;
import static com.frank.ffmpeg.handler.FFmpegHandler.MSG_FINISH; import static com.frank.ffmpeg.handler.FFmpegHandler.MSG_FINISH;
/** /**
* 使用ffmpeg进行音视频合成与分离 * 使用ffmpeg进行Audio and video synthesis versus 分离
* Created by frank on 2018/1/23. * Created by frank on 2018/1/23.
*/ */
public class MediaHandleActivity extends BaseActivity { public class MediaHandleActivity extends BaseActivity {
@ -49,7 +49,7 @@ public class MediaHandleActivity extends BaseActivity {
String muxFile = PATH + File.separator + "media-mux.mp4"; String muxFile = PATH + File.separator + "media-mux.mp4";
try { try {
//使用MediaPlayer获取视频时长 //使用MediaPlayerObtain视频时长
MediaPlayer mediaPlayer = new MediaPlayer(); MediaPlayer mediaPlayer = new MediaPlayer();
mediaPlayer.setDataSource(videoFile); mediaPlayer.setDataSource(videoFile);
mediaPlayer.prepare(); mediaPlayer.prepare();
@ -57,7 +57,7 @@ public class MediaHandleActivity extends BaseActivity {
int videoDuration = mediaPlayer.getDuration()/1000; int videoDuration = mediaPlayer.getDuration()/1000;
Log.i(TAG, "videoDuration=" + videoDuration); Log.i(TAG, "videoDuration=" + videoDuration);
mediaPlayer.release(); mediaPlayer.release();
//使用MediaMetadataRetriever获取音频时长 //使用MediaMetadataRetrieverObtain Audio时长
MediaMetadataRetriever mediaRetriever = new MediaMetadataRetriever(); MediaMetadataRetriever mediaRetriever = new MediaMetadataRetriever();
mediaRetriever.setDataSource(audioFile); mediaRetriever.setDataSource(audioFile);
//单位为ms //单位为ms
@ -65,9 +65,9 @@ public class MediaHandleActivity extends BaseActivity {
int audioDuration = (int)(Long.parseLong(duration)/1000); int audioDuration = (int)(Long.parseLong(duration)/1000);
Log.i(TAG, "audioDuration=" + audioDuration); Log.i(TAG, "audioDuration=" + audioDuration);
mediaRetriever.release(); mediaRetriever.release();
//如果视频时长比音频长,采用音频时长,否则用视频时长 //如果视频时长比 Audio长,采用 Audio时长,否则用视频时长
int mDuration = Math.min(audioDuration, videoDuration); int mDuration = Math.min(audioDuration, videoDuration);
//使用纯视频与音频进行合成 //使用纯视频 versus Audio进行合成
String[] commandLine = FFmpegUtil.mediaMux(temp, audioFile, mDuration, muxFile); String[] commandLine = FFmpegUtil.mediaMux(temp, audioFile, mDuration, muxFile);
if (ffmpegHandler != null) { if (ffmpegHandler != null) {
ffmpegHandler.isContinue(false); ffmpegHandler.isContinue(false);
@ -141,9 +141,9 @@ public class MediaHandleActivity extends BaseActivity {
} }
switch (viewId){ switch (viewId){
case R.id.btn_mux://音视频合成 case R.id.btn_mux://Audio and video synthesis
try { try {
//视频文件有音频,先把纯视频文件抽取出来 //视频文件有 Audio,先把纯视频文件抽取出来
videoFile = srcFile; videoFile = srcFile;
commandLine = FFmpegUtil.extractVideo(srcFile, temp); commandLine = FFmpegUtil.extractVideo(srcFile, temp);
if (ffmpegHandler != null) { if (ffmpegHandler != null) {
@ -153,7 +153,7 @@ public class MediaHandleActivity extends BaseActivity {
e.printStackTrace(); e.printStackTrace();
} }
break; break;
case R.id.btn_extract_audio://提取音频 case R.id.btn_extract_audio://提取 Audio
String extractAudio = PATH + File.separator + "extractAudio.aac"; String extractAudio = PATH + File.separator + "extractAudio.aac";
commandLine = FFmpegUtil.extractAudio(srcFile, extractAudio); commandLine = FFmpegUtil.extractAudio(srcFile, extractAudio);
break; break;

@ -12,7 +12,7 @@ import com.frank.ffmpeg.R;
import com.frank.ffmpeg.util.FileUtil; import com.frank.ffmpeg.util.FileUtil;
/** /**
* 音视频解码播放 * 音视频 decoding Play
* Created by frank on 2018/2/12. * Created by frank on 2018/2/12.
*/ */

@ -132,7 +132,7 @@ public class VideoHandleActivity extends BaseActivity {
int duration = 20; int duration = 20;
commandLine = FFmpegUtil.cutVideo(srcFile, startTime, duration, cutVideo); commandLine = FFmpegUtil.cutVideo(srcFile, startTime, duration, cutVideo);
break; break;
case R.id.btn_video_concat://视频合并 case R.id.btn_video_concat://视频merge
// commandLine = FFmpegUtil.toTs(srcFile, ts1); // commandLine = FFmpegUtil.toTs(srcFile, ts1);
// concatStep ++; // concatStep ++;
// String concatVideo = PATH + File.separator + "concatVideo.mp4"; // String concatVideo = PATH + File.separator + "concatVideo.mp4";
@ -222,7 +222,7 @@ public class VideoHandleActivity extends BaseActivity {
if (!FileUtil.checkFileExist(inputFile1) && !FileUtil.checkFileExist(inputFile2)) { if (!FileUtil.checkFileExist(inputFile1) && !FileUtil.checkFileExist(inputFile2)) {
return; return;
} }
//x、y坐标点需要根据全屏视频小视频大小,进行计算 //x、y坐标点需要根据全屏视频 versus 小视频大小,进行计算
//比如:全屏视频为320x240,小视频为120x90,那么x=200 y=150 //比如:全屏视频为320x240,小视频为120x90,那么x=200 y=150
int x = 200; int x = 200;
int y = 150; int y = 150;

@ -13,7 +13,7 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
/** /**
* RecyclerView适配 * RecyclerView适配Device
* Created by frank on 2018/6/6. * Created by frank on 2018/6/6.
*/ */

@ -9,7 +9,7 @@ import com.frank.ffmpeg.model.MediaBean;
import com.frank.ffmpeg.tool.JsonParseTool; import com.frank.ffmpeg.tool.JsonParseTool;
/** /**
* Handler消息处理器 * Handler Message processing Device
* Created by frank on 2019/11/11. * Created by frank on 2019/11/11.
*/ */
public class FFmpegHandler { public class FFmpegHandler {
@ -37,7 +37,7 @@ public class FFmpegHandler {
} }
/** /**
* 执行ffmpeg命令行 * carried out ffmpeg Command Line
* @param commandLine commandLine * @param commandLine commandLine
*/ */
public void executeFFmpegCmd(final String[] commandLine) { public void executeFFmpegCmd(final String[] commandLine) {

@ -10,7 +10,7 @@ import android.view.Surface;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
/** /**
* 使用MediaExtractor抽帧MediaCodec解码然后渲染到Surface * 使用MediaExtractor抽帧MediaCodec decoding 然后渲染到Surface
* Created by frank on 2019/11/16. * Created by frank on 2019/11/16.
*/ */

@ -1,7 +1,7 @@
package com.frank.ffmpeg.listener; package com.frank.ffmpeg.listener;
/** /**
* 流程执行监听器 * 流程 carried out 监听Device
* Created by frank on 2019/11/11. * Created by frank on 2019/11/11.
*/ */
public interface OnHandleListener { public interface OnHandleListener {

@ -1,7 +1,7 @@
package com.frank.ffmpeg.listener; package com.frank.ffmpeg.listener;
/** /**
* RecyclerView item点击监听 * RecyclerView item点击监听Device
* Created by frank on 2018/6/6. * Created by frank on 2018/6/6.
*/ */

@ -8,14 +8,14 @@ import java.util.List;
import java.util.Locale; import java.util.Locale;
/** /**
* ffmpeg工具拼接命令行处理音视频 * ffmpeg工具拼接 Command Line 处理音视频
* Created by frank on 2018/1/23. * Created by frank on 2018/1/23.
*/ */
public class FFmpegUtil { public class FFmpegUtil {
/** /**
* 使用ffmpeg命令行进行音频转码 * 使用ffmpeg Command Line 进行 Audio转码
* *
* @param srcFile 源文件 * @param srcFile 源文件
* @param targetFile 目标文件后缀指定转码格式 * @param targetFile 目标文件后缀指定转码格式
@ -28,7 +28,7 @@ public class FFmpegUtil {
} }
/** /**
* 使用ffmpeg命令行进行音频剪切 * 使用ffmpeg Command Line 进行 Audio剪切
* *
* @param srcFile 源文件 * @param srcFile 源文件
* @param startTime 剪切的开始时间(单位为秒) * @param startTime 剪切的开始时间(单位为秒)
@ -44,11 +44,11 @@ public class FFmpegUtil {
} }
/** /**
* 使用ffmpeg命令行进行音频合并 * 使用ffmpeg Command Line 进行 Audiomerge
* *
* @param fileList 合并列表 * @param fileList merge列表
* @param targetFile 目标文件 * @param targetFile 目标文件
* @return 合并后的文件 * @return merge后的文件
*/ */
public static String[] concatAudio(List<String> fileList, String targetFile) { public static String[] concatAudio(List<String> fileList, String targetFile) {
// ffmpeg -i concat:%s|%s -acodec copy %s // ffmpeg -i concat:%s|%s -acodec copy %s
@ -67,7 +67,7 @@ public class FFmpegUtil {
} }
/** /**
* 使用ffmpeg命令行进行音频混合 * 使用ffmpeg Command Line 进行 Audio混合
* *
* @param srcFile 源文件 * @param srcFile 源文件
* @param mixFile 待混合文件 * @param mixFile 待混合文件
@ -84,10 +84,10 @@ public class FFmpegUtil {
/** /**
* 使用ffmpeg命令行进行音视频合成 * 使用ffmpeg Command Line 进行Audio and video synthesis
* *
* @param videoFile 视频文件 * @param videoFile 视频文件
* @param audioFile 音频文件 * @param audioFile Audio文件
* @param duration 视频时长 * @param duration 视频时长
* @param muxFile 目标文件 * @param muxFile 目标文件
* @return 合成后的文件 * @return 合成后的文件
@ -101,11 +101,11 @@ public class FFmpegUtil {
} }
/** /**
* 使用ffmpeg命令行进行抽取音频 * 使用ffmpeg Command Line 进行抽取 Audio
* *
* @param srcFile 原文件 * @param srcFile 原文件
* @param targetFile 目标文件 * @param targetFile 目标文件
* @return 抽取后的音频文件 * @return 抽取后的 Audio文件
*/ */
public static String[] extractAudio(String srcFile, String targetFile) { public static String[] extractAudio(String srcFile, String targetFile) {
//-vn:video not //-vn:video not
@ -115,7 +115,7 @@ public class FFmpegUtil {
} }
/** /**
* 使用ffmpeg命令行进行抽取视频 * 使用ffmpeg Command Line 进行抽取视频
* *
* @param srcFile 原文件 * @param srcFile 原文件
* @param targetFile 目标文件 * @param targetFile 目标文件
@ -130,7 +130,7 @@ public class FFmpegUtil {
/** /**
* 使用ffmpeg命令行进行视频转码 * 使用ffmpeg Command Line 进行视频转码
* *
* @param srcFile 源文件 * @param srcFile 源文件
* @param targetFile 目标文件后缀指定转码格式 * @param targetFile 目标文件后缀指定转码格式
@ -139,7 +139,7 @@ public class FFmpegUtil {
public static String[] transformVideo(String srcFile, String targetFile) { public static String[] transformVideo(String srcFile, String targetFile) {
// 指定视频的帧率、码率、分辨率 // 指定视频的帧率、码率、分辨率
// String transformVideoCmd = "ffmpeg -i %s -r 25 -b 200 -s 1080x720 %s"; // String transformVideoCmd = "ffmpeg -i %s -r 25 -b 200 -s 1080x720 %s";
// 指定视频编码器:解决有旋转角度的视频,转码后发生旋转的问题 // 指定 Video encoding Device:解决有旋转角度的视频,转码后发生旋转的问题
// String transformVideoCmd = "ffmpeg -i %s -vcodec libx264 -acodec copy %s"; // String transformVideoCmd = "ffmpeg -i %s -vcodec libx264 -acodec copy %s";
String transformVideoCmd = "ffmpeg -i %s -vcodec copy -acodec copy %s"; String transformVideoCmd = "ffmpeg -i %s -vcodec copy -acodec copy %s";
transformVideoCmd = String.format(transformVideoCmd, srcFile, targetFile); transformVideoCmd = String.format(transformVideoCmd, srcFile, targetFile);
@ -147,7 +147,7 @@ public class FFmpegUtil {
} }
/** /**
* 使用ffmpeg命令行进行视频剪切 * 使用ffmpeg Command Line 进行视频剪切
* *
* @param srcFile 源文件 * @param srcFile 源文件
* @param startTime 剪切的开始时间(单位为秒) * @param startTime 剪切的开始时间(单位为秒)
@ -157,14 +157,14 @@ public class FFmpegUtil {
*/ */
@SuppressLint("DefaultLocale") @SuppressLint("DefaultLocale")
public static String[] cutVideo(String srcFile, int startTime, int duration, String targetFile) { public static String[] cutVideo(String srcFile, int startTime, int duration, String targetFile) {
//指定音视频编码器:ffmpeg -i %s -ss %d -t %d -acodec libmp3lame -vcodec libx264 %s //指定音 Video encoding Device:ffmpeg -i %s -ss %d -t %d -acodec libmp3lame -vcodec libx264 %s
String cutVideoCmd = "ffmpeg -i %s -ss %d -t %d -acodec copy -vcodec copy %s"; String cutVideoCmd = "ffmpeg -i %s -ss %d -t %d -acodec copy -vcodec copy %s";
cutVideoCmd = String.format(cutVideoCmd, srcFile, startTime, duration, targetFile); cutVideoCmd = String.format(cutVideoCmd, srcFile, startTime, duration, targetFile);
return cutVideoCmd.split(" "); return cutVideoCmd.split(" ");
} }
/** /**
* 使用ffmpeg命令行进行视频截图 * 使用ffmpeg Command Line 进行视频截图
* *
* @param srcFile 源文件 * @param srcFile 源文件
* @param time 截图开始时间 * @param time 截图开始时间
@ -178,7 +178,7 @@ public class FFmpegUtil {
} }
/** /**
* 使用ffmpeg命令行给视频添加水印 * 使用ffmpeg Command Line 给视频添加水印
* *
* @param srcFile 源文件 * @param srcFile 源文件
* @param waterMark 水印文件路径 * @param waterMark 水印文件路径
@ -193,7 +193,7 @@ public class FFmpegUtil {
} }
/** /**
* 使用ffmpeg命令行进行视频转成Gif动图 * 使用ffmpeg Command Line 进行视频转成Gif动图
* *
* @param srcFile 源文件 * @param srcFile 源文件
* @param startTime 开始时间 * @param startTime 开始时间
@ -213,7 +213,7 @@ public class FFmpegUtil {
} }
/** /**
* 使用ffmpeg命令行进行屏幕录制 * 使用ffmpeg Command Line 进行屏幕录制
* *
* @param size 视频尺寸大小 * @param size 视频尺寸大小
* @param recordTime 录屏时间 * @param recordTime 录屏时间
@ -230,7 +230,7 @@ public class FFmpegUtil {
} }
/** /**
* 使用ffmpeg命令行进行图片合成视频 * 使用ffmpeg Command Line 进行图片合成视频
* *
* @param srcFile 源文件 * @param srcFile 源文件
* @param frameRate 合成视频帧率 * @param frameRate 合成视频帧率
@ -252,7 +252,7 @@ public class FFmpegUtil {
* @param srcFile 源文件 * @param srcFile 源文件
* @param resolution 分辨率 * @param resolution 分辨率
* @param targetFile 目标文件 * @param targetFile 目标文件
* @return 转换后的图片命令行 * @return 转换后的图片 Command Line
*/ */
@SuppressLint("DefaultLocale") @SuppressLint("DefaultLocale")
public static String[] convertResolution(String srcFile, String resolution, String targetFile) { public static String[] convertResolution(String srcFile, String resolution, String targetFile) {
@ -262,13 +262,13 @@ public class FFmpegUtil {
} }
/** /**
* 音频编码 * Audio coding
* *
* @param srcFile 源文件pcm裸流 * @param srcFile 源文件pcm裸流
* @param targetFile 编码后目标文件 * @param targetFile coding 后目标文件
* @param sampleRate 采样率 * @param sampleRate 采样率
* @param channel 声道:单声道为1/立体声道为2 * @param channel 声道:单声道为1/立体声道为2
* @return 音频编码的命令行 * @return Audio coding Command Line
*/ */
@SuppressLint("DefaultLocale") @SuppressLint("DefaultLocale")
public static String[] encodeAudio(String srcFile, String targetFile, int sampleRate, int channel) { public static String[] encodeAudio(String srcFile, String targetFile, int sampleRate, int channel) {
@ -284,7 +284,7 @@ public class FFmpegUtil {
* @param input2 输入文件2 * @param input2 输入文件2
* @param videoLayout 视频布局 * @param videoLayout 视频布局
* @param targetFile 画面拼接文件 * @param targetFile 画面拼接文件
* @return 画面拼接的命令行 * @return 画面拼接的 Command Line
*/ */
public static String[] multiVideo(String input1, String input2, String targetFile, int videoLayout) { public static String[] multiVideo(String input1, String input2, String targetFile, int videoLayout) {
// String multiVideo = "ffmpeg -i %s -i %s -i %s -i %s -filter_complex " + // String multiVideo = "ffmpeg -i %s -i %s -i %s -i %s -filter_complex " +
@ -302,10 +302,10 @@ public class FFmpegUtil {
* *
* @param inputFile 输入文件 * @param inputFile 输入文件
* @param targetFile 反序文件 * @param targetFile 反序文件
* @return 视频反序的命令行 * @return 视频反序的 Command Line
*/ */
public static String[] reverseVideo(String inputFile, String targetFile) { public static String[] reverseVideo(String inputFile, String targetFile) {
//FIXME 音频也反序 //FIXME Audio也反序
// String reverseVideo = "ffmpeg -i %s -filter_complex [0:v]reverse[v];[0:a]areverse[a] -map [v] -map [a] %s"; // String reverseVideo = "ffmpeg -i %s -filter_complex [0:v]reverse[v];[0:a]areverse[a] -map [v] -map [a] %s";
String reverseVideo = "ffmpeg -i %s -filter_complex [0:v]reverse[v] -map [v] %s";//单纯视频反序 String reverseVideo = "ffmpeg -i %s -filter_complex [0:v]reverse[v] -map [v] %s";//单纯视频反序
reverseVideo = String.format(reverseVideo, inputFile, targetFile); reverseVideo = String.format(reverseVideo, inputFile, targetFile);
@ -317,7 +317,7 @@ public class FFmpegUtil {
* *
* @param inputFile 输入文件 * @param inputFile 输入文件
* @param targetFile 输出文件 * @param targetFile 输出文件
* @return 视频降噪的命令行 * @return 视频降噪的 Command Line
*/ */
public static String[] denoiseVideo(String inputFile, String targetFile) { public static String[] denoiseVideo(String inputFile, String targetFile) {
String reverseVideo = "ffmpeg -i %s -nr 500 %s"; String reverseVideo = "ffmpeg -i %s -nr 500 %s";
@ -333,7 +333,7 @@ public class FFmpegUtil {
* @param duration 持续时间 * @param duration 持续时间
* @param frameRate 帧率 * @param frameRate 帧率
* @param targetFile 输出文件 * @param targetFile 输出文件
* @return 视频抽帧的命令行 * @return 视频抽帧的 Command Line
*/ */
public static String[] videoToImage(String inputFile, int startTime, int duration, int frameRate, String targetFile) { public static String[] videoToImage(String inputFile, int startTime, int duration, int frameRate, String targetFile) {
//-ss:开始时间,单位为秒 //-ss:开始时间,单位为秒
@ -353,7 +353,7 @@ public class FFmpegUtil {
* @param targetFile 输出文件 * @param targetFile 输出文件
* @param x 小视频起点x坐标 * @param x 小视频起点x坐标
* @param y 小视频起点y坐标 * @param y 小视频起点y坐标
* @return 视频画中画的命令行 * @return 视频画中画的 Command Line
*/ */
@SuppressLint("DefaultLocale") @SuppressLint("DefaultLocale")
public static String[] picInPicVideo(String inputFile1, String inputFile2, int x, int y, String targetFile) { public static String[] picInPicVideo(String inputFile1, String inputFile2, int x, int y, String targetFile) {
@ -367,7 +367,7 @@ public class FFmpegUtil {
* *
* @param inputFile inputFile * @param inputFile inputFile
* @param outputFile outputFile * @param outputFile outputFile
* @return 移动moov命令行 * @return 移动moov Command Line
*/ */
public static String[] moveMoovAhead(String inputFile, String outputFile) { public static String[] moveMoovAhead(String inputFile, String outputFile) {
String moovCmd = "ffmpeg -i %s -movflags faststart -acodec copy -vcodec copy %s"; String moovCmd = "ffmpeg -i %s -movflags faststart -acodec copy -vcodec copy %s";

@ -6,7 +6,7 @@ import java.util.Date;
import java.util.Locale; import java.util.Locale;
/** /**
* 时间转换工具类 * 时间 Conversion tools
* Created by frank on 2018/11/12. * Created by frank on 2018/11/12.
*/ */
@ -52,7 +52,7 @@ public class TimeUtil {
} }
/** /**
* 获取视频时长 * Obtain视频时长
* @param time time * @param time time
* @return 视频时长 * @return 视频时长
*/ */

@ -1,9 +1,9 @@
<?xml version="1.0" encoding="utf-8"?> <?xml version="1.0" encoding="utf-8"?>
<!--空心圆角矩形 --> <!--空心圆角rectangle -->
<shape xmlns:android="http://schemas.android.com/apk/res/android" <shape xmlns:android="http://schemas.android.com/apk/res/android"
android:shape="rectangle" > android:shape="rectangle" >
<corners android:radius="10dp" /><!-- 圆角半径 --> <corners android:radius="10dp" /><!-- 圆角半径 -->
<gradient <gradient
android:endColor="@color/colorPrimary" android:endColor="@color/colorPrimary"
android:startColor="@color/colorAccent" /><!-- 矩形的内部颜色,这里设置成白色,即空心 --> android:startColor="@color/colorAccent" /><!-- rectangle的内部颜色,这里设置成白色,即空心 -->
</shape> </shape>

@ -1,12 +1,12 @@
<?xml version="1.0" encoding="utf-8"?> <?xml version="1.0" encoding="utf-8"?>
<!--空心圆角矩形 --> <!--空心圆角rectangle -->
<shape xmlns:android="http://schemas.android.com/apk/res/android" <shape xmlns:android="http://schemas.android.com/apk/res/android"
android:shape="rectangle" > android:shape="rectangle" >
<corners android:radius="30dp" /><!-- 圆角半径 --> <corners android:radius="30dp" /><!-- 圆角半径 -->
<stroke <stroke
android:width="1dp" android:width="1dp"
android:color="@color/colorBord" /><!--矩形的边线 --> android:color="@color/colorBord" /><!--rectangle的边线 -->
<size <size
android:height="60dp" android:height="60dp"
android:width="60dp" /><!--矩形的宽高 --> android:width="60dp" /><!--rectangle的宽高 -->
</shape> </shape>

@ -1,12 +1,12 @@
<?xml version="1.0" encoding="utf-8"?> <?xml version="1.0" encoding="utf-8"?>
<!--空心圆角矩形 --> <!--空心圆角rectangle -->
<shape xmlns:android="http://schemas.android.com/apk/res/android" <shape xmlns:android="http://schemas.android.com/apk/res/android"
android:shape="rectangle" > android:shape="rectangle" >
<corners android:radius="20dp" /><!-- 圆角半径 --> <corners android:radius="20dp" /><!-- 圆角半径 -->
<size <size
android:height="40dp" android:height="40dp"
android:width="40dp" /><!--矩形的宽高 --> android:width="40dp" /><!--rectangle的宽高 -->
<gradient <gradient
android:endColor="@color/redBtn" android:endColor="@color/redBtn"
android:startColor="@color/redBtn" /><!-- 矩形的内部颜色,这里设置成白色,即空心 --> android:startColor="@color/redBtn" /><!-- rectangle的内部颜色,这里设置成白色,即空心 -->
</shape> </shape>

@ -1,9 +1,9 @@
<?xml version="1.0" encoding="utf-8"?> <?xml version="1.0" encoding="utf-8"?>
<!--空心圆角矩形 --> <!--空心圆角rectangle -->
<shape xmlns:android="http://schemas.android.com/apk/res/android" <shape xmlns:android="http://schemas.android.com/apk/res/android"
android:shape="rectangle" > android:shape="rectangle" >
<corners android:radius="10dp" /><!-- 圆角半径 --> <corners android:radius="10dp" /><!-- 圆角半径 -->
<gradient <gradient
android:endColor="@android:color/white" android:endColor="@android:color/white"
android:startColor="@android:color/white" /><!-- 矩形的内部颜色,这里设置成白色,即空心 --> android:startColor="@android:color/white" /><!-- rectangle的内部颜色,这里设置成白色,即空心 -->
</shape> </shape>

@ -0,0 +1,75 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">FFmpegAndroid</string>
<string name="audio_transform"> Audio transcoding</string>
<string name="audio_cut"> Audio cut</string>
<string name="audio_concat"> Audio merge</string>
<string name="audio_mix">Audio mix</string>
<string name="audio_play"> Audio decoding AudioTrack play</string>
<string name="audio_opensl"> Audio decoding OpenSLPlay</string>
<string name="audio_encode"> Audio coding </string>
<string name="pcm_concat">PCMmerge</string>
<string name="audio_handle"> Audio processing</string>
<string name="media_handle">Audio and video processing</string>
<string name="video_handle">Video processing</string>
<string name="video_push">Local streaming live</string>
<string name="video_live">Live streaming live</string>
<string name="media_mux">Audio and video synthesis</string>
<string name="media_extra_audio"> Extract Audio </string>
<string name="media_extract_video"> Extract video </string>
<string name="media_play"> Audio and Video Play </string>
<string name="media_probe"> Parsing multimedia formats </string>
<string name="video_cut"> Video cut </string>
<string name="video_concat"> Video stitching </string>
<string name="video_water_mark"> Video watermark </string>
<string name="video_to_gif"> Video to Gif </string>
<string name="video_transform"> Video transcoding </string>
<string name="video_screen_shot"> video screenshot </string>
<string name="video_screen_record"> Screen Recording </string>
<string name="video_from_photo"> Image synthesis video </string>
<string name="video_extract_frame"> Video frame </string>
<string name="video_part_zoom"> Zoom in video </string>
<string name="video_play"> Video Play </string>
<string name="video_filter"> Filter effects </string>
<string name="video_slow"> Slow playback </string>
<string name="video_fast"> fast forward </string>
<string name="video_multi"> Screen stitching </string>
<string name="video_reverse"> Reverse video </string>
<string name="video_denoise"> Video Noise Reduction </string>
<string name="video_image"> Video to Image </string>
<string name="video_pip"> Video picture-in-picture </string>
<string name="video_preview"> Play preview </string>
<string name="video_moov"> MOOV forward </string>
<string name="swap"> Switch </string>
<string name="start"> Start </string>
<string name="stop"> Stop </string>
<string name="mute"> Mute </string>
<string name="sound"> sound </string>
<string name="sound_open"> Sound On </string>
<string name="sound_close"> Sound Off </string>
<string name="video_sketch"> sketch </string>
<string name="video_hue"> hue </string>
<string name="video_lut"> lut </string>
<string name="video_edge"> Edge </string>
<string name="video_blur"> Vague </string>
<string name="video_grid"> 九宫格 </string>
<string name="video_rotate"> rotate </string>
<string name="video_flip"> Flip </string>
<string name="video_box"> Rectangle </string>
<string name="video_flash"> Flashing </string>
<string name="video_balance"> Color balance </string>
<string name="video_sharp"> Sharpening </string>
<string name="select_file"> Select a file </string>
<string name="please_select"> Please select the correct file </string>
<string name="file_not_found"> File does not exist </string>
<string name="please_click_select"> Please click the menu in the upper right corner to select a file </string>
<string name="wrong_audio_format"> Non-Audio file </string>
<string name="wrong_video_format"> non-video file </string>
<string name="converting_video">Reversing video&#8230;</string>
<string name="tip_not_mp4_video"> This video is not mp4 and cannot be moved forward with moov </string>
</resources>

@ -0,0 +1,77 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">FFmpegAndroid</string>
<string name="audio_transform"> Audio转码</string>
<string name="audio_cut"> Audio剪切</string>
<string name="audio_concat"> Audio合并</string>
<string name="audio_mix"> Audio混合</string>
<string name="audio_play"> Audio decoding AudioTrack播放</string>
<string name="audio_opensl"> Audio decoding OpenSL播放</string>
<string name="audio_encode"> Audio coding </string>
<string name="pcm_concat">PCM合并</string>
<string name="audio_handle"> Audio处理</string>
<string name="media_handle">音视频处理</string>
<string name="video_handle">视频处理</string>
<string name="video_push">本地推流直播</string>
<string name="video_live">实时推流直播</string>
<string name="media_mux">音视频合成</string>
<string name="media_extra_audio">提取 Audio</string>
<string name="media_extract_video">提取视频</string>
<string name="media_play">音视频播放</string>
<string name="media_probe">解析多媒体格式</string>
<string name="video_cut">视频剪切</string>
<string name="video_concat">视频拼接</string>
<string name="video_water_mark">视频水印</string>
<string name="video_to_gif">视频转Gif</string>
<string name="video_transform">视频转码</string>
<string name="video_screen_shot">视频截图</string>
<string name="video_screen_record">屏幕录制</string>
<string name="video_from_photo">图片合成视频</string>
<string name="video_extract_frame">视频抽帧</string>
<string name="video_part_zoom">视频局部放大</string>
<string name="video_play">视频播放</string>
<string name="video_filter">滤镜特效</string>
<string name="video_slow">慢放</string>
<string name="video_fast">快进</string>
<string name="video_multi">画面拼接</string>
<string name="video_reverse">视频倒播</string>
<string name="video_denoise">视频降噪</string>
<string name="video_image">视频转图片</string>
<string name="video_pip">视频画中画</string>
<string name="video_preview">播放预览</string>
<string name="video_moov">MOOV前移</string>
<string name="swap">切换</string>
<string name="start">开始</string>
<string name="stop">停止</string>
<string name="mute">Mute</string>
<string name="sound">声音</string>
<string name="sound_open">声音开</string>
<string name="sound_close">声音关</string>
<string name="video_sketch">sketch</string>
<string name="video_hue">hue</string>
<string name="video_lut">lut</string>
<string name="video_edge">边缘</string>
<string name="video_blur">模糊</string>
<string name="video_grid">九宫格</string>
<string name="video_rotate">旋转</string>
<string name="video_flip">翻转</string>
<string name="video_box">矩形</string>
<string name="video_flash">闪烁</string>
<string name="video_balance">色彩平衡</string>
<string name="video_sharp">锐化</string>
<string name="select_file">选择文件</string>
<string name="please_select">请选择正确文件</string>
<string name="file_not_found">文件不存在</string>
<string name="please_click_select">请点击右上角菜单选择文件</string>
<string name="wrong_audio_format">非 Audio文件</string>
<string name="wrong_video_format">非视频文件</string>
<string name="converting_video">正在反转视频&#8230;</string>
<string name="tip_not_mp4_video">该视频不是mp4,无法进行moov前移操作</string>
</resources>

@ -1,76 +1,74 @@
<resources> <resources>
<string name="app_name">FFmpegAndroid</string> <string name="app_name">FFmpegAndroid</string>
<string name="audio_transform">音频转码</string> <string name="audio_transform"> Audio transcoding</string>
<string name="audio_cut">音频剪切</string> <string name="audio_cut"> Audio cut</string>
<string name="audio_concat">音频合并</string> <string name="audio_concat"> Audio merge</string>
<string name="audio_mix">音频混合</string> <string name="audio_mix">Audio mix</string>
<string name="audio_play">音频解码AudioTrack播放</string> <string name="audio_play"> Audio decoding AudioTrack play</string>
<string name="audio_opensl">音频解码OpenSL播放</string> <string name="audio_opensl"> Audio decoding OpenSLPlay</string>
<string name="audio_encode">音频编码</string> <string name="audio_encode"> Audio coding </string>
<string name="pcm_concat">PCM合并</string> <string name="pcm_concat">PCMmerge</string>
<string name="audio_handle">音频处理</string> <string name="audio_handle">Audio processing</string>
<string name="media_handle">音视频处理</string> <string name="media_handle">Audio and video processing</string>
<string name="video_handle">视频处理</string> <string name="video_handle">Video processing</string>
<string name="video_push">本地推流直播</string> <string name="video_push">Local streaming live</string>
<string name="video_live">实时推流直播</string> <string name="video_live">Live streaming live</string>
<string name="media_mux">音视频合成</string> <string name="media_mux">Audio and video synthesis</string>
<string name="media_extra_audio">提取音频</string> <string name="media_extra_audio"> Extract Audio </string>
<string name="media_extract_video">提取视频</string> <string name="media_extract_video"> Extract video </string>
<string name="media_play">音视频播放</string> <string name="media_play"> Audio and Video Play </string>
<string name="media_probe">解析多媒体格式</string> <string name="media_probe"> Parsing multimedia formats </string>
<string name="video_cut"> Video cut </string>
<string name="video_concat"> Video stitching </string>
<string name="video_water_mark"> Video watermark </string>
<string name="video_to_gif"> Video to Gif </string>
<string name="video_transform"> Video transcoding </string>
<string name="video_screen_shot"> video screenshot </string>
<string name="video_screen_record"> Screen Recording </string>
<string name="video_from_photo"> Image synthesis video </string>
<string name="video_extract_frame"> Video frame </string>
<string name="video_part_zoom"> Zoom in video </string>
<string name="video_play"> Video Play </string>
<string name="video_filter"> Filter effects </string>
<string name="video_slow"> Slow playback </string>
<string name="video_fast"> fast forward </string>
<string name="video_multi"> Screen stitching </string>
<string name="video_reverse"> Reverse video </string>
<string name="video_denoise"> Video Noise Reduction </string>
<string name="video_image"> Video to Image </string>
<string name="video_pip"> Video picture-in-picture </string>
<string name="video_preview"> Play preview </string>
<string name="video_moov"> MOOV forward </string>
<string name="video_cut">视频剪切</string> <string name="swap"> Switch </string>
<string name="video_concat">视频拼接</string> <string name="start"> Start </string>
<string name="video_water_mark">视频水印</string> <string name="stop"> Stop </string>
<string name="video_to_gif">视频转Gif</string> <string name="mute"> Mute </string>
<string name="video_transform">视频转码</string> <string name="sound"> sound </string>
<string name="video_screen_shot">视频截图</string> <string name="sound_open"> Sound On </string>
<string name="video_screen_record">屏幕录制</string> <string name="sound_close"> Sound Off </string>
<string name="video_from_photo">图片合成视频</string>
<string name="video_extract_frame">视频抽帧</string>
<string name="video_part_zoom">视频局部放大</string>
<string name="video_play">视频播放</string>
<string name="video_filter">滤镜特效</string>
<string name="video_slow">慢放</string>
<string name="video_fast">快进</string>
<string name="video_multi">画面拼接</string>
<string name="video_reverse">视频倒播</string>
<string name="video_denoise">视频降噪</string>
<string name="video_image">视频转图片</string>
<string name="video_pip">视频画中画</string>
<string name="video_preview">播放预览</string>
<string name="video_moov">MOOV前移</string>
<string name="swap">切换</string> <string name="video_sketch"> sketch </string>
<string name="start">开始</string>
<string name="stop">停止</string>
<string name="mute">静音</string>
<string name="sound">声音</string>
<string name="sound_open">声音开</string>
<string name="sound_close">声音关</string>
<string name="video_sketch">素描</string>
<string name="video_hue"> hue </string> <string name="video_hue"> hue </string>
<string name="video_lut"> lut </string> <string name="video_lut"> lut </string>
<string name="video_edge">边缘</string> <string name="video_edge"> Edge </string>
<string name="video_blur">模糊</string> <string name="video_blur"> Vague </string>
<string name="video_grid"> 九宫格 </string> <string name="video_grid"> 九宫格 </string>
<string name="video_rotate">旋转</string> <string name="video_rotate"> rotate </string>
<string name="video_flip">翻转</string> <string name="video_flip"> Flip </string>
<string name="video_box">矩形</string> <string name="video_box"> Rectangle </string>
<string name="video_flash">闪烁</string> <string name="video_flash"> Flashing </string>
<string name="video_balance">色彩平衡</string> <string name="video_balance"> Color balance </string>
<string name="video_sharp">锐化</string> <string name="video_sharp"> Sharpening </string>
<string name="select_file">选择文件</string>
<string name="please_select">请选择正确文件</string>
<string name="file_not_found">文件不存在</string>
<string name="please_click_select">请点击右上角菜单选择文件</string>
<string name="wrong_audio_format">非音频文件</string>
<string name="wrong_video_format">非视频文件</string>
<string name="converting_video">正在反转视频&#8230;</string>
<string name="tip_not_mp4_video">该视频不是mp4,无法进行moov前移操作</string>
<string name="select_file"> Select a file </string>
<string name="please_select"> Please select the correct file </string>
<string name="file_not_found"> File does not exist </string>
<string name="please_click_select"> Please click the menu in the upper right corner to select a file </string>
<string name="wrong_audio_format"> Non-Audio file </string>
<string name="wrong_video_format"> non-video file </string>
<string name="converting_video">Reversing video&#8230;</string>
<string name="tip_not_mp4_video"> This video is not mp4 and cannot be moved forward with moov </string>
</resources> </resources>

Loading…
Cancel
Save