From f1f97c8947f8d1c2c91ec1d589dde9c0b751eab1 Mon Sep 17 00:00:00 2001 From: nickzt Date: Sat, 14 Mar 2020 15:57:34 +0200 Subject: [PATCH] start & add some translations --- Live/src/main/AndroidManifest.xml | 14 +- Live/src/main/cpp/AudioStream.cpp | 14 +- Live/src/main/cpp/RtmpPusher.cpp | 18 +-- Live/src/main/cpp/VideoStream.cpp | 8 +- Live/src/main/cpp/VideoStream.h | 2 +- .../java/com/frank/live/LivePusherNew.java | 36 ++--- .../java/com/frank/live/PushActivity.java | 22 +-- .../java/com/frank/live/param/AudioParam.java | 2 +- .../java/com/frank/live/param/VideoParam.java | 2 +- .../com/frank/live/stream/AudioStream.java | 4 +- .../com/frank/live/stream/CameraHelper.java | 2 +- .../java/com/frank/live/util/YUVUtil.java | 2 +- Live/src/main/res/layout/activity_push.xml | 4 +- Live/src/main/res/values-en/strings.xml | 7 + Live/src/main/res/values-zh-rCN/strings.xml | 7 + Live/src/main/res/values/strings.xml | 6 +- .../living/activity/MultiScreenActivity.java | 12 +- README.md | 18 +-- app/src/main/AndroidManifest.xml | 14 +- app/src/main/cpp/audio_player.c | 46 +++--- app/src/main/cpp/ffmpeg_cmd.c | 2 +- app/src/main/cpp/ffmpeg_pusher.cpp | 6 +- app/src/main/cpp/media_player.c | 74 +++++----- app/src/main/cpp/openSL_audio_player.c | 72 +++++----- app/src/main/cpp/video_filter.c | 84 +++++------ app/src/main/cpp/video_player.c | 36 ++--- .../java/com/frank/ffmpeg/AudioPlayer.java | 8 +- .../main/java/com/frank/ffmpeg/FFmpegCmd.java | 2 +- .../java/com/frank/ffmpeg/MediaPlayer.java | 2 +- .../java/com/frank/ffmpeg/VideoPlayer.java | 2 +- .../ffmpeg/activity/AudioHandleActivity.java | 22 +-- .../frank/ffmpeg/activity/FilterActivity.java | 28 ++-- .../frank/ffmpeg/activity/LiveActivity.java | 4 +- .../frank/ffmpeg/activity/MainActivity.java | 12 +- .../ffmpeg/activity/MediaHandleActivity.java | 16 +-- .../ffmpeg/activity/MediaPlayerActivity.java | 2 +- .../ffmpeg/activity/VideoHandleActivity.java | 4 +- .../ffmpeg/adapter/HorizontalAdapter.java | 2 +- .../frank/ffmpeg/handler/FFmpegHandler.java | 4 +- .../frank/ffmpeg/hardware/HardwareDecode.java | 2 +- .../ffmpeg/listener/OnHandleListener.java | 2 +- .../ffmpeg/listener/OnItemClickListener.java | 2 +- .../com/frank/ffmpeg/util/FFmpegUtil.java | 64 ++++----- .../java/com/frank/ffmpeg/util/TimeUtil.java | 4 +- app/src/main/res/drawable/btn.xml | 4 +- app/src/main/res/drawable/btn_circle.xml | 6 +- app/src/main/res/drawable/btn_point.xml | 6 +- .../main/res/drawable/white_background.xml | 4 +- app/src/main/res/values-en/strings.xml | 75 ++++++++++ app/src/main/res/values-zh-rCN/strings.xml | 77 ++++++++++ app/src/main/res/values/strings.xml | 134 +++++++++--------- 51 files changed, 583 insertions(+), 419 deletions(-) create mode 100644 Live/src/main/res/values-en/strings.xml create mode 100644 Live/src/main/res/values-zh-rCN/strings.xml create mode 100644 app/src/main/res/values-en/strings.xml create mode 100644 app/src/main/res/values-zh-rCN/strings.xml diff --git a/Live/src/main/AndroidManifest.xml b/Live/src/main/AndroidManifest.xml index d93668d..2483740 100644 --- a/Live/src/main/AndroidManifest.xml +++ b/Live/src/main/AndroidManifest.xml @@ -14,14 +14,14 @@ android:roundIcon="@mipmap/ic_launcher_round" android:supportsRtl="true" android:theme="@style/AppTheme" > - - - - + + + - - - + + + \ No newline at end of file diff --git a/Live/src/main/cpp/AudioStream.cpp b/Live/src/main/cpp/AudioStream.cpp index 8a7fd48..f99c598 100644 --- a/Live/src/main/cpp/AudioStream.cpp +++ b/Live/src/main/cpp/AudioStream.cpp @@ -20,16 +20,16 @@ void AudioStream::setAudioCallback(AudioCallback audioCallback) { } void AudioStream::setAudioEncInfo(int samplesInHZ, int channels) { - //打开编码器 + //打开 coding Device mChannels = channels; - //一次最大能输入编码器的样本数量 (一个样本是16位 2字节) - //编码后的最大字节数 + //一次最大能输入 coding Device的样本数量 (一个样本是16位 2字节) + // coding 后的最大字节数 audioCodec = faacEncOpen(static_cast(samplesInHZ), static_cast(channels), &inputSamples, &maxOutputBytes); - //设置编码器参数 + //设置 coding Device参数 faacEncConfigurationPtr config = faacEncGetCurrentConfiguration(audioCodec); //指定为 mpeg4 标准 config->mpegVersion = MPEG4; @@ -37,11 +37,11 @@ void AudioStream::setAudioEncInfo(int samplesInHZ, int channels) { config->aacObjectType = LOW; //16位 config->inputFormat = FAAC_INPUT_16BIT; - // 编码出原始数据 + // coding 出原始数据 config->outputFormat = 0; faacEncSetConfiguration(audioCodec, config); - //输出缓冲区 编码后的数据 用这个缓冲区来保存 + //输出缓冲区 coding 后的数据 用这个缓冲区来保存 buffer = new u_char[maxOutputBytes]; } @@ -74,7 +74,7 @@ RTMPPacket *AudioStream::getAudioTag() { } void AudioStream::encodeData(int8_t *data) { - //返回编码后数据字节的长度 + //返回 coding 后数据字节的长度 int byteLen = faacEncEncode(audioCodec, reinterpret_cast(data), static_cast(inputSamples), buffer, diff --git a/Live/src/main/cpp/RtmpPusher.cpp b/Live/src/main/cpp/RtmpPusher.cpp index d02600b..29c5f07 100644 --- a/Live/src/main/cpp/RtmpPusher.cpp +++ b/Live/src/main/cpp/RtmpPusher.cpp @@ -26,23 +26,23 @@ JavaVM *javaVM; //调用类 jobject jobject_error; -/***************与Java层对应**************/ -//视频编码器打开失败 +/*************** versus Java层对应**************/ +// Video encoding Device Open failed const int ERROR_VIDEO_ENCODER_OPEN = 0x01; -//视频帧编码失败 +//Video frame encoding failed const int ERROR_VIDEO_ENCODE = 0x02; -//音频编码器打开失败 +// Audio coding Device Open failed const int ERROR_AUDIO_ENCODER_OPEN = 0x03; -//音频帧编码失败 +// Audio帧 coding 失败 const int ERROR_AUDIO_ENCODE = 0x04; -//RTMP连接失败 +//RTMP Connection failed const int ERROR_RTMP_CONNECT = 0x05; -//RTMP连接流失败 +//RTMP Connection flow failed const int ERROR_RTMP_CONNECT_STREAM = 0x06; -//RTMP发送数据包失败 +//RTMP Failed to send packet const int ERROR_RTMP_SEND_PACKET = 0x07; -/***************与Java层对应**************/ +/*************** versus Java层对应**************/ //当调用System.loadLibrary时,会回调这个方法 jint JNICALL JNI_OnLoad(JavaVM *vm, void *reserved) { diff --git a/Live/src/main/cpp/VideoStream.cpp b/Live/src/main/cpp/VideoStream.cpp index a6431d1..2fa8522 100644 --- a/Live/src/main/cpp/VideoStream.cpp +++ b/Live/src/main/cpp/VideoStream.cpp @@ -38,8 +38,8 @@ void VideoStream::setVideoEncInfo(int width, int height, int fps, int bitrate) { DELETE(pic_in); } - //打开x264编码器 - //x264编码器的属性 + //打开x264 coding Device + //x264 coding Device的属性 x264_param_t param; x264_param_default_preset(¶m, "ultrafast", "zerolatency"); param.i_level_idc = 32; @@ -73,7 +73,7 @@ void VideoStream::setVideoEncInfo(int width, int height, int fps, int bitrate) { param.i_threads = 1; x264_param_apply_profile(¶m, "baseline"); - //打开编码器 + //打开 coding Device videoCodec = x264_encoder_open(¶m); pic_in = new x264_picture_t; x264_picture_alloc(pic_in, X264_CSP_I420, width, height); @@ -163,7 +163,7 @@ void VideoStream::sendSpsPps(uint8_t *sps, uint8_t *pps, int sps_len, int pps_le //版本 packet->m_body[i++] = 0x01; - //编码规格 + // coding 规格 packet->m_body[i++] = sps[1]; packet->m_body[i++] = sps[2]; packet->m_body[i++] = sps[3]; diff --git a/Live/src/main/cpp/VideoStream.h b/Live/src/main/cpp/VideoStream.h index 8d84fa6..bb99e2c 100644 --- a/Live/src/main/cpp/VideoStream.h +++ b/Live/src/main/cpp/VideoStream.h @@ -14,7 +14,7 @@ public: ~VideoStream(); - //创建x264编码器 + //创建x264 coding Device void setVideoEncInfo(int width, int height, int fps, int bitrate); void encodeData(int8_t *data); diff --git a/Live/src/main/java/com/frank/live/LivePusherNew.java b/Live/src/main/java/com/frank/live/LivePusherNew.java index 95721b7..0ff0b8b 100644 --- a/Live/src/main/java/com/frank/live/LivePusherNew.java +++ b/Live/src/main/java/com/frank/live/LivePusherNew.java @@ -13,19 +13,19 @@ import com.frank.live.stream.VideoStreamNew; public class LivePusherNew { - //视频编码器打开失败 + // Video encoding Device Open failed private final static int ERROR_VIDEO_ENCODER_OPEN = 0x01; - //视频帧编码失败 + //Video frame encoding failed private final static int ERROR_VIDEO_ENCODE = 0x02; - //音频编码器打开失败 + // Audio coding Device Open failed private final static int ERROR_AUDIO_ENCODER_OPEN = 0x03; - //音频帧编码失败 + // Audio帧 coding 失败 private final static int ERROR_AUDIO_ENCODE = 0x04; - //RTMP连接失败 + //RTMP Connection failed private final static int ERROR_RTMP_CONNECT = 0x05; - //RTMP连接流失败 + //RTMP Connection flow failed private final static int ERROR_RTMP_CONNECT_STREAM = 0x06; - //RTMP发送数据包失败 + //RTMP Failed to send packet private final static int ERROR_RTMP_SEND_PACKET = 0x07; static { @@ -60,9 +60,9 @@ public class LivePusherNew { } /** - * 设置静音 + * Set mute * - * @param isMute 是否静音 + * @param isMute Whether to mute */ public void setMute(boolean isMute) { audioStream.setMute(isMute); @@ -88,36 +88,36 @@ public class LivePusherNew { } /** - * 当native报错时,回调这个方法 + * Call this method when native reports an error * * @param errCode errCode */ public void errorFromNative(int errCode) { - //停止推流 + //Stop streaming stopPush(); if (liveStateChangeListener != null) { String msg = ""; switch (errCode) { case ERROR_VIDEO_ENCODER_OPEN: - msg = "视频编码器打开失败..."; + msg = " Video encoding Device Open failed ..."; break; case ERROR_VIDEO_ENCODE: - msg = "视频帧编码失败..."; + msg = "Video frame encoding failed..."; break; case ERROR_AUDIO_ENCODER_OPEN: - msg = "音频编码器打开失败..."; + msg = " Audio coding Device Open failed ..."; break; case ERROR_AUDIO_ENCODE: - msg = "音频帧编码失败..."; + msg = " Audio帧 coding 失败..."; break; case ERROR_RTMP_CONNECT: - msg = "RTMP连接失败..."; + msg = "RTMP Connection failed..."; break; case ERROR_RTMP_CONNECT_STREAM: - msg = "RTMP连接流失败..."; + msg = "RTMP Connection flow failed ..."; break; case ERROR_RTMP_SEND_PACKET: - msg = "RTMP发送数据包失败..."; + msg = "RTMP Failed to send packet..."; break; default: break; diff --git a/Live/src/main/java/com/frank/live/PushActivity.java b/Live/src/main/java/com/frank/live/PushActivity.java index d79e801..7524098 100644 --- a/Live/src/main/java/com/frank/live/PushActivity.java +++ b/Live/src/main/java/com/frank/live/PushActivity.java @@ -50,7 +50,7 @@ public class PushActivity extends Activity implements Callback { private Spinner beautyTypeSelector; private ImageView img_photo; - //拍照 + //Take a picture private boolean takePhoto; private final static int videoWidth = 640; @@ -58,7 +58,7 @@ public class PushActivity extends Activity implements Callback { private final static String[] permissions = new String[]{Manifest.permission.CAMERA}; private final static int CODE_CAMERA = 1001; - private final static String[] beautySelector = new String[]{"美颜", "冷酷", "日出","素描","白猫", "浪漫", "原图"}; + private final static String[] beautySelector = new String[]{"Beauty", "Cold", "sunrise","sketch","White cat", "romantic", "Original image"}; @Override public void onCreate(Bundle savedInstanceState) { @@ -83,11 +83,11 @@ public class PushActivity extends Activity implements Callback { private void initView(){ //SurfaceView mSmartCameraView = findViewById(R.id.gl_surfaceview); - //美颜类型 + //Beauty类型 beautyTypeSelector = findViewById(R.id.beauty_type_selctor); - //静音 + //Mute btnMute = findViewById(R.id.button_mute); - //拍照 + //Take a picture img_photo = findViewById(R.id.img_photo); } @@ -137,13 +137,13 @@ public class PushActivity extends Activity implements Callback { is_mute = !is_mute; if ( is_mute ) - btnMute.setText("取消静音"); + btnMute.setText("Unmute"); else - btnMute.setText("静音"); + btnMute.setText("Mute"); } }); - //预览数据回调(RGBA格式) + //Preview data callback (RGBA格式) mSmartCameraView.setPreviewCallback(new SmartCameraView.PreviewCallback() { @Override public void onGetRgbaFrame(byte[] data, int width, int height) { @@ -171,9 +171,9 @@ public class PushActivity extends Activity implements Callback { } /** - * 拍照 - * @param data 预览数据 - * @param width 图片宽度 + * Take a picture + * @param data Preview data + * @param width Picture width * @param height 图片高度 */ private void doTakePhoto(byte[] data, int width, int height){ diff --git a/Live/src/main/java/com/frank/live/param/AudioParam.java b/Live/src/main/java/com/frank/live/param/AudioParam.java index 0737765..8180a75 100644 --- a/Live/src/main/java/com/frank/live/param/AudioParam.java +++ b/Live/src/main/java/com/frank/live/param/AudioParam.java @@ -2,7 +2,7 @@ package com.frank.live.param; /** - * 音频相关参数 + * Audio Related parameters * Created by frank on 2018/1/28. */ diff --git a/Live/src/main/java/com/frank/live/param/VideoParam.java b/Live/src/main/java/com/frank/live/param/VideoParam.java index 2a9f228..8b42334 100644 --- a/Live/src/main/java/com/frank/live/param/VideoParam.java +++ b/Live/src/main/java/com/frank/live/param/VideoParam.java @@ -1,7 +1,7 @@ package com.frank.live.param; /** - * 视频相关参数 + * 视频 Related parameters * Created by frank on 2018/1/28. */ diff --git a/Live/src/main/java/com/frank/live/stream/AudioStream.java b/Live/src/main/java/com/frank/live/stream/AudioStream.java index 8ad0a83..bc04320 100644 --- a/Live/src/main/java/com/frank/live/stream/AudioStream.java +++ b/Live/src/main/java/com/frank/live/stream/AudioStream.java @@ -74,8 +74,8 @@ public class AudioStream { } /** - * 设置静音 - * @param isMute 是否静音 + * Set mute + * @param isMute Whether to mute */ public void setMute(boolean isMute){ this.isMute = isMute; diff --git a/Live/src/main/java/com/frank/live/stream/CameraHelper.java b/Live/src/main/java/com/frank/live/stream/CameraHelper.java index 6de4a7d..6217af7 100644 --- a/Live/src/main/java/com/frank/live/stream/CameraHelper.java +++ b/Live/src/main/java/com/frank/live/stream/CameraHelper.java @@ -41,7 +41,7 @@ public class CameraHelper implements SurfaceHolder.Callback, Camera.PreviewCallb private void stopPreview() { if (mCamera != null) { - //预览数据回调接口 + //Preview data callback 接口 mCamera.setPreviewCallback(null); //停止预览 mCamera.stopPreview(); diff --git a/Live/src/main/java/com/frank/live/util/YUVUtil.java b/Live/src/main/java/com/frank/live/util/YUVUtil.java index 6d58fba..6cdce00 100644 --- a/Live/src/main/java/com/frank/live/util/YUVUtil.java +++ b/Live/src/main/java/com/frank/live/util/YUVUtil.java @@ -1,7 +1,7 @@ package com.frank.live.util; /** - * YUV与RGB转换工具类 + * YUV versus RGB Conversion tools * Created by frank on 2018/7/1. */ diff --git a/Live/src/main/res/layout/activity_push.xml b/Live/src/main/res/layout/activity_push.xml index 2a56b4c..1bb2251 100644 --- a/Live/src/main/res/layout/activity_push.xml +++ b/Live/src/main/res/layout/activity_push.xml @@ -23,7 +23,7 @@ android:layout_height="wrap_content" android:layout_alignParentRight="true" android:src="@drawable/ic_camera_switch" - android:text="拍照" + android:text="Take a picture" android:clickable="true" android:focusable="true" android:tint="@color/colorPrimary" /> @@ -32,7 +32,7 @@ android:id="@+id/button_mute" android:layout_width="wrap_content" android:layout_height="wrap_content" - android:text=" 静音" + android:text=" Mute" android:layout_alignParentRight="true" android:layout_marginTop="10dp" android:layout_below="@+id/img_photo"/> diff --git a/Live/src/main/res/values-en/strings.xml b/Live/src/main/res/values-en/strings.xml new file mode 100644 index 0000000..8d9ffa2 --- /dev/null +++ b/Live/src/main/res/values-en/strings.xml @@ -0,0 +1,7 @@ + + + live + Switch + Start + stop + \ No newline at end of file diff --git a/Live/src/main/res/values-zh-rCN/strings.xml b/Live/src/main/res/values-zh-rCN/strings.xml new file mode 100644 index 0000000..6e7e789 --- /dev/null +++ b/Live/src/main/res/values-zh-rCN/strings.xml @@ -0,0 +1,7 @@ + + + live + 切换 + 开始 + 停止 + \ No newline at end of file diff --git a/Live/src/main/res/values/strings.xml b/Live/src/main/res/values/strings.xml index 82d655b..fd7e089 100644 --- a/Live/src/main/res/values/strings.xml +++ b/Live/src/main/res/values/strings.xml @@ -1,6 +1,6 @@ live - 切换 - 开始 - 停止 + Switch + Start + stop diff --git a/OnLive/src/main/java/com/frank/living/activity/MultiScreenActivity.java b/OnLive/src/main/java/com/frank/living/activity/MultiScreenActivity.java index 21d5af5..55bd151 100644 --- a/OnLive/src/main/java/com/frank/living/activity/MultiScreenActivity.java +++ b/OnLive/src/main/java/com/frank/living/activity/MultiScreenActivity.java @@ -48,7 +48,7 @@ public class MultiScreenActivity extends AppCompatActivity { //四分屏模式还是全屏模式 private boolean isMultiScreen; - //保存客户端ip与通道数对应关系 + //保存客户端ip versus 通道数对应关系 private HashMap clientMap = new HashMap<>(); //记录每个通道的投屏状态 private TreeMap channelMap = new TreeMap<>(); @@ -114,7 +114,7 @@ public class MultiScreenActivity extends AppCompatActivity { mVideoView1.setIjkPlayerListener(new IjkPlayerListener() { @Override public void onIjkPlayer(IjkMediaPlayer ijkMediaPlayer) { - //设置播放器option + //设置PlayDeviceoption setOptions(ijkMediaPlayer); } }); @@ -161,7 +161,7 @@ public class MultiScreenActivity extends AppCompatActivity { } /** - * 配置播放器参数项 + * 配置PlayDevice参数项 */ private void setOptions(IjkMediaPlayer ijkPlayer) { if (ijkPlayer == null) @@ -181,7 +181,7 @@ public class MultiScreenActivity extends AppCompatActivity { ijkPlayer.setOption(IjkMediaPlayer.OPT_CATEGORY_PLAYER, "max_cached_duration", 30);//最大缓存时长 ijkPlayer.setOption(IjkMediaPlayer.OPT_CATEGORY_PLAYER, "infbuf", 1);//是否限制输入缓存数 ijkPlayer.setOption(IjkMediaPlayer.OPT_CATEGORY_FORMAT, "fflags", "nobuffer"); - //设置播放前的最大探测时间,分析码流时长:默认1024*1000 + //设置Play前的最大探测时间,分析码流时长:默认1024*1000 ijkPlayer.setOption(IjkMediaPlayer.OPT_CATEGORY_FORMAT, "analyzedmaxduration", 100); //ijkPlayer.setOption(IjkMediaPlayer.OPT_CATEGORY_FORMAT, "rtsp_transport", "tcp");//tcp传输数据 } @@ -208,7 +208,7 @@ public class MultiScreenActivity extends AppCompatActivity { } /** - * 自定义广播接收器 + * 自定义广播接收Device */ private class CustomReceiver extends BroadcastReceiver { @Override @@ -271,7 +271,7 @@ public class MultiScreenActivity extends AppCompatActivity { } /** - * 获取当前投屏通道 + * Obtain当前投屏通道 * * @return idleChannel */ diff --git a/README.md b/README.md index 1a452df..fa158cd 100644 --- a/README.md +++ b/README.md @@ -3,13 +3,13 @@ android端基于FFmpeg库的使用。
添加编译ffmpeg、shine、mp3lame、x264源码的参考脚本
目前音视频相关处理:
-- #### 音频剪切、拼接 -- #### 音频混音 -- #### 音频转码 -- #### 音视频合成 -- #### 音频抽取 -- #### 音频解码播放 -- #### 音频编码 +- #### Audio剪切、拼接 +- #### Audio混音 +- #### Audio转码 +- #### Audio and video synthesis +- #### Audio抽取 +- #### Audio decoding Play +- #### Audio coding - #### 视频抽取 - #### 视频剪切 - #### 视频转码 @@ -22,10 +22,10 @@ android端基于FFmpeg库的使用。
- #### 视频反序倒播 - #### 视频画中画 - #### 图片合成视频 -- #### 视频解码播放 +- #### 视频 decoding Play - #### 本地直播推流 - #### 实时直播推流 -- #### 音视频解码播放 +- #### 音视频 decoding Play - #### OpenGL+GPUImage滤镜 - #### FFmpeg的AVFilter滤镜 - #### 使用mp3lame库进行mp3转码 diff --git a/app/src/main/AndroidManifest.xml b/app/src/main/AndroidManifest.xml index cc53340..fa231ea 100644 --- a/app/src/main/AndroidManifest.xml +++ b/app/src/main/AndroidManifest.xml @@ -27,20 +27,20 @@ - + - + - + - + - + - + @@ -48,7 +48,7 @@ - + diff --git a/app/src/main/cpp/audio_player.c b/app/src/main/cpp/audio_player.c index 99d22f4..ce86fe6 100644 --- a/app/src/main/cpp/audio_player.c +++ b/app/src/main/cpp/audio_player.c @@ -6,7 +6,7 @@ #include //封装格式 #include "libavformat/avformat.h" -//解码 +// decoding #include "libavcodec/avcodec.h" //缩放 #include "libswscale/swscale.h" @@ -25,17 +25,17 @@ AUDIO_PLAYER_FUNC(void, play, jstring input_jstr) { //注册组件 av_register_all(); AVFormatContext *pFormatCtx = avformat_alloc_context(); - //打开音频文件 + //打开 Audio文件 if(avformat_open_input(&pFormatCtx,input_cstr,NULL,NULL) != 0){ - LOGE(TAG, "无法打开音频文件"); + LOGE(TAG, "无法打开 Audio文件"); return; } - //获取输入文件信息 + //Obtain输入文件信息 if(avformat_find_stream_info(pFormatCtx,NULL) < 0){ - LOGE(TAG, "无法获取输入文件信息"); + LOGE(TAG, "无法Obtain输入文件信息"); return; } - //获取音频流索引位置 + //Obtain Audio流索引位置 int i = 0, audio_stream_idx = -1; for(; i < pFormatCtx->nb_streams;i++){ if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO){ @@ -44,32 +44,32 @@ AUDIO_PLAYER_FUNC(void, play, jstring input_jstr) { } } - //获取音频解码器 + //Obtain Audio decoding Device AVCodecContext *codecCtx = pFormatCtx->streams[audio_stream_idx]->codec; AVCodec *codec = avcodec_find_decoder(codecCtx->codec_id); if(codec == NULL){ - LOGE(TAG, "无法获取解码器"); + LOGE(TAG, "无法Obtain decoding Device"); return; } - //打开解码器 + //打开 decoding Device if(avcodec_open2(codecCtx,codec,NULL) < 0){ - LOGE(TAG, "无法打开解码器"); + LOGE(TAG, "无法打开 decoding Device"); return; } //压缩数据 AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket)); //解压缩数据 AVFrame *frame = av_frame_alloc(); - //frame->16bit 44100 PCM 统一音频采样格式与采样率 + //frame->16bit 44100 PCM Unified audio sampling format and sampling rate SwrContext *swrCtx = swr_alloc(); - //输入的采样格式 + // Input sampling format enum AVSampleFormat in_sample_fmt = codecCtx->sample_fmt; //输出采样格式16bit PCM enum AVSampleFormat out_sample_fmt = AV_SAMPLE_FMT_S16; //输入采样率 int in_sample_rate = codecCtx->sample_rate; - //输出采样率 + //Output sampling rate int out_sample_rate = in_sample_rate; //声道布局(2个声道,默认立体声stereo) uint64_t in_ch_layout = codecCtx->channel_layout; @@ -82,7 +82,7 @@ AUDIO_PLAYER_FUNC(void, play, jstring input_jstr) { 0, NULL); swr_init(swrCtx); - //输出的声道个数 + //Number of output channels int out_channel_nb = av_get_channel_layout_nb_channels(out_ch_layout); jclass player_class = (*env)->GetObjectClass(env,thiz); @@ -101,40 +101,40 @@ AUDIO_PLAYER_FUNC(void, play, jstring input_jstr) { jmethodID audio_track_play_mid = (*env)->GetMethodID(env,audio_track_class,"play","()V"); (*env)->CallVoidMethod(env,audio_track,audio_track_play_mid); - //获取write()方法 + //Obtainwrite()方法 jmethodID audio_track_write_mid = (*env)->GetMethodID(env,audio_track_class,"write","([BII)I"); //16bit 44100 PCM 数据 uint8_t *out_buffer = (uint8_t *)av_malloc(MAX_AUDIO_FRAME_SIZE); int got_frame = 0,index = 0, ret; - //不断读取编码数据 + //不断读取 coding 数据 while(av_read_frame(pFormatCtx,packet) >= 0){ - //解码音频类型的Packet + // decoding Audio类型的Packet if(packet->stream_index == audio_stream_idx){ - //解码 + // decoding ret = avcodec_decode_audio4(codecCtx,frame,&got_frame,packet); if(ret < 0){ break; } - //解码一帧成功 + //Successfully decoded a frame if(got_frame > 0){ LOGI(TAG, "decode frame count=%d", index++); - //音频格式转换 + // AudioFormat conversion swr_convert(swrCtx, &out_buffer, MAX_AUDIO_FRAME_SIZE,(const uint8_t **)frame->data,frame->nb_samples); int out_buffer_size = av_samples_get_buffer_size(NULL, out_channel_nb, frame->nb_samples, out_sample_fmt, 1); jbyteArray audio_sample_array = (*env)->NewByteArray(env,out_buffer_size); jbyte* sample_byte_array = (*env)->GetByteArrayElements(env,audio_sample_array,NULL); - //拷贝缓冲数据 + //Copy buffered data memcpy(sample_byte_array, out_buffer, (size_t) out_buffer_size); //释放数组 (*env)->ReleaseByteArrayElements(env,audio_sample_array,sample_byte_array,0); - //调用AudioTrack的write方法进行播放 + //调用AudioTrack的write方法进行Play (*env)->CallIntMethod(env,audio_track,audio_track_write_mid, audio_sample_array,0,out_buffer_size); - //释放局部引用 + //Release local references (*env)->DeleteLocalRef(env,audio_sample_array); usleep(1000 * 16); } diff --git a/app/src/main/cpp/ffmpeg_cmd.c b/app/src/main/cpp/ffmpeg_cmd.c index 6ec89dc..a658259 100644 --- a/app/src/main/cpp/ffmpeg_cmd.c +++ b/app/src/main/cpp/ffmpeg_cmd.c @@ -14,7 +14,7 @@ FFMPEG_FUNC(jint, handle, jobjectArray commands) { strcpy(argv[i], temp); (*env)->ReleaseStringUTFChars(env, jstr, temp); } - //执行ffmpeg命令 + // carried out ffmpeg命令 result = run(argc, argv); //释放内存 for (i = 0; i < argc; i++) { diff --git a/app/src/main/cpp/ffmpeg_pusher.cpp b/app/src/main/cpp/ffmpeg_pusher.cpp index 55e20df..2d87995 100644 --- a/app/src/main/cpp/ffmpeg_pusher.cpp +++ b/app/src/main/cpp/ffmpeg_pusher.cpp @@ -34,11 +34,11 @@ PUSHER_FUNC(jint, pushStream, jstring filePath, jstring liveUrl) { LOGE(TAG, "file_path=%s", file_path); LOGE(TAG, "live_url=%s", live_url); - //注册所有组件 + //Register all components av_register_all(); //初始化网络 avformat_network_init(); - //打开输入文件 + //Open input file if((ret = avformat_open_input(&in_format, file_path, 0, 0)) < 0){ LOGE(TAG, "could not open input file..."); goto end; @@ -100,7 +100,7 @@ PUSHER_FUNC(jint, pushStream, jstring filePath, jstring liveUrl) { LOGE(TAG, "could not write header..."); goto end; } - //获取开始时间 + //Obtain开始时间 start_time = av_gettime(); //开始循环读一帧数据 while (1){ diff --git a/app/src/main/cpp/media_player.c b/app/src/main/cpp/media_player.c index 4ad6d98..3d7a394 100644 --- a/app/src/main/cpp/media_player.c +++ b/app/src/main/cpp/media_player.c @@ -74,7 +74,7 @@ jint JNICALL JNI_OnLoad(JavaVM* vm, void* reserved){ //初始化输入格式上下文 int init_input_format_context(MediaPlayer* player, const char* file_name){ - //注册所有组件 + //Register all components av_register_all(); //分配上下文 player->format_context = avformat_alloc_context(); @@ -114,11 +114,11 @@ int init_input_format_context(MediaPlayer* player, const char* file_name){ return 0; } -//打开音视频解码器 +//打开音视频 decoding Device int init_condec_context(MediaPlayer* player){ - //获取codec上下文指针 + //Obtaincodec上下文指针 player->video_codec_context = player->format_context->streams[player->video_stream_index]->codec; - //寻找视频流的解码器 + //Looking for video streaming decoding Device player->video_codec = avcodec_find_decoder(player->video_codec_context->codec_id); if(player->video_codec == NULL) { LOGE(TAG, "couldn't find video Codec."); @@ -138,25 +138,25 @@ int init_condec_context(MediaPlayer* player){ LOGE(TAG, "Couldn't open audio codec."); return -1; } - // 获取视频宽高 + // Obtain视频宽高 player->video_width = player->video_codec_context->width; player->video_height = player->video_codec_context->height; return 0; } -//视频解码 +//视频 decoding void video_player_prepare(MediaPlayer* player, JNIEnv* env, jobject surface){ - // 获取native window + // Obtainnative window player->native_window = ANativeWindow_fromSurface(env, surface); } -//获取当前播放时间 +//Obtain当前Play时间 int64_t get_play_time(MediaPlayer* player){ return (int64_t)(av_gettime() - player->start_time); } /** - * 延迟等待,音视频同步 + * Delayed wait,音视频同步 */ void player_wait_for_frame(MediaPlayer *player, int64_t stream_time) { pthread_mutex_lock(&player->mutex); @@ -191,7 +191,7 @@ void player_wait_for_frame(MediaPlayer *player, int64_t stream_time) { pthread_mutex_unlock(&player->mutex); } -//视频解码 +//视频 decoding int decode_video(MediaPlayer* player, AVPacket* packet){ // 设置native window的buffer大小,可自动拉伸 ANativeWindow_setBuffersGeometry(player->native_window, player->video_width, @@ -213,7 +213,7 @@ int decode_video(MediaPlayer* player, AVPacket* packet){ av_image_fill_arrays(player->rgba_frame->data, player->rgba_frame->linesize, player->buffer, AV_PIX_FMT_RGBA, player->video_width, player->video_height, 1); - // 由于解码出来的帧格式不是RGBA的,在渲染之前需要进行格式转换 + // 由于 decoding 出来的帧格式不是RGBA的,在渲染之前需要进行Format conversion struct SwsContext *sws_ctx = sws_getContext( player->video_width, player->video_height, @@ -227,7 +227,7 @@ int decode_video(MediaPlayer* player, AVPacket* packet){ NULL); int frameFinished; - //对该帧进行解码 + //对该帧进行 decoding int ret = avcodec_decode_video2(player->video_codec_context, player->yuv_frame, &frameFinished, packet); if(ret < 0){ LOGE(TAG, "avcodec_decode_video2 error..."); @@ -236,16 +236,16 @@ int decode_video(MediaPlayer* player, AVPacket* packet){ if (frameFinished) { // lock native window ANativeWindow_lock(player->native_window, &windowBuffer, 0); - // 格式转换 + // Format conversion sws_scale(sws_ctx, (uint8_t const * const *)player->yuv_frame->data, player->yuv_frame->linesize, 0, player->video_height, player->rgba_frame->data, player->rgba_frame->linesize); - // 获取stride + // Obtainstride uint8_t * dst = windowBuffer.bits; int dstStride = windowBuffer.stride * 4; uint8_t * src = player->rgba_frame->data[0]; int srcStride = player->rgba_frame->linesize[0]; - // 由于window的stride和帧的stride不同,因此需要逐行复制 + // 由于window的stride And framed stride different , So you need to copy line by line int h; for (h = 0; h < player->video_height; h++) { memcpy(dst + h * dstStride, src + h * srcStride, (size_t) srcStride); @@ -254,7 +254,7 @@ int decode_video(MediaPlayer* player, AVPacket* packet){ //计算延迟 int64_t pts = av_frame_get_best_effort_timestamp(player->yuv_frame); AVStream *stream = player->format_context->streams[player->video_stream_index]; - //转换(不同时间基时间转换) + //转换( different 时间基时间转换) int64_t time = av_rescale_q(pts, stream->time_base, AV_TIME_BASE_Q); //音视频帧同步 player_wait_for_frame(player, time); @@ -264,17 +264,17 @@ int decode_video(MediaPlayer* player, AVPacket* packet){ return 0; } -//音频解码初始化 +// Audio decoding 初始化 void audio_decoder_prepare(MediaPlayer* player) { player->swrContext = swr_alloc(); - //输入的采样格式 + // Input sampling format enum AVSampleFormat in_sample_fmt = player->audio_codec_context->sample_fmt; //输出采样格式16bit PCM player->out_sample_fmt = AV_SAMPLE_FMT_S16; //输入采样率 int in_sample_rate = player->audio_codec_context->sample_rate; - //输出采样率 + //Output sampling rate player->out_sample_rate = in_sample_rate; //声道布局(2个声道,默认立体声stereo) uint64_t in_ch_layout = player->audio_codec_context->channel_layout; @@ -286,11 +286,11 @@ void audio_decoder_prepare(MediaPlayer* player) { in_ch_layout, in_sample_fmt, in_sample_rate, 0, NULL); swr_init(player->swrContext); - //输出的声道个数 + //Number of output channels player->out_channel_nb = av_get_channel_layout_nb_channels(out_ch_layout); } -//音频播放器 +// AudioPlayDevice void audio_player_prepare(MediaPlayer* player, JNIEnv* env, jclass jthiz){ jclass player_class = (*env)->GetObjectClass(env,jthiz); if(!player_class){ @@ -311,7 +311,7 @@ void audio_player_prepare(MediaPlayer* player, JNIEnv* env, jclass jthiz){ (*env)->CallVoidMethod(env, audio_track, audio_track_play_mid); player->audio_track = (*env)->NewGlobalRef(env, audio_track); - //获取write()方法 + //Obtainwrite()方法 player->audio_track_write_mid = (*env)->GetMethodID(env,audio_track_class,"write","([BII)I"); //16bit 44100 PCM 数据 @@ -320,18 +320,18 @@ void audio_player_prepare(MediaPlayer* player, JNIEnv* env, jclass jthiz){ player->audio_frame = av_frame_alloc(); } -//音频解码 +// Audio decoding int decode_audio(MediaPlayer* player, AVPacket* packet){ int got_frame = 0, ret; - //解码 + // decoding ret = avcodec_decode_audio4(player->audio_codec_context, player->audio_frame, &got_frame, packet); if(ret < 0){ LOGE(TAG, "avcodec_decode_audio4 error..."); return -1; } - //解码一帧成功 + //Successfully decoded a frame if(got_frame > 0){ - //音频格式转换 + // AudioFormat conversion swr_convert(player->swrContext, &player->audio_buffer, MAX_AUDIO_FRAME_SIZE, (const uint8_t **)player->audio_frame->data, player->audio_frame->nb_samples); int out_buffer_size = av_samples_get_buffer_size(NULL, player->out_channel_nb, player->audio_frame->nb_samples, player->out_sample_fmt, 1); @@ -349,14 +349,14 @@ int decode_audio(MediaPlayer* player, AVPacket* packet){ (*javaVM)->AttachCurrentThread(javaVM, &env, NULL); jbyteArray audio_sample_array = (*env)->NewByteArray(env,out_buffer_size); jbyte* sample_byte_array = (*env)->GetByteArrayElements(env,audio_sample_array,NULL); - //拷贝缓冲数据 + //Copy buffered data memcpy(sample_byte_array, player->audio_buffer, (size_t) out_buffer_size); //释放数组 (*env)->ReleaseByteArrayElements(env,audio_sample_array,sample_byte_array,0); - //调用AudioTrack的write方法进行播放 + //调用AudioTrack的write方法进行Play (*env)->CallIntMethod(env, player->audio_track, player->audio_track_write_mid, audio_sample_array,0,out_buffer_size); - //释放局部引用 + //Release local references (*env)->DeleteLocalRef(env,audio_sample_array); } } @@ -394,7 +394,7 @@ void* write_packet_to_queue(void* arg){ break; } if(pkt->stream_index == player->video_stream_index || pkt->stream_index == player->audio_stream_index){ - //根据AVPacket->stream_index获取对应的队列 + //根据AVPacket->stream_indexObtain对应的队列 AVPacketQueue *queue = player->packets[pkt->stream_index]; pthread_mutex_lock(&player->mutex); AVPacket* data = queue_push(queue, &player->mutex, &player->cond); @@ -405,12 +405,12 @@ void* write_packet_to_queue(void* arg){ } } -//音视频解码线程(消费者) +//音视频 decoding 线程(消费者) void* decode_func(void* arg){ Decoder *decoder_data = (Decoder*)arg; MediaPlayer *player = decoder_data->player; int stream_index = decoder_data->stream_index; - //根据stream_index获取对应的AVPacket队列 + //根据stream_indexObtain对应的AVPacket队列 AVPacketQueue *queue = player->packets[stream_index]; int ret = 0; @@ -421,7 +421,7 @@ void* decode_func(void* arg){ if(stream_index == player->video_stream_index) {//视频流 ret = decode_video(player, packet); - } else if(stream_index == player->audio_stream_index) {//音频流 + } else if(stream_index == player->audio_stream_index) {// Audio流 ret = decode_audio(player, packet); } av_packet_unref(packet); @@ -444,16 +444,16 @@ MEDIA_PLAYER_FUNC(jint, setup, jstring filePath, jobject surface){ if(ret < 0){ return ret; } - //初始化音视频解码器 + //初始化音视频 decoding Device ret = init_condec_context(player); if(ret < 0){ return ret; } //初始化视频surface video_player_prepare( player, env, surface); - //初始化音频相关参数 + //初始化 Audio Related parameters audio_decoder_prepare(player); - //初始化音频播放器 + //初始化 AudioPlayDevice audio_player_prepare(player, env, thiz); //初始化音视频packet队列 init_queue(player, PACKET_SIZE); @@ -486,7 +486,7 @@ MEDIA_PLAYER_FUNC(jint, play){ } MEDIA_PLAYER_FUNC(void, release){ - //释放内存以及关闭文件 + //Free up memory and close files free(player->audio_track); free(player->audio_track_write_mid); av_free(player->buffer); diff --git a/app/src/main/cpp/openSL_audio_player.c b/app/src/main/cpp/openSL_audio_player.c index 2f2abe5..c34d011 100644 --- a/app/src/main/cpp/openSL_audio_player.c +++ b/app/src/main/cpp/openSL_audio_player.c @@ -21,11 +21,11 @@ SLObjectItf engineObject = NULL; SLEngineItf engineEngine; -//输出混音器接口 +//输出混音Device接口 SLObjectItf outputMixObject = NULL; SLEnvironmentalReverbItf outputMixEnvironmentalReverb = NULL; -//缓冲播放器接口 +//缓冲PlayDevice接口 SLObjectItf bqPlayerObject = NULL; SLPlayItf bqPlayerPlay; SLAndroidSimpleBufferQueueItf bqPlayerBufferQueue; @@ -53,14 +53,14 @@ int createAudioPlayer(int *rate, int *channel, const char *file_name) ; // 释放相关资源 int releaseAudioPlayer(); -// 获取PCM数据, 自动回调获取 +// ObtainPCM数据, 自动回调Obtain int getPCM(void **pcm, size_t *pcmSize) ; -//播放回调方法 +//Play回调方法 void bqPlayerCallback(SLAndroidSimpleBufferQueueItf bufferQueueItf, void *context) { bufferSize = 0; getPCM(&buffer, &bufferSize); - //如果buffer不为空,入待播放队列 + //如果buffer不为空,入待Play队列 if (NULL != buffer && 0 != bufferSize) { SLresult result; result = (*bqPlayerBufferQueue)->Enqueue(bqPlayerBufferQueue, buffer, bufferSize); @@ -80,16 +80,16 @@ void createEngine() { LOGI(TAG, "slCreateEngine=%d", result); result = (*engineObject)->Realize(engineObject, SL_BOOLEAN_FALSE); LOGI(TAG, "engineObject->Realize=%d", result); - //获取引擎接口 + //Obtain引擎接口 result = (*engineObject)->GetInterface(engineObject, SL_IID_ENGINE, &engineEngine); LOGI(TAG, "engineObject->GetInterface=%d", result); - //创建输出混音器 + //创建输出混音Device result = (*engineEngine)->CreateOutputMix(engineEngine, &outputMixObject, 0, 0, 0); LOGI(TAG, "CreateOutputMix=%d", result); - //关联输出混音器 + //关联输出混音Device result = (*outputMixObject)->Realize(outputMixObject, SL_BOOLEAN_FALSE); LOGI(TAG, "outputMixObject->Realize=%d", result); - //获取reverb接口 + //Obtainreverb接口 result = (*outputMixObject)->GetInterface(outputMixObject, SL_IID_ENVIRONMENTALREVERB, &outputMixEnvironmentalReverb); LOGI(TAG, "outputMixObject->GetInterface=%d", result); @@ -101,11 +101,11 @@ void createEngine() { } -//创建带有缓冲队列的音频播放器 +//创建带有缓冲队列的 AudioPlayDevice void createBufferQueueAudioPlayer(int rate, int channel, int bitsPerSample) { SLresult result; - //配置音频源 + //配置 Audio源 SLDataLocator_AndroidSimpleBufferQueue buffer_queue = {SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, 2}; SLDataFormat_PCM format_pcm; format_pcm.formatType = SL_DATAFORMAT_PCM; @@ -120,26 +120,26 @@ void createBufferQueueAudioPlayer(int rate, int channel, int bitsPerSample) { format_pcm.endianness = SL_BYTEORDER_LITTLEENDIAN; SLDataSource audioSrc = {&buffer_queue, &format_pcm}; - //配置音频池 + //配置 Audio池 SLDataLocator_OutputMix loc_outmix = {SL_DATALOCATOR_OUTPUTMIX, outputMixObject}; SLDataSink audioSnk = {&loc_outmix, NULL}; - //创建音频播放器 + //创建 AudioPlayDevice const SLInterfaceID ids[3] = {SL_IID_BUFFERQUEUE, SL_IID_EFFECTSEND, SL_IID_VOLUME}; const SLboolean req[3] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE}; result = (*engineEngine)->CreateAudioPlayer(engineEngine, &bqPlayerObject, &audioSrc, &audioSnk, 3, ids, req); LOGI(TAG, "CreateAudioPlayer=%d", result); - //关联播放器 + //关联PlayDevice result = (*bqPlayerObject)->Realize(bqPlayerObject, SL_BOOLEAN_FALSE); LOGI(TAG, "bqPlayerObject Realize=%d", result); - //获取播放接口 + //ObtainPlay接口 result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_PLAY, &bqPlayerPlay); LOGI(TAG, "GetInterface bqPlayerPlay=%d", result); - //获取缓冲队列接口 + //Obtain缓冲队列接口 result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_BUFFERQUEUE, &bqPlayerBufferQueue); LOGI(TAG, "GetInterface bqPlayerBufferQueue=%d", result); @@ -148,16 +148,16 @@ void createBufferQueueAudioPlayer(int rate, int channel, int bitsPerSample) { result = (*bqPlayerBufferQueue)->RegisterCallback(bqPlayerBufferQueue, bqPlayerCallback, NULL); LOGI(TAG, "RegisterCallback=%d", result); - //获取音效接口 + //Obtain音效接口 result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_EFFECTSEND, &bqPlayerEffectSend); LOGI(TAG, "GetInterface effect=%d", result); - //获取音量接口 + //Obtain音量接口 result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_VOLUME, &bqPlayerVolume); LOGI(TAG, "GetInterface volume=%d", result); - //开始播放音乐 + //开始Play音乐 result = (*bqPlayerPlay)->SetPlayState(bqPlayerPlay, SL_PLAYSTATE_PLAYING); LOGI(TAG, "SetPlayState=%d", result); } @@ -167,7 +167,7 @@ int createAudioPlayer(int *rate, int *channel, const char *file_name) { av_register_all(); aFormatCtx = avformat_alloc_context(); - //打开音频文件 + //打开 Audio文件 if (avformat_open_input(&aFormatCtx, file_name, NULL, NULL) != 0) { LOGE(TAG, "Couldn't open file:%s\n", file_name); return -1; // Couldn't open file @@ -179,7 +179,7 @@ int createAudioPlayer(int *rate, int *channel, const char *file_name) { return -1; } - //寻找音频stream + //寻找 Audiostream int i; audioStream = -1; for (i = 0; i < aFormatCtx->nb_streams; i++) { @@ -192,21 +192,21 @@ int createAudioPlayer(int *rate, int *channel, const char *file_name) { LOGE(TAG, "Couldn't find audio stream!"); return -1; } - //获取解码器context + //Obtain decoding Devicecontext aCodecCtx = aFormatCtx->streams[audioStream]->codec; - //寻找音频解码器 + //寻找 Audio decoding Device AVCodec *aCodec = avcodec_find_decoder(aCodecCtx->codec_id); if (!aCodec) { fprintf(stderr, "Unsupported codec!\n"); return -1; } - //打开解码器 + //打开 decoding Device if (avcodec_open2(aCodecCtx, aCodec, NULL) < 0) { LOGE(TAG, "Could not open codec."); return -1; } aFrame = av_frame_alloc(); - // 设置格式转换 + // 设置Format conversion swr = swr_alloc(); av_opt_set_int(swr, "in_channel_layout", aCodecCtx->channel_layout, 0); av_opt_set_int(swr, "out_channel_layout", aCodecCtx->channel_layout, 0); @@ -225,16 +225,16 @@ int createAudioPlayer(int *rate, int *channel, const char *file_name) { return 0; } -// 获取PCM数据, 自动回调获取 +// ObtainPCM数据, 自动回调Obtain int getPCM(void **pcm, size_t *pcmSize) { while (av_read_frame(aFormatCtx, &packet) >= 0) { int frameFinished = 0; - //音频流 + // Audio流 if (packet.stream_index == audioStream) { avcodec_decode_audio4(aCodecCtx, aFrame, &frameFinished, &packet); - //解码完一帧数据 + // decoding 完一帧数据 if (frameFinished) { - // data_size为音频数据所占的字节数 + // data_size为 Audio数据所占的字节数 int data_size = av_samples_get_buffer_size( aFrame->linesize, aCodecCtx->channels, aFrame->nb_samples, aCodecCtx->sample_fmt, 1); @@ -244,7 +244,7 @@ int getPCM(void **pcm, size_t *pcmSize) { outputBuffer = (uint8_t *) realloc(outputBuffer, sizeof(uint8_t) * outputBufferSize); } - // 音频格式转换 + // AudioFormat conversion swr_convert(swr, &outputBuffer, aFrame->nb_samples, (uint8_t const **) (aFrame->extended_data), aFrame->nb_samples); @@ -275,20 +275,20 @@ AUDIO_PLAYER_FUNC(void, playAudio, jstring filePath) { const char *file_name = (*env)->GetStringUTFChars(env, filePath, NULL); LOGI(TAG, "file_name=%s", file_name); - // 创建音频解码器 + // 创建 Audio decoding Device createAudioPlayer(&rate, &channel, file_name); - // 创建播放引擎 + // 创建Play引擎 createEngine(); - // 创建缓冲队列音频播放器 + // 创建缓冲队列 AudioPlayDevice createBufferQueueAudioPlayer(rate, channel, SL_PCMSAMPLEFORMAT_FIXED_16); - // 启动音频播放 + // 启动 AudioPlay bqPlayerCallback(bqPlayerBufferQueue, NULL); } -//停止播放,释放相关资源 +//停止Play,释放相关资源 AUDIO_PLAYER_FUNC(void, stop) { if (bqPlayerObject != NULL) { (*bqPlayerObject)->Destroy(bqPlayerObject); @@ -311,6 +311,6 @@ AUDIO_PLAYER_FUNC(void, stop) { engineEngine = NULL; } - // 释放解码器相关资源 + // 释放 decoding Device相关资源 releaseAudioPlayer(); } diff --git a/app/src/main/cpp/video_filter.c b/app/src/main/cpp/video_filter.c index 0111f50..915fef9 100644 --- a/app/src/main/cpp/video_filter.c +++ b/app/src/main/cpp/video_filter.c @@ -69,9 +69,9 @@ jboolean playAudio = JNI_TRUE; //const char *filter_descr = "hflip";//左右反序 //const char *filter_descr = "rotate=90";//旋转90° //const char *filter_descr = "colorbalance=bs=0.3";//添加蓝色背景 -//const char *filter_descr = "drawbox=x=100:y=100:w=100:h=100:color=pink@0.5'";//绘制矩形 +//const char *filter_descr = "drawbox=x=100:y=100:w=100:h=100:color=pink@0.5'";//绘制rectangle //const char *filter_descr = "drawgrid=w=iw/3:h=ih/3:t=2:c=white@0.5";//九宫格分割 -//const char *filter_descr = "edgedetect=low=0.1:high=0.4";//边缘检测 +//const char *filter_descr = "edgedetect=low=0.1:high=0.4";//edge检测 //const char *filter_descr = "lutrgb='r=0:g=0'";//去掉红色、绿色分量,只保留蓝色 //const char *filter_descr = "noise=alls=20:allf=t+u";//添加噪声 //const char *filter_descr = "vignette='PI/4+random(1)*PI/50':eval=frame";//闪烁装饰 @@ -79,7 +79,7 @@ jboolean playAudio = JNI_TRUE; //const char *filter_descr = "drawtext=fontfile='arial.ttf':fontcolor=green:fontsize=30:text='Hello world'";//绘制文字 //const char *filter_descr = "movie=my_logo.png[wm];[in][wm]overlay=5:5[out]";//添加图片水印 -//初始化滤波器 +//Initialize the filter int init_filters(const char *filters_descr) { char args[512]; int ret = 0; @@ -149,10 +149,10 @@ int init_filters(const char *filters_descr) { return ret; } -//初始化视频解码器与播放器 +//初始化视频 decoding Device versus PlayDevice int open_input(JNIEnv * env, const char* file_name, jobject surface){ LOGI(TAG, "open file:%s\n", file_name); - //注册所有组件 + //Register all components av_register_all(); //分配上下文 pFormatCtx = avformat_alloc_context(); @@ -166,7 +166,7 @@ int open_input(JNIEnv * env, const char* file_name, jobject surface){ LOGE(TAG, "Couldn't find stream information."); return -1; } - //寻找视频流的第一帧 + //Looking for video streaming First frame int i; for (i = 0; i < pFormatCtx->nb_streams; i++) { if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO @@ -179,9 +179,9 @@ int open_input(JNIEnv * env, const char* file_name, jobject surface){ return -1; } - //获取codec上下文指针 + //Obtaincodec上下文指针 pCodecCtx = pFormatCtx->streams[video_stream_index]->codec; - //寻找视频流的解码器 + //Looking for video streaming decoding Device AVCodec * pCodec = avcodec_find_decoder(pCodecCtx->codec_id); if(pCodec==NULL) { LOGE(TAG, "couldn't find Codec."); @@ -191,7 +191,7 @@ int open_input(JNIEnv * env, const char* file_name, jobject surface){ LOGE(TAG, "Couldn't open codec."); return -1; } - // 获取native window + // Obtainnative window nativeWindow = ANativeWindow_fromSurface(env, surface); // 设置native window的buffer大小,可自动拉伸 @@ -209,7 +209,7 @@ int open_input(JNIEnv * env, const char* file_name, jobject surface){ buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t)); av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1); - // 由于解码出来的帧格式不是RGBA的,在渲染之前需要进行格式转换 + // 由于 decoding 出来的帧格式不是RGBA的,在渲染之前需要进行Format conversion sws_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, @@ -224,9 +224,9 @@ int open_input(JNIEnv * env, const char* file_name, jobject surface){ return 0; } -//初始化音频解码器与播放器 +//Initialize the audio decoder versus PlayDevice int init_audio(JNIEnv * env, jclass jthiz){ - //获取音频流索引位置 + //Obtain Audio流索引位置 int i; for(i=0; i < pFormatCtx->nb_streams;i++){ if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO){ @@ -235,28 +235,28 @@ int init_audio(JNIEnv * env, jclass jthiz){ } } - //获取音频解码器 + //Obtain Audio decoding Device audioCodecCtx = pFormatCtx->streams[audio_stream_index]->codec; AVCodec *codec = avcodec_find_decoder(audioCodecCtx->codec_id); if(codec == NULL){ - LOGE(TAG, "无法获取音频解码器"); + LOGE(TAG, "无法Obtain Audio decoding Device"); return -1; } - //打开音频解码器 + //打开 Audio decoding Device if(avcodec_open2(audioCodecCtx,codec,NULL) < 0){ - LOGE(TAG, "无法打开音频解码器"); + LOGE(TAG, "无法打开 Audio decoding Device"); return -1; } - //frame->16bit 44100 PCM 统一音频采样格式与采样率 + //frame->16bit 44100 PCM Unified audio sampling format and sampling rate audio_swr_ctx = swr_alloc(); - //输入的采样格式 + // Input sampling format enum AVSampleFormat in_sample_fmt = audioCodecCtx->sample_fmt; //输出采样格式16bit PCM out_sample_fmt = AV_SAMPLE_FMT_S16; //输入采样率 int in_sample_rate = audioCodecCtx->sample_rate; - //输出采样率 + //Output sampling rate int out_sample_rate = in_sample_rate; //声道布局(2个声道,默认立体声stereo) uint64_t in_ch_layout = audioCodecCtx->channel_layout; @@ -269,7 +269,7 @@ int init_audio(JNIEnv * env, jclass jthiz){ 0, NULL); swr_init(audio_swr_ctx); - //输出的声道个数 + //Number of output channels out_channel_nb = av_get_channel_layout_nb_channels(out_ch_layout); jclass player_class = (*env)->GetObjectClass(env,jthiz); @@ -290,7 +290,7 @@ int init_audio(JNIEnv * env, jclass jthiz){ jmethodID audio_track_play_mid = (*env)->GetMethodID(env,audio_track_class,"play","()V"); (*env)->CallVoidMethod(env,audio_track,audio_track_play_mid); - //获取write()方法 + //Obtainwrite()方法 audio_track_write_mid = (*env)->GetMethodID(env,audio_track_class,"write","([BII)I"); //16bit 44100 PCM 数据 @@ -299,28 +299,28 @@ int init_audio(JNIEnv * env, jclass jthiz){ } int play_audio(JNIEnv * env, AVPacket* packet, AVFrame* frame){ - //解码 + // decoding int ret = avcodec_decode_audio4(audioCodecCtx, frame, &got_frame, packet); if(ret < 0){ return ret; } - //解码一帧成功 + //Successfully decoded a frame if(got_frame > 0){ - //音频格式转换 + // AudioFormat conversion swr_convert(audio_swr_ctx, &out_buffer, MAX_AUDIO_FRAME_SIZE,(const uint8_t **)frame->data,frame->nb_samples); int out_buffer_size = av_samples_get_buffer_size(NULL, out_channel_nb, frame->nb_samples, out_sample_fmt, 1); jbyteArray audio_sample_array = (*env)->NewByteArray(env,out_buffer_size); jbyte* sample_byte_array = (*env)->GetByteArrayElements(env,audio_sample_array,NULL); - //拷贝缓冲数据 + //Copy buffered data memcpy(sample_byte_array, out_buffer, (size_t) out_buffer_size); //释放数组 (*env)->ReleaseByteArrayElements(env,audio_sample_array,sample_byte_array,0); - //调用AudioTrack的write方法进行播放 + //调用AudioTrack的write方法进行Play (*env)->CallIntMethod(env,audio_track,audio_track_write_mid, audio_sample_array,0,out_buffer_size); - //释放局部引用 + //Release local references (*env)->DeleteLocalRef(env,audio_sample_array); usleep(1000);//1000 * 16 } @@ -332,14 +332,14 @@ VIDEO_PLAYER_FUNC(jint, filter, jstring filePath, jobject surface, jstring filte int ret; const char * file_name = (*env)->GetStringUTFChars(env, filePath, JNI_FALSE); const char *filter_descr = (*env)->GetStringUTFChars(env, filterDescr, JNI_FALSE); - //打开输入文件 + //Open input file if(!is_playing){ LOGI(TAG, "open_input..."); if((ret = open_input(env, file_name, surface)) < 0){ LOGE(TAG, "Couldn't allocate video frame."); goto end; } - //注册滤波器 + //Registration filter avfilter_register_all(); filter_frame = av_frame_alloc(); if(filter_frame == NULL) { @@ -347,7 +347,7 @@ VIDEO_PLAYER_FUNC(jint, filter, jstring filePath, jobject surface, jstring filte ret = -1; goto end; } - //初始化音频解码器 + //Initialize the audio decoder if ((ret = init_audio(env, thiz)) < 0){ LOGE(TAG, "Couldn't init_audio."); goto end; @@ -355,7 +355,7 @@ VIDEO_PLAYER_FUNC(jint, filter, jstring filePath, jobject surface, jstring filte } - //初始化滤波器 + //Initialize the filter if ((ret = init_filters(filter_descr)) < 0){ LOGE(TAG, "init_filter error, ret=%d\n", ret); goto end; @@ -366,36 +366,36 @@ VIDEO_PLAYER_FUNC(jint, filter, jstring filePath, jobject surface, jstring filte AVPacket packet; while(av_read_frame(pFormatCtx, &packet)>=0 && !release) { - //切换滤波器,退出当初播放 + //Switch the filter and exit the original playback if(again){ goto again; } - //判断是否为视频流 + //Determine if it is a video stream if(packet.stream_index == video_stream_index) { - //对该帧进行解码 + //对该帧进行 decoding avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet); if (frameFinished) { - //把解码后视频帧添加到filter_graph + //Add decoded video frames to filter_graph if (av_buffersrc_add_frame_flags(buffersrc_ctx, pFrame, AV_BUFFERSRC_FLAG_KEEP_REF) < 0) { LOGE(TAG, "Error while feeding the filter_graph\n"); break; } - //把滤波后的视频帧从filter graph取出来 + //Take the filtered video frame from the filter graph ret = av_buffersink_get_frame(buffersink_ctx, filter_frame); if (ret >= 0){ // lock native window ANativeWindow_lock(nativeWindow, &windowBuffer, 0); - // 格式转换 + // Format conversion sws_scale(sws_ctx, (uint8_t const * const *)filter_frame->data, filter_frame->linesize, 0, pCodecCtx->height, pFrameRGBA->data, pFrameRGBA->linesize); - // 获取stride + // Obtain stride uint8_t * dst = windowBuffer.bits; int dstStride = windowBuffer.stride * 4; uint8_t * src = pFrameRGBA->data[0]; int srcStride = pFrameRGBA->linesize[0]; - // 由于window的stride和帧的stride不同,因此需要逐行复制 + // because of window of stride And framed stride different , So you need to copy line by line int h; for (h = 0; h < pCodecCtx->height; h++) { memcpy(dst + h * dstStride, src + h * srcStride, (size_t) srcStride); @@ -404,11 +404,11 @@ VIDEO_PLAYER_FUNC(jint, filter, jstring filePath, jobject surface, jstring filte } av_frame_unref(filter_frame); } - //延迟等待 + //Delayed wait if (!playAudio){ usleep((unsigned long) (1000 * 40));//1000 * 40 } - } else if(packet.stream_index == audio_stream_index){//音频帧 + } else if(packet.stream_index == audio_stream_index){// Audio帧 if (playAudio){ play_audio(env, &packet, pFrame); } @@ -417,7 +417,7 @@ VIDEO_PLAYER_FUNC(jint, filter, jstring filePath, jobject surface, jstring filte } end: is_playing = 0; - //释放内存以及关闭文件 + //Free up memory and close files av_free(buffer); av_free(pFrameRGBA); av_free(filter_frame); diff --git a/app/src/main/cpp/video_player.c b/app/src/main/cpp/video_player.c index 4447742..f1663e4 100644 --- a/app/src/main/cpp/video_player.c +++ b/app/src/main/cpp/video_player.c @@ -14,7 +14,7 @@ #define TAG "VideoPlayer" -//播放倍率 +//Play倍率 float play_rate = 1; //视频总时长 long duration = 0; @@ -23,7 +23,7 @@ VIDEO_PLAYER_FUNC(jint, play, jstring filePath, jobject surface){ const char * file_name = (*env)->GetStringUTFChars(env, filePath, JNI_FALSE); LOGE(TAG, "open file:%s\n", file_name); - //注册所有组件 + //Register all components av_register_all(); //分配上下文 AVFormatContext * pFormatCtx = avformat_alloc_context(); @@ -37,7 +37,7 @@ VIDEO_PLAYER_FUNC(jint, play, jstring filePath, jobject surface){ LOGE(TAG, "Couldn't find stream information."); return -1; } - //寻找视频流的第一帧 + //Looking for video streaming First frame int videoStream = -1, i; for (i = 0; i < pFormatCtx->nb_streams; i++) { if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO @@ -50,15 +50,15 @@ VIDEO_PLAYER_FUNC(jint, play, jstring filePath, jobject surface){ return -1; } - //获取视频总时长 + //Obtain视频总时长 if (pFormatCtx->duration != AV_NOPTS_VALUE) { duration = (long) (pFormatCtx->duration / AV_TIME_BASE); LOGE(TAG, "duration==%ld", duration); } - //获取codec上下文指针 + //Obtaincodec上下文指针 AVCodecContext * pCodecCtx = pFormatCtx->streams[videoStream]->codec; - //寻找视频流的解码器 + //Looking for video streaming decoding Device AVCodec * pCodec = avcodec_find_decoder(pCodecCtx->codec_id); if(pCodec==NULL) { LOGE(TAG, "couldn't find Codec."); @@ -68,9 +68,9 @@ VIDEO_PLAYER_FUNC(jint, play, jstring filePath, jobject surface){ LOGE(TAG, "Couldn't open codec."); return -1; } - // 获取native window + // Obtainnative window ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env, surface); - // 获取视频宽高 + // Obtain视频宽高 int videoWidth = pCodecCtx->width; int videoHeight = pCodecCtx->height; // 设置native window的buffer大小,可自动拉伸 @@ -94,7 +94,7 @@ VIDEO_PLAYER_FUNC(jint, play, jstring filePath, jobject surface){ av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1); - // 由于解码出来的帧格式不是RGBA的,在渲染之前需要进行格式转换 + // 由于 decoding 出来的帧格式不是RGBA的,在渲染之前需要进行Format conversion struct SwsContext *sws_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, @@ -110,35 +110,35 @@ VIDEO_PLAYER_FUNC(jint, play, jstring filePath, jobject surface){ AVPacket packet; while(av_read_frame(pFormatCtx, &packet)>=0) { - //判断是否为视频流 + //Determine if it is a video stream if(packet.stream_index==videoStream) { - //对该帧进行解码 + //对该帧进行 decoding avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet); if (frameFinished) { // lock native window ANativeWindow_lock(nativeWindow, &windowBuffer, 0); - // 格式转换 + // Format conversion sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameRGBA->data, pFrameRGBA->linesize); - // 获取stride + // Obtainstride uint8_t * dst = windowBuffer.bits; int dstStride = windowBuffer.stride * 4; uint8_t * src = pFrameRGBA->data[0]; int srcStride = pFrameRGBA->linesize[0]; - // 由于window的stride和帧的stride不同,因此需要逐行复制 + // 由于window的stride And framed stride different , So you need to copy line by line int h; for (h = 0; h < videoHeight; h++) { memcpy(dst + h * dstStride, src + h * srcStride, (size_t) srcStride); } ANativeWindow_unlockAndPost(nativeWindow); } - //延迟等待 + //Delayed wait usleep((unsigned long) (1000 * 40 * play_rate)); } av_packet_unref(&packet); } - //释放内存以及关闭文件 + //Free up memory and close files av_free(buffer); av_free(pFrameRGBA); av_free(pFrame); @@ -147,12 +147,12 @@ VIDEO_PLAYER_FUNC(jint, play, jstring filePath, jobject surface){ return 0; } -//设置播放速率 +//设置Play速率 VIDEO_PLAYER_FUNC(void, setPlayRate, jfloat playRate){ play_rate = playRate; } -//获取视频总时长 +//Obtain视频总时长 VIDEO_PLAYER_FUNC(jint, getDuration){ return duration; } \ No newline at end of file diff --git a/app/src/main/java/com/frank/ffmpeg/AudioPlayer.java b/app/src/main/java/com/frank/ffmpeg/AudioPlayer.java index d6553f5..6f721ad 100644 --- a/app/src/main/java/com/frank/ffmpeg/AudioPlayer.java +++ b/app/src/main/java/com/frank/ffmpeg/AudioPlayer.java @@ -5,7 +5,7 @@ import android.media.AudioManager; import android.media.AudioTrack; /** - * 音频播放器 + * AudioPlayDevice * Created by frank on 2018/2/1. */ @@ -13,11 +13,11 @@ public class AudioPlayer { static { System.loadLibrary("media-handle"); } - //调用AudioTrack播放 + //调用AudioTrackPlay public native void play(String audioPath); - //调用OpenSL ES播放 + //调用OpenSL ESPlay public native void playAudio(String audioPath); - //调用OpenSL ES播放 + //调用OpenSL ESPlay public native void stop(); public native static void lameInitDefault(); diff --git a/app/src/main/java/com/frank/ffmpeg/FFmpegCmd.java b/app/src/main/java/com/frank/ffmpeg/FFmpegCmd.java index eedc569..dab4990 100644 --- a/app/src/main/java/com/frank/ffmpeg/FFmpegCmd.java +++ b/app/src/main/java/com/frank/ffmpeg/FFmpegCmd.java @@ -14,7 +14,7 @@ public class FFmpegCmd { private final static int RESULT_ERROR = 0; - //开子线程调用native方法进行音视频处理 + //开子线程调用native方法进行Audio and video processing public static void execute(final String[] commands, final OnHandleListener onHandleListener){ new Thread(new Runnable() { @Override diff --git a/app/src/main/java/com/frank/ffmpeg/MediaPlayer.java b/app/src/main/java/com/frank/ffmpeg/MediaPlayer.java index 5380df5..2194c78 100644 --- a/app/src/main/java/com/frank/ffmpeg/MediaPlayer.java +++ b/app/src/main/java/com/frank/ffmpeg/MediaPlayer.java @@ -5,7 +5,7 @@ import android.media.AudioManager; import android.media.AudioTrack; /** - * 音视频播放器 + * 音视频PlayDevice * Created by frank on 2018/2/12. */ diff --git a/app/src/main/java/com/frank/ffmpeg/VideoPlayer.java b/app/src/main/java/com/frank/ffmpeg/VideoPlayer.java index 20fc676..38c61f7 100644 --- a/app/src/main/java/com/frank/ffmpeg/VideoPlayer.java +++ b/app/src/main/java/com/frank/ffmpeg/VideoPlayer.java @@ -5,7 +5,7 @@ import android.media.AudioManager; import android.media.AudioTrack; /** - * 视频播放器 + * 视频PlayDevice * Created by frank on 2018/2/1 */ public class VideoPlayer { diff --git a/app/src/main/java/com/frank/ffmpeg/activity/AudioHandleActivity.java b/app/src/main/java/com/frank/ffmpeg/activity/AudioHandleActivity.java index fd03370..2592464 100644 --- a/app/src/main/java/com/frank/ffmpeg/activity/AudioHandleActivity.java +++ b/app/src/main/java/com/frank/ffmpeg/activity/AudioHandleActivity.java @@ -24,7 +24,7 @@ import static com.frank.ffmpeg.handler.FFmpegHandler.MSG_BEGIN; import static com.frank.ffmpeg.handler.FFmpegHandler.MSG_FINISH; /** - * 使用ffmpeg处理音频 + * 使用ffmpeg处理 Audio * Created by frank on 2018/1/23. */ @@ -101,7 +101,7 @@ public class AudioHandleActivity extends BaseActivity { } /** - * 调用ffmpeg处理音频 + * 调用ffmpeg处理 Audio * * @param srcFile srcFile */ @@ -119,7 +119,7 @@ public class AudioHandleActivity extends BaseActivity { if (useFFmpeg) { //使用FFmpeg转码 String transformFile = PATH + File.separator + "transformAudio.mp3"; commandLine = FFmpegUtil.transformAudio(srcFile, transformFile); - } else { //使用MediaCodec与mp3lame转mp3 + } else { //使用MediaCodec versus mp3lame转mp3 new Thread(new Runnable() { @Override public void run() { @@ -130,7 +130,7 @@ public class AudioHandleActivity extends BaseActivity { }).start(); } break; - case R.id.btn_cut://剪切(注意原文件与剪切文件格式一致,文件绝对路径最好不包含中文、特殊字符) + case R.id.btn_cut://剪切(注意原文件 versus 剪切文件格式一致,文件绝对路径最好不包含中文、特殊字符) String suffix = FileUtil.getFileSuffix(srcFile); if (suffix == null || suffix.isEmpty()) { return; @@ -138,7 +138,7 @@ public class AudioHandleActivity extends BaseActivity { String cutFile = PATH + File.separator + "cutAudio" + suffix; commandLine = FFmpegUtil.cutAudio(srcFile, 10, 15, cutFile); break; - case R.id.btn_concat://合并,支持MP3、AAC、AMR等,不支持PCM裸流,不支持WAV(PCM裸流加音频头) + case R.id.btn_concat://merge,支持MP3、AAC、AMR等,不支持PCM裸流,不支持WAV(PCM裸流加 Audio头) if (!FileUtil.checkFileExist(appendFile)) { return; } @@ -159,7 +159,7 @@ public class AudioHandleActivity extends BaseActivity { String mixFile = PATH + File.separator + "mix" + mixSuffix; commandLine = FFmpegUtil.mixAudio(srcFile, appendFile, mixFile); break; - case R.id.btn_play_audio://解码播放(AudioTrack) + case R.id.btn_play_audio:// decoding Play(AudioTrack) new Thread(new Runnable() { @Override public void run() { @@ -167,7 +167,7 @@ public class AudioHandleActivity extends BaseActivity { } }).start(); return; - case R.id.btn_play_opensl://解码播放(OpenSL ES) + case R.id.btn_play_opensl:// decoding Play(OpenSL ES) new Thread(new Runnable() { @Override public void run() { @@ -175,8 +175,8 @@ public class AudioHandleActivity extends BaseActivity { } }).start(); return; - case R.id.btn_audio_encode://音频编码 - //可编码成WAV、AAC。如果需要编码成MP3,ffmpeg需要重新编译,把MP3库enable + case R.id.btn_audio_encode:// Audio coding + //可 coding 成WAV、AAC。如果需要 coding 成MP3,ffmpeg需要重新编译,把MP3库enable String pcmFile = PATH + File.separator + "concat.pcm"; String wavFile = PATH + File.separator + "new.wav"; //pcm数据的采样率,一般采样率为8000、16000、44100 @@ -185,10 +185,10 @@ public class AudioHandleActivity extends BaseActivity { int channel = 1; commandLine = FFmpegUtil.encodeAudio(pcmFile, wavFile, sampleRate, channel); break; - case R.id.btn_pcm_concat://PCM裸流音频文件合并 + case R.id.btn_pcm_concat://PCM裸流 Audio文件merge String srcPCM = PATH + File.separator + "audio.pcm";//第一个pcm文件 String appendPCM = PATH + File.separator + "audio.pcm";//第二个pcm文件 - String concatPCM = PATH + File.separator + "concat.pcm";//合并后的文件 + String concatPCM = PATH + File.separator + "concat.pcm";//merge后的文件 if (!FileUtil.checkFileExist(srcPCM) || !FileUtil.checkFileExist(appendPCM)) { return; } diff --git a/app/src/main/java/com/frank/ffmpeg/activity/FilterActivity.java b/app/src/main/java/com/frank/ffmpeg/activity/FilterActivity.java index 338d847..25c7602 100644 --- a/app/src/main/java/com/frank/ffmpeg/activity/FilterActivity.java +++ b/app/src/main/java/com/frank/ffmpeg/activity/FilterActivity.java @@ -38,7 +38,7 @@ public class FilterActivity extends BaseActivity implements SurfaceHolder.Callba private SurfaceHolder surfaceHolder; //surface是否已经创建 private boolean surfaceCreated; - //是否正在播放 + //是否正在Play private boolean isPlaying; //滤镜数组 private String[] filters = new String[]{ @@ -53,19 +53,19 @@ public class FilterActivity extends BaseActivity implements SurfaceHolder.Callba "unsharp" }; private String[] txtArray = new String[]{ - "素描", - "鲜明",//hue - "暖蓝", - "边缘", - "九宫格", - "均衡", - "矩形", - "翻转",//vflip上下翻转,hflip是左右翻转 - "锐化" + "sketch", + "Sharp",//hue + "Warm blue", + "edge", + "drawgrid", + "balanced", + "rectangle", + "Flip",//vflip上下Flip,hflip是左右Flip + "Sharpen" }; private HorizontalAdapter horizontalAdapter; private RecyclerView recyclerView; - //是否播放音频 + //是否Play Audio private boolean playAudio = true; private ToggleButton btnSound; private Button btnSelect; @@ -129,7 +129,7 @@ public class FilterActivity extends BaseActivity implements SurfaceHolder.Callba initViewsWithClick(R.id.btn_select_file); } - //注册监听器 + //注册监听Device private void registerLister(){ horizontalAdapter.setOnItemClickListener(new OnItemClickListener() { @Override @@ -166,7 +166,7 @@ public class FilterActivity extends BaseActivity implements SurfaceHolder.Callba new Thread(new Runnable() { @Override public void run() { - //切换播放 + //切换Play if(isPlaying){ videoPlayer.again(); } @@ -176,7 +176,7 @@ public class FilterActivity extends BaseActivity implements SurfaceHolder.Callba }).start(); } - //设置是否静音 + //设置 Whether to mute private void setPlayAudio(){ playAudio = !playAudio; videoPlayer.playAudio(playAudio); diff --git a/app/src/main/java/com/frank/ffmpeg/activity/LiveActivity.java b/app/src/main/java/com/frank/ffmpeg/activity/LiveActivity.java index 368bcb5..0ec85a9 100644 --- a/app/src/main/java/com/frank/ffmpeg/activity/LiveActivity.java +++ b/app/src/main/java/com/frank/ffmpeg/activity/LiveActivity.java @@ -21,7 +21,7 @@ import com.frank.live.param.VideoParam; import com.frank.live.LivePusherNew; /** - * h264与rtmp实时推流直播 + * h264 versus rtmpLive streaming live * Created by frank on 2018/1/28. */ @@ -93,7 +93,7 @@ public class LiveActivity extends BaseActivity implements CompoundButton.OnCheck livePusher.stopPush(); } break; - case R.id.btn_mute://设置静音 + case R.id.btn_mute:// Set mute Log.i(TAG, "isChecked=" + isChecked); livePusher.setMute(isChecked); break; diff --git a/app/src/main/java/com/frank/ffmpeg/activity/MainActivity.java b/app/src/main/java/com/frank/ffmpeg/activity/MainActivity.java index 821c0a2..fa39778 100644 --- a/app/src/main/java/com/frank/ffmpeg/activity/MainActivity.java +++ b/app/src/main/java/com/frank/ffmpeg/activity/MainActivity.java @@ -7,7 +7,7 @@ import android.view.View; import com.frank.ffmpeg.R; /** - * 使用ffmpeg进行音视频处理入口 + * 使用ffmpeg进行Audio and video processing入口 * Created by frank on 2018/1/23. */ public class MainActivity extends BaseActivity { @@ -38,22 +38,22 @@ public class MainActivity extends BaseActivity { public void onViewClick(View v) { Intent intent = new Intent(); switch (v.getId()){ - case R.id.btn_audio://音频处理 + case R.id.btn_audio:// Audio处理 intent.setClass(MainActivity.this, AudioHandleActivity.class); break; - case R.id.btn_video://视频处理 + case R.id.btn_video://Video processing intent.setClass(MainActivity.this, VideoHandleActivity.class); break; - case R.id.btn_media://音视频处理 + case R.id.btn_media://Audio and video processing intent.setClass(MainActivity.this, MediaHandleActivity.class); break; - case R.id.btn_play://音视频播放 + case R.id.btn_play://音视频Play intent.setClass(MainActivity.this, MediaPlayerActivity.class); break; case R.id.btn_push://FFmpeg推流 intent.setClass(MainActivity.this, PushActivity.class); break; - case R.id.btn_live://实时推流直播:AAC音频编码、H264视频编码、RTMP推流 + case R.id.btn_live://Live streaming live:AAC Audio coding 、H264 Video encoding 、RTMP推流 intent.setClass(MainActivity.this, LiveActivity.class); break; case R.id.btn_filter://滤镜特效 diff --git a/app/src/main/java/com/frank/ffmpeg/activity/MediaHandleActivity.java b/app/src/main/java/com/frank/ffmpeg/activity/MediaHandleActivity.java index efdaa1f..7e9f9fa 100644 --- a/app/src/main/java/com/frank/ffmpeg/activity/MediaHandleActivity.java +++ b/app/src/main/java/com/frank/ffmpeg/activity/MediaHandleActivity.java @@ -23,7 +23,7 @@ import static com.frank.ffmpeg.handler.FFmpegHandler.MSG_CONTINUE; import static com.frank.ffmpeg.handler.FFmpegHandler.MSG_FINISH; /** - * 使用ffmpeg进行音视频合成与分离 + * 使用ffmpeg进行Audio and video synthesis versus 分离 * Created by frank on 2018/1/23. */ public class MediaHandleActivity extends BaseActivity { @@ -49,7 +49,7 @@ public class MediaHandleActivity extends BaseActivity { String muxFile = PATH + File.separator + "media-mux.mp4"; try { - //使用MediaPlayer获取视频时长 + //使用MediaPlayerObtain视频时长 MediaPlayer mediaPlayer = new MediaPlayer(); mediaPlayer.setDataSource(videoFile); mediaPlayer.prepare(); @@ -57,7 +57,7 @@ public class MediaHandleActivity extends BaseActivity { int videoDuration = mediaPlayer.getDuration()/1000; Log.i(TAG, "videoDuration=" + videoDuration); mediaPlayer.release(); - //使用MediaMetadataRetriever获取音频时长 + //使用MediaMetadataRetrieverObtain Audio时长 MediaMetadataRetriever mediaRetriever = new MediaMetadataRetriever(); mediaRetriever.setDataSource(audioFile); //单位为ms @@ -65,9 +65,9 @@ public class MediaHandleActivity extends BaseActivity { int audioDuration = (int)(Long.parseLong(duration)/1000); Log.i(TAG, "audioDuration=" + audioDuration); mediaRetriever.release(); - //如果视频时长比音频长,采用音频时长,否则用视频时长 + //如果视频时长比 Audio长,采用 Audio时长,否则用视频时长 int mDuration = Math.min(audioDuration, videoDuration); - //使用纯视频与音频进行合成 + //使用纯视频 versus Audio进行合成 String[] commandLine = FFmpegUtil.mediaMux(temp, audioFile, mDuration, muxFile); if (ffmpegHandler != null) { ffmpegHandler.isContinue(false); @@ -141,9 +141,9 @@ public class MediaHandleActivity extends BaseActivity { } switch (viewId){ - case R.id.btn_mux://音视频合成 + case R.id.btn_mux://Audio and video synthesis try { - //视频文件有音频,先把纯视频文件抽取出来 + //视频文件有 Audio,先把纯视频文件抽取出来 videoFile = srcFile; commandLine = FFmpegUtil.extractVideo(srcFile, temp); if (ffmpegHandler != null) { @@ -153,7 +153,7 @@ public class MediaHandleActivity extends BaseActivity { e.printStackTrace(); } break; - case R.id.btn_extract_audio://提取音频 + case R.id.btn_extract_audio://提取 Audio String extractAudio = PATH + File.separator + "extractAudio.aac"; commandLine = FFmpegUtil.extractAudio(srcFile, extractAudio); break; diff --git a/app/src/main/java/com/frank/ffmpeg/activity/MediaPlayerActivity.java b/app/src/main/java/com/frank/ffmpeg/activity/MediaPlayerActivity.java index 69a1204..4b1662a 100644 --- a/app/src/main/java/com/frank/ffmpeg/activity/MediaPlayerActivity.java +++ b/app/src/main/java/com/frank/ffmpeg/activity/MediaPlayerActivity.java @@ -12,7 +12,7 @@ import com.frank.ffmpeg.R; import com.frank.ffmpeg.util.FileUtil; /** - * 音视频解码播放 + * 音视频 decoding Play * Created by frank on 2018/2/12. */ diff --git a/app/src/main/java/com/frank/ffmpeg/activity/VideoHandleActivity.java b/app/src/main/java/com/frank/ffmpeg/activity/VideoHandleActivity.java index 468113d..8b7630a 100644 --- a/app/src/main/java/com/frank/ffmpeg/activity/VideoHandleActivity.java +++ b/app/src/main/java/com/frank/ffmpeg/activity/VideoHandleActivity.java @@ -132,7 +132,7 @@ public class VideoHandleActivity extends BaseActivity { int duration = 20; commandLine = FFmpegUtil.cutVideo(srcFile, startTime, duration, cutVideo); break; - case R.id.btn_video_concat://视频合并 + case R.id.btn_video_concat://视频merge // commandLine = FFmpegUtil.toTs(srcFile, ts1); // concatStep ++; // String concatVideo = PATH + File.separator + "concatVideo.mp4"; @@ -222,7 +222,7 @@ public class VideoHandleActivity extends BaseActivity { if (!FileUtil.checkFileExist(inputFile1) && !FileUtil.checkFileExist(inputFile2)) { return; } - //x、y坐标点需要根据全屏视频与小视频大小,进行计算 + //x、y坐标点需要根据全屏视频 versus 小视频大小,进行计算 //比如:全屏视频为320x240,小视频为120x90,那么x=200 y=150 int x = 200; int y = 150; diff --git a/app/src/main/java/com/frank/ffmpeg/adapter/HorizontalAdapter.java b/app/src/main/java/com/frank/ffmpeg/adapter/HorizontalAdapter.java index c0ed3fe..c12eac0 100644 --- a/app/src/main/java/com/frank/ffmpeg/adapter/HorizontalAdapter.java +++ b/app/src/main/java/com/frank/ffmpeg/adapter/HorizontalAdapter.java @@ -13,7 +13,7 @@ import java.util.ArrayList; import java.util.List; /** - * RecyclerView适配器 + * RecyclerView适配Device * Created by frank on 2018/6/6. */ diff --git a/app/src/main/java/com/frank/ffmpeg/handler/FFmpegHandler.java b/app/src/main/java/com/frank/ffmpeg/handler/FFmpegHandler.java index a4086da..1137af3 100644 --- a/app/src/main/java/com/frank/ffmpeg/handler/FFmpegHandler.java +++ b/app/src/main/java/com/frank/ffmpeg/handler/FFmpegHandler.java @@ -9,7 +9,7 @@ import com.frank.ffmpeg.model.MediaBean; import com.frank.ffmpeg.tool.JsonParseTool; /** - * Handler消息处理器 + * Handler Message processing Device * Created by frank on 2019/11/11. */ public class FFmpegHandler { @@ -37,7 +37,7 @@ public class FFmpegHandler { } /** - * 执行ffmpeg命令行 + * carried out ffmpeg Command Line * @param commandLine commandLine */ public void executeFFmpegCmd(final String[] commandLine) { diff --git a/app/src/main/java/com/frank/ffmpeg/hardware/HardwareDecode.java b/app/src/main/java/com/frank/ffmpeg/hardware/HardwareDecode.java index 55ba7b8..fbec59b 100644 --- a/app/src/main/java/com/frank/ffmpeg/hardware/HardwareDecode.java +++ b/app/src/main/java/com/frank/ffmpeg/hardware/HardwareDecode.java @@ -10,7 +10,7 @@ import android.view.Surface; import java.nio.ByteBuffer; /** - * 使用MediaExtractor抽帧,MediaCodec解码,然后渲染到Surface + * 使用MediaExtractor抽帧,MediaCodec decoding ,然后渲染到Surface * Created by frank on 2019/11/16. */ diff --git a/app/src/main/java/com/frank/ffmpeg/listener/OnHandleListener.java b/app/src/main/java/com/frank/ffmpeg/listener/OnHandleListener.java index 4b8156c..102d5a5 100644 --- a/app/src/main/java/com/frank/ffmpeg/listener/OnHandleListener.java +++ b/app/src/main/java/com/frank/ffmpeg/listener/OnHandleListener.java @@ -1,7 +1,7 @@ package com.frank.ffmpeg.listener; /** - * 流程执行监听器 + * 流程 carried out 监听Device * Created by frank on 2019/11/11. */ public interface OnHandleListener { diff --git a/app/src/main/java/com/frank/ffmpeg/listener/OnItemClickListener.java b/app/src/main/java/com/frank/ffmpeg/listener/OnItemClickListener.java index 6353d8f..640b8a5 100644 --- a/app/src/main/java/com/frank/ffmpeg/listener/OnItemClickListener.java +++ b/app/src/main/java/com/frank/ffmpeg/listener/OnItemClickListener.java @@ -1,7 +1,7 @@ package com.frank.ffmpeg.listener; /** - * RecyclerView item点击监听器 + * RecyclerView item点击监听Device * Created by frank on 2018/6/6. */ diff --git a/app/src/main/java/com/frank/ffmpeg/util/FFmpegUtil.java b/app/src/main/java/com/frank/ffmpeg/util/FFmpegUtil.java index c1ad06e..e0c50cc 100644 --- a/app/src/main/java/com/frank/ffmpeg/util/FFmpegUtil.java +++ b/app/src/main/java/com/frank/ffmpeg/util/FFmpegUtil.java @@ -8,14 +8,14 @@ import java.util.List; import java.util.Locale; /** - * ffmpeg工具:拼接命令行处理音视频 + * ffmpeg工具:拼接 Command Line 处理音视频 * Created by frank on 2018/1/23. */ public class FFmpegUtil { /** - * 使用ffmpeg命令行进行音频转码 + * 使用ffmpeg Command Line 进行 Audio转码 * * @param srcFile 源文件 * @param targetFile 目标文件(后缀指定转码格式) @@ -28,7 +28,7 @@ public class FFmpegUtil { } /** - * 使用ffmpeg命令行进行音频剪切 + * 使用ffmpeg Command Line 进行 Audio剪切 * * @param srcFile 源文件 * @param startTime 剪切的开始时间(单位为秒) @@ -44,11 +44,11 @@ public class FFmpegUtil { } /** - * 使用ffmpeg命令行进行音频合并 + * 使用ffmpeg Command Line 进行 Audiomerge * - * @param fileList 合并列表 + * @param fileList merge列表 * @param targetFile 目标文件 - * @return 合并后的文件 + * @return merge后的文件 */ public static String[] concatAudio(List fileList, String targetFile) { // ffmpeg -i concat:%s|%s -acodec copy %s @@ -67,7 +67,7 @@ public class FFmpegUtil { } /** - * 使用ffmpeg命令行进行音频混合 + * 使用ffmpeg Command Line 进行 Audio混合 * * @param srcFile 源文件 * @param mixFile 待混合文件 @@ -84,10 +84,10 @@ public class FFmpegUtil { /** - * 使用ffmpeg命令行进行音视频合成 + * 使用ffmpeg Command Line 进行Audio and video synthesis * * @param videoFile 视频文件 - * @param audioFile 音频文件 + * @param audioFile Audio文件 * @param duration 视频时长 * @param muxFile 目标文件 * @return 合成后的文件 @@ -101,11 +101,11 @@ public class FFmpegUtil { } /** - * 使用ffmpeg命令行进行抽取音频 + * 使用ffmpeg Command Line 进行抽取 Audio * * @param srcFile 原文件 * @param targetFile 目标文件 - * @return 抽取后的音频文件 + * @return 抽取后的 Audio文件 */ public static String[] extractAudio(String srcFile, String targetFile) { //-vn:video not @@ -115,7 +115,7 @@ public class FFmpegUtil { } /** - * 使用ffmpeg命令行进行抽取视频 + * 使用ffmpeg Command Line 进行抽取视频 * * @param srcFile 原文件 * @param targetFile 目标文件 @@ -130,7 +130,7 @@ public class FFmpegUtil { /** - * 使用ffmpeg命令行进行视频转码 + * 使用ffmpeg Command Line 进行视频转码 * * @param srcFile 源文件 * @param targetFile 目标文件(后缀指定转码格式) @@ -139,7 +139,7 @@ public class FFmpegUtil { public static String[] transformVideo(String srcFile, String targetFile) { // 指定视频的帧率、码率、分辨率 // String transformVideoCmd = "ffmpeg -i %s -r 25 -b 200 -s 1080x720 %s"; - // 指定视频编码器:解决有旋转角度的视频,转码后发生旋转的问题 + // 指定 Video encoding Device:解决有旋转角度的视频,转码后发生旋转的问题 // String transformVideoCmd = "ffmpeg -i %s -vcodec libx264 -acodec copy %s"; String transformVideoCmd = "ffmpeg -i %s -vcodec copy -acodec copy %s"; transformVideoCmd = String.format(transformVideoCmd, srcFile, targetFile); @@ -147,7 +147,7 @@ public class FFmpegUtil { } /** - * 使用ffmpeg命令行进行视频剪切 + * 使用ffmpeg Command Line 进行视频剪切 * * @param srcFile 源文件 * @param startTime 剪切的开始时间(单位为秒) @@ -157,14 +157,14 @@ public class FFmpegUtil { */ @SuppressLint("DefaultLocale") public static String[] cutVideo(String srcFile, int startTime, int duration, String targetFile) { - //指定音视频编码器:ffmpeg -i %s -ss %d -t %d -acodec libmp3lame -vcodec libx264 %s + //指定音 Video encoding Device:ffmpeg -i %s -ss %d -t %d -acodec libmp3lame -vcodec libx264 %s String cutVideoCmd = "ffmpeg -i %s -ss %d -t %d -acodec copy -vcodec copy %s"; cutVideoCmd = String.format(cutVideoCmd, srcFile, startTime, duration, targetFile); return cutVideoCmd.split(" "); } /** - * 使用ffmpeg命令行进行视频截图 + * 使用ffmpeg Command Line 进行视频截图 * * @param srcFile 源文件 * @param time 截图开始时间 @@ -178,7 +178,7 @@ public class FFmpegUtil { } /** - * 使用ffmpeg命令行给视频添加水印 + * 使用ffmpeg Command Line 给视频添加水印 * * @param srcFile 源文件 * @param waterMark 水印文件路径 @@ -193,7 +193,7 @@ public class FFmpegUtil { } /** - * 使用ffmpeg命令行进行视频转成Gif动图 + * 使用ffmpeg Command Line 进行视频转成Gif动图 * * @param srcFile 源文件 * @param startTime 开始时间 @@ -213,7 +213,7 @@ public class FFmpegUtil { } /** - * 使用ffmpeg命令行进行屏幕录制 + * 使用ffmpeg Command Line 进行屏幕录制 * * @param size 视频尺寸大小 * @param recordTime 录屏时间 @@ -230,7 +230,7 @@ public class FFmpegUtil { } /** - * 使用ffmpeg命令行进行图片合成视频 + * 使用ffmpeg Command Line 进行图片合成视频 * * @param srcFile 源文件 * @param frameRate 合成视频帧率 @@ -252,7 +252,7 @@ public class FFmpegUtil { * @param srcFile 源文件 * @param resolution 分辨率 * @param targetFile 目标文件 - * @return 转换后的图片命令行 + * @return 转换后的图片 Command Line */ @SuppressLint("DefaultLocale") public static String[] convertResolution(String srcFile, String resolution, String targetFile) { @@ -262,13 +262,13 @@ public class FFmpegUtil { } /** - * 音频编码 + * Audio coding * * @param srcFile 源文件pcm裸流 - * @param targetFile 编码后目标文件 + * @param targetFile coding 后目标文件 * @param sampleRate 采样率 * @param channel 声道:单声道为1/立体声道为2 - * @return 音频编码的命令行 + * @return Audio coding 的 Command Line */ @SuppressLint("DefaultLocale") public static String[] encodeAudio(String srcFile, String targetFile, int sampleRate, int channel) { @@ -284,7 +284,7 @@ public class FFmpegUtil { * @param input2 输入文件2 * @param videoLayout 视频布局 * @param targetFile 画面拼接文件 - * @return 画面拼接的命令行 + * @return 画面拼接的 Command Line */ public static String[] multiVideo(String input1, String input2, String targetFile, int videoLayout) { // String multiVideo = "ffmpeg -i %s -i %s -i %s -i %s -filter_complex " + @@ -302,10 +302,10 @@ public class FFmpegUtil { * * @param inputFile 输入文件 * @param targetFile 反序文件 - * @return 视频反序的命令行 + * @return 视频反序的 Command Line */ public static String[] reverseVideo(String inputFile, String targetFile) { - //FIXME 音频也反序 + //FIXME Audio也反序 // String reverseVideo = "ffmpeg -i %s -filter_complex [0:v]reverse[v];[0:a]areverse[a] -map [v] -map [a] %s"; String reverseVideo = "ffmpeg -i %s -filter_complex [0:v]reverse[v] -map [v] %s";//单纯视频反序 reverseVideo = String.format(reverseVideo, inputFile, targetFile); @@ -317,7 +317,7 @@ public class FFmpegUtil { * * @param inputFile 输入文件 * @param targetFile 输出文件 - * @return 视频降噪的命令行 + * @return 视频降噪的 Command Line */ public static String[] denoiseVideo(String inputFile, String targetFile) { String reverseVideo = "ffmpeg -i %s -nr 500 %s"; @@ -333,7 +333,7 @@ public class FFmpegUtil { * @param duration 持续时间 * @param frameRate 帧率 * @param targetFile 输出文件 - * @return 视频抽帧的命令行 + * @return 视频抽帧的 Command Line */ public static String[] videoToImage(String inputFile, int startTime, int duration, int frameRate, String targetFile) { //-ss:开始时间,单位为秒 @@ -353,7 +353,7 @@ public class FFmpegUtil { * @param targetFile 输出文件 * @param x 小视频起点x坐标 * @param y 小视频起点y坐标 - * @return 视频画中画的命令行 + * @return 视频画中画的 Command Line */ @SuppressLint("DefaultLocale") public static String[] picInPicVideo(String inputFile1, String inputFile2, int x, int y, String targetFile) { @@ -367,7 +367,7 @@ public class FFmpegUtil { * * @param inputFile inputFile * @param outputFile outputFile - * @return 移动moov命令行 + * @return 移动moov Command Line */ public static String[] moveMoovAhead(String inputFile, String outputFile) { String moovCmd = "ffmpeg -i %s -movflags faststart -acodec copy -vcodec copy %s"; diff --git a/app/src/main/java/com/frank/ffmpeg/util/TimeUtil.java b/app/src/main/java/com/frank/ffmpeg/util/TimeUtil.java index b0f9243..3840597 100644 --- a/app/src/main/java/com/frank/ffmpeg/util/TimeUtil.java +++ b/app/src/main/java/com/frank/ffmpeg/util/TimeUtil.java @@ -6,7 +6,7 @@ import java.util.Date; import java.util.Locale; /** - * 时间转换工具类 + * 时间 Conversion tools * Created by frank on 2018/11/12. */ @@ -52,7 +52,7 @@ public class TimeUtil { } /** - * 获取视频时长 + * Obtain视频时长 * @param time time * @return 视频时长 */ diff --git a/app/src/main/res/drawable/btn.xml b/app/src/main/res/drawable/btn.xml index d520f55..c7c874e 100644 --- a/app/src/main/res/drawable/btn.xml +++ b/app/src/main/res/drawable/btn.xml @@ -1,9 +1,9 @@ - + + android:startColor="@color/colorAccent" /> \ No newline at end of file diff --git a/app/src/main/res/drawable/btn_circle.xml b/app/src/main/res/drawable/btn_circle.xml index 455fe51..ed6ae0c 100644 --- a/app/src/main/res/drawable/btn_circle.xml +++ b/app/src/main/res/drawable/btn_circle.xml @@ -1,12 +1,12 @@ - + + android:color="@color/colorBord" /> + android:width="60dp" /> \ No newline at end of file diff --git a/app/src/main/res/drawable/btn_point.xml b/app/src/main/res/drawable/btn_point.xml index 3f3e022..5ca1ddc 100644 --- a/app/src/main/res/drawable/btn_point.xml +++ b/app/src/main/res/drawable/btn_point.xml @@ -1,12 +1,12 @@ - + + android:width="40dp" /> + android:startColor="@color/redBtn" /> \ No newline at end of file diff --git a/app/src/main/res/drawable/white_background.xml b/app/src/main/res/drawable/white_background.xml index d79bedc..7a7ac52 100644 --- a/app/src/main/res/drawable/white_background.xml +++ b/app/src/main/res/drawable/white_background.xml @@ -1,9 +1,9 @@ - + + android:startColor="@android:color/white" /> \ No newline at end of file diff --git a/app/src/main/res/values-en/strings.xml b/app/src/main/res/values-en/strings.xml new file mode 100644 index 0000000..216f222 --- /dev/null +++ b/app/src/main/res/values-en/strings.xml @@ -0,0 +1,75 @@ + + + FFmpegAndroid + Audio transcoding + Audio cut + Audio merge + Audio mix + Audio decoding AudioTrack play + Audio decoding OpenSLPlay + Audio coding + PCMmerge + + Audio processing + Audio and video processing + Video processing + Local streaming live + Live streaming live + + Audio and video synthesis + Extract Audio + Extract video + Audio and Video Play + Parsing multimedia formats + Video cut + Video stitching + Video watermark + Video to Gif + Video transcoding + video screenshot + Screen Recording + Image synthesis video + Video frame + Zoom in video + Video Play + Filter effects + Slow playback + fast forward + Screen stitching + Reverse video + Video Noise Reduction + Video to Image + Video picture-in-picture + Play preview + MOOV forward + + Switch + Start + Stop + Mute + sound + Sound On + Sound Off + + sketch + hue + lut + Edge + Vague + 九宫格 + rotate + Flip + Rectangle + Flashing + Color balance + Sharpening + + Select a file + Please select the correct file + File does not exist + Please click the menu in the upper right corner to select a file + Non-Audio file + non-video file + Reversing video… + This video is not mp4 and cannot be moved forward with moov + \ No newline at end of file diff --git a/app/src/main/res/values-zh-rCN/strings.xml b/app/src/main/res/values-zh-rCN/strings.xml new file mode 100644 index 0000000..847ce25 --- /dev/null +++ b/app/src/main/res/values-zh-rCN/strings.xml @@ -0,0 +1,77 @@ + + + FFmpegAndroid + Audio转码 + Audio剪切 + Audio合并 + Audio混合 + Audio decoding AudioTrack播放 + Audio decoding OpenSL播放 + Audio coding + PCM合并 + + Audio处理 + 音视频处理 + 视频处理 + 本地推流直播 + 实时推流直播 + + 音视频合成 + 提取 Audio + 提取视频 + 音视频播放 + 解析多媒体格式 + + 视频剪切 + 视频拼接 + 视频水印 + 视频转Gif + 视频转码 + 视频截图 + 屏幕录制 + 图片合成视频 + 视频抽帧 + 视频局部放大 + 视频播放 + 滤镜特效 + 慢放 + 快进 + 画面拼接 + 视频倒播 + 视频降噪 + 视频转图片 + 视频画中画 + 播放预览 + MOOV前移 + + 切换 + 开始 + 停止 + Mute + 声音 + 声音开 + 声音关 + + sketch + hue + lut + 边缘 + 模糊 + 九宫格 + 旋转 + 翻转 + 矩形 + 闪烁 + 色彩平衡 + 锐化 + + 选择文件 + 请选择正确文件 + 文件不存在 + 请点击右上角菜单选择文件 + 非 Audio文件 + 非视频文件 + 正在反转视频… + 该视频不是mp4,无法进行moov前移操作 + + \ No newline at end of file diff --git a/app/src/main/res/values/strings.xml b/app/src/main/res/values/strings.xml index 6be162d..2f17b4a 100644 --- a/app/src/main/res/values/strings.xml +++ b/app/src/main/res/values/strings.xml @@ -1,76 +1,74 @@ FFmpegAndroid - 音频转码 - 音频剪切 - 音频合并 - 音频混合 - 音频解码AudioTrack播放 - 音频解码OpenSL播放 - 音频编码 - PCM合并 + Audio transcoding + Audio cut + Audio merge + Audio mix + Audio decoding AudioTrack play + Audio decoding OpenSLPlay + Audio coding + PCMmerge - 音频处理 - 音视频处理 - 视频处理 - 本地推流直播 - 实时推流直播 + Audio processing + Audio and video processing + Video processing + Local streaming live + Live streaming live - 音视频合成 - 提取音频 - 提取视频 - 音视频播放 - 解析多媒体格式 + Audio and video synthesis + Extract Audio + Extract video + Audio and Video Play + Parsing multimedia formats + Video cut + Video stitching + Video watermark + Video to Gif + Video transcoding + video screenshot + Screen Recording + Image synthesis video + Video frame + Zoom in video + Video Play + Filter effects + Slow playback + fast forward + Screen stitching + Reverse video + Video Noise Reduction + Video to Image + Video picture-in-picture + Play preview + MOOV forward - 视频剪切 - 视频拼接 - 视频水印 - 视频转Gif - 视频转码 - 视频截图 - 屏幕录制 - 图片合成视频 - 视频抽帧 - 视频局部放大 - 视频播放 - 滤镜特效 - 慢放 - 快进 - 画面拼接 - 视频倒播 - 视频降噪 - 视频转图片 - 视频画中画 - 播放预览 - MOOV前移 + Switch + Start + Stop + Mute + sound + Sound On + Sound Off - 切换 - 开始 - 停止 - 静音 - 声音 - 声音开 - 声音关 - - 素描 - hue - lut - 边缘 - 模糊 - 九宫格 - 旋转 - 翻转 - 矩形 - 闪烁 - 色彩平衡 - 锐化 - - 选择文件 - 请选择正确文件 - 文件不存在 - 请点击右上角菜单选择文件 - 非音频文件 - 非视频文件 - 正在反转视频… - 该视频不是mp4,无法进行moov前移操作 + sketch + hue + lut + Edge + Vague + 九宫格 + rotate + Flip + Rectangle + Flashing + Color balance + Sharpening + Select a file + Please select the correct file + File does not exist + Please click the menu in the upper right corner to select a file + Non-Audio file + non-video file + Reversing video… + This video is not mp4 and cannot be moved forward with moov