change Activity to kotlin

pull/166/head
xufulong 4 years ago
parent 8bd6f85f7c
commit 89073b92c8
  1. 246
      app/src/main/java/com/frank/ffmpeg/activity/AudioHandleActivity.java
  2. 249
      app/src/main/java/com/frank/ffmpeg/activity/AudioHandleActivity.kt
  3. 135
      app/src/main/java/com/frank/ffmpeg/activity/BaseActivity.java
  4. 125
      app/src/main/java/com/frank/ffmpeg/activity/BaseActivity.kt
  5. 239
      app/src/main/java/com/frank/ffmpeg/activity/FilterActivity.java
  6. 194
      app/src/main/java/com/frank/ffmpeg/activity/FilterActivity.kt
  7. 130
      app/src/main/java/com/frank/ffmpeg/activity/LiveActivity.java
  8. 124
      app/src/main/java/com/frank/ffmpeg/activity/LiveActivity.kt
  9. 83
      app/src/main/java/com/frank/ffmpeg/activity/MainActivity.java
  10. 68
      app/src/main/java/com/frank/ffmpeg/activity/MainActivity.kt
  11. 189
      app/src/main/java/com/frank/ffmpeg/activity/MediaHandleActivity.java
  12. 189
      app/src/main/java/com/frank/ffmpeg/activity/MediaHandleActivity.kt
  13. 112
      app/src/main/java/com/frank/ffmpeg/activity/MediaPlayerActivity.java
  14. 103
      app/src/main/java/com/frank/ffmpeg/activity/MediaPlayerActivity.kt
  15. 114
      app/src/main/java/com/frank/ffmpeg/activity/ProbeFormatActivity.java
  16. 105
      app/src/main/java/com/frank/ffmpeg/activity/ProbeFormatActivity.kt
  17. 86
      app/src/main/java/com/frank/ffmpeg/activity/PushActivity.java
  18. 81
      app/src/main/java/com/frank/ffmpeg/activity/PushActivity.kt
  19. 442
      app/src/main/java/com/frank/ffmpeg/activity/VideoHandleActivity.java
  20. 452
      app/src/main/java/com/frank/ffmpeg/activity/VideoHandleActivity.kt
  21. 156
      app/src/main/java/com/frank/ffmpeg/activity/VideoPreviewActivity.java
  22. 146
      app/src/main/java/com/frank/ffmpeg/activity/VideoPreviewActivity.kt

@ -1,246 +0,0 @@
package com.frank.ffmpeg.activity;
import android.annotation.SuppressLint;
import android.os.Environment;
import android.os.Handler;
import android.os.Message;
import android.os.Bundle;
import android.view.View;
import android.widget.LinearLayout;
import android.widget.TextView;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import com.frank.ffmpeg.AudioPlayer;
import com.frank.ffmpeg.R;
import com.frank.ffmpeg.handler.FFmpegHandler;
import com.frank.ffmpeg.mp3.Mp3Converter;
import com.frank.ffmpeg.util.FFmpegUtil;
import com.frank.ffmpeg.util.FileUtil;
import static com.frank.ffmpeg.handler.FFmpegHandler.MSG_BEGIN;
import static com.frank.ffmpeg.handler.FFmpegHandler.MSG_FINISH;
import static com.frank.ffmpeg.handler.FFmpegHandler.MSG_PROGRESS;
/**
* Using ffmpeg command to handle audio
* Created by frank on 2018/1/23.
*/
public class AudioHandleActivity extends BaseActivity {
private final static String PATH = Environment.getExternalStorageDirectory().getPath();
private String appendFile = PATH + File.separator + "heart.m4a";
private LinearLayout layoutAudioHandle;
private LinearLayout layoutProgress;
private TextView txtProgress;
private int viewId;
private FFmpegHandler ffmpegHandler;
private String outputPath1 = PATH + File.separator + "output1.mp3";
private String outputPath2 = PATH + File.separator + "output2.mp3";
private boolean isJointing = false;
private final static boolean useFFmpeg = true;
@SuppressLint("HandlerLeak")
private Handler mHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
super.handleMessage(msg);
switch (msg.what) {
case MSG_BEGIN:
layoutProgress.setVisibility(View.VISIBLE);
layoutAudioHandle.setVisibility(View.GONE);
break;
case MSG_FINISH:
layoutProgress.setVisibility(View.GONE);
layoutAudioHandle.setVisibility(View.VISIBLE);
if (isJointing) {
isJointing = false;
FileUtil.deleteFile(outputPath1);
FileUtil.deleteFile(outputPath2);
}
break;
case MSG_PROGRESS:
int progress = msg.arg1;
int duration = msg.arg2;
if (progress > 0) {
txtProgress.setVisibility(View.VISIBLE);
txtProgress.setText(String.format(Locale.getDefault(), "%d%%", progress));
} else {
txtProgress.setVisibility(View.INVISIBLE);
}
break;
default:
break;
}
}
};
@Override
int getLayoutId() {
return R.layout.activity_audio_handle;
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
hideActionBar();
initView();
ffmpegHandler = new FFmpegHandler(mHandler);
}
private void initView() {
layoutProgress = getView(R.id.layout_progress);
txtProgress = getView(R.id.txt_progress);
layoutAudioHandle = getView(R.id.layout_audio_handle);
initViewsWithClick(
R.id.btn_transform,
R.id.btn_cut,
R.id.btn_concat,
R.id.btn_mix,
R.id.btn_play_audio,
R.id.btn_play_opensl,
R.id.btn_audio_encode,
R.id.btn_pcm_concat,
R.id.btn_audio_speed
);
}
@Override
public void onViewClick(View view) {
viewId = view.getId();
selectFile();
}
@Override
void onSelectedFile(String filePath) {
doHandleAudio(filePath);
}
/**
* Using ffmpeg cmd to handle audio
*
* @param srcFile srcFile
*/
private void doHandleAudio(final String srcFile) {
String[] commandLine = null;
if (!FileUtil.checkFileExist(srcFile)) {
return;
}
if (!FileUtil.isAudio(srcFile)) {
showToast(getString(R.string.wrong_audio_format));
return;
}
switch (viewId) {
case R.id.btn_transform:
if (useFFmpeg) { //use FFmpeg to transform
String transformFile = PATH + File.separator + "transformAudio.mp3";
commandLine = FFmpegUtil.transformAudio(srcFile, transformFile);
} else { //use MediaCodec and libmp3lame to transform
new Thread(() -> {
String transformInput = PATH + File.separator + "transformAudio.mp3";
Mp3Converter mp3Converter = new Mp3Converter();
mp3Converter.convertToMp3(srcFile, transformInput);
}).start();
}
break;
case R.id.btn_cut://cut audio, it's best not include special characters
String suffix = FileUtil.getFileSuffix(srcFile);
if (suffix == null || suffix.isEmpty()) {
return;
}
String cutFile = PATH + File.separator + "cutAudio" + suffix;
commandLine = FFmpegUtil.cutAudio(srcFile, 10, 15, cutFile);
break;
case R.id.btn_concat://concat audio
if (!FileUtil.checkFileExist(appendFile)) {
return;
}
concatAudio(srcFile);
return;
case R.id.btn_mix://mix audio
if (!FileUtil.checkFileExist(appendFile)) {
return;
}
String mixSuffix = FileUtil.getFileSuffix(srcFile);
if (mixSuffix == null || mixSuffix.isEmpty()) {
return;
}
String mixFile = PATH + File.separator + "mix" + mixSuffix;
commandLine = FFmpegUtil.mixAudio(srcFile, appendFile, mixFile);
break;
case R.id.btn_play_audio://use AudioTrack to play audio
new Thread(() -> new AudioPlayer().play(srcFile)).start();
return;
case R.id.btn_play_opensl://use OpenSL ES to play audio
new Thread(() -> new AudioPlayer().playAudio(srcFile)).start();
return;
case R.id.btn_audio_encode://audio encode
String pcmFile = PATH + File.separator + "raw.pcm";
String wavFile = PATH + File.separator + "convert.mp3";
//sample rate, normal is 8000/16000/44100
int sampleRate = 44100;
//channel num of pcm
int channel = 2;
commandLine = FFmpegUtil.encodeAudio(pcmFile, wavFile, sampleRate, channel);
break;
case R.id.btn_pcm_concat://concat PCM streams
String srcPCM = PATH + File.separator + "audio.pcm";
String appendPCM = PATH + File.separator + "audio.pcm";
String concatPCM = PATH + File.separator + "concat.pcm";
if (!FileUtil.checkFileExist(srcPCM) || !FileUtil.checkFileExist(appendPCM)) {
return;
}
mHandler.obtainMessage(MSG_BEGIN).sendToTarget();
FileUtil.concatFile(srcPCM, appendPCM, concatPCM);
mHandler.obtainMessage(MSG_FINISH).sendToTarget();
return;
case R.id.btn_audio_speed://change audio speed
float speed = 2.0f;//from 0.5 to 2.0
String speedPath = PATH + File.separator + "speed.mp3";
commandLine = FFmpegUtil.changeAudioSpeed(srcFile, speedPath, speed);
break;
default:
break;
}
if (ffmpegHandler != null && commandLine != null) {
ffmpegHandler.executeFFmpegCmd(commandLine);
}
}
private void concatAudio(String selectedPath) {
if (ffmpegHandler == null || selectedPath.isEmpty() || appendFile.isEmpty()) {
return;
}
isJointing = true;
String targetPath = PATH + File.separator + "concatAudio.mp3";
String[] transformCmd1 = FFmpegUtil.transformAudio(selectedPath, "libmp3lame", outputPath1);
String[] transformCmd2 = FFmpegUtil.transformAudio(appendFile, "libmp3lame", outputPath2);
List<String> fileList = new ArrayList<>();
fileList.add(outputPath1);
fileList.add(outputPath2);
String[] jointVideoCmd = FFmpegUtil.concatAudio(fileList, targetPath);
List<String[]> commandList = new ArrayList<>();
commandList.add(transformCmd1);
commandList.add(transformCmd2);
commandList.add(jointVideoCmd);
ffmpegHandler.executeFFmpegCmds(commandList);
}
@Override
protected void onDestroy() {
super.onDestroy();
if (mHandler != null) {
mHandler.removeCallbacksAndMessages(null);
}
}
}

@ -0,0 +1,249 @@
package com.frank.ffmpeg.activity
import android.annotation.SuppressLint
import android.os.Environment
import android.os.Handler
import android.os.Message
import android.os.Bundle
import android.view.View
import android.widget.LinearLayout
import android.widget.TextView
import java.io.File
import java.util.ArrayList
import java.util.Locale
import com.frank.ffmpeg.AudioPlayer
import com.frank.ffmpeg.R
import com.frank.ffmpeg.handler.FFmpegHandler
import com.frank.ffmpeg.mp3.Mp3Converter
import com.frank.ffmpeg.util.FFmpegUtil
import com.frank.ffmpeg.util.FileUtil
import com.frank.ffmpeg.handler.FFmpegHandler.MSG_BEGIN
import com.frank.ffmpeg.handler.FFmpegHandler.MSG_FINISH
import com.frank.ffmpeg.handler.FFmpegHandler.MSG_PROGRESS
/**
* Using ffmpeg command to handle audio
* Created by frank on 2018/1/23.
*/
class AudioHandleActivity : BaseActivity() {
private val appendFile = PATH + File.separator + "heart.m4a"
private var layoutAudioHandle: LinearLayout? = null
private var layoutProgress: LinearLayout? = null
private var txtProgress: TextView? = null
private var viewId: Int = 0
private var ffmpegHandler: FFmpegHandler? = null
private val outputPath1 = PATH + File.separator + "output1.mp3"
private val outputPath2 = PATH + File.separator + "output2.mp3"
private var isJointing = false
@SuppressLint("HandlerLeak")
private val mHandler = object : Handler() {
override fun handleMessage(msg: Message) {
super.handleMessage(msg)
when (msg.what) {
MSG_BEGIN -> {
layoutProgress!!.visibility = View.VISIBLE
layoutAudioHandle!!.visibility = View.GONE
}
MSG_FINISH -> {
layoutProgress!!.visibility = View.GONE
layoutAudioHandle!!.visibility = View.VISIBLE
if (isJointing) {
isJointing = false
FileUtil.deleteFile(outputPath1)
FileUtil.deleteFile(outputPath2)
}
}
MSG_PROGRESS -> {
val progress = msg.arg1
val duration = msg.arg2
if (progress > 0) {
txtProgress!!.visibility = View.VISIBLE
txtProgress!!.text = String.format(Locale.getDefault(), "%d%%", progress)
} else {
txtProgress!!.visibility = View.INVISIBLE
}
}
else -> {
}
}
}
}
override val layoutId: Int
get() = R.layout.activity_audio_handle
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
hideActionBar()
initView()
ffmpegHandler = FFmpegHandler(mHandler)
}
private fun initView() {
layoutProgress = getView(R.id.layout_progress)
txtProgress = getView(R.id.txt_progress)
layoutAudioHandle = getView(R.id.layout_audio_handle)
initViewsWithClick(
R.id.btn_transform,
R.id.btn_cut,
R.id.btn_concat,
R.id.btn_mix,
R.id.btn_play_audio,
R.id.btn_play_opensl,
R.id.btn_audio_encode,
R.id.btn_pcm_concat,
R.id.btn_audio_speed
)
}
override fun onViewClick(view: View) {
viewId = view.id
selectFile()
}
override fun onSelectedFile(filePath: String) {
doHandleAudio(filePath)
}
/**
* Using ffmpeg cmd to handle audio
*
* @param srcFile srcFile
*/
private fun doHandleAudio(srcFile: String) {
var commandLine: Array<String>? = null
if (!FileUtil.checkFileExist(srcFile)) {
return
}
if (!FileUtil.isAudio(srcFile)) {
showToast(getString(R.string.wrong_audio_format))
return
}
when (viewId) {
R.id.btn_transform -> if (useFFmpeg) { //use FFmpeg to transform
val transformFile = PATH + File.separator + "transformAudio.mp3"
commandLine = FFmpegUtil.transformAudio(srcFile, transformFile)
} else { //use MediaCodec and libmp3lame to transform
Thread {
val transformInput = PATH + File.separator + "transformAudio.mp3"
val mp3Converter = Mp3Converter()
mp3Converter.convertToMp3(srcFile, transformInput)
}.start()
}
R.id.btn_cut//cut audio, it's best not include special characters
-> {
val suffix = FileUtil.getFileSuffix(srcFile)
if (suffix == null || suffix.isEmpty()) {
return
}
val cutFile = PATH + File.separator + "cutAudio" + suffix
commandLine = FFmpegUtil.cutAudio(srcFile, 10, 15, cutFile)
}
R.id.btn_concat//concat audio
-> {
if (!FileUtil.checkFileExist(appendFile)) {
return
}
concatAudio(srcFile)
return
}
R.id.btn_mix//mix audio
-> {
if (!FileUtil.checkFileExist(appendFile)) {
return
}
val mixSuffix = FileUtil.getFileSuffix(srcFile)
if (mixSuffix == null || mixSuffix.isEmpty()) {
return
}
val mixFile = PATH + File.separator + "mix" + mixSuffix
commandLine = FFmpegUtil.mixAudio(srcFile, appendFile, mixFile)
}
R.id.btn_play_audio//use AudioTrack to play audio
-> {
Thread { AudioPlayer().play(srcFile) }.start()
return
}
R.id.btn_play_opensl//use OpenSL ES to play audio
-> {
Thread { AudioPlayer().playAudio(srcFile) }.start()
return
}
R.id.btn_audio_encode//audio encode
-> {
val pcmFile = PATH + File.separator + "raw.pcm"
val wavFile = PATH + File.separator + "convert.mp3"
//sample rate, normal is 8000/16000/44100
val sampleRate = 44100
//channel num of pcm
val channel = 2
commandLine = FFmpegUtil.encodeAudio(pcmFile, wavFile, sampleRate, channel)
}
R.id.btn_pcm_concat//concat PCM streams
-> {
val srcPCM = PATH + File.separator + "audio.pcm"
val appendPCM = PATH + File.separator + "audio.pcm"
val concatPCM = PATH + File.separator + "concat.pcm"
if (!FileUtil.checkFileExist(srcPCM) || !FileUtil.checkFileExist(appendPCM)) {
return
}
mHandler.obtainMessage(MSG_BEGIN).sendToTarget()
FileUtil.concatFile(srcPCM, appendPCM, concatPCM)
mHandler.obtainMessage(MSG_FINISH).sendToTarget()
return
}
R.id.btn_audio_speed//change audio speed
-> {
val speed = 2.0f//from 0.5 to 2.0
val speedPath = PATH + File.separator + "speed.mp3"
commandLine = FFmpegUtil.changeAudioSpeed(srcFile, speedPath, speed)
}
else -> {
}
}
if (ffmpegHandler != null && commandLine != null) {
ffmpegHandler!!.executeFFmpegCmd(commandLine)
}
}
private fun concatAudio(selectedPath: String) {
if (ffmpegHandler == null || selectedPath.isEmpty() || appendFile.isEmpty()) {
return
}
isJointing = true
val targetPath = PATH + File.separator + "concatAudio.mp3"
val transformCmd1 = FFmpegUtil.transformAudio(selectedPath, "libmp3lame", outputPath1)
val transformCmd2 = FFmpegUtil.transformAudio(appendFile, "libmp3lame", outputPath2)
val fileList = ArrayList<String>()
fileList.add(outputPath1)
fileList.add(outputPath2)
val jointVideoCmd = FFmpegUtil.concatAudio(fileList, targetPath)
val commandList = ArrayList<Array<String>>()
commandList.add(transformCmd1)
commandList.add(transformCmd2)
commandList.add(jointVideoCmd)
ffmpegHandler!!.executeFFmpegCmds(commandList)
}
override fun onDestroy() {
super.onDestroy()
mHandler?.removeCallbacksAndMessages(null)
}
companion object {
private val PATH = Environment.getExternalStorageDirectory().path
private const val useFFmpeg = true
}
}

@ -1,135 +0,0 @@
package com.frank.ffmpeg.activity;
import android.Manifest;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.os.Build;
import android.os.Bundle;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import android.text.TextUtils;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.Toast;
import com.frank.ffmpeg.R;
import com.frank.ffmpeg.util.ContentUtil;
/**
* base Activity
* Created by frank on 2019/11/2.
*/
public abstract class BaseActivity extends AppCompatActivity implements View.OnClickListener {
private final static String TAG = BaseActivity.class.getSimpleName();
private final static int REQUEST_CODE = 1234;
private final static String[] permissions = new String[]{
Manifest.permission.WRITE_EXTERNAL_STORAGE,
Manifest.permission.READ_EXTERNAL_STORAGE,
Manifest.permission.CAMERA,
Manifest.permission.RECORD_AUDIO};
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M
&& checkSelfPermission(permissions[0]) != PackageManager.PERMISSION_GRANTED
&& checkSelfPermission(permissions[1]) != PackageManager.PERMISSION_GRANTED) {
requestPermission();
}
setContentView(getLayoutId());
}
protected void hideActionBar() {
if (getSupportActionBar() != null) {
getSupportActionBar().hide();
}
}
private void requestPermission() {
requestPermission(permissions);
}
protected void requestPermission(String[] permissions) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
requestPermissions(permissions, REQUEST_CODE);
}
}
protected void initViewsWithClick(int... viewIds) {
for (int viewId : viewIds) {
getView(viewId).setOnClickListener(this);
}
}
@Override
public void onClick(View v) {
onViewClick(v);
}
protected void selectFile() {
Intent intent = new Intent(Intent.ACTION_GET_CONTENT);
intent.addCategory(Intent.CATEGORY_OPENABLE);
intent.setType("*/*");
intent.addFlags(Intent.FLAG_GRANT_READ_URI_PERMISSION);
this.startActivityForResult(intent, 123);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, @Nullable Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (data != null && data.getData() != null) {
String filePath = ContentUtil.getPath(this, data.getData());
Log.i(TAG, "filePath=" + filePath);
onSelectedFile(filePath);
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.menu_setting, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.menu_select:
selectFile();
break;
default:
break;
}
return super.onOptionsItemSelected(item);
}
protected void showToast(String msg) {
if (TextUtils.isEmpty(msg)) {
return;
}
Toast.makeText(this, msg, Toast.LENGTH_SHORT).show();
}
protected void showSelectFile() {
showToast(getString(R.string.please_select));
}
protected <T extends View> T getView(int viewId) {
return (T) findViewById(viewId);
}
abstract int getLayoutId();
abstract void onViewClick(View view);
abstract void onSelectedFile(String filePath);
}

@ -0,0 +1,125 @@
package com.frank.ffmpeg.activity
import android.Manifest
import android.content.Intent
import android.content.pm.PackageManager
import android.os.Build
import android.os.Bundle
import androidx.appcompat.app.AppCompatActivity
import android.text.TextUtils
import android.util.Log
import android.view.Menu
import android.view.MenuItem
import android.view.View
import android.widget.Toast
import com.frank.ffmpeg.R
import com.frank.ffmpeg.util.ContentUtil
/**
* base Activity
* Created by frank on 2019/11/2.
*/
abstract class BaseActivity : AppCompatActivity(), View.OnClickListener {
abstract val layoutId: Int
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M
&& checkSelfPermission(permissions[0]) != PackageManager.PERMISSION_GRANTED
&& checkSelfPermission(permissions[1]) != PackageManager.PERMISSION_GRANTED) {
requestPermission()
}
setContentView(layoutId)
}
protected fun hideActionBar() {
if (supportActionBar != null) {
supportActionBar!!.hide()
}
}
private fun requestPermission() {
requestPermission(permissions)
}
protected fun requestPermission(permissions: Array<String>) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
requestPermissions(permissions, REQUEST_CODE)
}
}
protected fun initViewsWithClick(vararg viewIds: Int) {
for (viewId in viewIds) {
getView<View>(viewId).setOnClickListener(this)
}
}
override fun onClick(v: View) {
onViewClick(v)
}
protected fun selectFile() {
val intent = Intent(Intent.ACTION_GET_CONTENT)
intent.addCategory(Intent.CATEGORY_OPENABLE)
intent.type = "*/*"
intent.addFlags(Intent.FLAG_GRANT_READ_URI_PERMISSION)
this.startActivityForResult(intent, 123)
}
override fun onActivityResult(requestCode: Int, resultCode: Int, data: Intent?) {
super.onActivityResult(requestCode, resultCode, data)
if (data != null && data.data != null) {
val filePath = ContentUtil.getPath(this, data.data)
Log.i(TAG, "filePath=" + filePath!!)
onSelectedFile(filePath)
}
}
override fun onCreateOptionsMenu(menu: Menu): Boolean {
menuInflater.inflate(R.menu.menu_setting, menu)
return true
}
override fun onOptionsItemSelected(item: MenuItem): Boolean {
when (item.itemId) {
R.id.menu_select -> selectFile()
else -> {
}
}
return super.onOptionsItemSelected(item)
}
protected fun showToast(msg: String) {
if (TextUtils.isEmpty(msg)) {
return
}
Toast.makeText(this, msg, Toast.LENGTH_SHORT).show()
}
protected fun showSelectFile() {
showToast(getString(R.string.please_select))
}
protected fun <T : View> getView(viewId: Int): T {
return findViewById<View>(viewId) as T
}
internal abstract fun onViewClick(view: View)
internal abstract fun onSelectedFile(filePath: String)
companion object {
private val TAG = BaseActivity::class.java.simpleName
private const val REQUEST_CODE = 1234
private val permissions = arrayOf(Manifest.permission.WRITE_EXTERNAL_STORAGE, Manifest.permission.READ_EXTERNAL_STORAGE, Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO)
}
}

@ -1,239 +0,0 @@
package com.frank.ffmpeg.activity;
import android.annotation.SuppressLint;
import android.os.Environment;
import android.os.Handler;
import android.os.Message;
import android.os.Bundle;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.widget.Button;
import android.widget.CompoundButton;
import android.widget.Toast;
import android.widget.ToggleButton;
import com.frank.ffmpeg.FFmpegApplication;
import com.frank.ffmpeg.R;
import com.frank.ffmpeg.VideoPlayer;
import com.frank.ffmpeg.adapter.HorizontalAdapter;
import com.frank.ffmpeg.listener.OnItemClickListener;
import com.frank.ffmpeg.util.FileUtil;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* Using ffmpeg to filter
* Created by frank on 2018/6/5.
*/
public class FilterActivity extends BaseActivity implements SurfaceHolder.Callback {
private String videoPath = Environment.getExternalStorageDirectory().getPath() + "/beyond.mp4";
private VideoPlayer videoPlayer;
private SurfaceView surfaceView;
private SurfaceHolder surfaceHolder;
private boolean surfaceCreated;
//is playing or not
private boolean isPlaying;
//the array of filter
private String[] filters = new String[]{
"lutyuv='u=128:v=128'",
"hue='h=60:s=-3'",
"edgedetect=low=0.1:high=0.4",
"drawgrid=w=iw/3:h=ih/3:t=2:c=white@0.5",
"colorbalance=bs=0.3",
"drawbox=x=100:y=100:w=100:h=100:color=red@0.5'",
"hflip",
//adjust the coefficient of sigma to control the blur
"gblur=sigma=2:steps=1:planes=1:sigmaV=1",
"rotate=180*PI/180",
"unsharp"
};
//vflip is up and down, hflip is left and right
private String[] txtArray = new String[]{
FFmpegApplication.getInstance().getString(R.string.filter_sketch),
FFmpegApplication.getInstance().getString(R.string.filter_distinct),
FFmpegApplication.getInstance().getString(R.string.filter_edge),
FFmpegApplication.getInstance().getString(R.string.filter_division),
FFmpegApplication.getInstance().getString(R.string.filter_equalize),
FFmpegApplication.getInstance().getString(R.string.filter_rectangle),
FFmpegApplication.getInstance().getString(R.string.filter_flip),
FFmpegApplication.getInstance().getString(R.string.filter_blur),
FFmpegApplication.getInstance().getString(R.string.filter_rotate),
FFmpegApplication.getInstance().getString(R.string.filter_sharpening)
};
private HorizontalAdapter horizontalAdapter;
private RecyclerView recyclerView;
private boolean playAudio = true;
private ToggleButton btnSound;
private Button btnSelect;
private final static int MSG_HIDE = 222;
private final static int DELAY_TIME = 5000;
@SuppressLint("HandlerLeak")
private Handler mHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
super.handleMessage(msg);
if (msg.what == MSG_HIDE) { //after idle 5s, hide the controller view
recyclerView.setVisibility(View.GONE);
btnSound.setVisibility(View.GONE);
btnSelect.setVisibility(View.GONE);
}
}
};
private class HideRunnable implements Runnable {
@Override
public void run() {
mHandler.obtainMessage(MSG_HIDE).sendToTarget();
}
}
private HideRunnable hideRunnable;
@Override
int getLayoutId() {
return R.layout.activity_filter;
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
hideActionBar();
initView();
registerLister();
hideRunnable = new HideRunnable();
mHandler.postDelayed(hideRunnable, DELAY_TIME);
}
private void initView() {
surfaceView = getView(R.id.surface_filter);
surfaceHolder = surfaceView.getHolder();
surfaceHolder.addCallback(this);
videoPlayer = new VideoPlayer();
btnSound = getView(R.id.btn_sound);
recyclerView = getView(R.id.recycler_view);
LinearLayoutManager linearLayoutManager = new LinearLayoutManager(this);
linearLayoutManager.setOrientation(LinearLayoutManager.HORIZONTAL);
recyclerView.setLayoutManager(linearLayoutManager);
List<String> itemList = new ArrayList<>(Arrays.asList(txtArray));
horizontalAdapter = new HorizontalAdapter(itemList);
recyclerView.setAdapter(horizontalAdapter);
btnSelect = getView(R.id.btn_select_file);
initViewsWithClick(R.id.btn_select_file);
}
private void registerLister() {
horizontalAdapter.setOnItemClickListener(new OnItemClickListener() {
@Override
public void onItemClick(int position) {
if (!surfaceCreated)
return;
if (!FileUtil.checkFileExist(videoPath)) {
showSelectFile();
return;
}
doFilterPlay(position);
}
});
surfaceView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
btnSelect.setVisibility(View.VISIBLE);
btnSound.setVisibility(View.VISIBLE);
recyclerView.setVisibility(View.VISIBLE);
mHandler.postDelayed(hideRunnable, DELAY_TIME);
}
});
btnSound.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
setPlayAudio();
}
});
}
/**
* switch filter
* @param position position in the array of filters
*/
private void doFilterPlay(final int position) {
new Thread(new Runnable() {
@Override
public void run() {
if (isPlaying) {
videoPlayer.again();
}
isPlaying = true;
videoPlayer.filter(videoPath, surfaceHolder.getSurface(), filters[position]);
}
}).start();
}
private void setPlayAudio() {
playAudio = !playAudio;
videoPlayer.playAudio(playAudio);
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
surfaceCreated = true;
if (FileUtil.checkFileExist(videoPath)) {
doFilterPlay(4);
btnSound.setChecked(true);
} else {
Toast.makeText(FilterActivity.this, getString(R.string.file_not_found), Toast.LENGTH_SHORT).show();
}
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
surfaceCreated = false;
}
@Override
protected void onDestroy() {
super.onDestroy();
isPlaying = false;
//FIXME
// videoPlayer.release();
videoPlayer = null;
horizontalAdapter = null;
}
@Override
void onViewClick(View view) {
if (view.getId() == R.id.btn_select_file) {
selectFile();
}
}
@Override
void onSelectedFile(String filePath) {
videoPath = filePath;
doFilterPlay(4);
//sound off by default
btnSound.setChecked(true);
}
}

@ -0,0 +1,194 @@
package com.frank.ffmpeg.activity
import android.annotation.SuppressLint
import android.os.Environment
import android.os.Handler
import android.os.Message
import android.os.Bundle
import androidx.recyclerview.widget.LinearLayoutManager
import androidx.recyclerview.widget.RecyclerView
import android.view.SurfaceHolder
import android.view.SurfaceView
import android.view.View
import android.widget.Button
import android.widget.Toast
import android.widget.ToggleButton
import com.frank.ffmpeg.FFmpegApplication
import com.frank.ffmpeg.R
import com.frank.ffmpeg.VideoPlayer
import com.frank.ffmpeg.adapter.HorizontalAdapter
import com.frank.ffmpeg.listener.OnItemClickListener
import com.frank.ffmpeg.util.FileUtil
import java.util.ArrayList
import java.util.Arrays
/**
* Using ffmpeg to filter
* Created by frank on 2018/6/5.
*/
class FilterActivity : BaseActivity(), SurfaceHolder.Callback {
private var videoPath = Environment.getExternalStorageDirectory().path + "/beyond.mp4"
private var videoPlayer: VideoPlayer? = null
private var surfaceView: SurfaceView? = null
private var surfaceHolder: SurfaceHolder? = null
private var surfaceCreated: Boolean = false
//is playing or not
private var isPlaying: Boolean = false
//the array of filter
private val filters = arrayOf("lutyuv='u=128:v=128'", "hue='h=60:s=-3'", "edgedetect=low=0.1:high=0.4", "drawgrid=w=iw/3:h=ih/3:t=2:c=white@0.5", "colorbalance=bs=0.3", "drawbox=x=100:y=100:w=100:h=100:color=red@0.5'", "hflip",
//adjust the coefficient of sigma to control the blur
"gblur=sigma=2:steps=1:planes=1:sigmaV=1", "rotate=180*PI/180", "unsharp")
//vflip is up and down, hflip is left and right
private val txtArray = arrayOf(FFmpegApplication.getInstance().getString(R.string.filter_sketch), FFmpegApplication.getInstance().getString(R.string.filter_distinct), FFmpegApplication.getInstance().getString(R.string.filter_edge), FFmpegApplication.getInstance().getString(R.string.filter_division), FFmpegApplication.getInstance().getString(R.string.filter_equalize), FFmpegApplication.getInstance().getString(R.string.filter_rectangle), FFmpegApplication.getInstance().getString(R.string.filter_flip), FFmpegApplication.getInstance().getString(R.string.filter_blur), FFmpegApplication.getInstance().getString(R.string.filter_rotate), FFmpegApplication.getInstance().getString(R.string.filter_sharpening))
private var horizontalAdapter: HorizontalAdapter? = null
private var recyclerView: RecyclerView? = null
private var playAudio = true
private var btnSound: ToggleButton? = null
private var btnSelect: Button? = null
@SuppressLint("HandlerLeak")
private val mHandler = object : Handler() {
override fun handleMessage(msg: Message) {
super.handleMessage(msg)
if (msg.what == MSG_HIDE) { //after idle 5s, hide the controller view
recyclerView!!.visibility = View.GONE
btnSound!!.visibility = View.GONE
btnSelect!!.visibility = View.GONE
}
}
}
private var hideRunnable: HideRunnable? = null
override val layoutId: Int
get() = R.layout.activity_filter
private inner class HideRunnable : Runnable {
override fun run() {
mHandler.obtainMessage(MSG_HIDE).sendToTarget()
}
}
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
hideActionBar()
initView()
registerLister()
hideRunnable = HideRunnable()
mHandler.postDelayed(hideRunnable, DELAY_TIME.toLong())
}
private fun initView() {
surfaceView = getView(R.id.surface_filter)
surfaceHolder = surfaceView!!.holder
surfaceHolder!!.addCallback(this)
videoPlayer = VideoPlayer()
btnSound = getView(R.id.btn_sound)
recyclerView = getView(R.id.recycler_view)
val linearLayoutManager = LinearLayoutManager(this)
linearLayoutManager.orientation = LinearLayoutManager.HORIZONTAL
recyclerView!!.layoutManager = linearLayoutManager
val itemList = ArrayList(Arrays.asList(*txtArray))
horizontalAdapter = HorizontalAdapter(itemList)
recyclerView!!.adapter = horizontalAdapter
btnSelect = getView(R.id.btn_select_file)
initViewsWithClick(R.id.btn_select_file)
}
private fun registerLister() {
horizontalAdapter!!.setOnItemClickListener(OnItemClickListener { position ->
if (!surfaceCreated)
return@OnItemClickListener
if (!FileUtil.checkFileExist(videoPath)) {
showSelectFile()
return@OnItemClickListener
}
doFilterPlay(position)
})
surfaceView!!.setOnClickListener {
btnSelect!!.visibility = View.VISIBLE
btnSound!!.visibility = View.VISIBLE
recyclerView!!.visibility = View.VISIBLE
mHandler.postDelayed(hideRunnable, DELAY_TIME.toLong())
}
btnSound!!.setOnCheckedChangeListener { buttonView, isChecked -> setPlayAudio() }
}
/**
* switch filter
* @param position position in the array of filters
*/
private fun doFilterPlay(position: Int) {
Thread(Runnable {
if (isPlaying) {
videoPlayer!!.again()
}
isPlaying = true
videoPlayer!!.filter(videoPath, surfaceHolder!!.surface, filters[position])
}).start()
}
private fun setPlayAudio() {
playAudio = !playAudio
videoPlayer!!.playAudio(playAudio)
}
override fun surfaceCreated(holder: SurfaceHolder) {
surfaceCreated = true
if (FileUtil.checkFileExist(videoPath)) {
doFilterPlay(4)
btnSound!!.isChecked = true
} else {
Toast.makeText(this@FilterActivity, getString(R.string.file_not_found), Toast.LENGTH_SHORT).show()
}
}
override fun surfaceChanged(holder: SurfaceHolder, format: Int, width: Int, height: Int) {
}
override fun surfaceDestroyed(holder: SurfaceHolder) {
surfaceCreated = false
}
override fun onDestroy() {
super.onDestroy()
isPlaying = false
//FIXME
// videoPlayer.release();
videoPlayer = null
horizontalAdapter = null
}
override fun onViewClick(view: View) {
if (view.id == R.id.btn_select_file) {
selectFile()
}
}
override fun onSelectedFile(filePath: String) {
videoPath = filePath
doFilterPlay(4)
//sound off by default
btnSound!!.isChecked = true
}
companion object {
private val MSG_HIDE = 222
private val DELAY_TIME = 5000
}
}

@ -1,130 +0,0 @@
package com.frank.ffmpeg.activity;
import android.annotation.SuppressLint;
import android.media.AudioFormat;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.text.TextUtils;
import android.util.Log;
import android.view.SurfaceView;
import android.view.View;
import android.widget.CompoundButton;
import android.widget.Toast;
import android.widget.ToggleButton;
import com.frank.ffmpeg.R;
import com.frank.live.camera2.Camera2Helper;
import com.frank.live.listener.LiveStateChangeListener;
import com.frank.live.param.AudioParam;
import com.frank.live.param.VideoParam;
import com.frank.live.LivePusherNew;
/**
* Realtime living with rtmp stream
* Created by frank on 2018/1/28.
*/
public class LiveActivity extends BaseActivity implements CompoundButton.OnCheckedChangeListener, LiveStateChangeListener {
private final static String TAG = LiveActivity.class.getSimpleName();
private final static String LIVE_URL = "rtmp://192.168.1.3/live/stream";
private final static int MSG_ERROR = 100;
private SurfaceView textureView;
private LivePusherNew livePusher;
@SuppressLint("HandlerLeak")
private Handler mHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
super.handleMessage(msg);
if (msg.what == MSG_ERROR) {
String errMsg = (String) msg.obj;
if (!TextUtils.isEmpty(errMsg)) {
Toast.makeText(LiveActivity.this, errMsg, Toast.LENGTH_SHORT).show();
}
}
}
};
@Override
int getLayoutId() {
return R.layout.activity_live;
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
hideActionBar();
initView();
initPusher();
}
private void initView() {
initViewsWithClick(R.id.btn_swap);
((ToggleButton) findViewById(R.id.btn_live)).setOnCheckedChangeListener(this);
((ToggleButton) findViewById(R.id.btn_mute)).setOnCheckedChangeListener(this);
textureView = getView(R.id.surface_camera);
}
private void initPusher() {
int width = 640;//resolution
int height = 480;
int videoBitRate = 800_000;//kb/s
int videoFrameRate = 10;//fps
VideoParam videoParam = new VideoParam(width, height,
Integer.valueOf(Camera2Helper.CAMERA_ID_BACK), videoBitRate, videoFrameRate);
int sampleRate = 44100;//sample rate: Hz
int channelConfig = AudioFormat.CHANNEL_IN_STEREO;
int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
int numChannels = 2;//channel number
AudioParam audioParam = new AudioParam(sampleRate, channelConfig, audioFormat, numChannels);
livePusher = new LivePusherNew(this, videoParam, audioParam);
livePusher.setPreviewDisplay(textureView.getHolder());
}
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
switch (buttonView.getId()) {
case R.id.btn_live://start or stop living
if (isChecked) {
livePusher.startPush(LIVE_URL, this);
} else {
livePusher.stopPush();
}
break;
case R.id.btn_mute://mute or not
Log.i(TAG, "isChecked=" + isChecked);
livePusher.setMute(isChecked);
break;
default:
break;
}
}
@Override
public void onError(String msg) {
Log.e(TAG, "errMsg=" + msg);
mHandler.obtainMessage(MSG_ERROR, msg).sendToTarget();
}
@Override
protected void onDestroy() {
super.onDestroy();
if (livePusher != null) {
livePusher.release();
}
}
@Override
void onViewClick(View view) {
if (view.getId() == R.id.btn_swap) {//switch camera
livePusher.switchCamera();
}
}
@Override
void onSelectedFile(String filePath) {
}
}

@ -0,0 +1,124 @@
package com.frank.ffmpeg.activity
import android.annotation.SuppressLint
import android.media.AudioFormat
import android.os.Bundle
import android.os.Handler
import android.os.Message
import android.text.TextUtils
import android.util.Log
import android.view.SurfaceView
import android.view.View
import android.widget.CompoundButton
import android.widget.Toast
import android.widget.ToggleButton
import com.frank.ffmpeg.R
import com.frank.live.camera2.Camera2Helper
import com.frank.live.listener.LiveStateChangeListener
import com.frank.live.param.AudioParam
import com.frank.live.param.VideoParam
import com.frank.live.LivePusherNew
/**
* Realtime living with rtmp stream
* Created by frank on 2018/1/28.
*/
class LiveActivity : BaseActivity(), CompoundButton.OnCheckedChangeListener, LiveStateChangeListener {
private var textureView: SurfaceView? = null
private var livePusher: LivePusherNew? = null
@SuppressLint("HandlerLeak")
private val mHandler = object : Handler() {
override fun handleMessage(msg: Message) {
super.handleMessage(msg)
if (msg.what == MSG_ERROR) {
val errMsg = msg.obj as String
if (!TextUtils.isEmpty(errMsg)) {
Toast.makeText(this@LiveActivity, errMsg, Toast.LENGTH_SHORT).show()
}
}
}
}
override val layoutId: Int
get() = R.layout.activity_live
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
hideActionBar()
initView()
initPusher()
}
private fun initView() {
initViewsWithClick(R.id.btn_swap)
(findViewById<View>(R.id.btn_live) as ToggleButton).setOnCheckedChangeListener(this)
(findViewById<View>(R.id.btn_mute) as ToggleButton).setOnCheckedChangeListener(this)
textureView = getView(R.id.surface_camera)
}
private fun initPusher() {
val width = 640//resolution
val height = 480
val videoBitRate = 800000//kb/s
val videoFrameRate = 10//fps
val videoParam = VideoParam(width, height,
Integer.valueOf(Camera2Helper.CAMERA_ID_BACK), videoBitRate, videoFrameRate)
val sampleRate = 44100//sample rate: Hz
val channelConfig = AudioFormat.CHANNEL_IN_STEREO
val audioFormat = AudioFormat.ENCODING_PCM_16BIT
val numChannels = 2//channel number
val audioParam = AudioParam(sampleRate, channelConfig, audioFormat, numChannels)
livePusher = LivePusherNew(this, videoParam, audioParam)
livePusher!!.setPreviewDisplay(textureView!!.holder)
}
override fun onCheckedChanged(buttonView: CompoundButton, isChecked: Boolean) {
when (buttonView.id) {
R.id.btn_live//start or stop living
-> if (isChecked) {
livePusher!!.startPush(LIVE_URL, this)
} else {
livePusher!!.stopPush()
}
R.id.btn_mute//mute or not
-> {
Log.i(TAG, "isChecked=$isChecked")
livePusher!!.setMute(isChecked)
}
else -> {
}
}
}
override fun onError(msg: String) {
Log.e(TAG, "errMsg=$msg")
mHandler.obtainMessage(MSG_ERROR, msg).sendToTarget()
}
override fun onDestroy() {
super.onDestroy()
if (livePusher != null) {
livePusher!!.release()
}
}
override fun onViewClick(view: View) {
if (view.id == R.id.btn_swap) {//switch camera
livePusher!!.switchCamera()
}
}
override fun onSelectedFile(filePath: String) {
}
companion object {
private val TAG = LiveActivity::class.java.simpleName
private const val LIVE_URL = "rtmp://192.168.1.3/live/stream"
private const val MSG_ERROR = 100
}
}

@ -1,83 +0,0 @@
package com.frank.ffmpeg.activity;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import com.frank.ffmpeg.R;
/**
* The main entrance of all Activity
* Created by frank on 2018/1/23.
*/
public class MainActivity extends BaseActivity {
@Override
int getLayoutId() {
return R.layout.activity_main;
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
initViewsWithClick(
R.id.btn_audio,
R.id.btn_video,
R.id.btn_media,
R.id.btn_play,
R.id.btn_push,
R.id.btn_live,
R.id.btn_filter,
R.id.btn_preview,
R.id.btn_probe,
R.id.btn_audio_effect
);
}
@Override
public void onViewClick(View v) {
Intent intent = new Intent();
switch (v.getId()) {
case R.id.btn_audio://handle audio
intent.setClass(MainActivity.this, AudioHandleActivity.class);
break;
case R.id.btn_video://handle video
intent.setClass(MainActivity.this, VideoHandleActivity.class);
break;
case R.id.btn_media://handle media
intent.setClass(MainActivity.this, MediaHandleActivity.class);
break;
case R.id.btn_play://media play
intent.setClass(MainActivity.this, MediaPlayerActivity.class);
break;
case R.id.btn_push://pushing
intent.setClass(MainActivity.this, PushActivity.class);
break;
case R.id.btn_live://realtime living with rtmp stream
intent.setClass(MainActivity.this, LiveActivity.class);
break;
case R.id.btn_filter://filter effect
intent.setClass(MainActivity.this, FilterActivity.class);
break;
case R.id.btn_preview://preview thumbnail
intent.setClass(MainActivity.this, VideoPreviewActivity.class);
break;
case R.id.btn_probe://probe media format
intent.setClass(MainActivity.this, ProbeFormatActivity.class);
break;
case R.id.btn_audio_effect://audio effect
intent.setClass(MainActivity.this, AudioEffectActivity.class);
break;
default:
break;
}
startActivity(intent);
}
@Override
void onSelectedFile(String filePath) {
}
}

@ -0,0 +1,68 @@
package com.frank.ffmpeg.activity
import android.content.Intent
import android.os.Bundle
import android.view.View
import com.frank.ffmpeg.R
/**
* The main entrance of all Activity
* Created by frank on 2018/1/23.
*/
class MainActivity : BaseActivity() {
override val layoutId: Int
get() = R.layout.activity_main
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
initViewsWithClick(
R.id.btn_audio,
R.id.btn_video,
R.id.btn_media,
R.id.btn_play,
R.id.btn_push,
R.id.btn_live,
R.id.btn_filter,
R.id.btn_preview,
R.id.btn_probe,
R.id.btn_audio_effect
)
}
override fun onViewClick(view: View) {
val intent = Intent()
when (view.id) {
R.id.btn_audio//handle audio
-> intent.setClass(this@MainActivity, AudioHandleActivity::class.java)
R.id.btn_video//handle video
-> intent.setClass(this@MainActivity, VideoHandleActivity::class.java)
R.id.btn_media//handle media
-> intent.setClass(this@MainActivity, MediaHandleActivity::class.java)
R.id.btn_play//media play
-> intent.setClass(this@MainActivity, MediaPlayerActivity::class.java)
R.id.btn_push//pushing
-> intent.setClass(this@MainActivity, PushActivity::class.java)
R.id.btn_live//realtime living with rtmp stream
-> intent.setClass(this@MainActivity, LiveActivity::class.java)
R.id.btn_filter//filter effect
-> intent.setClass(this@MainActivity, FilterActivity::class.java)
R.id.btn_preview//preview thumbnail
-> intent.setClass(this@MainActivity, VideoPreviewActivity::class.java)
R.id.btn_probe//probe media format
-> intent.setClass(this@MainActivity, ProbeFormatActivity::class.java)
R.id.btn_audio_effect//audio effect
-> intent.setClass(this@MainActivity, AudioEffectActivity::class.java)
else -> {
}
}
startActivity(intent)
}
override fun onSelectedFile(filePath: String) {
}
}

@ -1,189 +0,0 @@
package com.frank.ffmpeg.activity;
import android.annotation.SuppressLint;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.Message;
import android.util.Log;
import android.view.View;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.frank.ffmpeg.FFmpegCmd;
import com.frank.ffmpeg.R;
import com.frank.ffmpeg.handler.FFmpegHandler;
import com.frank.ffmpeg.util.FFmpegUtil;
import com.frank.ffmpeg.util.FileUtil;
import com.frank.ffmpeg.util.ThreadPoolUtil;
import java.io.File;
import java.util.Locale;
import static com.frank.ffmpeg.handler.FFmpegHandler.MSG_BEGIN;
import static com.frank.ffmpeg.handler.FFmpegHandler.MSG_FINISH;
import static com.frank.ffmpeg.handler.FFmpegHandler.MSG_PROGRESS;
/**
* using ffmpeg to handle media
* Created by frank on 2018/1/23.
*/
public class MediaHandleActivity extends BaseActivity {
private final static String TAG = MediaHandleActivity.class.getSimpleName();
private static final String PATH = Environment.getExternalStorageDirectory().getPath();
private String audioFile = PATH + File.separator + "tiger.mp3";
private LinearLayout layoutProgress;
private TextView txtProgress;
private int viewId;
private LinearLayout layoutMediaHandle;
private FFmpegHandler ffmpegHandler;
@SuppressLint("HandlerLeak")
private Handler mHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
super.handleMessage(msg);
switch (msg.what) {
case MSG_BEGIN:
layoutProgress.setVisibility(View.VISIBLE);
layoutMediaHandle.setVisibility(View.GONE);
break;
case MSG_FINISH:
layoutProgress.setVisibility(View.GONE);
layoutMediaHandle.setVisibility(View.VISIBLE);
break;
case MSG_PROGRESS:
int progress = msg.arg1;
if (progress > 0) {
txtProgress.setVisibility(View.VISIBLE);
txtProgress.setText(String.format(Locale.getDefault(), "%d%%", progress));
} else {
txtProgress.setVisibility(View.INVISIBLE);
}
break;
default:
break;
}
}
};
@Override
int getLayoutId() {
return R.layout.activity_media_handle;
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
hideActionBar();
initView();
ffmpegHandler = new FFmpegHandler(mHandler);
}
private void initView() {
layoutProgress = getView(R.id.layout_progress);
txtProgress = getView(R.id.txt_progress);
layoutMediaHandle = getView(R.id.layout_media_handle);
initViewsWithClick(
R.id.btn_mux,
R.id.btn_extract_audio,
R.id.btn_extract_video,
R.id.btn_dubbing
);
}
@Override
public void onViewClick(View view) {
viewId = view.getId();
selectFile();
}
@Override
void onSelectedFile(String filePath) {
doHandleMedia(filePath);
}
/**
* Using ffmpeg cmd to handle media
*
* @param srcFile srcFile
*/
private void doHandleMedia(String srcFile) {
String[] commandLine = null;
if (!FileUtil.checkFileExist(srcFile)) {
return;
}
if (!FileUtil.isVideo(srcFile)) {
showToast(getString(R.string.wrong_video_format));
return;
}
switch (viewId) {
case R.id.btn_mux://mux:pure video and pure audio
ThreadPoolUtil.executeSingleThreadPool(() -> mediaMux(srcFile));
return;
case R.id.btn_extract_audio://extract audio
String extractAudio = PATH + File.separator + "extractAudio.aac";
commandLine = FFmpegUtil.extractAudio(srcFile, extractAudio);
break;
case R.id.btn_extract_video://extract video
String extractVideo = PATH + File.separator + "extractVideo.mp4";
commandLine = FFmpegUtil.extractVideo(srcFile, extractVideo);
break;
case R.id.btn_dubbing://dubbing
ThreadPoolUtil.executeSingleThreadPool(() -> mediaDubbing(srcFile));
return;
default:
break;
}
if (ffmpegHandler != null) {
ffmpegHandler.executeFFmpegCmd(commandLine);
}
}
private void muxVideoAndAudio(String videoPath, String outputPath) {
String[] commandLine = FFmpegUtil.mediaMux(videoPath, audioFile, true, outputPath);
int result = FFmpegCmd.executeSync(commandLine);
if (result != 0) {
commandLine = FFmpegUtil.mediaMux(videoPath, audioFile, false, outputPath);
result = FFmpegCmd.executeSync(commandLine);
Log.e(TAG, "mux audio and video result=" + result);
}
}
private void mediaMux(String srcFile) {
mHandler.sendEmptyMessage(MSG_BEGIN);
String suffix = FileUtil.getFileSuffix(srcFile);
String muxPath = PATH + File.separator + "mux" + suffix;
Log.e(TAG, "muxPath=" + muxPath);
muxVideoAndAudio(srcFile, muxPath);
mHandler.sendEmptyMessage(MSG_FINISH);
}
private void mediaDubbing(String srcFile) {
mHandler.sendEmptyMessage(MSG_BEGIN);
String dubbingSuffix = FileUtil.getFileSuffix(srcFile);
String dubbingPath = PATH + File.separator + "dubbing" + dubbingSuffix;
String temp = PATH + File.separator + "temp" + dubbingSuffix;
String[] commandLine1 = FFmpegUtil.extractVideo(srcFile, temp);
int dubbingResult = FFmpegCmd.executeSync(commandLine1);
if (dubbingResult != 0) {
Log.e(TAG, "extract video fail, result=" + dubbingResult);
return;
}
muxVideoAndAudio(temp, dubbingPath);
FileUtil.deleteFile(temp);
mHandler.sendEmptyMessage(MSG_FINISH);
}
@Override
protected void onDestroy() {
super.onDestroy();
if (mHandler != null) {
mHandler.removeCallbacksAndMessages(null);
}
}
}

@ -0,0 +1,189 @@
package com.frank.ffmpeg.activity
import android.annotation.SuppressLint
import android.os.Bundle
import android.os.Environment
import android.os.Handler
import android.os.Message
import android.util.Log
import android.view.View
import android.widget.LinearLayout
import android.widget.TextView
import com.frank.ffmpeg.FFmpegCmd
import com.frank.ffmpeg.R
import com.frank.ffmpeg.handler.FFmpegHandler
import com.frank.ffmpeg.util.FFmpegUtil
import com.frank.ffmpeg.util.FileUtil
import com.frank.ffmpeg.util.ThreadPoolUtil
import java.io.File
import java.util.Locale
import com.frank.ffmpeg.handler.FFmpegHandler.MSG_BEGIN
import com.frank.ffmpeg.handler.FFmpegHandler.MSG_FINISH
import com.frank.ffmpeg.handler.FFmpegHandler.MSG_PROGRESS
/**
* using ffmpeg to handle media
* Created by frank on 2018/1/23.
*/
class MediaHandleActivity : BaseActivity() {
private val audioFile = PATH + File.separator + "tiger.mp3"
private var layoutProgress: LinearLayout? = null
private var txtProgress: TextView? = null
private var viewId: Int = 0
private var layoutMediaHandle: LinearLayout? = null
private var ffmpegHandler: FFmpegHandler? = null
@SuppressLint("HandlerLeak")
private val mHandler = object : Handler() {
override fun handleMessage(msg: Message) {
super.handleMessage(msg)
when (msg.what) {
MSG_BEGIN -> {
layoutProgress!!.visibility = View.VISIBLE
layoutMediaHandle!!.visibility = View.GONE
}
MSG_FINISH -> {
layoutProgress!!.visibility = View.GONE
layoutMediaHandle!!.visibility = View.VISIBLE
}
MSG_PROGRESS -> {
val progress = msg.arg1
if (progress > 0) {
txtProgress!!.visibility = View.VISIBLE
txtProgress!!.text = String.format(Locale.getDefault(), "%d%%", progress)
} else {
txtProgress!!.visibility = View.INVISIBLE
}
}
else -> {
}
}
}
}
override val layoutId: Int
get() = R.layout.activity_media_handle
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
hideActionBar()
initView()
ffmpegHandler = FFmpegHandler(mHandler)
}
private fun initView() {
layoutProgress = getView(R.id.layout_progress)
txtProgress = getView(R.id.txt_progress)
layoutMediaHandle = getView(R.id.layout_media_handle)
initViewsWithClick(
R.id.btn_mux,
R.id.btn_extract_audio,
R.id.btn_extract_video,
R.id.btn_dubbing
)
}
override fun onViewClick(view: View) {
viewId = view.id
selectFile()
}
override fun onSelectedFile(filePath: String) {
doHandleMedia(filePath)
}
/**
* Using ffmpeg cmd to handle media
*
* @param srcFile srcFile
*/
private fun doHandleMedia(srcFile: String) {
var commandLine: Array<String>? = null
if (!FileUtil.checkFileExist(srcFile)) {
return
}
if (!FileUtil.isVideo(srcFile)) {
showToast(getString(R.string.wrong_video_format))
return
}
when (viewId) {
R.id.btn_mux//mux:pure video and pure audio
-> {
ThreadPoolUtil.executeSingleThreadPool { mediaMux(srcFile) }
return
}
R.id.btn_extract_audio//extract audio
-> {
val extractAudio = PATH + File.separator + "extractAudio.aac"
commandLine = FFmpegUtil.extractAudio(srcFile, extractAudio)
}
R.id.btn_extract_video//extract video
-> {
val extractVideo = PATH + File.separator + "extractVideo.mp4"
commandLine = FFmpegUtil.extractVideo(srcFile, extractVideo)
}
R.id.btn_dubbing//dubbing
-> {
ThreadPoolUtil.executeSingleThreadPool { mediaDubbing(srcFile) }
return
}
else -> {
}
}
if (ffmpegHandler != null) {
ffmpegHandler!!.executeFFmpegCmd(commandLine)
}
}
private fun muxVideoAndAudio(videoPath: String, outputPath: String) {
var commandLine = FFmpegUtil.mediaMux(videoPath, audioFile, true, outputPath)
var result = FFmpegCmd.executeSync(commandLine)
if (result != 0) {
commandLine = FFmpegUtil.mediaMux(videoPath, audioFile, false, outputPath)
result = FFmpegCmd.executeSync(commandLine)
Log.e(TAG, "mux audio and video result=$result")
}
}
private fun mediaMux(srcFile: String) {
mHandler.sendEmptyMessage(MSG_BEGIN)
val suffix = FileUtil.getFileSuffix(srcFile)
val muxPath = PATH + File.separator + "mux" + suffix
Log.e(TAG, "muxPath=$muxPath")
muxVideoAndAudio(srcFile, muxPath)
mHandler.sendEmptyMessage(MSG_FINISH)
}
private fun mediaDubbing(srcFile: String) {
mHandler.sendEmptyMessage(MSG_BEGIN)
val dubbingSuffix = FileUtil.getFileSuffix(srcFile)
val dubbingPath = PATH + File.separator + "dubbing" + dubbingSuffix
val temp = PATH + File.separator + "temp" + dubbingSuffix
val commandLine1 = FFmpegUtil.extractVideo(srcFile, temp)
val dubbingResult = FFmpegCmd.executeSync(commandLine1)
if (dubbingResult != 0) {
Log.e(TAG, "extract video fail, result=$dubbingResult")
return
}
muxVideoAndAudio(temp, dubbingPath)
FileUtil.deleteFile(temp)
mHandler.sendEmptyMessage(MSG_FINISH)
}
override fun onDestroy() {
super.onDestroy()
mHandler.removeCallbacksAndMessages(null)
}
companion object {
private val TAG = MediaHandleActivity::class.java.simpleName
private val PATH = Environment.getExternalStorageDirectory().path
}
}

@ -1,112 +0,0 @@
package com.frank.ffmpeg.activity;
import android.os.Bundle;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.widget.Button;
import com.frank.ffmpeg.MediaPlayer;
import com.frank.ffmpeg.R;
import com.frank.ffmpeg.util.FileUtil;
/**
* mediaPlayer, which decode by software
* Created by frank on 2018/2/12.
*/
public class MediaPlayerActivity extends BaseActivity implements SurfaceHolder.Callback {
private static final String TAG = MediaPlayerActivity.class.getSimpleName();
private SurfaceHolder surfaceHolder;
private MediaPlayer mediaPlayer;
private boolean surfaceCreated;
private Button btnSelectFile;
@Override
int getLayoutId() {
return R.layout.activity_media_player;
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
hideActionBar();
initView();
initPlayer();
}
private void initView(){
SurfaceView surfaceView = getView(R.id.surface_media);
surfaceHolder = surfaceView.getHolder();
surfaceHolder.addCallback(this);
btnSelectFile = getView(R.id.btn_select_file);
initViewsWithClick(R.id.btn_select_file);
}
private void initPlayer(){
mediaPlayer = new MediaPlayer();
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
surfaceCreated = true;
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed");
}
@Override
protected void onDestroy() {
super.onDestroy();
if(mediaPlayer != null){
mediaPlayer.release();
mediaPlayer = null;
}
}
private void startPlay(final String filePath) {
new Thread(new Runnable() {
@Override
public void run() {
int result = mediaPlayer.setup(filePath, surfaceHolder.getSurface());
if(result < 0){
Log.e(TAG, "mediaPlayer setup error!");
return;
}
mediaPlayer.play();
}
}).start();
}
@Override
void onSelectedFile(String filePath) {
if (!FileUtil.checkFileExist(filePath)){
return;
}
if (surfaceCreated) {
btnSelectFile.setVisibility(View.GONE);
startPlay(filePath);
}
}
@Override
void onViewClick(View view) {
if (view.getId() == R.id.btn_select_file) {
selectFile();
}
}
}

@ -0,0 +1,103 @@
package com.frank.ffmpeg.activity
import android.os.Bundle
import android.util.Log
import android.view.SurfaceHolder
import android.view.SurfaceView
import android.view.View
import android.widget.Button
import com.frank.ffmpeg.MediaPlayer
import com.frank.ffmpeg.R
import com.frank.ffmpeg.util.FileUtil
/**
* mediaPlayer, which decode by software
* Created by frank on 2018/2/12.
*/
class MediaPlayerActivity : BaseActivity(), SurfaceHolder.Callback {
private var surfaceHolder: SurfaceHolder? = null
private var mediaPlayer: MediaPlayer? = null
private var surfaceCreated: Boolean = false
private var btnSelectFile: Button? = null
override val layoutId: Int
get() = R.layout.activity_media_player
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
hideActionBar()
initView()
initPlayer()
}
private fun initView() {
val surfaceView = getView<SurfaceView>(R.id.surface_media)
surfaceHolder = surfaceView.holder
surfaceHolder!!.addCallback(this)
btnSelectFile = getView(R.id.btn_select_file)
initViewsWithClick(R.id.btn_select_file)
}
private fun initPlayer() {
mediaPlayer = MediaPlayer()
}
override fun surfaceCreated(holder: SurfaceHolder) {
surfaceCreated = true
}
override fun surfaceChanged(holder: SurfaceHolder, format: Int, width: Int, height: Int) {
Log.i(TAG, "surfaceChanged")
}
override fun surfaceDestroyed(holder: SurfaceHolder) {
Log.i(TAG, "surfaceDestroyed")
}
override fun onDestroy() {
super.onDestroy()
if (mediaPlayer != null) {
mediaPlayer!!.release()
mediaPlayer = null
}
}
private fun startPlay(filePath: String) {
Thread(Runnable {
val result = mediaPlayer!!.setup(filePath, surfaceHolder!!.surface)
if (result < 0) {
Log.e(TAG, "mediaPlayer setup error!")
return@Runnable
}
mediaPlayer!!.play()
}).start()
}
override fun onSelectedFile(filePath: String) {
if (!FileUtil.checkFileExist(filePath)) {
return
}
if (surfaceCreated) {
btnSelectFile!!.visibility = View.GONE
startPlay(filePath)
}
}
override fun onViewClick(view: View) {
if (view.id == R.id.btn_select_file) {
selectFile()
}
}
companion object {
private val TAG = MediaPlayerActivity::class.java.simpleName
}
}

@ -1,114 +0,0 @@
package com.frank.ffmpeg.activity;
import android.annotation.SuppressLint;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.text.TextUtils;
import android.view.View;
import android.widget.ProgressBar;
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.frank.ffmpeg.R;
import com.frank.ffmpeg.handler.FFmpegHandler;
import com.frank.ffmpeg.model.MediaBean;
import com.frank.ffmpeg.tool.JsonParseTool;
import com.frank.ffmpeg.util.FFmpegUtil;
import com.frank.ffmpeg.util.FileUtil;
import static com.frank.ffmpeg.handler.FFmpegHandler.MSG_BEGIN;
import static com.frank.ffmpeg.handler.FFmpegHandler.MSG_FINISH;
/**
* Using ffprobe to parse media format data
* Created by frank on 2020/1/7.
*/
public class ProbeFormatActivity extends BaseActivity {
private TextView txtProbeFormat;
private ProgressBar progressProbe;
private RelativeLayout layoutProbe;
private FFmpegHandler ffmpegHandler;
@SuppressLint("HandlerLeak")
private Handler mHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
super.handleMessage(msg);
switch (msg.what) {
case MSG_BEGIN:
progressProbe.setVisibility(View.VISIBLE);
layoutProbe.setVisibility(View.GONE);
break;
case MSG_FINISH:
progressProbe.setVisibility(View.GONE);
layoutProbe.setVisibility(View.VISIBLE);
MediaBean result = (MediaBean) msg.obj;
String resultFormat = JsonParseTool.stringFormat(result);
if (!TextUtils.isEmpty(resultFormat) && txtProbeFormat != null) {
txtProbeFormat.setText(resultFormat);
}
break;
default:
break;
}
}
};
@Override
int getLayoutId() {
return R.layout.activity_probe;
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
initView();
ffmpegHandler = new FFmpegHandler(mHandler);
}
private void initView() {
progressProbe = getView(R.id.progress_probe);
layoutProbe = getView(R.id.layout_probe);
initViewsWithClick(R.id.btn_probe_format);
txtProbeFormat = getView(R.id.txt_probe_format);
}
@Override
public void onViewClick(View view) {
selectFile();
}
@Override
void onSelectedFile(String filePath) {
doHandleProbe(filePath);
}
/**
* use ffprobe to parse video/audio format metadata
*
* @param srcFile srcFile
*/
private void doHandleProbe(final String srcFile) {
if (!FileUtil.checkFileExist(srcFile)) {
return;
}
String[] commandLine = FFmpegUtil.probeFormat(srcFile);
if (ffmpegHandler != null) {
ffmpegHandler.executeFFprobeCmd(commandLine);
}
}
@Override
protected void onDestroy() {
super.onDestroy();
if (mHandler != null) {
mHandler.removeCallbacksAndMessages(null);
}
}
}

@ -0,0 +1,105 @@
package com.frank.ffmpeg.activity
import android.annotation.SuppressLint
import android.os.Bundle
import android.os.Handler
import android.os.Message
import android.text.TextUtils
import android.view.View
import android.widget.ProgressBar
import android.widget.RelativeLayout
import android.widget.TextView
import com.frank.ffmpeg.R
import com.frank.ffmpeg.handler.FFmpegHandler
import com.frank.ffmpeg.model.MediaBean
import com.frank.ffmpeg.tool.JsonParseTool
import com.frank.ffmpeg.util.FFmpegUtil
import com.frank.ffmpeg.util.FileUtil
import com.frank.ffmpeg.handler.FFmpegHandler.MSG_BEGIN
import com.frank.ffmpeg.handler.FFmpegHandler.MSG_FINISH
/**
* Using ffprobe to parse media format data
* Created by frank on 2020/1/7.
*/
class ProbeFormatActivity : BaseActivity() {
private var txtProbeFormat: TextView? = null
private var progressProbe: ProgressBar? = null
private var layoutProbe: RelativeLayout? = null
private var ffmpegHandler: FFmpegHandler? = null
@SuppressLint("HandlerLeak")
private val mHandler = object : Handler() {
override fun handleMessage(msg: Message) {
super.handleMessage(msg)
when (msg.what) {
MSG_BEGIN -> {
progressProbe!!.visibility = View.VISIBLE
layoutProbe!!.visibility = View.GONE
}
MSG_FINISH -> {
progressProbe!!.visibility = View.GONE
layoutProbe!!.visibility = View.VISIBLE
val result = msg.obj as MediaBean
val resultFormat = JsonParseTool.stringFormat(result)
if (!TextUtils.isEmpty(resultFormat) && txtProbeFormat != null) {
txtProbeFormat!!.text = resultFormat
}
}
else -> {
}
}
}
}
override val layoutId: Int
get() = R.layout.activity_probe
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
initView()
ffmpegHandler = FFmpegHandler(mHandler)
}
private fun initView() {
progressProbe = getView(R.id.progress_probe)
layoutProbe = getView(R.id.layout_probe)
initViewsWithClick(R.id.btn_probe_format)
txtProbeFormat = getView(R.id.txt_probe_format)
}
override fun onViewClick(view: View) {
selectFile()
}
override fun onSelectedFile(filePath: String) {
doHandleProbe(filePath)
}
/**
* use ffprobe to parse video/audio format metadata
*
* @param srcFile srcFile
*/
private fun doHandleProbe(srcFile: String) {
if (!FileUtil.checkFileExist(srcFile)) {
return
}
val commandLine = FFmpegUtil.probeFormat(srcFile)
if (ffmpegHandler != null) {
ffmpegHandler!!.executeFFprobeCmd(commandLine)
}
}
override fun onDestroy() {
super.onDestroy()
mHandler.removeCallbacksAndMessages(null)
}
}

@ -1,86 +0,0 @@
package com.frank.ffmpeg.activity;
import android.os.Bundle;
import android.text.TextUtils;
import android.util.Log;
import android.view.View;
import android.widget.EditText;
import com.frank.ffmpeg.Pusher;
import com.frank.ffmpeg.R;
import java.io.File;
/**
* 使用ffmpeg推流直播
* Created by frank on 2018/2/2.
*/
public class PushActivity extends BaseActivity {
private static final String TAG = PushActivity.class.getSimpleName();
private static final String FILE_PATH = "storage/emulated/0/hello.flv";
private static final String LIVE_URL = "rtmp://192.168.1.104/live/stream";
private EditText edit_file_path;
private EditText edit_live_url;
@Override
int getLayoutId() {
return R.layout.activity_push;
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
hideActionBar();
initView();
}
private void initView() {
edit_file_path = getView(R.id.edit_file_path);
edit_live_url = getView(R.id.edit_live_url);
edit_file_path.setText(FILE_PATH);
edit_live_url.setText(LIVE_URL);
initViewsWithClick(R.id.btn_push_stream);
}
private void startPushStreaming() {
//TODO 视频文件格式为flv
final String filePath = edit_file_path.getText().toString();
final String liveUrl = edit_live_url.getText().toString();
Log.i(TAG, "filePath=" + filePath);
Log.i(TAG, "liveUrl=" + liveUrl);
if(!TextUtils.isEmpty(filePath) && !TextUtils.isEmpty(filePath)){
File file = new File(filePath);
//判断文件是否存在
if(file.exists()){
//开启子线程
new Thread(new Runnable() {
@Override
public void run() {
//开始推流
new Pusher().pushStream(filePath, liveUrl);
}
}).start();
}else {
showToast(getString(R.string.file_not_found));
}
}
}
@Override
void onViewClick(View view) {
if (view.getId() == R.id.btn_push_stream) {
startPushStreaming();
}
}
@Override
void onSelectedFile(String filePath) {
}
}

@ -0,0 +1,81 @@
package com.frank.ffmpeg.activity
import android.os.Bundle
import android.text.TextUtils
import android.util.Log
import android.view.View
import android.widget.EditText
import com.frank.ffmpeg.Pusher
import com.frank.ffmpeg.R
import java.io.File
/**
* 使用ffmpeg推流直播
* Created by frank on 2018/2/2.
*/
class PushActivity : BaseActivity() {
private var editFilePath: EditText? = null
private var editLiveURL: EditText? = null
override val layoutId: Int
get() = R.layout.activity_push
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
hideActionBar()
initView()
}
private fun initView() {
editFilePath = getView(R.id.edit_file_path)
editLiveURL = getView(R.id.edit_live_url)
editFilePath!!.setText(FILE_PATH)
editLiveURL!!.setText(LIVE_URL)
initViewsWithClick(R.id.btn_push_stream)
}
private fun startPushStreaming() {
//TODO 视频文件格式为flv
val filePath = editFilePath!!.text.toString()
val liveUrl = editLiveURL!!.text.toString()
Log.i(TAG, "filePath=$filePath")
Log.i(TAG, "liveUrl=$liveUrl")
if (!TextUtils.isEmpty(filePath) && !TextUtils.isEmpty(filePath)) {
val file = File(filePath)
//判断文件是否存在
if (file.exists()) {
//开启子线程
Thread(Runnable {
//开始推流
Pusher().pushStream(filePath, liveUrl)
}).start()
} else {
showToast(getString(R.string.file_not_found))
}
}
}
override fun onViewClick(view: View) {
if (view.id == R.id.btn_push_stream) {
startPushStreaming()
}
}
override fun onSelectedFile(filePath: String) {
}
companion object {
private val TAG = PushActivity::class.java.simpleName
private const val FILE_PATH = "storage/emulated/0/hello.flv"
private const val LIVE_URL = "rtmp://192.168.1.104/live/stream"
}
}

@ -1,442 +0,0 @@
package com.frank.ffmpeg.activity;
import android.annotation.SuppressLint;
import android.graphics.Color;
import android.media.MediaMetadataRetriever;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.Message;
import android.util.Log;
import android.view.View;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.frank.ffmpeg.FFmpegCmd;
import com.frank.ffmpeg.R;
import com.frank.ffmpeg.format.VideoLayout;
import com.frank.ffmpeg.gif.HighQualityGif;
import com.frank.ffmpeg.handler.FFmpegHandler;
import com.frank.ffmpeg.model.MediaBean;
import com.frank.ffmpeg.tool.JsonParseTool;
import com.frank.ffmpeg.util.BitmapUtil;
import com.frank.ffmpeg.util.FFmpegUtil;
import com.frank.ffmpeg.util.FileUtil;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import static com.frank.ffmpeg.handler.FFmpegHandler.MSG_BEGIN;
import static com.frank.ffmpeg.handler.FFmpegHandler.MSG_FINISH;
import static com.frank.ffmpeg.handler.FFmpegHandler.MSG_PROGRESS;
/**
* video process by FFmpeg command
* Created by frank on 2018/1/25.
*/
public class VideoHandleActivity extends BaseActivity {
private final static String TAG = VideoHandleActivity.class.getSimpleName();
private static final String PATH = Environment.getExternalStorageDirectory().getPath();
private LinearLayout layoutVideoHandle;
private LinearLayout layoutProgress;
private TextView txtProgress;
private int viewId;
private FFmpegHandler ffmpegHandler;
private final static boolean useFFmpegCmd = true;
private final static int TYPE_IMAGE = 1;
private final static int TYPE_GIF = 2;
private final static int TYPE_TEXT = 3;
private final static int waterMarkType = TYPE_IMAGE;
private String appendPath = PATH + File.separator + "snow.mp4";
private String outputPath1 = PATH + File.separator + "output1.ts";
private String outputPath2 = PATH + File.separator + "output2.ts";
private String listPath = PATH + File.separator + "listFile.txt";
private boolean isJointing = false;
private final static boolean convertGifWithFFmpeg = false;
@SuppressLint("HandlerLeak")
private Handler mHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
super.handleMessage(msg);
switch (msg.what) {
case MSG_BEGIN:
layoutProgress.setVisibility(View.VISIBLE);
layoutVideoHandle.setVisibility(View.GONE);
break;
case MSG_FINISH:
layoutProgress.setVisibility(View.GONE);
layoutVideoHandle.setVisibility(View.VISIBLE);
if (isJointing) {
isJointing = false;
FileUtil.deleteFile(outputPath1);
FileUtil.deleteFile(outputPath2);
FileUtil.deleteFile(listPath);
}
break;
case MSG_PROGRESS:
int progress = msg.arg1;
int duration = msg.arg2;
if (progress > 0) {
txtProgress.setVisibility(View.VISIBLE);
String percent = duration > 0 ? "%" : "";
String strProgress = progress + percent;
txtProgress.setText(strProgress);
} else {
txtProgress.setVisibility(View.INVISIBLE);
}
break;
default:
break;
}
}
};
@Override
int getLayoutId() {
return R.layout.activity_video_handle;
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
hideActionBar();
intView();
ffmpegHandler = new FFmpegHandler(mHandler);
}
private void intView() {
layoutProgress = getView(R.id.layout_progress);
txtProgress = getView(R.id.txt_progress);
layoutVideoHandle = getView(R.id.layout_video_handle);
initViewsWithClick(
R.id.btn_video_transform,
R.id.btn_video_cut,
R.id.btn_video_concat,
R.id.btn_screen_shot,
R.id.btn_water_mark,
R.id.btn_generate_gif,
R.id.btn_screen_record,
R.id.btn_combine_video,
R.id.btn_multi_video,
R.id.btn_reverse_video,
R.id.btn_denoise_video,
R.id.btn_to_image,
R.id.btn_pip,
R.id.btn_moov,
R.id.btn_speed,
R.id.btn_flv,
R.id.btn_thumbnail
);
}
@Override
public void onViewClick(View view) {
viewId = view.getId();
if (viewId == R.id.btn_combine_video) {
handlePhoto();
return;
}
selectFile();
}
@Override
void onSelectedFile(String filePath) {
doHandleVideo(filePath);
}
/**
* Using FFmpeg cmd to handle video
*
* @param srcFile srcFile
*/
private void doHandleVideo(String srcFile) {
String[] commandLine = null;
if (!FileUtil.checkFileExist(srcFile)) {
return;
}
if (!FileUtil.isVideo(srcFile)) {
showToast(getString(R.string.wrong_video_format));
return;
}
switch (viewId) {
case R.id.btn_video_transform://transform format
String transformVideo = PATH + File.separator + "transformVideo.mp4";
commandLine = FFmpegUtil.transformVideo(srcFile, transformVideo);
break;
case R.id.btn_video_cut://cut video
String suffix = FileUtil.getFileSuffix(srcFile);
if (suffix == null || suffix.isEmpty()) {
return;
}
String cutVideo = PATH + File.separator + "cutVideo" + suffix;
int startTime = 0;
int duration = 20;
commandLine = FFmpegUtil.cutVideo(srcFile, startTime, duration, cutVideo);
break;
case R.id.btn_video_concat://concat video together
concatVideo(srcFile);
break;
case R.id.btn_screen_shot://video snapshot
String screenShot = PATH + File.separator + "screenShot.jpg";
int time = 18;
commandLine = FFmpegUtil.screenShot(srcFile, time, screenShot);
break;
case R.id.btn_water_mark://add watermark to video
// the unit of bitRate is kb
int bitRate = 500;
MediaMetadataRetriever retriever = new MediaMetadataRetriever();
retriever.setDataSource(srcFile);
String mBitRate = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_BITRATE);
if (mBitRate != null && !mBitRate.isEmpty()) {
int probeBitrate = Integer.valueOf(mBitRate);
bitRate = (probeBitrate/1000/100) * 100;
}
//1:top left 2:top right 3:bottom left 4:bottom right
int location = 2;
int offsetXY = 5;
switch (waterMarkType) {
case TYPE_IMAGE:// image
String photo = PATH + File.separator + "hello.png";
String photoMark = PATH + File.separator + "photoMark.mp4";
commandLine = FFmpegUtil.addWaterMarkImg(srcFile, photo, location, bitRate, offsetXY, photoMark);
break;
case TYPE_GIF:// gif
String gifPath = PATH + File.separator + "ok.gif";
String gifWaterMark = PATH + File.separator + "gifWaterMark.mp4";
commandLine = FFmpegUtil.addWaterMarkGif(srcFile, gifPath, location, bitRate, offsetXY, gifWaterMark);
break;
case TYPE_TEXT:// text
String text = "Hello,FFmpeg";
String textPath = PATH + File.separator + "text.png";
boolean result = BitmapUtil.textToPicture(textPath, text, Color.BLUE, 20);
Log.i(TAG, "text to picture result=" + result);
String textMark = PATH + File.separator + "textMark.mp4";
commandLine = FFmpegUtil.addWaterMarkImg(srcFile, textPath, location, bitRate, offsetXY, textMark);
break;
default:
break;
}
break;
case R.id.btn_generate_gif://convert video into gif
String video2Gif = PATH + File.separator + "video2Gif.gif";
int gifStart = 10;
int gifDuration = 3;
int width = 320;
int frameRate = 10;
if (convertGifWithFFmpeg) {
String palettePath = PATH + "/palette.png";
FileUtil.deleteFile(palettePath);
String[] paletteCmd = FFmpegUtil.generatePalette(srcFile, gifStart, gifDuration,
frameRate, width, palettePath);
String[] gifCmd = FFmpegUtil.generateGifByPalette(srcFile, palettePath, gifStart, gifDuration,
frameRate, width, video2Gif);
List<String[]> cmdList = new ArrayList<>();
cmdList.add(paletteCmd);
cmdList.add(gifCmd);
ffmpegHandler.executeFFmpegCmds(cmdList);
} else {
convertGifInHighQuality(video2Gif, srcFile, gifStart, gifDuration, frameRate);
}
break;
case R.id.btn_multi_video://combine video which layout could be horizontal of vertical
String input1 = PATH + File.separator + "input1.mp4";
String input2 = PATH + File.separator + "input2.mp4";
String outputFile = PATH + File.separator + "multi.mp4";
if (!FileUtil.checkFileExist(input1) || !FileUtil.checkFileExist(input2)) {
return;
}
commandLine = FFmpegUtil.multiVideo(input1, input2, outputFile, VideoLayout.LAYOUT_HORIZONTAL);
break;
case R.id.btn_reverse_video://video reverse
String output = PATH + File.separator + "reverse.mp4";
commandLine = FFmpegUtil.reverseVideo(srcFile, output);
break;
case R.id.btn_denoise_video://noise reduction of video
String denoise = PATH + File.separator + "denoise.mp4";
commandLine = FFmpegUtil.denoiseVideo(srcFile, denoise);
break;
case R.id.btn_to_image://convert video to picture
String imagePath = PATH + File.separator + "Video2Image/";
File imageFile = new File(imagePath);
if (!imageFile.exists()) {
if (!imageFile.mkdir()) {
return;
}
}
int mStartTime = 10;//start time
int mDuration = 5;//duration
int mFrameRate = 10;//frameRate
commandLine = FFmpegUtil.videoToImage(srcFile, mStartTime, mDuration, mFrameRate, imagePath);
break;
case R.id.btn_pip://combine into picture-in-picture video
String inputFile1 = PATH + File.separator + "beyond.mp4";
String inputFile2 = PATH + File.separator + "small_girl.mp4";
if (!FileUtil.checkFileExist(inputFile1) && !FileUtil.checkFileExist(inputFile2)) {
return;
}
//x and y coordinate points need to be calculated according to the size of full video and small video
//For example: full video is 320x240, small video is 120x90, so x=200 y=150
int x = 200;
int y = 150;
String picInPic = PATH + File.separator + "PicInPic.mp4";
commandLine = FFmpegUtil.picInPicVideo(inputFile1, inputFile2, x, y, picInPic);
break;
case R.id.btn_moov://moov box moves ahead, which is behind mdat box of mp4 video
if (!srcFile.endsWith(FileUtil.TYPE_MP4)) {
showToast(getString(R.string.tip_not_mp4_video));
return;
}
String filePath = FileUtil.getFilePath(srcFile);
String fileName = FileUtil.getFileName(srcFile);
Log.e(TAG, "moov filePath=" + filePath + "--fileName=" + fileName);
fileName = "moov_" + fileName;
String moovPath = filePath + File.separator + fileName;
if (useFFmpegCmd) {
commandLine = FFmpegUtil.moveMoovAhead(srcFile, moovPath);
} else {
long start = System.currentTimeMillis();
FFmpegCmd ffmpegCmd = new FFmpegCmd();
int result = ffmpegCmd.moveMoovAhead(srcFile, moovPath);
Log.e(TAG, "result=" + (result == 0));
Log.e(TAG, "move moov use time=" + (System.currentTimeMillis() - start));
}
break;
case R.id.btn_speed://playing speed of video
String speed = PATH + File.separator + "speed.mp4";
commandLine = FFmpegUtil.changeSpeed(srcFile, speed, 2f, false);
break;
case R.id.btn_flv://rebuild the keyframe index of flv
if (!".flv".equalsIgnoreCase(FileUtil.getFileSuffix(srcFile))) {
Log.e(TAG, "It's not flv file, suffix=" + FileUtil.getFileSuffix(srcFile));
return;
}
String outputPath = PATH + File.separator + "frame_index.flv";
commandLine = FFmpegUtil.buildFlvIndex(srcFile, outputPath);
break;
case R.id.btn_thumbnail:// insert thumbnail into video
String thumbSuffix = FileUtil.getFileSuffix(srcFile);
if (thumbSuffix == null || thumbSuffix.isEmpty()) {
return;
}
String thumbnailPath = PATH + File.separator + "thumb.jpg";
String thumbVideoPath = PATH + File.separator + "thumbnailVideo" + thumbSuffix;
commandLine = FFmpegUtil.insertPicIntoVideo(srcFile, thumbnailPath, thumbVideoPath);
break;
default:
break;
}
if (ffmpegHandler != null && commandLine != null) {
ffmpegHandler.executeFFmpegCmd(commandLine);
}
}
/**
* concat/joint two videos together
* It's recommended to convert to the same resolution and encoding
* @param selectedPath the path which is selected
*/
private void concatVideo(String selectedPath) {
if (ffmpegHandler == null || selectedPath.isEmpty()) {
return;
}
isJointing = true;
String targetPath = PATH + File.separator + "jointVideo.mp4";
String[] transformCmd1 = FFmpegUtil.transformVideoWithEncode(selectedPath, outputPath1);
int width = 0;
int height = 0;
//probe width and height of the selected video
String probeResult = FFmpegCmd.executeProbeSynchronize(FFmpegUtil.probeFormat(selectedPath));
MediaBean mediaBean = JsonParseTool.parseMediaFormat(probeResult);
if (mediaBean != null && mediaBean.getVideoBean() != null) {
width = mediaBean.getVideoBean().getWidth();
height = mediaBean.getVideoBean().getHeight();
Log.e(TAG, "width=" + width + "--height=" + height);
}
String[] transformCmd2 = FFmpegUtil.transformVideoWithEncode(appendPath, width, height, outputPath2);
List<String> fileList = new ArrayList<>();
fileList.add(outputPath1);
fileList.add(outputPath2);
FileUtil.createListFile(listPath, fileList);
String[] jointVideoCmd = FFmpegUtil.jointVideo(listPath, targetPath);
List<String[]> commandList = new ArrayList<>();
commandList.add(transformCmd1);
commandList.add(transformCmd2);
commandList.add(jointVideoCmd);
ffmpegHandler.executeFFmpegCmds(commandList);
}
/**
* Combine pictures into video
*/
private void handlePhoto() {
// The path of pictures, naming format: img+number.jpg
String picturePath = PATH + "/img/";
if (!FileUtil.checkFileExist(picturePath)) {
return;
}
String tempPath = PATH + "/temp/";
FileUtil.deleteFolder(tempPath);
File photoFile = new File(picturePath);
File[] files = photoFile.listFiles();
List<String[]> cmdList = new ArrayList<>();
//the resolution of photo which you want to convert
String resolution = "640x320";
for (File file : files) {
String inputPath = file.getAbsolutePath();
String outputPath = tempPath + file.getName();
String[] convertCmd = FFmpegUtil.convertResolution(inputPath, resolution, outputPath);
cmdList.add(convertCmd);
}
String combineVideo = PATH + File.separator + "combineVideo.mp4";
int frameRate = 2;// suggested synthetic frameRate:1-10
String[] commandLine = FFmpegUtil.pictureToVideo(tempPath, frameRate, combineVideo);
cmdList.add(commandLine);
if (ffmpegHandler != null) {
ffmpegHandler.executeFFmpegCmds(cmdList);
}
}
private void convertGifInHighQuality(String gifPath, String videoPath, int startTime, int duration, int frameRate) {
new Thread(() -> {
mHandler.sendEmptyMessage(MSG_BEGIN);
int width=0, height=0;
int rotateDegree = 0;
try {
MediaMetadataRetriever retriever = new MediaMetadataRetriever();
retriever.setDataSource(videoPath);
String mWidth = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH);
String mHeight = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT);
width = Integer.valueOf(mWidth);
height = Integer.valueOf(mHeight);
String rotate = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION);
rotateDegree = Integer.valueOf(rotate);
retriever.release();
Log.e(TAG, "retrieve width=" + width + "--height=" + height + "--rotate=" + rotate);
} catch (Exception e) {
Log.e(TAG, "retrieve error=" + e.toString());
}
long start = System.currentTimeMillis();
HighQualityGif highQualityGif = new HighQualityGif(width, height, rotateDegree);
boolean result = highQualityGif.convertGIF(gifPath, videoPath, startTime, duration, frameRate);
Log.e(TAG, "convert gif result=" + result + "--time=" + (System.currentTimeMillis()-start));
mHandler.sendEmptyMessage(MSG_FINISH);
}).start();
}
@Override
protected void onDestroy() {
super.onDestroy();
if (mHandler != null) {
mHandler.removeCallbacksAndMessages(null);
}
}
}

@ -0,0 +1,452 @@
package com.frank.ffmpeg.activity
import android.annotation.SuppressLint
import android.graphics.Color
import android.media.MediaMetadataRetriever
import android.os.Bundle
import android.os.Environment
import android.os.Handler
import android.os.Message
import android.util.Log
import android.view.View
import android.widget.LinearLayout
import android.widget.TextView
import com.frank.ffmpeg.FFmpegCmd
import com.frank.ffmpeg.R
import com.frank.ffmpeg.format.VideoLayout
import com.frank.ffmpeg.gif.HighQualityGif
import com.frank.ffmpeg.handler.FFmpegHandler
import com.frank.ffmpeg.tool.JsonParseTool
import com.frank.ffmpeg.util.BitmapUtil
import com.frank.ffmpeg.util.FFmpegUtil
import com.frank.ffmpeg.util.FileUtil
import java.io.File
import java.util.ArrayList
import com.frank.ffmpeg.handler.FFmpegHandler.MSG_BEGIN
import com.frank.ffmpeg.handler.FFmpegHandler.MSG_FINISH
import com.frank.ffmpeg.handler.FFmpegHandler.MSG_PROGRESS
/**
* video process by FFmpeg command
* Created by frank on 2018/1/25.
*/
class VideoHandleActivity : BaseActivity() {
private var layoutVideoHandle: LinearLayout? = null
private var layoutProgress: LinearLayout? = null
private var txtProgress: TextView? = null
private var viewId: Int = 0
private var ffmpegHandler: FFmpegHandler? = null
private val appendPath = PATH + File.separator + "snow.mp4"
private val outputPath1 = PATH + File.separator + "output1.ts"
private val outputPath2 = PATH + File.separator + "output2.ts"
private val listPath = PATH + File.separator + "listFile.txt"
private var isJointing = false
@SuppressLint("HandlerLeak")
private val mHandler = object : Handler() {
override fun handleMessage(msg: Message) {
super.handleMessage(msg)
when (msg.what) {
MSG_BEGIN -> {
layoutProgress!!.visibility = View.VISIBLE
layoutVideoHandle!!.visibility = View.GONE
}
MSG_FINISH -> {
layoutProgress!!.visibility = View.GONE
layoutVideoHandle!!.visibility = View.VISIBLE
if (isJointing) {
isJointing = false
FileUtil.deleteFile(outputPath1)
FileUtil.deleteFile(outputPath2)
FileUtil.deleteFile(listPath)
}
}
MSG_PROGRESS -> {
val progress = msg.arg1
val duration = msg.arg2
if (progress > 0) {
txtProgress!!.visibility = View.VISIBLE
val percent = if (duration > 0) "%" else ""
val strProgress = progress.toString() + percent
txtProgress!!.text = strProgress
} else {
txtProgress!!.visibility = View.INVISIBLE
}
}
else -> {
}
}
}
}
override val layoutId: Int
get() = R.layout.activity_video_handle
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
hideActionBar()
intView()
ffmpegHandler = FFmpegHandler(mHandler)
}
private fun intView() {
layoutProgress = getView(R.id.layout_progress)
txtProgress = getView(R.id.txt_progress)
layoutVideoHandle = getView(R.id.layout_video_handle)
initViewsWithClick(
R.id.btn_video_transform,
R.id.btn_video_cut,
R.id.btn_video_concat,
R.id.btn_screen_shot,
R.id.btn_water_mark,
R.id.btn_generate_gif,
R.id.btn_screen_record,
R.id.btn_combine_video,
R.id.btn_multi_video,
R.id.btn_reverse_video,
R.id.btn_denoise_video,
R.id.btn_to_image,
R.id.btn_pip,
R.id.btn_moov,
R.id.btn_speed,
R.id.btn_flv,
R.id.btn_thumbnail
)
}
override fun onViewClick(view: View) {
viewId = view.id
if (viewId == R.id.btn_combine_video) {
handlePhoto()
return
}
selectFile()
}
override fun onSelectedFile(filePath: String) {
doHandleVideo(filePath)
}
/**
* Using FFmpeg cmd to handle video
*
* @param srcFile srcFile
*/
private fun doHandleVideo(srcFile: String) {
var commandLine: Array<String>? = null
if (!FileUtil.checkFileExist(srcFile)) {
return
}
if (!FileUtil.isVideo(srcFile)) {
showToast(getString(R.string.wrong_video_format))
return
}
when (viewId) {
R.id.btn_video_transform//transform format
-> {
val transformVideo = PATH + File.separator + "transformVideo.mp4"
commandLine = FFmpegUtil.transformVideo(srcFile, transformVideo)
}
R.id.btn_video_cut//cut video
-> {
val suffix = FileUtil.getFileSuffix(srcFile)
if (suffix == null || suffix.isEmpty()) {
return
}
val cutVideo = PATH + File.separator + "cutVideo" + suffix
val startTime = 0
val duration = 20
commandLine = FFmpegUtil.cutVideo(srcFile, startTime, duration, cutVideo)
}
R.id.btn_video_concat//concat video together
-> concatVideo(srcFile)
R.id.btn_screen_shot//video snapshot
-> {
val screenShot = PATH + File.separator + "screenShot.jpg"
val time = 18
commandLine = FFmpegUtil.screenShot(srcFile, time, screenShot)
}
R.id.btn_water_mark//add watermark to video
-> {
// the unit of bitRate is kb
var bitRate = 500
val retriever = MediaMetadataRetriever()
retriever.setDataSource(srcFile)
val mBitRate = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_BITRATE)
if (mBitRate != null && !mBitRate.isEmpty()) {
val probeBitrate = Integer.valueOf(mBitRate)
bitRate = probeBitrate / 1000 / 100 * 100
}
//1:top left 2:top right 3:bottom left 4:bottom right
val location = 2
val offsetXY = 5
when (waterMarkType) {
TYPE_IMAGE// image
-> {
val photo = PATH + File.separator + "hello.png"
val photoMark = PATH + File.separator + "photoMark.mp4"
commandLine = FFmpegUtil.addWaterMarkImg(srcFile, photo, location, bitRate, offsetXY, photoMark)
}
TYPE_GIF// gif
-> {
val gifPath = PATH + File.separator + "ok.gif"
val gifWaterMark = PATH + File.separator + "gifWaterMark.mp4"
commandLine = FFmpegUtil.addWaterMarkGif(srcFile, gifPath, location, bitRate, offsetXY, gifWaterMark)
}
TYPE_TEXT// text
-> {
val text = "Hello,FFmpeg"
val textPath = PATH + File.separator + "text.png"
val result = BitmapUtil.textToPicture(textPath, text, Color.BLUE, 20)
Log.i(TAG, "text to picture result=$result")
val textMark = PATH + File.separator + "textMark.mp4"
commandLine = FFmpegUtil.addWaterMarkImg(srcFile, textPath, location, bitRate, offsetXY, textMark)
}
else -> {
}
}
}
R.id.btn_generate_gif//convert video into gif
-> {
val video2Gif = PATH + File.separator + "video2Gif.gif"
val gifStart = 10
val gifDuration = 3
val width = 320
val frameRate = 10
if (convertGifWithFFmpeg) {
val palettePath = "$PATH/palette.png"
FileUtil.deleteFile(palettePath)
val paletteCmd = FFmpegUtil.generatePalette(srcFile, gifStart, gifDuration,
frameRate, width, palettePath)
val gifCmd = FFmpegUtil.generateGifByPalette(srcFile, palettePath, gifStart, gifDuration,
frameRate, width, video2Gif)
val cmdList = ArrayList<Array<String>>()
cmdList.add(paletteCmd)
cmdList.add(gifCmd)
ffmpegHandler!!.executeFFmpegCmds(cmdList)
} else {
convertGifInHighQuality(video2Gif, srcFile, gifStart, gifDuration, frameRate)
}
}
R.id.btn_multi_video//combine video which layout could be horizontal of vertical
-> {
val input1 = PATH + File.separator + "input1.mp4"
val input2 = PATH + File.separator + "input2.mp4"
val outputFile = PATH + File.separator + "multi.mp4"
if (!FileUtil.checkFileExist(input1) || !FileUtil.checkFileExist(input2)) {
return
}
commandLine = FFmpegUtil.multiVideo(input1, input2, outputFile, VideoLayout.LAYOUT_HORIZONTAL)
}
R.id.btn_reverse_video//video reverse
-> {
val output = PATH + File.separator + "reverse.mp4"
commandLine = FFmpegUtil.reverseVideo(srcFile, output)
}
R.id.btn_denoise_video//noise reduction of video
-> {
val denoise = PATH + File.separator + "denoise.mp4"
commandLine = FFmpegUtil.denoiseVideo(srcFile, denoise)
}
R.id.btn_to_image//convert video to picture
-> {
val imagePath = PATH + File.separator + "Video2Image/"
val imageFile = File(imagePath)
if (!imageFile.exists()) {
if (!imageFile.mkdir()) {
return
}
}
val mStartTime = 10//start time
val mDuration = 5//duration
val mFrameRate = 10//frameRate
commandLine = FFmpegUtil.videoToImage(srcFile, mStartTime, mDuration, mFrameRate, imagePath)
}
R.id.btn_pip//combine into picture-in-picture video
-> {
val inputFile1 = PATH + File.separator + "beyond.mp4"
val inputFile2 = PATH + File.separator + "small_girl.mp4"
if (!FileUtil.checkFileExist(inputFile1) && !FileUtil.checkFileExist(inputFile2)) {
return
}
//x and y coordinate points need to be calculated according to the size of full video and small video
//For example: full video is 320x240, small video is 120x90, so x=200 y=150
val x = 200
val y = 150
val picInPic = PATH + File.separator + "PicInPic.mp4"
commandLine = FFmpegUtil.picInPicVideo(inputFile1, inputFile2, x, y, picInPic)
}
R.id.btn_moov//moov box moves ahead, which is behind mdat box of mp4 video
-> {
if (!srcFile.endsWith(FileUtil.TYPE_MP4)) {
showToast(getString(R.string.tip_not_mp4_video))
return
}
val filePath = FileUtil.getFilePath(srcFile)
var fileName = FileUtil.getFileName(srcFile)
Log.e(TAG, "moov filePath=$filePath--fileName=$fileName")
fileName = "moov_" + fileName!!
val moovPath = filePath + File.separator + fileName
if (useFFmpegCmd) {
commandLine = FFmpegUtil.moveMoovAhead(srcFile, moovPath)
} else {
val start = System.currentTimeMillis()
val ffmpegCmd = FFmpegCmd()
val result = ffmpegCmd.moveMoovAhead(srcFile, moovPath)
Log.e(TAG, "result=" + (result == 0))
Log.e(TAG, "move moov use time=" + (System.currentTimeMillis() - start))
}
}
R.id.btn_speed//playing speed of video
-> {
val speed = PATH + File.separator + "speed.mp4"
commandLine = FFmpegUtil.changeSpeed(srcFile, speed, 2f, false)
}
R.id.btn_flv//rebuild the keyframe index of flv
-> {
if (!".flv".equals(FileUtil.getFileSuffix(srcFile)!!, ignoreCase = true)) {
Log.e(TAG, "It's not flv file, suffix=" + FileUtil.getFileSuffix(srcFile)!!)
return
}
val outputPath = PATH + File.separator + "frame_index.flv"
commandLine = FFmpegUtil.buildFlvIndex(srcFile, outputPath)
}
R.id.btn_thumbnail// insert thumbnail into video
-> {
val thumbSuffix = FileUtil.getFileSuffix(srcFile)
if (thumbSuffix == null || thumbSuffix.isEmpty()) {
return
}
val thumbnailPath = PATH + File.separator + "thumb.jpg"
val thumbVideoPath = PATH + File.separator + "thumbnailVideo" + thumbSuffix
commandLine = FFmpegUtil.insertPicIntoVideo(srcFile, thumbnailPath, thumbVideoPath)
}
else -> {
}
}
if (ffmpegHandler != null && commandLine != null) {
ffmpegHandler!!.executeFFmpegCmd(commandLine)
}
}
/**
* concat/joint two videos together
* It's recommended to convert to the same resolution and encoding
* @param selectedPath the path which is selected
*/
private fun concatVideo(selectedPath: String) {
if (ffmpegHandler == null || selectedPath.isEmpty()) {
return
}
isJointing = true
val targetPath = PATH + File.separator + "jointVideo.mp4"
val transformCmd1 = FFmpegUtil.transformVideoWithEncode(selectedPath, outputPath1)
var width = 0
var height = 0
//probe width and height of the selected video
val probeResult = FFmpegCmd.executeProbeSynchronize(FFmpegUtil.probeFormat(selectedPath))
val mediaBean = JsonParseTool.parseMediaFormat(probeResult)
if (mediaBean != null && mediaBean.videoBean != null) {
width = mediaBean.videoBean.width
height = mediaBean.videoBean.height
Log.e(TAG, "width=$width--height=$height")
}
val transformCmd2 = FFmpegUtil.transformVideoWithEncode(appendPath, width, height, outputPath2)
val fileList = ArrayList<String>()
fileList.add(outputPath1)
fileList.add(outputPath2)
FileUtil.createListFile(listPath, fileList)
val jointVideoCmd = FFmpegUtil.jointVideo(listPath, targetPath)
val commandList = ArrayList<Array<String>>()
commandList.add(transformCmd1)
commandList.add(transformCmd2)
commandList.add(jointVideoCmd)
ffmpegHandler!!.executeFFmpegCmds(commandList)
}
/**
* Combine pictures into video
*/
private fun handlePhoto() {
// The path of pictures, naming format: img+number.jpg
val picturePath = "$PATH/img/"
if (!FileUtil.checkFileExist(picturePath)) {
return
}
val tempPath = "$PATH/temp/"
FileUtil.deleteFolder(tempPath)
val photoFile = File(picturePath)
val files = photoFile.listFiles()
val cmdList = ArrayList<Array<String>>()
//the resolution of photo which you want to convert
val resolution = "640x320"
for (file in files) {
val inputPath = file.absolutePath
val outputPath = tempPath + file.name
val convertCmd = FFmpegUtil.convertResolution(inputPath, resolution, outputPath)
cmdList.add(convertCmd)
}
val combineVideo = PATH + File.separator + "combineVideo.mp4"
val frameRate = 2// suggested synthetic frameRate:1-10
val commandLine = FFmpegUtil.pictureToVideo(tempPath, frameRate, combineVideo)
cmdList.add(commandLine)
if (ffmpegHandler != null) {
ffmpegHandler!!.executeFFmpegCmds(cmdList)
}
}
private fun convertGifInHighQuality(gifPath: String, videoPath: String, startTime: Int, duration: Int, frameRate: Int) {
Thread {
mHandler.sendEmptyMessage(MSG_BEGIN)
var width = 0
var height = 0
var rotateDegree = 0
try {
val retriever = MediaMetadataRetriever()
retriever.setDataSource(videoPath)
val mWidth = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH)
val mHeight = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT)
width = Integer.valueOf(mWidth)
height = Integer.valueOf(mHeight)
val rotate = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION)
rotateDegree = Integer.valueOf(rotate)
retriever.release()
Log.e(TAG, "retrieve width=$width--height=$height--rotate=$rotate")
} catch (e: Exception) {
Log.e(TAG, "retrieve error=$e")
}
val start = System.currentTimeMillis()
val highQualityGif = HighQualityGif(width, height, rotateDegree)
val result = highQualityGif.convertGIF(gifPath, videoPath, startTime, duration, frameRate)
Log.e(TAG, "convert gif result=" + result + "--time=" + (System.currentTimeMillis() - start))
mHandler.sendEmptyMessage(MSG_FINISH)
}.start()
}
override fun onDestroy() {
super.onDestroy()
mHandler.removeCallbacksAndMessages(null)
}
companion object {
private val TAG = VideoHandleActivity::class.java.simpleName
private val PATH = Environment.getExternalStorageDirectory().path
private const val useFFmpegCmd = true
private const val TYPE_IMAGE = 1
private const val TYPE_GIF = 2
private const val TYPE_TEXT = 3
private const val waterMarkType = TYPE_IMAGE
private const val convertGifWithFFmpeg = false
}
}

@ -1,156 +0,0 @@
package com.frank.ffmpeg.activity;
import android.annotation.SuppressLint;
import android.media.MediaPlayer;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.text.TextUtils;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import com.frank.ffmpeg.R;
import com.frank.ffmpeg.view.VideoPreviewBar;
import java.io.IOException;
import static com.frank.ffmpeg.handler.FFmpegHandler.MSG_TOAST;
/**
* Preview the thumbnail of video when seeking
* Created by frank on 2019/11/16.
*/
public class VideoPreviewActivity extends BaseActivity implements VideoPreviewBar.PreviewBarCallback {
private final static String TAG = VideoPreviewActivity.class.getSimpleName();
private MediaPlayer mediaPlayer;
private SurfaceView surfaceVideo;
private VideoPreviewBar videoPreviewBar;
private final static int TIME_UPDATE = 1000;
private final static int MSG_UPDATE = 1234;
@SuppressLint("HandlerLeak")
private Handler mHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
super.handleMessage(msg);
if (msg.what == MSG_UPDATE) {
if (videoPreviewBar != null && mediaPlayer != null) {
videoPreviewBar.updateProgress(mediaPlayer.getCurrentPosition());
}
mHandler.sendEmptyMessageDelayed(MSG_UPDATE, TIME_UPDATE);
} else if (msg.what == MSG_TOAST) {
showToast(getString(R.string.please_click_select));
}
}
};
@Override
int getLayoutId() {
return R.layout.activity_preview;
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
initView();
mHandler.sendEmptyMessageDelayed(MSG_TOAST, 500);
}
private void initView() {
surfaceVideo = getView(R.id.surface_view);
videoPreviewBar = getView(R.id.preview_video);
}
private void setPlayCallback(final String filePath, SurfaceView surfaceView) {
surfaceView.getHolder().addCallback(new SurfaceHolder.Callback() {
@Override
public void surfaceCreated(SurfaceHolder holder) {
doPlay(filePath, holder.getSurface());
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
}
});
}
private void setPrepareListener() {
if (mediaPlayer == null) {
return;
}
mediaPlayer.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
@Override
public void onPrepared(MediaPlayer mp) {
Log.i(TAG, "onPrepared...");
mediaPlayer.start();
mHandler.sendEmptyMessage(MSG_UPDATE);
}
});
}
private void doPlay(String filePath, Surface surface) {
if (surface == null || TextUtils.isEmpty(filePath)) {
return;
}
releasePlayer();
try {
mediaPlayer = new MediaPlayer();
setPrepareListener();
mediaPlayer.setDataSource(filePath);
mediaPlayer.setSurface(surface);
mediaPlayer.prepareAsync();
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
void onViewClick(View view) {
}
@Override
void onSelectedFile(String filePath) {
setPlayCallback(filePath, surfaceVideo);
videoPreviewBar.init(filePath, this);
}
@Override
public void onStopTracking(long progress) {
if (mediaPlayer != null) {
Log.i(TAG, "onStopTracking progress=" + progress);
mediaPlayer.seekTo((int) progress);
}
}
private void releasePlayer() {
if (mediaPlayer != null) {
mediaPlayer.stop();
mediaPlayer.release();
mediaPlayer = null;
}
}
@Override
protected void onDestroy() {
super.onDestroy();
releasePlayer();
if (videoPreviewBar != null) {
videoPreviewBar.release();
}
}
}

@ -0,0 +1,146 @@
package com.frank.ffmpeg.activity
import android.annotation.SuppressLint
import android.media.MediaPlayer
import android.os.Bundle
import android.os.Handler
import android.os.Message
import android.text.TextUtils
import android.util.Log
import android.view.Surface
import android.view.SurfaceHolder
import android.view.SurfaceView
import android.view.View
import com.frank.ffmpeg.R
import com.frank.ffmpeg.view.VideoPreviewBar
import java.io.IOException
import com.frank.ffmpeg.handler.FFmpegHandler.MSG_TOAST
/**
* Preview the thumbnail of video when seeking
* Created by frank on 2019/11/16.
*/
class VideoPreviewActivity : BaseActivity(), VideoPreviewBar.PreviewBarCallback {
private var mediaPlayer: MediaPlayer? = null
private var surfaceVideo: SurfaceView? = null
private var videoPreviewBar: VideoPreviewBar? = null
@SuppressLint("HandlerLeak")
private val mHandler = object : Handler() {
override fun handleMessage(msg: Message) {
super.handleMessage(msg)
if (msg.what == MSG_UPDATE) {
if (videoPreviewBar != null && mediaPlayer != null) {
videoPreviewBar!!.updateProgress(mediaPlayer!!.currentPosition)
}
this.sendEmptyMessageDelayed(MSG_UPDATE, TIME_UPDATE.toLong())
} else if (msg.what == MSG_TOAST) {
showToast(getString(R.string.please_click_select))
}
}
}
override val layoutId: Int
get() = R.layout.activity_preview
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
initView()
mHandler.sendEmptyMessageDelayed(MSG_TOAST, 500)
}
private fun initView() {
surfaceVideo = getView(R.id.surface_view)
videoPreviewBar = getView(R.id.preview_video)
}
private fun setPlayCallback(filePath: String, surfaceView: SurfaceView) {
surfaceView.holder.addCallback(object : SurfaceHolder.Callback {
override fun surfaceCreated(holder: SurfaceHolder) {
doPlay(filePath, holder.surface)
}
override fun surfaceChanged(holder: SurfaceHolder, format: Int, width: Int, height: Int) {
}
override fun surfaceDestroyed(holder: SurfaceHolder) {
}
})
}
private fun setPrepareListener() {
if (mediaPlayer == null) {
return
}
mediaPlayer!!.setOnPreparedListener {
Log.i(TAG, "onPrepared...")
mediaPlayer!!.start()
mHandler.sendEmptyMessage(MSG_UPDATE)
}
}
private fun doPlay(filePath: String, surface: Surface?) {
if (surface == null || TextUtils.isEmpty(filePath)) {
return
}
releasePlayer()
try {
mediaPlayer = MediaPlayer()
setPrepareListener()
mediaPlayer!!.setDataSource(filePath)
mediaPlayer!!.setSurface(surface)
mediaPlayer!!.prepareAsync()
} catch (e: IOException) {
e.printStackTrace()
}
}
override fun onViewClick(view: View) {
}
override fun onSelectedFile(filePath: String) {
setPlayCallback(filePath, surfaceVideo!!)
videoPreviewBar!!.init(filePath, this)
}
override fun onStopTracking(progress: Long) {
if (mediaPlayer != null) {
Log.i(TAG, "onStopTracking progress=$progress")
mediaPlayer!!.seekTo(progress.toInt())
}
}
private fun releasePlayer() {
if (mediaPlayer != null) {
mediaPlayer!!.stop()
mediaPlayer!!.release()
mediaPlayer = null
}
}
override fun onDestroy() {
super.onDestroy()
releasePlayer()
if (videoPreviewBar != null) {
videoPreviewBar!!.release()
}
}
companion object {
private val TAG = VideoPreviewActivity::class.java.simpleName
private const val TIME_UPDATE = 1000
private const val MSG_UPDATE = 1234
}
}
Loading…
Cancel
Save