视频解码播放

使用FFmpeg进行视频解码播放
pull/107/head
frank 7 years ago
parent 17b18af513
commit 39808aca1c
  1. 5
      README.md
  2. 3
      app/CMakeLists.txt
  3. 4
      app/src/main/AndroidManifest.xml
  4. 161
      app/src/main/cpp/video_player.c
  5. 16
      app/src/main/java/com/frank/ffmpeg/VideoPlayer.java
  6. 8
      app/src/main/java/com/frank/ffmpeg/activity/VideoHandleActivity.java
  7. 101
      app/src/main/java/com/frank/ffmpeg/activity/VideoPlayerActivity.java
  8. 9
      app/src/main/res/layout/activity_video_handle.xml
  9. 31
      app/src/main/res/layout/activity_video_player.xml
  10. 3
      app/src/main/res/values/strings.xml

@ -8,6 +8,7 @@ android端基于FFmpeg库在中的使用。<br>
- #### 音频转码 - #### 音频转码
- #### 音视频合成 - #### 音视频合成
- #### 音频抽取 - #### 音频抽取
- #### 音频解码播放
- #### 视频抽取 - #### 视频抽取
- #### 视频剪切 - #### 视频剪切
- #### 视频转码 - #### 视频转码
@ -15,8 +16,10 @@ android端基于FFmpeg库在中的使用。<br>
- #### 视频转GIF动图 - #### 视频转GIF动图
- #### 视频添加水印 - #### 视频添加水印
- #### 图片合成视频 - #### 图片合成视频
- #### 视频解码播放
*** ***
后续会加上音视频解码。 后续会加上音视频解码同步播放、直播推流
<br><br> <br><br>

@ -23,7 +23,8 @@ add_library( # Sets the name of the library.
src/main/cpp/ffmpeg_filter.c src/main/cpp/ffmpeg_filter.c
src/main/cpp/ffmpeg_opt.c src/main/cpp/ffmpeg_opt.c
src/main/cpp/audio_player.c src/main/cpp/audio_player.c
src/main/cpp/openSL_audio_player.c) src/main/cpp/openSL_audio_player.c
src/main/cpp/video_player.c)
add_library( ffmpeg add_library( ffmpeg
SHARED SHARED

@ -34,6 +34,10 @@
<activity android:name=".activity.VideoHandleActivity"/> <activity android:name=".activity.VideoHandleActivity"/>
<activity android:name=".activity.VideoPlayerActivity"
android:screenOrientation="landscape">
</activity>
</application> </application>
</manifest> </manifest>

@ -0,0 +1,161 @@
//
// Created by frank on 2018/2/1.
//
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include <android/native_window.h>
#include <android/native_window_jni.h>
#include <stdio.h>
#include <unistd.h>
#include <libavutil/imgutils.h>
#include <android/log.h>
#define TAG "MediaPlayer"
#define LOGI(FORMAT,...) __android_log_print(ANDROID_LOG_INFO, TAG, FORMAT, ##__VA_ARGS__);
#define LOGE(FORMAT,...) __android_log_print(ANDROID_LOG_ERROR, TAG, FORMAT, ##__VA_ARGS__);
//播放倍率
float play_rate = 1;
//视频总时长
long duration = 0;
JNIEXPORT jint JNICALL Java_com_frank_ffmpeg_VideoPlayer_play
(JNIEnv * env, jclass clazz, jstring filePath, jobject surface){
const char * file_name = (*env)->GetStringUTFChars(env, filePath, JNI_FALSE);
//注册所有组件
av_register_all();
//分配上下文
AVFormatContext * pFormatCtx = avformat_alloc_context();
//打开视频文件
if(avformat_open_input(&pFormatCtx, file_name, NULL, NULL)!=0) {
LOGE("Couldn't open file:%s\n", file_name);
return -1;
}
//检索多媒体流信息
if(avformat_find_stream_info(pFormatCtx, NULL)<0) {
LOGE("Couldn't find stream information.");
return -1;
}
//寻找视频流的第一帧
int videoStream = -1, i;
for (i = 0; i < pFormatCtx->nb_streams; i++) {
if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO
&& videoStream < 0) {
videoStream = i;
}
}
if(videoStream==-1) {
LOGE("couldn't find a video stream.");
return -1;
}
//获取视频总时长
if (pFormatCtx->duration != AV_NOPTS_VALUE) {
duration = (long) (pFormatCtx->duration / AV_TIME_BASE);
LOGE("duration=%d", duration);
}
//获取codec上下文指针
AVCodecContext * pCodecCtx = pFormatCtx->streams[videoStream]->codec;
//寻找视频流的解码器
AVCodec * pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
if(pCodec==NULL) {
LOGE("couldn't find Codec.");
return -1;
}
if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
LOGE("Couldn't open codec.");
return -1;
}
// 获取native window
ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env, surface);
// 获取视频宽高
int videoWidth = pCodecCtx->width;
int videoHeight = pCodecCtx->height;
// 设置native window的buffer大小,可自动拉伸
ANativeWindow_setBuffersGeometry(nativeWindow, videoWidth, videoHeight, WINDOW_FORMAT_RGBA_8888);
ANativeWindow_Buffer windowBuffer;
if(avcodec_open2(pCodecCtx, pCodec, NULL)<0) {
LOGE("Couldn't open codec.");
return -1;
}
//申请内存
AVFrame * pFrame = av_frame_alloc();
AVFrame * pFrameRGBA = av_frame_alloc();
if(pFrameRGBA == NULL || pFrame == NULL) {
LOGE("Couldn't allocate video frame.");
return -1;
}
// buffer中数据用于渲染,且格式为RGBA
int numBytes=av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1);
uint8_t * buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA,
pCodecCtx->width, pCodecCtx->height, 1);
// 由于解码出来的帧格式不是RGBA的,在渲染之前需要进行格式转换
struct SwsContext *sws_ctx = sws_getContext(pCodecCtx->width,
pCodecCtx->height,
pCodecCtx->pix_fmt,
pCodecCtx->width,
pCodecCtx->height,
AV_PIX_FMT_RGBA,
SWS_BILINEAR,
NULL,
NULL,
NULL);
int frameFinished;
AVPacket packet;
while(av_read_frame(pFormatCtx, &packet)>=0) {
//判断是否为视频流
if(packet.stream_index==videoStream) {
//对该帧进行解码
avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
if (frameFinished) {
// lock native window
ANativeWindow_lock(nativeWindow, &windowBuffer, 0);
// 格式转换
sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
pFrame->linesize, 0, pCodecCtx->height,
pFrameRGBA->data, pFrameRGBA->linesize);
// 获取stride
uint8_t * dst = windowBuffer.bits;
int dstStride = windowBuffer.stride * 4;
uint8_t * src = pFrameRGBA->data[0];
int srcStride = pFrameRGBA->linesize[0];
// 由于window的stride和帧的stride不同,因此需要逐行复制
int h;
for (h = 0; h < videoHeight; h++) {
memcpy(dst + h * dstStride, src + h * srcStride, (size_t) srcStride);
}
ANativeWindow_unlockAndPost(nativeWindow);
}
//延迟等待
usleep((unsigned long) (1000 * 40 * play_rate));
}
av_packet_unref(&packet);
}
//释放内存以及关闭文件
av_free(buffer);
av_free(pFrameRGBA);
av_free(pFrame);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);
return 0;
}
//设置播放速率
JNIEXPORT jint JNICALL Java_com_frank_ffmpeg_VideoPlayer_setPlayRate
(JNIEnv * env, jclass clazz, jfloat playRate){
play_rate = playRate;
}
//获取视频总时长
JNIEXPORT jint JNICALL Java_com_frank_ffmpeg_VideoPlayer_getDuration
(JNIEnv * env, jclass clazz){
return duration;
}

@ -0,0 +1,16 @@
package com.frank.ffmpeg;
/**
* 视频播放器
* Created by frank on 2018/2/1
*/
public class VideoPlayer {
static {
System.loadLibrary("media-handle");
}
public native int play(String filePath, Object surface);
public native int setPlayRate(float playRate);
}

@ -1,6 +1,7 @@
package com.frank.ffmpeg.activity; package com.frank.ffmpeg.activity;
import android.annotation.SuppressLint; import android.annotation.SuppressLint;
import android.content.Intent;
import android.os.Bundle; import android.os.Bundle;
import android.os.Environment; import android.os.Environment;
import android.os.Handler; import android.os.Handler;
@ -65,6 +66,7 @@ public class VideoHandleActivity extends AppCompatActivity implements View.OnCli
findViewById(R.id.btn_generate_gif).setOnClickListener(this); findViewById(R.id.btn_generate_gif).setOnClickListener(this);
findViewById(R.id.btn_screen_record).setOnClickListener(this); findViewById(R.id.btn_screen_record).setOnClickListener(this);
findViewById(R.id.btn_combine_video).setOnClickListener(this); findViewById(R.id.btn_combine_video).setOnClickListener(this);
findViewById(R.id.btn_play_video).setOnClickListener(this);
} }
private void setVisible() { private void setVisible() {
@ -117,6 +119,9 @@ public class VideoHandleActivity extends AppCompatActivity implements View.OnCli
case R.id.btn_combine_video: case R.id.btn_combine_video:
handleType = 7; handleType = 7;
break; break;
case R.id.btn_play_video:
handleType = 8;
break;
default: default:
handleType = 0; handleType = 0;
break; break;
@ -196,6 +201,9 @@ public class VideoHandleActivity extends AppCompatActivity implements View.OnCli
String combineVideo = PATH + File.separator + "combineVideo.mp4"; String combineVideo = PATH + File.separator + "combineVideo.mp4";
commandLine = FFmpegUtil.pictureToVideo(picturePath, combineVideo); commandLine = FFmpegUtil.pictureToVideo(picturePath, combineVideo);
break; break;
case 8:
startActivity(new Intent(VideoHandleActivity.this, VideoPlayerActivity.class));
return;
default: default:
break; break;
} }

@ -0,0 +1,101 @@
package com.frank.ffmpeg.activity;
import android.os.Bundle;
import android.os.Environment;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.widget.Button;
import com.frank.ffmpeg.R;
import com.frank.ffmpeg.VideoPlayer;
import java.io.File;
/**
* 使用ffmpeg播放视频
* Created by frank on 2018/2/1.
*/
public class VideoPlayerActivity extends AppCompatActivity implements SurfaceHolder.Callback {
private static final String TAG = MainActivity.class.getSimpleName();
SurfaceHolder surfaceHolder;
private final static String PATH = Environment.getExternalStorageDirectory().getPath() + File.separator;
private String filePath = PATH + "hello.mp4";
private VideoPlayer videoPlayer;
//播放倍率
private float playRate = 1;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_video_player);
initView();
initPlayer();
}
private void initView(){
SurfaceView surfaceView = (SurfaceView) findViewById(R.id.surface_view);
surfaceHolder = surfaceView.getHolder();
surfaceHolder.addCallback(this);
Button btn_slow = (Button) findViewById(R.id.btn_slow);
btn_slow.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if(playRate <= 32){
playRate *= 2;
}
Log.i(TAG, "playRate=" + playRate);
videoPlayer.setPlayRate(playRate);
}
});
Button btn_fast = (Button) findViewById(R.id.btn_fast);
btn_fast.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if(playRate >= 1/32){
playRate *= 0.5;
}
Log.i(TAG, "playRate=" + playRate);
videoPlayer.setPlayRate(playRate);
}
});
}
private void initPlayer(){
videoPlayer = new VideoPlayer();
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
new Thread(new Runnable() {
@Override
public void run() {
videoPlayer.play(filePath, surfaceHolder.getSurface());
}
}).start();
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
}
@Override
protected void onDestroy() {
super.onDestroy();
if(videoPlayer != null){
videoPlayer = null;
}
}
}

@ -85,4 +85,13 @@
android:layout_centerInParent="true" android:layout_centerInParent="true"
android:visibility="gone"/> android:visibility="gone"/>
<Button
android:id="@+id/btn_play_video"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_centerHorizontal="true"
android:layout_below="@id/btn_combine_video"
android:layout_marginTop="10dp"
android:text="@string/video_play"/>
</RelativeLayout> </RelativeLayout>

@ -0,0 +1,31 @@
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context="com.frank.ffmpeg.activity.VideoPlayerActivity">
<SurfaceView
android:id="@+id/surface_view"
android:layout_width="match_parent"
android:layout_height="match_parent" />
<Button
android:id="@+id/btn_slow"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentBottom="true"
android:layout_alignParentStart="true"
android:layout_margin="16dp"
android:text="@string/video_slow"/>
<Button
android:id="@+id/btn_fast"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentBottom="true"
android:layout_alignParentEnd="true"
android:layout_margin="16dp"
android:text="@string/video_fast"/>
</RelativeLayout>

@ -25,5 +25,8 @@
<string name="video_from_photo">图片合成视频</string> <string name="video_from_photo">图片合成视频</string>
<string name="video_extract_frame">视频抽帧</string> <string name="video_extract_frame">视频抽帧</string>
<string name="video_part_zoom">视频局部放大</string> <string name="video_part_zoom">视频局部放大</string>
<string name="video_play">视频播放</string>
<string name="video_slow">慢放</string>
<string name="video_fast">快进</string>
</resources> </resources>

Loading…
Cancel
Save