<com.haohao.ffmpeg.MySurfaceView
android:id=“@+id/my_surface_view”
android:layout_width=“match_parent”
android:layout_height=“match_parent” />
MainActivity.java
public class MainActivity extends AppCompatActivity implements View.OnClickListener, AVUtils.AVCallback {
private static final String TAG = “MainActivity”;
private static final String BASE_PATH = Environment.getExternalStorageDirectory().getAbsolutePath() + File.separatorChar;
private String input_video_file_path = BASE_PATH
- “input.mp4”;
private String output_video_file_path = BASE_PATH - “output.yuv”;
private String input_audio_file_path = BASE_PATH - “hello.mp3”;
private String output_audio_file_path = BASE_PATH - “hello.pcm”;
private String video_src = BASE_PATH - “ffmpeg.mp4”;
private Button mDecodeVideoBtn;
private Button mVideoRenderBtn;
private Button mAudioPlayBtn, mAudioDecodeBtn;
private ProgressDialog mProgressDialog;
private ExecutorService mExecutorService;
private MySurfaceView mySurfaceView;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
requestPermissions(new String[]{Manifest.permission.READ_EXTERNAL_STORAGE, Manifest.permission.WRITE_EXTERNAL_STORAGE, Manifest.permission.MOUNT_UNMOUNT_FILESYSTEMS}, 0);
}
mDecodeVideoBtn = (Button)findViewById(R.id.video_decode_btn);
mVideoRenderBtn = (Button)findViewById(R.id.video_render_btn);
mAudioDecodeBtn = (Button) findViewById(R.id.audio_decode_btn);
mAudioPlayBtn = (Button)findViewById(R.id.audio_play_btn);
mySurfaceView = (MySurfaceView) findViewById(R.id.my_surface_view);
mDecodeVideoBtn.setOnClickListener(this);
mVideoRenderBtn.setOnClickListener(this);
mAudioDecodeBtn.setOnClickListener(this);
mAudioPlayBtn.setOnClickListener(this);
AVUtils.registerCallback(this);
mProgressDialog = new ProgressDialog(this);
mProgressDialog.setCanceledOnTouchOutside(false);
mExecutorService = Executors.newFixedThreadPool(2);
}
@Override
public void onClick(View view) {
int id = view.getId();
switch (id) {
case R.id.video_decode_btn:
mProgressDialog.setMessage(“正在解码…”);
mProgressDialog.show();
mExecutorService.submit(new Runnable() {
@Override
public void run() {
AVUtils.videoDecode(input_video_file_path, output_video_file_path);
}
});
break;
case R.id.video_render_btn:
mExecutorService.submit(new Runnable() {
@Override
public void run() {
AVUtils.videoRender(input_video_file_path, mySurfaceView.getHolder().getSurface());
}
});
break;
case R.id.audio_decode_btn:
mProgressDialog.setMessage(“正在解码…”);
mProgressDialog.show();
mExecutorService.submit(new Runnable() {
@Override
public void run() {
AVUtils.audioDecode(input_audio_file_path, output_audio_file_path);
}
});
break;
case R.id.audio_play_btn:
mExecutorService.submit(new Runnable() {
@Override
public void run() {
AVUtils.audioPlay(input_video_file_path);
}
});
break;
}
}
@Override
public void onFinish() {
runOnUiThread(new Runnable() {
@Override
public void run() {
if (mProgressDialog.isShowing()) {
mProgressDialog.dismiss();
}
Toast.makeText(MainActivity.this, “解码完成”, Toast.LENGTH_SHORT).show();
}
});
}
@Override
protected void onDestroy() {
super.onDestroy();
mExecutorService.shutdown();
}
}
nativelib.c
#include <jni.h>
#include <string.h>
#include <android/log.h>
#include <stdio.h>
#include <libavutil/time.h>
//编码
#include “include/libavcodec/avcodec.h”
//封装格式处理
#include “include/libavformat/avformat.h”
//像素处理
#include “include/libswscale/swscale.h”
#define LOGI(FORMAT, …) __android_log_print(ANDROID_LOG_INFO,“haohao”,FORMAT,##VA_ARGS);
#define LOGE(FORMAT, …) __android_log_print(ANDROID_LOG_ERROR,“haohao”,FORMAT,##VA_ARGS);
//中文字符串转换
jstring charsToUTF8String(JNIEnv *env, char *s) {
jclass string_cls = (*env)->FindClass(env, “java/lang/String”);
jmethodID mid = (*env)->GetMethodID(env, string_cls, “”, “([BLjava/lang/String;)V”);
jbyteArray jb_arr = (*env)->NewByteArray(env, strlen(s));
(*env)->SetByteArrayRegion(env, jb_arr, 0, strlen(s), s);
jstring charset = (*env)->NewStringUTF(env, “UTF-8”);
return (*env)->NewObject(env, string_cls, mid, jb_arr, charset);
}
JNIEXPORT void JNICALL
Java_com_haohao_ffmpeg_AVUtils_videoDecode(JNIEnv *env, jclass type, jstring input_,
jstring output_) {
//访问静态方法
jmethodID mid = (*env)->GetStaticMethodID(env, type, “onNativeCallback”, “()V”);
//需要转码的视频文件(输入的视频文件)
const char *input = (*env)->GetStringUTFChars(env, input_, 0);
const char *output = (*env)->GetStringUTFChars(env, output_, 0);
//注册所有组件
av_register_all();
//封装格式上下文,统领全局的结构体,保存了视频文件封装格式的相关信息
AVFormatContext *pFormatCtx = avformat_alloc_context();
//打开输入视频文件
if (avformat_open_input(&pFormatCtx, input, NULL, NULL) != 0) {
LOGE(“%s”, “无法打开输入视频文件”);
return;
}
//获取视频文件信息,例如得到视频的宽高
if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
LOGE(“%s”, “无法获取视频文件信息”);
return;
}
//获取视频流的索引位置
//遍历所有类型的流(音频流、视频流、字幕流),找到视频流
int v_stream_idx = -1;
int i = 0;
for (; i < pFormatCtx->nb_streams; i++) {
//判断视频流
if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
v_stream_idx = i;
break;
}
}
if (v_stream_idx == -1) {
LOGE(“%s”, “找不到视频流\n”);
return;
}
//根据视频的编码方式,获取对应的解码器
AVCodecContext *pCodecCtx = pFormatCtx->streams[v_stream_idx]->codec;
//根据编解码上下文中的编码 id 查找对应的解码器
AVCodec *pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
if (pCodec == NULL) {
LOGE(“%s”, “找不到解码器,或者视频已加密\n”);
return;
}
//打开解码器,解码器有问题(比如说我们编译FFmpeg的时候没有编译对应类型的解码器)
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
LOGE(“%s”, “解码器无法打开\n”);
return;
}
//输出视频信息
LOGI(“视频的文件格式:%s”, pFormatCtx->iformat->name);
LOGI(“视频时长:%lld”, (pFormatCtx->duration) / (1000 * 1000));
LOGI(“视频的宽高:%d,%d”, pCodecCtx->width, pCodecCtx->height);
LOGI(“解码器的名称:%s”, pCodec->name);
//准备读取
//AVPacket用于存储一帧一帧的压缩数据(H264)
//缓冲区,开辟空间
AVPacket *packet = (AVPacket *) av_malloc(sizeof(AVPacket));
//A