1.下載FFmpeg-3.3.1源碼 解壓
2.配置環境變量,在電腦/Users/zhangyipeng/目錄下創建.bash_profile文件(已有此文件無需創建),打開文件加入如下配置:
//大家替換成自己的NDK目錄就好了,我這里使用的是android studio上下載的ndk目錄
export ANDROID_HOME=/Users/zhangyipeng/Library/Android/sdk
export ANDROID_NDK_HOME=$ANDROID_HOME/ndk-bundle
export PATH=${PATH}:$ANDROID_HOME/platform-tools
export PATH=$PATH:$ANDROID_NDK_HOME
4.編譯前需要修改ffmpeg-3.3.1目錄下的configure文件,修改如下所示:
注釋前四行掉,然后換成沒有注釋的
#SLIBNAME_WITH_MAJOR='$(SLIBNAME).$(LIBMAJOR)'
#LIB_INSTALL_EXTRA_CMD='$$(RANLIB) "$(LIBDIR)/$(LIBNAME)"'
#SLIB_INSTALL_NAME='$(SLIBNAME_WITH_VERSION)'
#SLIB_INSTALL_LINKS='$(SLIBNAME_WITH_MAJOR) $(SLIBNAME)'
SLIBNAME_WITH_MAJOR='$(SLIBPREF)$(FULLNAME)-$(LIBMAJOR)$(SLIBSUF)'
LIB_INSTALL_EXTRA_CMD='$$(RANLIB)"$(LIBDIR)/$(LIBNAME)"'
SLIB_INSTALL_NAME='$(SLIBNAME_WITH_MAJOR)'
SLIB_INSTALL_LINKS='$(SLIBNAME)'
5.在ffmpeg-3.3.1目錄下創建文件build_android.sh,打開終端,進入ffmpeg-3.3.1目錄,執行如下命令,使此文件可執行
chmod +x build_android.sh
文件內容如下:
#!/bin/bash
# NDK的路徑,根據自己的安裝位置進行設置
NDK=/Users/zhangyipeng/Library/Android/sdk/ndk-bundle
# 編譯針對的平臺,可以根據自己的需求進行設置
# 這里選擇最低支持android-14, arm架構,生成的so庫是放在。
# libs/armeabi文件夾下的,若針對x86架構,要選擇arch-x86
PLATFORM=$NDK/platforms/android-14/arch-arm
# 工具鏈的路徑,根據編譯的平臺不同而不同
# arm-linux-androideabi-4.9與上面設置的PLATFORM對應,4.9為工具的版本號,
# 根據自己安裝的NDK版本來確定,一般使用最新的版本
TOOLCHAIN=$NDK/toolchains/arm-linux-androideabi-4.9/prebuilt/darwin-x86_64
function build_one
{
./configure \
--prefix=$PREFIX \
--target-os=linux \
--cross-prefix=$TOOLCHAIN/bin/arm-linux-androideabi- \
--arch=arm \
--sysroot=$PLATFORM \
--extra-cflags="-I$PLATFORM/usr/include" \
--cc=$TOOLCHAIN/bin/arm-linux-androideabi-gcc \
--nm=$TOOLCHAIN/bin/arm-linux-androideabi-nm \
--enable-shared \
--enable-runtime-cpudetect \
--enable-gpl \
--enable-small \
--enable-cross-compile \
--disable-debug \
--disable-static \
--disable-doc \
--disable-asm \
--disable-ffmpeg \
--disable-ffplay \
--disable-ffprobe \
--disable-ffserver \
--disable-postproc \
--disable-avdevice \
--disable-symver \
--disable-stripping \
$ADDITIONAL_CONFIGURE_FLAG
sed -i '' 's/HAVE_LRINT 0/HAVE_LRINT 1/g' config.h
sed -i '' 's/HAVE_LRINTF 0/HAVE_LRINTF 1/g' config.h
sed -i '' 's/HAVE_ROUND 0/HAVE_ROUND 1/g' config.h
sed -i '' 's/HAVE_ROUNDF 0/HAVE_ROUNDF 1/g' config.h
sed -i '' 's/HAVE_TRUNC 0/HAVE_TRUNC 1/g' config.h
sed -i '' 's/HAVE_TRUNCF 0/HAVE_TRUNCF 1/g' config.h
sed -i '' 's/HAVE_CBRT 0/HAVE_CBRT 1/g' config.h
sed -i '' 's/HAVE_RINT 0/HAVE_RINT 1/g' config.h
make clean
make -j4
make install
}
# arm v7vfp
CPU=armv7-a
OPTIMIZE_CFLAGS="-mfloat-abi=softfp -mfpu=vfp -marm -march=$CPU "
PREFIX=./android/$CPU-vfp
ADDITIONAL_CONFIGURE_FLAG=
build_one
# CPU=armv
# PREFIX=$(pwd)/android/$CPU
# ADDI_CFLAGS="-marm"
# build_one
#arm v6
#CPU=armv6
#OPTIMIZE_CFLAGS="-marm -march=$CPU"
#PREFIX=./android/$CPU
#ADDITIONAL_CONFIGURE_FLAG=
#build_one
#arm v7vfpv3
# CPU=armv7-a
# OPTIMIZE_CFLAGS="-mfloat-abi=softfp -mfpu=vfpv3-d16 -marm -march=$CPU "
# PREFIX=./android/$CPU
# ADDITIONAL_CONFIGURE_FLAG=
# build_one
#arm v7n
#CPU=armv7-a
#OPTIMIZE_CFLAGS="-mfloat-abi=softfp -mfpu=neon -marm -march=$CPU -mtune=cortex-a8"
#PREFIX=./android/$CPU
#ADDITIONAL_CONFIGURE_FLAG=--enable-neon
#build_one
#arm v6+vfp
#CPU=armv6
#OPTIMIZE_CFLAGS="-DCMP_HAVE_VFP -mfloat-abi=softfp -mfpu=vfp -marm -march=$CPU"
#PREFIX=./android/${CPU}_vfp
#ADDITIONAL_CONFIGURE_FLAG=
#build_one
6.執行build_android.sh,開始編譯ffmpeg成.so動態庫
命令如下:
./build_android.sh
7.大概等10~20分鐘,編譯完成,在ffmpeg-3.3.1目錄下會生成一個名為android的文件夾,動態庫就在這個目錄之中,如圖所示
8.如上圖所示,會生成6個動態庫,當然我們如果覺得動態庫太多,使用麻煩,也可以只生成一個動態庫,方法如下,我們可以再ffmpeg-3.3.1下再創建一個build_android_all.sh文件,重新執行上述步驟,就可以生成1個名為libffmpeg.so的庫
build_android_all.sh文件文件內容如下:
#!/bin/bash
# NDK的路徑,根據自己的安裝位置進行設置
NDK=/Users/zhangyipeng/Library/Android/sdk/ndk-bundle
# 編譯針對的平臺,可以根據自己的需求進行設置
# 這里選擇最低支持android-14, arm架構,生成的so庫是放在
# libs/armeabi文件夾下的,若針對x86架構,要選擇arch-x86
PLATFORM=$NDK/platforms/android-14/arch-arm
# 工具鏈的路徑,根據編譯的平臺不同而不同
# arm-linux-androideabi-4.9與上面設置的PLATFORM對應,4.9為工具的版本號,
# 根據自己安裝的NDK版本來確定,一般使用最新的版本
TOOLCHAIN=$NDK/toolchains/arm-linux-androideabi-4.9/prebuilt/darwin-x86_64
function build_one
{
./configure \
--prefix=$PREFIX \
--target-os=linux \
--cross-prefix=$TOOLCHAIN/bin/arm-linux-androideabi- \
--arch=arm \
--sysroot=$PLATFORM \
--extra-cflags="-I$PLATFORM/usr/include" \
--cc=$TOOLCHAIN/bin/arm-linux-androideabi-gcc \
--nm=$TOOLCHAIN/bin/arm-linux-androideabi-nm \
--disable-shared \
--enable-runtime-cpudetect \
--enable-gpl \
--enable-small \
--enable-cross-compile \
--disable-debug \
--enable-static \
--disable-doc \
--disable-asm \
--disable-ffmpeg \
--disable-ffplay \
--disable-ffprobe \
--disable-ffserver \
--disable-postproc \
--disable-avdevice \
--disable-symver \
--disable-stripping \
$ADDITIONAL_CONFIGURE_FLAG
sed -i '' 's/HAVE_LRINT 0/HAVE_LRINT 1/g' config.h
sed -i '' 's/HAVE_LRINTF 0/HAVE_LRINTF 1/g' config.h
sed -i '' 's/HAVE_ROUND 0/HAVE_ROUND 1/g' config.h
sed -i '' 's/HAVE_ROUNDF 0/HAVE_ROUNDF 1/g' config.h
sed -i '' 's/HAVE_TRUNC 0/HAVE_TRUNC 1/g' config.h
sed -i '' 's/HAVE_TRUNCF 0/HAVE_TRUNCF 1/g' config.h
sed -i '' 's/HAVE_CBRT 0/HAVE_CBRT 1/g' config.h
sed -i '' 's/HAVE_RINT 0/HAVE_RINT 1/g' config.h
make clean
make -j4
make install
$TOOLCHAIN/bin/arm-linux-androideabi-ld \
-rpath-link=$PLATFORM/usr/lib \
-L$PLATFORM/usr/lib \
-L$PREFIX/lib \
-soname libffmpeg.so -shared -nostdlib -Bsymbolic --whole-archive --no-undefined -o \
$PREFIX/libffmpeg.so \
libavcodec/libavcodec.a \
libavfilter/libavfilter.a \
libswresample/libswresample.a \
libavformat/libavformat.a \
libavutil/libavutil.a \
libswscale/libswscale.a \
-lc -lm -lz -ldl -llog --dynamic-linker=/system/bin/linker \
$TOOLCHAIN/lib/gcc/arm-linux-androideabi/4.9/libgcc.a
}
# arm v7vfp
CPU=armv7-a
OPTIMIZE_CFLAGS="-mfloat-abi=softfp -mfpu=vfp -marm -march=$CPU "
PREFIX=./android/$CPU-vfp-all
ADDITIONAL_CONFIGURE_FLAG=
build_one
# CPU=armv
# PREFIX=$(pwd)/android/$CPU
# ADDI_CFLAGS="-marm"
# build_one
#arm v6
#CPU=armv6
#OPTIMIZE_CFLAGS="-marm -march=$CPU"
#PREFIX=./android/$CPU
#ADDITIONAL_CONFIGURE_FLAG=
#build_one
#arm v7vfpv3
# CPU=armv7-a
# OPTIMIZE_CFLAGS="-mfloat-abi=softfp -mfpu=vfpv3-d16 -marm -march=$CPU "
# PREFIX=./android/$CPU
# ADDITIONAL_CONFIGURE_FLAG=
# build_one
#arm v7n
#CPU=armv7-a
#OPTIMIZE_CFLAGS="-mfloat-abi=softfp -mfpu=neon -marm -march=$CPU -mtune=cortex-a8"
#PREFIX=./android/$CPU
#ADDITIONAL_CONFIGURE_FLAG=--enable-neon
#build_one
#arm v6+vfp
#CPU=armv6
#OPTIMIZE_CFLAGS="-DCMP_HAVE_VFP -mfloat-abi=softfp -mfpu=vfp -marm -march=$CPU"
#PREFIX=./android/${CPU}_vfp
#ADDITIONAL_CONFIGURE_FLAG=
#build_one
9.在ffmpeg-3.3.1目錄下執行如下命令:
chmod +x build_android_all.sh
./build_android_all.sh
10.執行完成后,生成了一個名為libffmpeg.so的動態庫,如下圖所示:
11.這個動態庫其實是相當于把上面六個動態庫合并在一起了,所以使用效果一致,使用起來也更為便捷。
從上圖可以看出build_android_all.sh腳本的作用其實是先生成6個靜態庫,然后再把這幾個靜態庫(就是那6個.a文件)合并成一個動態庫libffmpeg.so,而build_android.sh其實是直接生成了6個動態庫。我們下面對比下這兩個腳本,看下區別,如下圖所示:
a.腳本區別:
b.編譯生成文件的區別:
12.下面開始使用Android Studio創建App工程,并使用以上我們編譯生成的動態庫,編寫一個簡單的jni調用ffmpeg,播放網絡視頻的Demo
13.創建Android工程FFmpegAndroidDemo,然后在main目錄下創建jni文件夾,然后復制上圖中的include文件夾以及libffmpeg.so(或者include文件夾以及6個so文件)到jni目錄下,結構如下圖所示:
14.工程根目錄的gradle.properties中添加如下代碼:
android.useDeprecatedNdk=true
14.工程根目錄的local.properties中添加如下代碼:
ndk.dir=/Users/zhangyipeng/Library/Android/sdk/ndk-bundle
15.創建FFmpegNdk.java文件,?代碼如下:
public class FFmpegNdk {
static {
// System.loadLibrary("avcodec-57");
// System.loadLibrary("avfilter-6");
// System.loadLibrary("avformat-57");
// System.loadLibrary("avutil-55");
// System.loadLibrary("swresample-2");
// System.loadLibrary("swscale-4");
System.loadLibrary("ffmpeg");
System.loadLibrary("myffmpeg");
}
public static native String avcodecinfo();
public static native int playVideo(String url, Object surface);
}
16.MainActivity.java代碼如下:
public class MainActivity extends AppCompatActivity implements SurfaceHolder.Callback {
private SurfaceView mSurfaceView;
private SurfaceHolder mSurfaceHolder;
public static final String url2 = "http://58.135.196.138:8090/live/db3bd108e3364bf3888ccaf8377af077/index.m3u8";
public static final String url = "http://tx2.a.yximgs.com/upic/2017/06/06/12/BMjAxNzA2MDYxMjA3MDJfOTg5MDkwODRfMjMzMzY5NjI3OV8xXzM=_hd.mp4?tag=1-1496888787-h-0-2gpzxdvetp-f9da4113e6f3de74";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
final TextView tv = (TextView) findViewById(R.id.tv);
SurfaceView mSurfaceView = (SurfaceView) findViewById(R.id.surface_view);
tv.setMovementMethod(ScrollingMovementMethod.getInstance());
findViewById(R.id.button).setOnClickListener(new View.OnClickListener() {
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
@Override
public void onClick(View v) {
tv.setText(FFmpegNdk.avcodecinfo());
startActivity(new Intent(MainActivity.this,VideoActivity.class));
}
});
mSurfaceHolder = mSurfaceView.getHolder();
mSurfaceHolder.addCallback(this);
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
new Thread(new Runnable() {
@Override
public void run() {
FFmpegNdk.playVideo(url,mSurfaceHolder.getSurface());
}
}).start();
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
}
}
17.使用javah命令生成.h頭文件,在FFmpegAndroidDemo/app/src/main/java目錄下使用如下命令,在jni目錄下生成com_zyp_ffmpegandroiddemo_FFmpegNdk.h文件
javah -d ../jni com.zyp.ffmpegandroiddemo.FFmpegNdk
18.jni目錄下創建ffmpeg_ndk.c文件,代碼如下所示:
/* DO NOT EDIT THIS FILE - it is machine generated */
#include <jni.h>
#include <stdio.h>
#include "include/libavcodec/avcodec.h"
#include "include/libavformat/avformat.h"
#include "include/libavfilter/avfilter.h"
//#include "com_zyp_ffmpegandroiddemo_FFmpegNdk.h"
#include <android/native_window.h>
#include <android/native_window_jni.h>
#include <android/log.h>
#include "util.h"
#define TAG "ffmpeg_android_tag"
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG,TAG ,__VA_ARGS__) // 定義LOGD類型
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,TAG ,__VA_ARGS__) // 定義LOGI類型
#define LOGW(...) __android_log_print(ANDROID_LOG_WARN,TAG ,__VA_ARGS__) // 定義LOGW類型
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,TAG ,__VA_ARGS__) // 定義LOGE類型
#define LOGF(...) __android_log_print(ANDROID_LOG_FATAL,TAG ,__VA_ARGS__) // 定義LOGF類型
/* Header for class com_zyp_ffmpegandroiddemo_FFmpegNdk */
#ifndef _Included_com_zyp_ffmpegandroiddemo_FFmpegNdk
#define _Included_com_zyp_ffmpegandroiddemo_FFmpegNdk
#ifdef __cplusplus
extern "C" {
#endif
/*
* Class: com_zyp_ffmpegandroiddemo_FFmpegNdk
* Method: avcodecinfo
* Signature: ()V
*/
JNIEXPORT jstring JNICALL Java_com_zyp_ffmpegandroiddemo_FFmpegNdk_avcodecinfo(JNIEnv *env, jobject obj) {
char info[40000] = {0};
av_register_all();
AVCodec *c_temp = av_codec_next(NULL);
while (c_temp != NULL) {
if (c_temp->decode != NULL) {
sprintf(info, "%s[Dec]", info);
} else {
sprintf(info, "%s[Enc]", info);
}
switch (c_temp->type) {
case AVMEDIA_TYPE_VIDEO:
sprintf(info, "%s[Video]", info);
break;
case AVMEDIA_TYPE_AUDIO:
sprintf(info, "%s[Audio]", info);
break;
default:
sprintf(info, "%s[Other]", info);
break;
}
sprintf(info, "%s[%10s]\n", info, c_temp->name);
c_temp = c_temp->next;
}
return (*env)->NewStringUTF(env, info);
}
/*
* Class: com_zyp_ffmpegandroiddemo_FFmpegNdk
* Method: playVideo
* Signature: (Ljava/lang/String;Ljava/lang/Object;)I
*/
JNIEXPORT jint JNICALL Java_com_zyp_ffmpegandroiddemo_FFmpegNdk_playVideo(JNIEnv *env, jclass clazz, jstring url, jobject surface) {
LOGD("start playvideo... url : %s",url);
char * url2= jstringTostring(env,url);
LOGD("start playvideo... url : %s",url2);
av_register_all();
AVFormatContext * pFormatCtx = avformat_alloc_context();
// Open video file
if(avformat_open_input(&pFormatCtx, url2, NULL, NULL)!=0) {
LOGE("Couldn't open file:%s\n", url2);
return -1; // Couldn't open file
}
// Retrieve stream information
if(avformat_find_stream_info(pFormatCtx, NULL)<0) {
LOGE("Couldn't find stream information.");
return -1;
}
// Find the first video stream
int videoStream = -1, i;
for (i = 0; i < pFormatCtx->nb_streams; i++) {
if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO
&& videoStream < 0) {
videoStream = i;
}
}
if(videoStream==-1) {
LOGE("Didn't find a video stream.");
return -1; // Didn't find a video stream
}
// Get a pointer to the codec context for the video stream
AVCodecContext * pCodecCtx = pFormatCtx->streams[videoStream]->codec;
// Find the decoder for the video stream
AVCodec * pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
if(pCodec==NULL) {
LOGE("Codec not found.");
return -1; // Codec not found
}
if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
LOGE("Could not open codec.");
return -1; // Could not open codec
}
// 獲取native window
ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env, surface);
// 獲取視頻寬高
int videoWidth = pCodecCtx->width;
int videoHeight = pCodecCtx->height;
// 設置native window的buffer大小,可自動拉伸
ANativeWindow_setBuffersGeometry(nativeWindow, videoWidth, videoHeight, WINDOW_FORMAT_RGBA_8888);
ANativeWindow_Buffer windowBuffer;
if(avcodec_open2(pCodecCtx, pCodec, NULL)<0) {
LOGE("Could not open codec.");
return -1; // Could not open codec
}
// Allocate video frame
AVFrame * pFrame = av_frame_alloc();
// 用于渲染
AVFrame * pFrameRGBA = av_frame_alloc();
if(pFrameRGBA == NULL || pFrame == NULL) {
LOGE("Could not allocate video frame.");
return -1;
}
// Determine required buffer size and allocate buffer
// buffer中數據就是用于渲染的,且格式為RGBA
int numBytes=av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1);
uint8_t * buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA,
pCodecCtx->width, pCodecCtx->height, 1);
// 由于解碼出來的幀格式不是RGBA的,在渲染之前需要進行格式轉換
struct SwsContext *sws_ctx = sws_getContext(pCodecCtx->width,
pCodecCtx->height,
pCodecCtx->pix_fmt,
pCodecCtx->width,
pCodecCtx->height,
AV_PIX_FMT_RGBA,
// SWS_BILINEAR,
NULL,
NULL,
NULL);
int frameFinished;
AVPacket packet;
while(av_read_frame(pFormatCtx, &packet)>=0) {
// Is this a packet from the video stream?
if(packet.stream_index==videoStream) {
// Decode video frame
avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
// 并不是decode一次就可解碼出一幀
if (frameFinished) {
// lock native window buffer
ANativeWindow_lock(nativeWindow, &windowBuffer, 0);
// 格式轉換
sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
pFrame->linesize, 0, pCodecCtx->height,
pFrameRGBA->data, pFrameRGBA->linesize);
// 獲取stride
uint8_t * dst = windowBuffer.bits;
int dstStride = windowBuffer.stride * 4;
uint8_t * src = (uint8_t*) (pFrameRGBA->data[0]);
int srcStride = pFrameRGBA->linesize[0];
// 由于window的stride和幀的stride不同,因此需要逐行復制
int h;
for (h = 0; h < videoHeight; h++) {
memcpy(dst + h * dstStride, src + h * srcStride, srcStride);
}
ANativeWindow_unlockAndPost(nativeWindow);
}
}
av_packet_unref(&packet);
}
av_free(buffer);
av_free(pFrameRGBA);
// Free the YUV frame
av_free(pFrame);
// Close the codecs
avcodec_close(pCodecCtx);
// Close the video file
avformat_close_input(&pFormatCtx);
return 0;
}
#ifdef __cplusplus
}
#endif
#endif
19.創建Android.mk文件,代碼如下圖所示:
# $java目錄下執行 javah -d ../jni com.zyp.ffmpegandroiddemo.FFmpegNdk
LOCAL_PATH := $(call my-dir)
#FFFmpeg libray
#include $(CLEAR_VARS)
#LOCAL_MODULE := avcodec
#LOCAL_SRC_FILES := libavcodec-57.so
#include $(PREBUILT_SHARED_LIBRARY)
#
#include $(CLEAR_VARS)
#LOCAL_MODULE := avfilter
#LOCAL_SRC_FILES := libavfilter-6.so
#include $(PREBUILT_SHARED_LIBRARY)
#
#include $(CLEAR_VARS)
#LOCAL_MODULE := avformat
#LOCAL_SRC_FILES := libavformat-57.so
#include $(PREBUILT_SHARED_LIBRARY)
#
#include $(CLEAR_VARS)
#LOCAL_MODULE := avutil
#LOCAL_SRC_FILES := libavutil-55.so
#include $(PREBUILT_SHARED_LIBRARY)
#
#include $(CLEAR_VARS)
#LOCAL_MODULE := swresample
#LOCAL_SRC_FILES := libswresample-2.so
#include $(PREBUILT_SHARED_LIBRARY)
#
#include $(CLEAR_VARS)
#LOCAL_MODULE := swscale
#LOCAL_SRC_FILES := libswscale-4.so
#include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := libffmpeg
LOCAL_SRC_FILES := libffmpeg.so
include $(PREBUILT_SHARED_LIBRARY)
#Program
include $(CLEAR_VARS)
LOCAL_MODULE := myffmpeg
LOCAL_SRC_FILES := ffmpeg_ndk.c
LOCAL_C_INCLUDES += $(LOCAL_PATH)/include/
LOCAL_LDLIBS := -llog -lz -landroid
#LOCAL_SHARED_LIBRARIES := avcodec avdevice avfilter avformat avutil postproc swresample swscale
LOCAL_SHARED_LIBRARIES := ffmpeg
include $(BUILD_SHARED_LIBRARY)
19.創建Application.mk文件,代碼如下圖所示:
APP_MODULES := myffmpeg
APP_ABI := armeabi armeabi-v7a
APP_PLATFORM := android-10
20.在jni目錄下執行ndk-build,會在jni/lib目錄下生成so動態庫
21.打開app目錄下的build.gradle文件,加入如下腳本:
android{
...
sourceSets.main {
jni.srcDirs = []
res.srcDirs = ['src/main/res']
jniLibs.srcDirs = ['src/main/libs']
}
...
}
22.現在一切配置ok,代碼和腳本也寫完了,可以點擊run,運行app了,效果如下圖:
視頻地址是抓的快手上的_
23.這篇文章主要是根據以下兩篇技術文章,然后與自己的實踐綜合起來寫的,感謝。
手把手圖文并茂教你用Android Studio編譯FFmpeg庫并移植
Android最簡單的基于FFmpeg的例子(三)---編譯FFmpeg成一個SO庫
24.Demo下載地址