git clone https://git.ffmpeg.org/ffmpeg.git
這一步可能會花比較長的時間java
因爲FFMPEG默認編譯出來的動態庫文件名的版本號在.so以後(例如「libavcodec.so.5.100.1」),可是android平臺不能識別這樣文件名,因此咱們須要修改FFMPEG生成的動態庫的文件名。linux
打開 configure 文件,找到:android
SLIBNAME_WITH_MAJOR='$(SLIBNAME).$(LIBMAJOR)' LIB_INSTALL_EXTRA_CMD='$$(RANLIB)"$(LIBDIR)/$(LIBNAME)"' SLIB_INSTALL_NAME='$(SLIBNAME_WITH_VERSION)' SLIB_INSTALL_LINKS='$(SLIBNAME_WITH_MAJOR)$(SLIBNAME)'
修改成git
SLIBNAME_WITH_MAJOR='$(SLIBPREF)$(FULLNAME)-$(LIBMAJOR)$(SLIBSUF)' LIB_INSTALL_EXTRA_CMD='$$(RANLIB)"$(LIBDIR)/$(LIBNAME)"' SLIB_INSTALL_NAME='$(SLIBNAME_WITH_MAJOR)' SLIB_INSTALL_LINKS='$(SLIBNAME)'
#!/bin/sh NDK=/home/cent/Android/Sdk/ndk-bundle SYSROOT=$NDK/platforms/android-19/arch-arm TOOLCHAIN=$NDK/toolchains/arm-linux-androideabi-4.9/prebuilt/linux-x86_64 build_android() { ./configure \ --prefix=$PREFIX \ --enable-shared \ --disable-static \ --disable-doc \ --disable-ffmpeg \ --disable-ffplay \ --disable-ffprobe \ --disable-ffserver \ --disable-avdevice \ --disable-doc \ --disable-symver \ --cross-prefix=$TOOLCHAIN/bin/arm-linux-androideabi- \ --target-os=linux \ --arch=arm \ --enable-cross-compile \ --sysroot=$SYSROOT \ --extra-cflags="-Os -fpic $ADDI_CFLAGS" \ --extra-ldflags="$ADDI_LDFLAGS" \ $ADDITIONAL_CONFIGURE_FLAG make clean make make install } CPU=arm PREFIX=$(pwd)/android/$CPU ADDI_CFLAGS="-marm" build_android
執行上面的腳本編譯出咱們須要的動態庫數據結構
./build_android.sh
進入android/$CPU目錄能夠看到生成的動態庫和咱們須要的頭文件架構
. └── arm ├── include │ ├── libavcodec │ ├── libavfilter │ ├── libavformat │ ├── libavutil │ ├── libswresample │ └── libswscale └── lib ├── libavcodec-57.so ├── libavcodec.so -> libavcodec-57.so ├── libavfilter-6.so ├── libavfilter.so -> libavfilter-6.so ├── libavformat-57.so ├── libavformat.so -> libavformat-57.so ├── libavutil-55.so ├── libavutil.so -> libavutil-55.so ├── libswresample-2.so ├── libswresample.so -> libswresample-2.so ├── libswscale-4.so ├── libswscale.so -> libswscale-4.so └── pkgconfig
首先新建一個工程,而且勾選 Include C++ Support 便可獲得一個基於CMake的模板工程。目錄結構以下所示app
. ├── app │ ├── app.iml │ ├── build │ │ ├── generated │ │ │ ├── res │ │ │ └── source │ │ ├── intermediates │ │ │ ├── blame │ │ │ ├── incremental │ │ │ ├── manifest │ │ │ ├── manifests │ │ │ ├── res │ │ │ ├── rs │ │ │ └── symbols │ │ └── outputs │ │ └── logs │ ├── build.gradle │ ├── CMakeLists.txt │ ├── CMakeLists.txt~ │ ├── libs │ │ ├── armeabi │ │ │ ├── libavcodec-57.so │ │ │ ├── libavfilter-6.so │ │ │ ├── libavformat-57.so │ │ │ ├── libavutil-55.so │ │ │ ├── libswresample-2.so │ │ │ └── libswscale-4.so │ │ └── include │ │ ├── libavcodec │ │ ├── libavfilter │ │ ├── libavformat │ │ ├── libavutil │ │ ├── libswresample │ │ └── libswscale │ ├── proguard-rules.pro │ └── src │ ├── androidTest │ │ └── java │ ├── main │ │ ├── AndroidManifest.xml │ │ ├── cpp │ │ ├── java │ │ └── res │ └── test │ └── java ├── build │ ├── android-profile │ │ └── profile-2017-03-31-23-04-31-347.rawproto │ └── generated │ └── mockable-android-25.jar ├── build.gradle ├── FFMPEGTest.iml ├── gradle │ └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradle.properties ├── gradlew ├── gradlew.bat ├── local.properties └── settings.gradle
而後將上面編譯FFMPEG生成的頭文件和動態庫拷貝到app/libs目錄下,拷貝完後的目錄結構以下所示ide
├── app │ ├── libs │ │ ├── armeabi │ │ │ ├── libavcodec-57.so │ │ │ ├── libavfilter-6.so │ │ │ ├── libavformat-57.so │ │ │ ├── libavutil-55.so │ │ │ ├── libswresample-2.so │ │ │ └── libswscale-4.so │ │ └── include │ │ ├── libavcodec │ │ ├── libavfilter │ │ ├── libavformat │ │ ├── libavutil │ │ ├── libswresample │ │ └── libswscale │ ├── proguard-rules.pro │ └── src
這樣還沒完,我當時就是這樣直接去編譯,而後就踩了一個大坑,APP啓動以後一直crash,緣由就是沒有找到咱們在java文件裏load的動態庫。爲何呢?緣由是在編譯的時候,咱們根本沒有將咱們的動態庫打包到APP中,咱們還須要修改app/build.gradle將咱們放在libs目錄下的動態庫打包到APP中去gradle
apply plugin: 'com.android.application' android { compileSdkVersion 25 buildToolsVersion "25.0.2" defaultConfig { applicationId "com.example.cent.ffmpegtest" minSdkVersion 15 targetSdkVersion 25 versionCode 1 versionName "1.0" testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner" sourceSets { main { jniLibs.srcDirs = ['libs'] } } externalNativeBuild { cmake { cppFlags "-frtti -fexceptions" } ndk{ abiFilters "armeabi" } } } buildTypes { release { minifyEnabled false proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' } } externalNativeBuild { cmake { path "CMakeLists.txt" } } } dependencies { compile fileTree(dir: 'libs', include: ['*.jar']) androidTestCompile('com.android.support.test.espresso:espresso-core:2.2.2', { exclude group: 'com.android.support', module: 'support-annotations' }) compile 'com.android.support.constraint:constraint-layout:1.0.1' testCompile 'junit:junit:4.12' }
緊接着咱們還要指定abiFilters,由於AndroidStudio默認會編譯全部架構的動態庫,可是在本次例子中,咱們實際上只拷貝了ui
├── libs
│ │ ├── armeabi
架構(目錄名)的動態庫,因此咱們須要指定一個abiFilters來過濾一下,不然會出現編譯錯誤。
externalNativeBuild { cmake { cppFlags "-frtti -fexceptions" } ndk{ abiFilters "armeabi" } } }
緊接着就是來編寫咱們的CMakeLists.txt文件來編譯咱們的動態庫和native源文件了
cmake_minimum_required(VERSION 3.4.1) find_library( log-lib log ) set(distribution_DIR ../../../../libs) add_library( native-lib SHARED src/main/cpp/native-lib.cpp ) add_library( avcodec-57 SHARED IMPORTED) set_target_properties( avcodec-57 PROPERTIES IMPORTED_LOCATION ${distribution_DIR}/armeabi/libavcodec-57.so) add_library( avfilter-6 SHARED IMPORTED) set_target_properties( avfilter-6 PROPERTIES IMPORTED_LOCATION ${distribution_DIR}/armeabi/libavfilter-6.so) add_library( avformat-57 SHARED IMPORTED) set_target_properties( avformat-57 PROPERTIES IMPORTED_LOCATION ${distribution_DIR}/armeabi/libavformat-57.so) add_library( avutil-55 SHARED IMPORTED) set_target_properties( avutil-55 PROPERTIES IMPORTED_LOCATION ${distribution_DIR}/armeabi/libavutil-55.so) add_library( swresample-2 SHARED IMPORTED) set_target_properties( swresample-2 PROPERTIES IMPORTED_LOCATION ${distribution_DIR}/armeabi/libswresample-2.so) add_library( swscale-4 SHARED IMPORTED) set_target_properties( swscale-4 PROPERTIES IMPORTED_LOCATION ${distribution_DIR}/armeabi/libswscale-4.so) include_directories(libs/include) target_link_libraries( native-lib avcodec-57 avfilter-6 avformat-57 avutil-55 swresample-2 swscale-4 ${log-lib} )
這樣基本上就大功告成了。
下面咱們將經過一個小例子來看一下怎樣使用FFMPEG。使用FFMPEG進行視頻解碼(音頻和視頻很類似)的通常流程以下圖所示
首先須要在JAVA文件中加載咱們須要的動態庫
//MainActivity.java public class MainActivity extends Activity { // Used to load the 'native-lib' library on application startup. static { System.loadLibrary("native-lib"); System.loadLibrary("avcodec-57"); System.loadLibrary("avfilter-6"); System.loadLibrary("avformat-57"); System.loadLibrary("avutil-55"); System.loadLibrary("swresample-2"); System.loadLibrary("swscale-4"); } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); // Example of a call to a native method TextView tv = (TextView) findViewById(R.id.sample_text); tv.setText(stringFromJNI()); String input = new File(Environment.getExternalStorageDirectory(),"input.mp4").getAbsolutePath(); String output = new File(Environment.getExternalStorageDirectory(),"output_yuv420p.yuv").getAbsolutePath(); decode(input, output); } /** * A native method that is implemented by the 'native-lib' native library, * which is packaged with this application. */ public native String stringFromJNI(); public native static void decode(String input,String output); }
而後在native代碼中實現主要邏輯
//native-lib.cpp #include <jni.h> #include <string> #include <android/log.h> extern "C" { //編碼 #include "libavcodec/avcodec.h" //封裝格式處理 #include "libavformat/avformat.h" //像素處理 #include "libswscale/swscale.h" } #define FFLOGI(FORMAT,...) __android_log_print(ANDROID_LOG_INFO,"ffmpeg",FORMAT,##__VA_ARGS__); #define FFLOGE(FORMAT,...) __android_log_print(ANDROID_LOG_ERROR,"ffmpeg",FORMAT,##__VA_ARGS__); extern "C" JNIEXPORT jstring JNICALL Java_com_example_cent_ffmpegtest_MainActivity_stringFromJNI( JNIEnv *env, jobject /* this */) { std::string hello = "Hello from C++"; return env->NewStringUTF(hello.c_str()); } extern "C" JNIEXPORT void JNICALL Java_com_example_cent_ffmpegtest_MainActivity_decode(JNIEnv *env, jclass type, jstring input_, jstring output_) { //獲取輸入輸出文件名 const char *input = env->GetStringUTFChars(input_, 0); const char *output = env->GetStringUTFChars(output_, 0); //1.註冊全部組件 av_register_all(); //封裝格式上下文,統領全局的結構體,保存了視頻文件封裝格式的相關信息 AVFormatContext *pFormatCtx = avformat_alloc_context(); //2.打開輸入視頻文件 if (avformat_open_input(&pFormatCtx, input, NULL, NULL) != 0) { FFLOGE("%s","沒法打開輸入視頻文件"); return; } //3.獲取視頻文件信息 if (avformat_find_stream_info(pFormatCtx,NULL) < 0) { FFLOGE("%s","沒法獲取視頻文件信息"); return; } //獲取視頻流的索引位置 //遍歷全部類型的流(音頻流、視頻流、字幕流),找到視頻流 int v_stream_idx = -1; int i = 0; //number of streams for (; i < pFormatCtx->nb_streams; i++) { //流的類型 if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) { v_stream_idx = i; break; } } if (v_stream_idx == -1) { FFLOGE("%s","找不到視頻流\n"); return; } //只有知道視頻的編碼方式,纔可以根據編碼方式去找到解碼器 //獲取視頻流中的編解碼上下文 AVCodecContext *pCodecCtx = pFormatCtx->streams[v_stream_idx]->codec; //4.根據編解碼上下文中的編碼id查找對應的解碼 AVCodec *pCodec = avcodec_find_decoder(pCodecCtx->codec_id); if (pCodec == NULL) { FFLOGE("%s","找不到解碼器\n"); return; } //5.打開解碼器 if (avcodec_open2(pCodecCtx,pCodec,NULL)<0) { FFLOGE("%s","解碼器沒法打開\n"); return; } //輸出視頻信息 FFLOGI("視頻的文件格式:%s",pFormatCtx->iformat->name); FFLOGI("視頻時長:%d", (pFormatCtx->duration)/1000000); FFLOGI("視頻的寬高:%d,%d",pCodecCtx->width,pCodecCtx->height); FFLOGI("解碼器的名稱:%s",pCodec->name); //準備讀取 //AVPacket用於存儲一幀一幀的壓縮數據(H264) //緩衝區,開闢空間 AVPacket *packet = (AVPacket*)av_malloc(sizeof(AVPacket)); //AVFrame用於存儲解碼後的像素數據(YUV) //內存分配 AVFrame *pFrame = av_frame_alloc(); //YUV420 AVFrame *pFrameYUV = av_frame_alloc(); //只有指定了AVFrame的像素格式、畫面大小才能真正分配內存 //緩衝區分配內存 uint8_t *out_buffer = (uint8_t *)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height)); //初始化緩衝區 avpicture_fill((AVPicture *)pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height); //用於轉碼(縮放)的參數,轉以前的寬高,轉以後的寬高,格式等 struct SwsContext *sws_ctx = sws_getContext(pCodecCtx->width,pCodecCtx->height,pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL); int got_picture, ret; FILE *fp_yuv = fopen(output, "wb+"); int frame_count = 0; //6.一幀一幀的讀取壓縮數據 while (av_read_frame(pFormatCtx, packet) >= 0) { //只要視頻壓縮數據(根據流的索引位置判斷) if (packet->stream_index == v_stream_idx) { //7.解碼一幀視頻壓縮數據,獲得視頻像素數據 ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet); if (ret < 0) { FFLOGE("%s","解碼錯誤"); return; } //爲0說明解碼完成,非0正在解碼 if (got_picture) { //AVFrame轉爲像素格式YUV420,寬高 //2 6輸入、輸出數據 //3 7輸入、輸出畫面一行的數據的大小 AVFrame 轉換是一行一行轉換的 //4 輸入數據第一列要轉碼的位置 從0開始 //5 輸入畫面的高度 sws_scale(sws_ctx, pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize); //輸出到YUV文件 //AVFrame像素幀寫入文件 //data解碼後的圖像像素數據(音頻採樣數據) //Y 亮度 UV 色度(壓縮了) 人對亮度更加敏感 //U V 個數是Y的1/4 int y_size = pCodecCtx->width * pCodecCtx->height; fwrite(pFrameYUV->data[0], 1, y_size, fp_yuv); fwrite(pFrameYUV->data[1], 1, y_size / 4, fp_yuv); fwrite(pFrameYUV->data[2], 1, y_size / 4, fp_yuv); frame_count++; FFLOGI("解碼第%d幀",frame_count); } } //釋放資源 av_free_packet(packet); } fclose(fp_yuv); av_frame_free(&pFrame); avcodec_close(pCodecCtx); avformat_free_context(pFormatCtx); env->ReleaseStringUTFChars(input_, input); env->ReleaseStringUTFChars(output_, output); }
記得在Manifest文件中添加須要的權限
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/> <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
最後簡單瞭解一下FFMPEG中使用的幾個主要數據結構的做用