编程语言
首页 > 编程语言> > 四、ffmpeg移植Android,并写一个demo,另附源码

四、ffmpeg移植Android,并写一个demo,另附源码

作者:互联网

一、新建一个android项目

(1)、新建项目,选择native C++

二、配置android项目

 

(1)、将编译后的so文件和include文件放到lib目录下

如何编译ffmpeg for Android,青查看上一篇:三、Mac编译ffmpeg for Android

(2)、编辑CMakeLists.txt,代码放出如下

# For more information about using CMake with Android Studio, read the
# documentation: https://d.android.com/studio/projects/add-native-code.html

# Sets the minimum version of CMake required to build the native library.

cmake_minimum_required(VERSION 3.4.1)

# 定义变量
set(distribution_DIR ../../../../libs)

# 添加库——自己编写的库
# 库名称:native-lib
# 库类型:SHARED,表示动态库,后缀为.so(如果是STATIC,则表示静态库,后缀为.a)
# 库源码文件:src/main/cpp/native-lib.cpp
add_library( native-lib
        SHARED
        native-lib.cpp )

# 添加库——外部引入的库
# 库名称:avcodec(不需要包含前缀lib)
# 库类型:SHARED,表示动态库,后缀为.so(如果是STATIC,则表示静态库,后缀为.a)
# IMPORTED表明是外部引入的库
add_library( avcodec
        SHARED
        IMPORTED)
# 设置目标属性
# 设置avcodec目标库的IMPORTED_LOCATION属性,用于说明引入库的位置
# 还可以设置其他属性,格式:PROPERTIES key value
set_target_properties( avcodec
        PROPERTIES IMPORTED_LOCATION
        ${distribution_DIR}/${ANDROID_ABI}/libavcodec.so)


find_library(
        log-lib
        log)
add_library( avfilter
        SHARED
        IMPORTED)
set_target_properties( avfilter
        PROPERTIES IMPORTED_LOCATION
        ${distribution_DIR}/${ANDROID_ABI}/libavfilter.so)

add_library( avformat
        SHARED
        IMPORTED)
set_target_properties( avformat
        PROPERTIES IMPORTED_LOCATION
        ${distribution_DIR}/${ANDROID_ABI}/libavformat.so)

add_library( avutil
        SHARED
        IMPORTED)
set_target_properties( avutil
        PROPERTIES IMPORTED_LOCATION
        ${distribution_DIR}/${ANDROID_ABI}/libavutil.so)

add_library( swresample
        SHARED
        IMPORTED)
set_target_properties( swresample
        PROPERTIES IMPORTED_LOCATION
        ${distribution_DIR}/${ANDROID_ABI}/libswresample.so)

add_library( swscale
        SHARED
        IMPORTED)
set_target_properties( swscale
        PROPERTIES IMPORTED_LOCATION
        ${distribution_DIR}/${ANDROID_ABI}/libswscale.so)

# 引入头文件
include_directories(../../../libs/include)

# 告诉编译器生成native-lib库需要链接的库
# native-lib库需要依赖avcodec、avfilter等库
target_link_libraries( native-lib
        avcodec
        avfilter
        avformat
        avutil
        swresample
        swscale
        -landroid
        ${log-lib} )

说明:set(distribution_DIR ../../../../libs)目录为什么加./../../../,因为/ffmpegDemo/app/.externalNativeBuild/cmake/debug/armeabi-v7a为原始目录,如下图

 

include_directories(../../../libs/include),以ffmpegDemo/app/src/main/cpp为原始目录,如下图

 

(3)、编辑build.gradle,代码放出如下

apply plugin: 'com.android.application'

android {
    compileSdkVersion 29
    buildToolsVersion "29.0.3"

    defaultConfig {
        applicationId "com.gui.ffmpegdemo"
        minSdkVersion 19
        targetSdkVersion 29
        versionCode 1
        versionName "1.0"
        multiDexEnabled true
        testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"

        externalNativeBuild {
            cmake {
//                cppFlags ""
                cppFlags "-frtti -fexceptions -Wno-deprecated-declarations"
//                abiFilters 'armeabi'//, 'x86', 'armeabi-v7a', 'x86_64', 'arm64-v8a'

            }
            ndk{
                abiFilters "armeabi-v7a"
            }
        }
        sourceSets {
            main {
                jniLibs.srcDirs = ['libs']
            }
        }
    }

    buildTypes {
        release {
            minifyEnabled false
            proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
        }
    }

    externalNativeBuild {
        cmake {
            path "src/main/cpp/CMakeLists.txt"
//            version "3.10.2"
//            path "CMakeLists.txt"

        }
    }
}

dependencies {
    implementation fileTree(dir: 'libs', include: ['*.jar'])
    compile 'com.tencent.bugly:crashreport:latest.release'
    compile 'pub.devrel:easypermissions:0.2.1'
//    implementation 'androidx.appcompat:appcompat:1.1.0'
    implementation 'androidx.constraintlayout:constraintlayout:1.1.3'
    testImplementation 'junit:junit:4.12'
    androidTestImplementation 'androidx.test.ext:junit:1.1.1'
    androidTestImplementation 'androidx.test.espresso:espresso-core:3.2.0'
}

(4)、编辑native-lib.cpp,代码放出如下

#include <jni.h>
#include <string>

extern "C"
{

#include <android/native_window_jni.h>
#include <libavfilter/avfilter.h>
#include <libavcodec/avcodec.h>
//封装格式处理
#include <libavformat/avformat.h>
//像素处理
#include <libswscale/swscale.h>
#include <unistd.h>

JNIEXPORT void JNICALL
Java_com_gui_ffmpegdemo_FFVideoPlayer_render(JNIEnv *env, jobject instance, jstring url_,
                                                 jobject surface) {
    const char *url = env->GetStringUTFChars(url_, 0);

    // 注册。
    av_register_all();
    // 打开地址并且获取里面的内容  avFormatContext是内容的一个上下文
    AVFormatContext *avFormatContext = avformat_alloc_context();
    avformat_open_input(&avFormatContext, url, NULL, NULL);
    avformat_find_stream_info(avFormatContext, NULL);

    // 找出视频流
    int video_index = -1;
    for (int i = 0; i < avFormatContext->nb_streams; ++i) {
        if (avFormatContext->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
            video_index = i;
        }
    }
    // 解码  转换  绘制
    // 获取解码器上下文
    AVCodecContext *avCodecContext = avFormatContext->streams[video_index]->codec;
    // 获取解码器
    AVCodec *avCodec = avcodec_find_decoder(avCodecContext->codec_id);
    // 打开解码器
    if (avcodec_open2(avCodecContext, avCodec, NULL) < 0) {
        // 打开失败。
        return;
    }
    // 申请AVPacket和AVFrame,
    // 其中AVPacket的作用是:保存解码之前的数据和一些附加信息,如显示时间戳(pts)、解码时间戳(dts)、数据时长,所在媒体流的索引等;
    // AVFrame的作用是:存放解码过后的数据。
    AVPacket *avPacket = (AVPacket *) av_malloc(sizeof(AVPacket));
    av_init_packet(avPacket);
    // 分配一个AVFrame结构体,AVFrame结构体一般用于存储原始数据,指向解码后的原始帧
    AVFrame *avFrame = av_frame_alloc();
    //分配一个AVFrame结构体,指向存放转换成rgb后的帧
    AVFrame *rgb_frame = av_frame_alloc();
    // rgb_frame是一个缓存区域,所以需要设置。
    // 缓存区
    uint8_t *out_buffer = (uint8_t *) av_malloc(
            avpicture_get_size(AV_PIX_FMT_RGBA, avCodecContext->width, avCodecContext->height));
    // 与缓存区相关联,设置rgb_frame缓存区
    avpicture_fill((AVPicture *) rgb_frame, out_buffer, AV_PIX_FMT_RGBA, avCodecContext->width,
                   avCodecContext->height);
    // 原生绘制,需要ANativeWindow
    ANativeWindow *pANativeWindow = ANativeWindow_fromSurface(env, surface);
    if (pANativeWindow == 0) {
        // 获取native window 失败
        return;
    }
    SwsContext *swsContext = sws_getContext(
            avCodecContext->width,
            avCodecContext->height,
            avCodecContext->pix_fmt,
            avCodecContext->width,
            avCodecContext->height,
            AV_PIX_FMT_RGBA,
            SWS_BICUBIC,
            NULL,
            NULL,
            NULL);
    // 视频缓冲区
    ANativeWindow_Buffer native_outBuffer;
    // 开始解码了。
    int frameCount;
    while (av_read_frame(avFormatContext, avPacket) >= 0) {
        if (avPacket->stream_index == video_index) {
            avcodec_decode_video2(avCodecContext, avFrame, &frameCount, avPacket);
            // 当解码一帧成功过后,我们转换成rgb格式并且绘制。
            if (frameCount) {
                ANativeWindow_setBuffersGeometry(pANativeWindow, avCodecContext->width,
                                                 avCodecContext->height, WINDOW_FORMAT_RGBA_8888);
                // 上锁
                ANativeWindow_lock(pANativeWindow, &native_outBuffer, NULL);
                // 转换为rgb格式
                sws_scale(swsContext, (const uint8_t *const *) avFrame->data, avFrame->linesize, 0,
                          avFrame->height, rgb_frame->data, rgb_frame->linesize);
                uint8_t *dst = (uint8_t *) native_outBuffer.bits;
                int destStride = native_outBuffer.stride * 4;
                uint8_t *src = rgb_frame->data[0];
                int srcStride = rgb_frame->linesize[0];
                for (int i = 0; i < avCodecContext->height; ++i) {
                    memcpy(dst + i * destStride, src + i * srcStride, srcStride);
                }
                ANativeWindow_unlockAndPost(pANativeWindow);
//                usleep(1000 * 16);
            }
        }
        av_free_packet(avPacket);

    }

    ANativeWindow_release(pANativeWindow);
    av_frame_free(&avFrame);
    av_frame_free(&rgb_frame);
    avcodec_close(avCodecContext);
    avformat_free_context(avFormatContext);


    env->ReleaseStringUTFChars(url_, url);
}


JNIEXPORT jstring JNICALL
Java_com_gui_ffmpegdemo_MainActivity_stringFromJNI(
        JNIEnv *env,
        jobject /* this */) {
    std::string hello = "Hello from C++";
    return env->NewStringUTF(hello.c_str());
}

JNIEXPORT jstring JNICALL
Java_com_gui_ffmpegdemo_MainActivity_urlprotocolinfo(JNIEnv *env, jobject instance) {
    char info[40000] = {0};
    av_register_all();
    struct URLProtocol *pup = NULL;
    struct URLProtocol **p_temp = &pup;
    avio_enum_protocols((void **) p_temp, 0);
    while ((*p_temp) != NULL) {
        sprintf(info, "%sInput: %s\n", info, avio_enum_protocols((void **) p_temp, 0));
    }
    pup = NULL;
    avio_enum_protocols((void **) p_temp, 1);
    while ((*p_temp) != NULL) {
        sprintf(info, "%sInput: %s\n", info, avio_enum_protocols((void **) p_temp, 1));
    }

    return env->NewStringUTF(info);
}

JNIEXPORT jstring JNICALL
Java_com_gui_ffmpegdemo_MainActivity_avformatinfo(JNIEnv *env, jobject instance) {

    char info[40000] = {0};

    av_register_all();

    AVInputFormat *if_temp = av_iformat_next(NULL);
    AVOutputFormat *of_temp = av_oformat_next(NULL);
    while (if_temp != NULL) {
        sprintf(info, "%sInput: %s\n", info, if_temp->name);
        if_temp = if_temp->next;
    }
    while (of_temp != NULL) {
        sprintf(info, "%sOutput: %s\n", info, of_temp->name);
        of_temp = of_temp->next;
    }
    return env->NewStringUTF(info);
}

JNIEXPORT jstring JNICALL
Java_com_gui_ffmpegdemo_MainActivity_avcodecinfo(JNIEnv *env, jobject instance) {
    char info[40000] = {0};

    av_register_all();

    AVCodec *c_temp = av_codec_next(NULL);

    while (c_temp != NULL) {
        if (c_temp->decode != NULL) {
            sprintf(info, "%sdecode:", info);
        } else {
            sprintf(info, "%sencode:", info);
        }
        switch (c_temp->type) {
            case AVMEDIA_TYPE_VIDEO:
                sprintf(info, "%s(video):", info);
                break;
            case AVMEDIA_TYPE_AUDIO:
                sprintf(info, "%s(audio):", info);
                break;
            default:
                sprintf(info, "%s(other):", info);
                break;
        }
        sprintf(info, "%s[%10s]\n", info, c_temp->name);
        c_temp = c_temp->next;
    }

    return env->NewStringUTF(info);
}

JNIEXPORT jstring JNICALL
Java_com_gui_ffmpegdemo_MainActivity_avfilterinfo(JNIEnv *env, jobject instance) {
    char info[40000] = {0};
    avfilter_register_all();

    AVFilter *f_temp = (AVFilter *) avfilter_next(NULL);
    while (f_temp != NULL) {
        sprintf(info, "%s%s\n", info, f_temp->name);
        f_temp = f_temp->next;
    }
    return env->NewStringUTF(info);
}
}
 

三、实现视频播放

 

package com.gui.ffmpegdemo;


import android.Manifest;
import android.app.Activity;
import android.os.Bundle;
import android.util.Log;
import android.widget.TextView;

import android.view.View;

import java.util.List;

import pub.devrel.easypermissions.EasyPermissions;

public class MainActivity extends Activity implements View.OnClickListener , EasyPermissions.PermissionCallbacks{
    // Used to load the 'native-lib' library on application startup.
    static {
        System.loadLibrary("native-lib");
    }


    TextView tvInfo;
    private FFVideoPlayer mPlayer;

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_main);
        findViewById(R.id.btn_protocol).setOnClickListener(this);
        findViewById(R.id.btn_codec).setOnClickListener(this);
        findViewById(R.id.btn_filter).setOnClickListener(this);
        findViewById(R.id.btn_format).setOnClickListener(this);
        findViewById(R.id.btn_player).setOnClickListener(this);
        mPlayer = findViewById(R.id.ffVideoPlayer);

        tvInfo = findViewById(R.id.tv_info);
    }


    @Override
    public void onClick(View view) {
        switch (view.getId()) {
            case R.id.btn_protocol:
                tvInfo.setText(urlprotocolinfo());
                break;
            case R.id.btn_format:
                tvInfo.setText(avformatinfo());
                break;
            case R.id.btn_codec:
                tvInfo.setText(avcodecinfo());
                break;
            case R.id.btn_filter:
                tvInfo.setText(avfilterinfo());
                break;
            case R.id.btn_player:
                mPlayer.play("http://clips.vorwaerts-gmbh.de/big_buck_bunny.mp4");
                break;
            default:
                break;
        }
    }

    public native String stringFromJNI();

    public native String urlprotocolinfo();

    public native String avformatinfo();

    public native String avcodecinfo();

    public native String avfilterinfo();




    private void initEasypermissions() {
        String[] permissions = {
                Manifest.permission.SYSTEM_ALERT_WINDOW,
                Manifest.permission.CAMERA,
                Manifest.permission.WRITE_SETTINGS,
                Manifest.permission.READ_PHONE_STATE,
                Manifest.permission.WRITE_EXTERNAL_STORAGE,
                Manifest.permission.READ_EXTERNAL_STORAGE,
                Manifest.permission.VIBRATE,
                Manifest.permission.INTERNET,
                Manifest.permission.ACCESS_NETWORK_STATE,
                Manifest.permission.ACCESS_WIFI_STATE,
                Manifest.permission.ACCESS_COARSE_LOCATION,
                Manifest.permission.ACCESS_FINE_LOCATION
        };

        if (!EasyPermissions.hasPermissions(this, permissions)) {
            Log.e("Lyz", "..........requestPermissions................");
            EasyPermissions.requestPermissions(this, "点击确定应用需要获取的权限", 101, permissions);
        }
    }

    //成功
    @Override
    public void onPermissionsGranted(int requestCode, List<String> perms) {
        if (perms != null && perms.size() > 0) {
            for (int i = 0; i < perms.size(); i++) {
                Log.e("Lyz", "...ok...perms......." + i + "....." + perms.get(i));
            }
            Log.e("Lyz", "..........onPermissionsGranted................");
            if (EasyPermissions.hasPermissions(MainActivity.this, Manifest.permission.READ_EXTERNAL_STORAGE)) {

            }
        }
    }

    //失败
    @Override
    public void onPermissionsDenied(int requestCode, List<String> perms) {
        if (perms != null && perms.size() > 0) {
            for (int i = 0; i < perms.size(); i++) {
                Log.e("Lyz", "..faild....perms......." + i + "....." + perms.get(i));
            }
            Log.e("Lyz", "..........onPermissionsDenied................");
        }
    }

}

demo下载

 

四、优秀项目推荐

FFmpeg In Android

贴几个比较好的Github项目

1.bravobit/FFmpeg-Android

This project is a continued fork of FFmpeg Android Java by WritingMinds. This fork fixes the CANNOT LINK EXECUTABLE ffmpeg: has text relocations issue on x86 devices along with some other bugfixes, new features and the newest FFmpeg builds.

2.xufuji456/FFmpegAndroid

android端基于FFmpeg实现音频剪切、拼接、转码、混音、编解码;视频剪切、水印、截图、转码、编解码、转Gif动图、画面拼接、视频倒播;音视频合成与分离;音视频解码、同步与播放;FFmpeg本地推流、H264与RTMP实时推流直播;OpenGL+GPUImage实时滤镜;FFmpeg滤镜:素描、色彩平衡、hue、lut、模糊、九宫格等;基于IjkPlayer修改支持RTSP超低延时直播(局域网1080P延时130ms)、暂停、静音,多路投屏直播

3.wlanjie/AndroidFFmpeg

android 读取摄像头和麦克风,使用rtmp推流

 

如果您在编译过程中遇到问题,欢迎添加QQ群:937088785 进行交流!

 

标签:info,avCodecContext,ffmpeg,temp,demo,..,源码,NULL,native
来源: https://blog.csdn.net/huangpeigui/article/details/113106855