android基于ffmpeg的简单视频播发器 播放视频

2024-05-11 06:32

本文主要是介绍android基于ffmpeg的简单视频播发器 播放视频,希望对大家解决编程问题提供一定的参考价值,需要的开发者们随着小编来一起学习吧!

视频播放用到opengl,因为ffmpeg是c写的,所以我就用c++写opengl,c不会写

把生成的so文件和include文件夹复制到项目的app\libs文件夹

CMakeLists.txt文件代码

cmake_minimum_required(VERSION 3.4.1)add_library( native-libSHAREDsrc/main/cpp/native-lib.cppsrc/main/cpp/OpenGLUtils.cppsrc/main/cpp/ShaderUtils.cppsrc/main/cpp/EGLUtils.cpp)find_library( log-liblog )set(distribution_DIR ../../../../libs)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=gnu++11")
set(CMAKE_VERBOSE_MAKEFILE on)
include_directories(libs/include)add_library( avcodec-57SHAREDIMPORTED)
add_library( avdevice-57SHAREDIMPORTED)
add_library( avfilter-6SHAREDIMPORTED)
add_library( avformat-57SHAREDIMPORTED)
add_library( avutil-55SHAREDIMPORTED)
add_library( swresample-2SHAREDIMPORTED)
add_library( swscale-4SHAREDIMPORTED)
set_target_properties( avcodec-57PROPERTIES IMPORTED_LOCATION${distribution_DIR}/${ANDROID_ABI}/libavcodec-57.so)
set_target_properties( avdevice-57PROPERTIES IMPORTED_LOCATION${distribution_DIR}/${ANDROID_ABI}/libavdevice-57.so)
set_target_properties( avfilter-6PROPERTIES IMPORTED_LOCATION${distribution_DIR}/${ANDROID_ABI}/libavfilter-6.so)
set_target_properties( avformat-57PROPERTIES IMPORTED_LOCATION${distribution_DIR}/${ANDROID_ABI}/libavformat-57.so)
set_target_properties( avutil-55PROPERTIES IMPORTED_LOCATION${distribution_DIR}/${ANDROID_ABI}/libavutil-55.so)
set_target_properties( swresample-2PROPERTIES IMPORTED_LOCATION${distribution_DIR}/${ANDROID_ABI}/libswresample-2.so)
set_target_properties( swscale-4PROPERTIES IMPORTED_LOCATION${distribution_DIR}/${ANDROID_ABI}/libswscale-4.so)target_link_libraries( native-libavcodec-57avdevice-57avfilter-6avformat-57avutil-55swresample-2swscale-4${log-lib}androidEGLGLESv2)
引入opengl,egl和android包,都要用到

java代码

setContentView(R.layout.activity_main);
SurfaceView surfaceView = findViewById(R.id.surface_view);

surfaceView.getHolder().addCallback(new SurfaceHolder.Callback() {@Override
    public void surfaceCreated(SurfaceHolder holder) {}@Override
    public void surfaceChanged(final SurfaceHolder holder, int format, int width, int height) {Thread thread = new Thread(){@Override
            public void run() {super.run();
                String videoPath = "/storage/emulated/0/baiduNetdisk/season09.mp4";
                videoPlay(videoPath,holder.getSurface());
            }};
        thread.start();
    }@Override
    public void surfaceDestroyed(SurfaceHolder holder) {}
});
很简单,主要是调用

public native void videoPlay(String path, Surface surface);
来进行处理

c++代码

const char *path = env->GetStringUTFChars(path_, 0);
// TODO

av_register_all();
AVFormatContext *fmt_ctx = avformat_alloc_context();
if (avformat_open_input(&fmt_ctx, path, NULL, NULL) < 0) {return;
}
if (avformat_find_stream_info(fmt_ctx, NULL) < 0) {return;
}
AVStream *avStream = NULL;
int video_stream_index = -1;
for (int i = 0; i < fmt_ctx->nb_streams; i++) {if (fmt_ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {avStream = fmt_ctx->streams[i];
        video_stream_index = i;
        break;
    }
}
if (video_stream_index == -1) {return;
}
AVCodecContext *codec_ctx = avcodec_alloc_context3(NULL);
avcodec_parameters_to_context(codec_ctx, avStream->codecpar);

AVCodec *avCodec = avcodec_find_decoder(codec_ctx->codec_id);
if (avcodec_open2(codec_ctx, avCodec, NULL) < 0) {return;
}ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env,surface);

AVFrame *yuvFrame = av_frame_alloc();

EGLUtils *eglUtils = new EGLUtils();
eglUtils->initEGL(nativeWindow);

OpenGLUtils *openGLUtils = new OpenGLUtils();
openGLUtils->surfaceCreated();
openGLUtils->surfaceChanged(eglUtils->getWidth(),eglUtils->getHeight());
openGLUtils->initTexture(codec_ctx->width,codec_ctx->height);

int y_size = codec_ctx->width * codec_ctx->height;
AVPacket *pkt = (AVPacket *) malloc(sizeof(AVPacket));
av_new_packet(pkt, y_size);
int ret;
while (1) {if (av_read_frame(fmt_ctx, pkt) < 0) {av_packet_unref(pkt);
        break;
    }if (pkt->stream_index == video_stream_index) {ret = avcodec_send_packet(codec_ctx, pkt);
        if (ret < 0 && ret != AVERROR(EAGAIN) && ret != AVERROR_EOF) {av_packet_unref(pkt);
            continue;
        }ret = avcodec_receive_frame(codec_ctx, yuvFrame);
        if (ret < 0 && ret != AVERROR_EOF) {av_packet_unref(pkt);
            continue;
        }openGLUtils->updateTexture(yuvFrame->width,yuvFrame->height,yuvFrame->data[0],yuvFrame->data[1],yuvFrame->data[2]);
        openGLUtils->surfaceDraw();
        eglUtils->drawEGL();

        av_packet_unref(pkt);
    }av_packet_unref(pkt);
}
av_frame_free(&yuvFrame);
avcodec_close(codec_ctx);
avformat_close_input(&fmt_ctx);

env->ReleaseStringUTFChars(path_, path);
引入的包

#include <android/native_window.h>
#include <android/native_window_jni.h>
#include "EGLUtils.h"
#include "OpenGLUtils.h"
extern "C" {
#include "libavformat/avformat.h"
#include "libavfilter/avfiltergraph.h"
}

因为ffmpeg用c写的,所以c++引入时要用extern不然会报错

EGLUtils和OpenGLUtils是我自己写的opengl渲染代码

EGLUtils.h

#ifndef VIDEOPLAY_EGLUTILS_H
#define VIDEOPLAY_EGLUTILS_H

#include <EGL/egl.h>
class EGLUtils {
public:EGLUtils();
    ~EGLUtils();

    void initEGL(ANativeWindow *nativeWindow);

    void drawEGL();
    int getWidth();
    int getHeight();

private:EGLConfig eglConf;
    EGLSurface eglWindow;
    EGLContext eglCtx;
    EGLDisplay eglDisp;

    int windowWidth;
    int windowHeight;

    void releaseEGL();
};


#endif //VIDEOPLAY_EGLUTILS_H
EGLUtils.cpp

#include "EGLUtils.h"
EGLUtils::EGLUtils() {}
EGLUtils::~EGLUtils() {releaseEGL();
}void EGLUtils::initEGL(ANativeWindow *nativeWindow) {EGLint configSpec[] = { EGL_RED_SIZE, 8,
                            EGL_GREEN_SIZE, 8,
                            EGL_BLUE_SIZE, 8,
                            EGL_SURFACE_TYPE, EGL_WINDOW_BIT, EGL_NONE };

    eglDisp = eglGetDisplay(EGL_DEFAULT_DISPLAY);
    EGLint eglMajVers, eglMinVers;
    EGLint numConfigs;
    eglInitialize(eglDisp, &eglMajVers, &eglMinVers);
    eglChooseConfig(eglDisp, configSpec, &eglConf, 1, &numConfigs);

    eglWindow = eglCreateWindowSurface(eglDisp, eglConf,nativeWindow, NULL);

    eglQuerySurface(eglDisp,eglWindow,EGL_WIDTH,&windowWidth);
    eglQuerySurface(eglDisp,eglWindow,EGL_HEIGHT,&windowHeight);
    const EGLint ctxAttr[] = {EGL_CONTEXT_CLIENT_VERSION, 2,
            EGL_NONE
    };
    eglCtx = eglCreateContext(eglDisp, eglConf,EGL_NO_CONTEXT, ctxAttr);
    eglMakeCurrent(eglDisp, eglWindow, eglWindow, eglCtx);
}
int EGLUtils::getWidth() {return windowWidth;
}
int EGLUtils::getHeight(){return windowHeight;
}
void EGLUtils::releaseEGL() {eglMakeCurrent(eglDisp, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
    eglDestroyContext(eglDisp, eglCtx);

    eglDestroySurface(eglDisp, eglWindow);
    eglTerminate(eglDisp);

    eglDisp = EGL_NO_DISPLAY;

    eglWindow = EGL_NO_SURFACE;
    eglCtx = EGL_NO_CONTEXT;
}
void EGLUtils::drawEGL() {eglSwapBuffers(eglDisp, eglWindow);
}
OpenGLUtils.h

#ifndef VIDEOPLAY_OPENGLUTILS_H
#define VIDEOPLAY_OPENGLUTILS_H


#include <GLES2/gl2.h>

class OpenGLUtils {
public:OpenGLUtils();
    ~OpenGLUtils();

    void surfaceCreated();

    void surfaceChanged(int width, int height);

    void initTexture(int width,int height);

    void updateTexture(int width,int height,void *bufY,void *bufU,void *bufV);

    void surfaceDraw();
    void release();
private:GLuint programId;

    GLuint aPositionHandle;
    GLuint aTextureCoordHandle;
    GLuint textureSamplerHandle[3];

    GLuint textureId[3];

    float *vertexData;
    float *textureVertexData;

    int viewWidth,viewHeight;
    int videoWidth,videoHeight;
    int screenWidth, screenHeight;

    void viewport();

};


#endif
OpenGLUtils.cpp

#include "OpenGLUtils.h"
#include "ShaderUtils.h"

OpenGLUtils::OpenGLUtils() {vertexData = new float[12]{1.0f, -1.0f, 0.0f,
            -1.0f, -1.0f, 0.0f,
            1.0f, 1.0f, 0.0f,
            -1.0f, 1.0f, 0.0f
    };

    textureVertexData = new float[8]{1.0f, 0.0f,//右下
            0.0f, 0.0f,//左下
            1.0f, 1.0f,//右上
            0.0f, 1.0f//左上
    };
}OpenGLUtils::~OpenGLUtils() {release();
}void OpenGLUtils::surfaceCreated() {ShaderUtils *shaderUtils = new ShaderUtils();
    programId = shaderUtils->getYUVShader();

    aPositionHandle = (GLuint) glGetAttribLocation(programId, "aPosition");
    aTextureCoordHandle = (GLuint) glGetAttribLocation(programId, "aTexCoord");

    textureSamplerHandle[0] = (GLuint) glGetUniformLocation(programId, "yTexture");
    textureSamplerHandle[1] = (GLuint) glGetUniformLocation(programId, "uTexture");
    textureSamplerHandle[2] = (GLuint) glGetUniformLocation(programId, "vTexture");
    delete shaderUtils;

    glUseProgram(programId);
    glEnableVertexAttribArray(aPositionHandle);
    glVertexAttribPointer(aPositionHandle, 3, GL_FLOAT, GL_FALSE,
            12, vertexData);

    glEnableVertexAttribArray(aTextureCoordHandle);
    glVertexAttribPointer(aTextureCoordHandle, 2, GL_FLOAT, GL_FALSE, 8, textureVertexData);


}void OpenGLUtils::surfaceChanged(int width, int height) {screenWidth = width;
    screenHeight = height;
}void OpenGLUtils::initTexture(int width, int height) {videoWidth = width;
    videoHeight = height;

    glGenTextures(1,&textureId[0]);
    glActiveTexture(GL_TEXTURE0);
    glBindTexture(GL_TEXTURE_2D,textureId[0]);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,
                    GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width, height, 0,
                 GL_LUMINANCE, GL_UNSIGNED_BYTE, NULL);
    glUniform1i(textureSamplerHandle[0],0);

    glGenTextures(1,&textureId[1]);
    glActiveTexture(GL_TEXTURE1);
    glBindTexture(GL_TEXTURE_2D,textureId[1]);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,
                    GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width/2, height/2, 0,
                 GL_LUMINANCE, GL_UNSIGNED_BYTE, NULL);
    glUniform1i(textureSamplerHandle[1],1);


    glGenTextures(1,&textureId[2]);
    glActiveTexture(GL_TEXTURE2);
    glBindTexture(GL_TEXTURE_2D,textureId[2]);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,
                    GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width/2, height/2, 0,
                 GL_LUMINANCE, GL_UNSIGNED_BYTE, NULL);
    glUniform1i(textureSamplerHandle[2],2);


    viewport();
}void OpenGLUtils::updateTexture(int width, int height, void *bufY, void *bufU, void *bufV) {glActiveTexture(GL_TEXTURE0);
    glBindTexture(GL_TEXTURE_2D, textureId[0]);
    glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, GL_LUMINANCE, GL_UNSIGNED_BYTE, bufY);

    glActiveTexture(GL_TEXTURE1);
    glBindTexture(GL_TEXTURE_2D, textureId[1]);
    glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width/2, height/2, GL_LUMINANCE, GL_UNSIGNED_BYTE, bufU);

    glActiveTexture(GL_TEXTURE2);
    glBindTexture(GL_TEXTURE_2D, textureId[2]);
    glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width/2, height/2, GL_LUMINANCE, GL_UNSIGNED_BYTE, bufV);

}void OpenGLUtils::surfaceDraw() {glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT);
    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}void OpenGLUtils::viewport() {int left,top;
    if(screenHeight > screenWidth){left = 0;
        viewWidth = screenWidth;
        viewHeight = (int)(videoHeight*1.0f/videoWidth*viewWidth);
        top = (screenHeight - viewHeight)/2;
    }else{top = 0;
        viewHeight = screenHeight;
        viewWidth = (int)(videoWidth*1.0f/videoHeight*viewHeight);
        left = (screenWidth - viewWidth)/2;
    }glViewport(left, top, viewWidth, viewHeight);
}void OpenGLUtils::release() {glDeleteProgram(programId);
    glDeleteTextures(3,textureId);
}
还有shader文件,用来显示YUV格式的图像

ShaderUtils.h

#ifndef VIDEOPLAY_SHADERUTILS_H
#define VIDEOPLAY_SHADERUTILS_H

#include <GLES2/gl2.h>
class ShaderUtils {
public:GLuint createProgram(const char *vertexSource, const char *fragmentSource);

    GLuint loadShader(GLenum shaderType, const char *source);

    GLuint getYUVShader();
};


#endif
ShaderUtils.cpp

#include <malloc.h>
#include "ShaderUtils.h"
#define GET_STR(x) #x
const char *vertexYUVShaderString = GET_STR(attribute vec4 aPosition;
        attribute vec2 aTexCoord;
        varying vec2 vTexCoord;
        void main() {vTexCoord=vec2(aTexCoord.x,1.0-aTexCoord.y);
            gl_Position = aPosition;
        }
);
const char *fragmentYUVSShaderString = GET_STR(precision mediump float;
        varying vec2 vTexCoord;
        uniform sampler2D yTexture;
        uniform sampler2D uTexture;
        uniform sampler2D vTexture;
        void main() {vec3 yuv;
            vec3 rgb;
            yuv.r = texture2D(yTexture, vTexCoord).r;
            yuv.g = texture2D(uTexture, vTexCoord).r - 0.5;
            yuv.b = texture2D(vTexture, vTexCoord).r - 0.5;
            rgb = mat3(1.0,       1.0,         1.0,
                       0.0,       -0.39465,  2.03211,
                       1.13983, -0.58060,  0.0) * yuv;
            gl_FragColor = vec4(rgb, 1.0);
        }
);
GLuint ShaderUtils::getYUVShader(){return createProgram(vertexYUVShaderString,fragmentYUVSShaderString);
}
GLuint ShaderUtils::createProgram(const char *vertexSource, const char *fragmentSource) {GLuint vertexShader = loadShader(GL_VERTEX_SHADER, vertexSource);
    if (!vertexShader) {return 0;
    }GLuint pixelShader = loadShader(GL_FRAGMENT_SHADER, fragmentSource);
    if (!pixelShader) {return 0;
    }GLuint program = glCreateProgram();
    if (program != 0) {glAttachShader(program, vertexShader);
        glAttachShader(program, pixelShader);
        glLinkProgram(program);
        GLint  linkStatus = 0;
        glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
        if (!linkStatus) {GLint info_length = 0;
            glGetProgramiv(program, GL_INFO_LOG_LENGTH, &info_length);
            if(info_length){char* buf = (char*)malloc(info_length * sizeof(char));
                glGetProgramInfoLog(program, info_length, NULL, buf);
                free(buf);
            }glDeleteProgram(program);
            program = 0;
        }}return program;
}
GLuint ShaderUtils::loadShader(GLenum shaderType, const char *source) {GLuint shader = glCreateShader(shaderType);
    if (shader != 0) {glShaderSource(shader,1, &source,NULL);
        glCompileShader(shader);
        GLint compiled = 0;
        glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
        if (!compiled) {GLint info_length = 0;
            glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &info_length);
            if(info_length){char* buf = (char*)malloc(info_length * sizeof(char));
                if(buf){ glGetShaderInfoLog(shader, info_length, NULL, buf);
                }free(buf);
            }glDeleteShader(shader);shader = 0;
        }}return shader;
}
shader代码是拿雷神的

这些代码只是显示解码出来的图像,没有做时间矫正,所以就是快进画面,播放速度就是解码速度









这篇关于android基于ffmpeg的简单视频播发器 播放视频的文章就介绍到这儿,希望我们推荐的文章对编程师们有所帮助!



http://www.chinasem.cn/article/978760

相关文章

流媒体平台/视频监控/安防视频汇聚EasyCVR播放暂停后视频画面黑屏是什么原因?

视频智能分析/视频监控/安防监控综合管理系统EasyCVR视频汇聚融合平台,是TSINGSEE青犀视频垂直深耕音视频流媒体技术、AI智能技术领域的杰出成果。该平台以其强大的视频处理、汇聚与融合能力,在构建全栈视频监控系统中展现出了独特的优势。视频监控管理系统EasyCVR平台内置了强大的视频解码、转码、压缩等技术,能够处理多种视频流格式,并以多种格式(RTMP、RTSP、HTTP-FLV、WebS

csu 1446 Problem J Modified LCS (扩展欧几里得算法的简单应用)

这是一道扩展欧几里得算法的简单应用题,这题是在湖南多校训练赛中队友ac的一道题,在比赛之后请教了队友,然后自己把它a掉 这也是自己独自做扩展欧几里得算法的题目 题意:把题意转变下就变成了:求d1*x - d2*y = f2 - f1的解,很明显用exgcd来解 下面介绍一下exgcd的一些知识点:求ax + by = c的解 一、首先求ax + by = gcd(a,b)的解 这个

hdu2289(简单二分)

虽说是简单二分,但是我还是wa死了  题意:已知圆台的体积,求高度 首先要知道圆台体积怎么求:设上下底的半径分别为r1,r2,高为h,V = PI*(r1*r1+r1*r2+r2*r2)*h/3 然后以h进行二分 代码如下: #include<iostream>#include<algorithm>#include<cstring>#include<stack>#includ

综合安防管理平台LntonAIServer视频监控汇聚抖动检测算法优势

LntonAIServer视频质量诊断功能中的抖动检测是一个专门针对视频稳定性进行分析的功能。抖动通常是指视频帧之间的不必要运动,这种运动可能是由于摄像机的移动、传输中的错误或编解码问题导致的。抖动检测对于确保视频内容的平滑性和观看体验至关重要。 优势 1. 提高图像质量 - 清晰度提升:减少抖动,提高图像的清晰度和细节表现力,使得监控画面更加真实可信。 - 细节增强:在低光条件下,抖

Android实现任意版本设置默认的锁屏壁纸和桌面壁纸(两张壁纸可不一致)

客户有些需求需要设置默认壁纸和锁屏壁纸  在默认情况下 这两个壁纸是相同的  如果需要默认的锁屏壁纸和桌面壁纸不一样 需要额外修改 Android13实现 替换默认桌面壁纸: 将图片文件替换frameworks/base/core/res/res/drawable-nodpi/default_wallpaper.*  (注意不能是bmp格式) 替换默认锁屏壁纸: 将图片资源放入vendo

usaco 1.3 Prime Cryptarithm(简单哈希表暴搜剪枝)

思路: 1. 用一个 hash[ ] 数组存放输入的数字,令 hash[ tmp ]=1 。 2. 一个自定义函数 check( ) ,检查各位是否为输入的数字。 3. 暴搜。第一行数从 100到999,第二行数从 10到99。 4. 剪枝。 代码: /*ID: who jayLANG: C++TASK: crypt1*/#include<stdio.h>bool h

Android平台播放RTSP流的几种方案探究(VLC VS ExoPlayer VS SmartPlayer)

技术背景 好多开发者需要遴选Android平台RTSP直播播放器的时候,不知道如何选的好,本文针对常用的方案,做个大概的说明: 1. 使用VLC for Android VLC Media Player(VLC多媒体播放器),最初命名为VideoLAN客户端,是VideoLAN品牌产品,是VideoLAN计划的多媒体播放器。它支持众多音频与视频解码器及文件格式,并支持DVD影音光盘,VCD影

uva 10387 Billiard(简单几何)

题意是一个球从矩形的中点出发,告诉你小球与矩形两条边的碰撞次数与小球回到原点的时间,求小球出发时的角度和小球的速度。 简单的几何问题,小球每与竖边碰撞一次,向右扩展一个相同的矩形;每与横边碰撞一次,向上扩展一个相同的矩形。 可以发现,扩展矩形的路径和在当前矩形中的每一段路径相同,当小球回到出发点时,一条直线的路径刚好经过最后一个扩展矩形的中心点。 最后扩展的路径和横边竖边恰好组成一个直

poj 1113 凸包+简单几何计算

题意: 给N个平面上的点,现在要在离点外L米处建城墙,使得城墙把所有点都包含进去且城墙的长度最短。 解析: 韬哥出的某次训练赛上A出的第一道计算几何,算是大水题吧。 用convexhull算法把凸包求出来,然后加加减减就A了。 计算见下图: 好久没玩画图了啊好开心。 代码: #include <iostream>#include <cstdio>#inclu

uva 10130 简单背包

题意: 背包和 代码: #include <iostream>#include <cstdio>#include <cstdlib>#include <algorithm>#include <cstring>#include <cmath>#include <stack>#include <vector>#include <queue>#include <map>