本文主要是介绍android基于ffmpeg的简单视频播发器 播放视频,希望对大家解决编程问题提供一定的参考价值,需要的开发者们随着小编来一起学习吧!
视频播放用到opengl,因为ffmpeg是c写的,所以我就用c++写opengl,c不会写
把生成的so文件和include文件夹复制到项目的app\libs文件夹
CMakeLists.txt文件代码
cmake_minimum_required(VERSION 3.4.1)add_library( native-libSHAREDsrc/main/cpp/native-lib.cppsrc/main/cpp/OpenGLUtils.cppsrc/main/cpp/ShaderUtils.cppsrc/main/cpp/EGLUtils.cpp)find_library( log-liblog )set(distribution_DIR ../../../../libs) set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=gnu++11") set(CMAKE_VERBOSE_MAKEFILE on) include_directories(libs/include)add_library( avcodec-57SHAREDIMPORTED) add_library( avdevice-57SHAREDIMPORTED) add_library( avfilter-6SHAREDIMPORTED) add_library( avformat-57SHAREDIMPORTED) add_library( avutil-55SHAREDIMPORTED) add_library( swresample-2SHAREDIMPORTED) add_library( swscale-4SHAREDIMPORTED) set_target_properties( avcodec-57PROPERTIES IMPORTED_LOCATION${distribution_DIR}/${ANDROID_ABI}/libavcodec-57.so) set_target_properties( avdevice-57PROPERTIES IMPORTED_LOCATION${distribution_DIR}/${ANDROID_ABI}/libavdevice-57.so) set_target_properties( avfilter-6PROPERTIES IMPORTED_LOCATION${distribution_DIR}/${ANDROID_ABI}/libavfilter-6.so) set_target_properties( avformat-57PROPERTIES IMPORTED_LOCATION${distribution_DIR}/${ANDROID_ABI}/libavformat-57.so) set_target_properties( avutil-55PROPERTIES IMPORTED_LOCATION${distribution_DIR}/${ANDROID_ABI}/libavutil-55.so) set_target_properties( swresample-2PROPERTIES IMPORTED_LOCATION${distribution_DIR}/${ANDROID_ABI}/libswresample-2.so) set_target_properties( swscale-4PROPERTIES IMPORTED_LOCATION${distribution_DIR}/${ANDROID_ABI}/libswscale-4.so)target_link_libraries( native-libavcodec-57avdevice-57avfilter-6avformat-57avutil-55swresample-2swscale-4${log-lib}androidEGLGLESv2)引入opengl,egl和android包,都要用到
java代码
setContentView(R.layout.activity_main); SurfaceView surfaceView = findViewById(R.id.surface_view); surfaceView.getHolder().addCallback(new SurfaceHolder.Callback() {@Override public void surfaceCreated(SurfaceHolder holder) {}@Override public void surfaceChanged(final SurfaceHolder holder, int format, int width, int height) {Thread thread = new Thread(){@Override public void run() {super.run(); String videoPath = "/storage/emulated/0/baiduNetdisk/season09.mp4"; videoPlay(videoPath,holder.getSurface()); }}; thread.start(); }@Override public void surfaceDestroyed(SurfaceHolder holder) {} });很简单,主要是调用
public native void videoPlay(String path, Surface surface);来进行处理
c++代码
const char *path = env->GetStringUTFChars(path_, 0); // TODO av_register_all(); AVFormatContext *fmt_ctx = avformat_alloc_context(); if (avformat_open_input(&fmt_ctx, path, NULL, NULL) < 0) {return; } if (avformat_find_stream_info(fmt_ctx, NULL) < 0) {return; } AVStream *avStream = NULL; int video_stream_index = -1; for (int i = 0; i < fmt_ctx->nb_streams; i++) {if (fmt_ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {avStream = fmt_ctx->streams[i]; video_stream_index = i; break; } } if (video_stream_index == -1) {return; } AVCodecContext *codec_ctx = avcodec_alloc_context3(NULL); avcodec_parameters_to_context(codec_ctx, avStream->codecpar); AVCodec *avCodec = avcodec_find_decoder(codec_ctx->codec_id); if (avcodec_open2(codec_ctx, avCodec, NULL) < 0) {return; }ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env,surface); AVFrame *yuvFrame = av_frame_alloc(); EGLUtils *eglUtils = new EGLUtils(); eglUtils->initEGL(nativeWindow); OpenGLUtils *openGLUtils = new OpenGLUtils(); openGLUtils->surfaceCreated(); openGLUtils->surfaceChanged(eglUtils->getWidth(),eglUtils->getHeight()); openGLUtils->initTexture(codec_ctx->width,codec_ctx->height); int y_size = codec_ctx->width * codec_ctx->height; AVPacket *pkt = (AVPacket *) malloc(sizeof(AVPacket)); av_new_packet(pkt, y_size); int ret; while (1) {if (av_read_frame(fmt_ctx, pkt) < 0) {av_packet_unref(pkt); break; }if (pkt->stream_index == video_stream_index) {ret = avcodec_send_packet(codec_ctx, pkt); if (ret < 0 && ret != AVERROR(EAGAIN) && ret != AVERROR_EOF) {av_packet_unref(pkt); continue; }ret = avcodec_receive_frame(codec_ctx, yuvFrame); if (ret < 0 && ret != AVERROR_EOF) {av_packet_unref(pkt); continue; }openGLUtils->updateTexture(yuvFrame->width,yuvFrame->height,yuvFrame->data[0],yuvFrame->data[1],yuvFrame->data[2]); openGLUtils->surfaceDraw(); eglUtils->drawEGL(); av_packet_unref(pkt); }av_packet_unref(pkt); } av_frame_free(&yuvFrame); avcodec_close(codec_ctx); avformat_close_input(&fmt_ctx); env->ReleaseStringUTFChars(path_, path);引入的包
#include <android/native_window.h> #include <android/native_window_jni.h> #include "EGLUtils.h" #include "OpenGLUtils.h" extern "C" { #include "libavformat/avformat.h" #include "libavfilter/avfiltergraph.h" }
因为ffmpeg用c写的,所以c++引入时要用extern不然会报错
EGLUtils和OpenGLUtils是我自己写的opengl渲染代码
EGLUtils.h
#ifndef VIDEOPLAY_EGLUTILS_H #define VIDEOPLAY_EGLUTILS_H #include <EGL/egl.h> class EGLUtils { public:EGLUtils(); ~EGLUtils(); void initEGL(ANativeWindow *nativeWindow); void drawEGL(); int getWidth(); int getHeight(); private:EGLConfig eglConf; EGLSurface eglWindow; EGLContext eglCtx; EGLDisplay eglDisp; int windowWidth; int windowHeight; void releaseEGL(); }; #endif //VIDEOPLAY_EGLUTILS_HEGLUtils.cpp
#include "EGLUtils.h" EGLUtils::EGLUtils() {} EGLUtils::~EGLUtils() {releaseEGL(); }void EGLUtils::initEGL(ANativeWindow *nativeWindow) {EGLint configSpec[] = { EGL_RED_SIZE, 8, EGL_GREEN_SIZE, 8, EGL_BLUE_SIZE, 8, EGL_SURFACE_TYPE, EGL_WINDOW_BIT, EGL_NONE }; eglDisp = eglGetDisplay(EGL_DEFAULT_DISPLAY); EGLint eglMajVers, eglMinVers; EGLint numConfigs; eglInitialize(eglDisp, &eglMajVers, &eglMinVers); eglChooseConfig(eglDisp, configSpec, &eglConf, 1, &numConfigs); eglWindow = eglCreateWindowSurface(eglDisp, eglConf,nativeWindow, NULL); eglQuerySurface(eglDisp,eglWindow,EGL_WIDTH,&windowWidth); eglQuerySurface(eglDisp,eglWindow,EGL_HEIGHT,&windowHeight); const EGLint ctxAttr[] = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL_NONE }; eglCtx = eglCreateContext(eglDisp, eglConf,EGL_NO_CONTEXT, ctxAttr); eglMakeCurrent(eglDisp, eglWindow, eglWindow, eglCtx); } int EGLUtils::getWidth() {return windowWidth; } int EGLUtils::getHeight(){return windowHeight; } void EGLUtils::releaseEGL() {eglMakeCurrent(eglDisp, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT); eglDestroyContext(eglDisp, eglCtx); eglDestroySurface(eglDisp, eglWindow); eglTerminate(eglDisp); eglDisp = EGL_NO_DISPLAY; eglWindow = EGL_NO_SURFACE; eglCtx = EGL_NO_CONTEXT; } void EGLUtils::drawEGL() {eglSwapBuffers(eglDisp, eglWindow); }OpenGLUtils.h
#ifndef VIDEOPLAY_OPENGLUTILS_H #define VIDEOPLAY_OPENGLUTILS_H #include <GLES2/gl2.h> class OpenGLUtils { public:OpenGLUtils(); ~OpenGLUtils(); void surfaceCreated(); void surfaceChanged(int width, int height); void initTexture(int width,int height); void updateTexture(int width,int height,void *bufY,void *bufU,void *bufV); void surfaceDraw(); void release(); private:GLuint programId; GLuint aPositionHandle; GLuint aTextureCoordHandle; GLuint textureSamplerHandle[3]; GLuint textureId[3]; float *vertexData; float *textureVertexData; int viewWidth,viewHeight; int videoWidth,videoHeight; int screenWidth, screenHeight; void viewport(); }; #endifOpenGLUtils.cpp
#include "OpenGLUtils.h" #include "ShaderUtils.h" OpenGLUtils::OpenGLUtils() {vertexData = new float[12]{1.0f, -1.0f, 0.0f, -1.0f, -1.0f, 0.0f, 1.0f, 1.0f, 0.0f, -1.0f, 1.0f, 0.0f }; textureVertexData = new float[8]{1.0f, 0.0f,//右下 0.0f, 0.0f,//左下 1.0f, 1.0f,//右上 0.0f, 1.0f//左上 }; }OpenGLUtils::~OpenGLUtils() {release(); }void OpenGLUtils::surfaceCreated() {ShaderUtils *shaderUtils = new ShaderUtils(); programId = shaderUtils->getYUVShader(); aPositionHandle = (GLuint) glGetAttribLocation(programId, "aPosition"); aTextureCoordHandle = (GLuint) glGetAttribLocation(programId, "aTexCoord"); textureSamplerHandle[0] = (GLuint) glGetUniformLocation(programId, "yTexture"); textureSamplerHandle[1] = (GLuint) glGetUniformLocation(programId, "uTexture"); textureSamplerHandle[2] = (GLuint) glGetUniformLocation(programId, "vTexture"); delete shaderUtils; glUseProgram(programId); glEnableVertexAttribArray(aPositionHandle); glVertexAttribPointer(aPositionHandle, 3, GL_FLOAT, GL_FALSE, 12, vertexData); glEnableVertexAttribArray(aTextureCoordHandle); glVertexAttribPointer(aTextureCoordHandle, 2, GL_FLOAT, GL_FALSE, 8, textureVertexData); }void OpenGLUtils::surfaceChanged(int width, int height) {screenWidth = width; screenHeight = height; }void OpenGLUtils::initTexture(int width, int height) {videoWidth = width; videoHeight = height; glGenTextures(1,&textureId[0]); glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D,textureId[0]); glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width, height, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, NULL); glUniform1i(textureSamplerHandle[0],0); glGenTextures(1,&textureId[1]); glActiveTexture(GL_TEXTURE1); glBindTexture(GL_TEXTURE_2D,textureId[1]); glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width/2, height/2, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, NULL); glUniform1i(textureSamplerHandle[1],1); glGenTextures(1,&textureId[2]); glActiveTexture(GL_TEXTURE2); glBindTexture(GL_TEXTURE_2D,textureId[2]); glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width/2, height/2, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, NULL); glUniform1i(textureSamplerHandle[2],2); viewport(); }void OpenGLUtils::updateTexture(int width, int height, void *bufY, void *bufU, void *bufV) {glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, textureId[0]); glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, GL_LUMINANCE, GL_UNSIGNED_BYTE, bufY); glActiveTexture(GL_TEXTURE1); glBindTexture(GL_TEXTURE_2D, textureId[1]); glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width/2, height/2, GL_LUMINANCE, GL_UNSIGNED_BYTE, bufU); glActiveTexture(GL_TEXTURE2); glBindTexture(GL_TEXTURE_2D, textureId[2]); glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width/2, height/2, GL_LUMINANCE, GL_UNSIGNED_BYTE, bufV); }void OpenGLUtils::surfaceDraw() {glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT); glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); }void OpenGLUtils::viewport() {int left,top; if(screenHeight > screenWidth){left = 0; viewWidth = screenWidth; viewHeight = (int)(videoHeight*1.0f/videoWidth*viewWidth); top = (screenHeight - viewHeight)/2; }else{top = 0; viewHeight = screenHeight; viewWidth = (int)(videoWidth*1.0f/videoHeight*viewHeight); left = (screenWidth - viewWidth)/2; }glViewport(left, top, viewWidth, viewHeight); }void OpenGLUtils::release() {glDeleteProgram(programId); glDeleteTextures(3,textureId); }还有shader文件,用来显示YUV格式的图像
ShaderUtils.h
#ifndef VIDEOPLAY_SHADERUTILS_H #define VIDEOPLAY_SHADERUTILS_H #include <GLES2/gl2.h> class ShaderUtils { public:GLuint createProgram(const char *vertexSource, const char *fragmentSource); GLuint loadShader(GLenum shaderType, const char *source); GLuint getYUVShader(); }; #endifShaderUtils.cpp
#include <malloc.h> #include "ShaderUtils.h" #define GET_STR(x) #x const char *vertexYUVShaderString = GET_STR(attribute vec4 aPosition; attribute vec2 aTexCoord; varying vec2 vTexCoord; void main() {vTexCoord=vec2(aTexCoord.x,1.0-aTexCoord.y); gl_Position = aPosition; } ); const char *fragmentYUVSShaderString = GET_STR(precision mediump float; varying vec2 vTexCoord; uniform sampler2D yTexture; uniform sampler2D uTexture; uniform sampler2D vTexture; void main() {vec3 yuv; vec3 rgb; yuv.r = texture2D(yTexture, vTexCoord).r; yuv.g = texture2D(uTexture, vTexCoord).r - 0.5; yuv.b = texture2D(vTexture, vTexCoord).r - 0.5; rgb = mat3(1.0, 1.0, 1.0, 0.0, -0.39465, 2.03211, 1.13983, -0.58060, 0.0) * yuv; gl_FragColor = vec4(rgb, 1.0); } ); GLuint ShaderUtils::getYUVShader(){return createProgram(vertexYUVShaderString,fragmentYUVSShaderString); } GLuint ShaderUtils::createProgram(const char *vertexSource, const char *fragmentSource) {GLuint vertexShader = loadShader(GL_VERTEX_SHADER, vertexSource); if (!vertexShader) {return 0; }GLuint pixelShader = loadShader(GL_FRAGMENT_SHADER, fragmentSource); if (!pixelShader) {return 0; }GLuint program = glCreateProgram(); if (program != 0) {glAttachShader(program, vertexShader); glAttachShader(program, pixelShader); glLinkProgram(program); GLint linkStatus = 0; glGetProgramiv(program, GL_LINK_STATUS, &linkStatus); if (!linkStatus) {GLint info_length = 0; glGetProgramiv(program, GL_INFO_LOG_LENGTH, &info_length); if(info_length){char* buf = (char*)malloc(info_length * sizeof(char)); glGetProgramInfoLog(program, info_length, NULL, buf); free(buf); }glDeleteProgram(program); program = 0; }}return program; } GLuint ShaderUtils::loadShader(GLenum shaderType, const char *source) {GLuint shader = glCreateShader(shaderType); if (shader != 0) {glShaderSource(shader,1, &source,NULL); glCompileShader(shader); GLint compiled = 0; glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled); if (!compiled) {GLint info_length = 0; glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &info_length); if(info_length){char* buf = (char*)malloc(info_length * sizeof(char)); if(buf){ glGetShaderInfoLog(shader, info_length, NULL, buf); }free(buf); }glDeleteShader(shader);shader = 0; }}return shader; }shader代码是拿雷神的
这些代码只是显示解码出来的图像,没有做时间矫正,所以就是快进画面,播放速度就是解码速度
这篇关于android基于ffmpeg的简单视频播发器 播放视频的文章就介绍到这儿,希望我们推荐的文章对编程师们有所帮助!