您的位置:首页 > 移动开发 > Android开发

android基于ffmpeg的简单视频播发器 播放视频

2017-11-02 18:07 543 查看
视频播放用到opengl,因为ffmpeg是c写的,所以我就用c++写opengl,c不会写

把生成的so文件和include文件夹复制到项目的app\libs文件夹

CMakeLists.txt文件代码

cmake_minimum_required(VERSION 3.4.1)

add_library( native-lib
SHARED
src/main/cpp/native-lib.cpp
src/main/cpp/OpenGLUtils.cpp
src/main/cpp/ShaderUtils.cpp
src/main/cpp/EGLUtils.cpp
)

find_library( log-lib
log )

set(distribution_DIR ../../../../libs)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=gnu++11")
set(CMAKE_VERBOSE_MAKEFILE on)
include_directories(libs/include)

add_library( avcodec-57
SHARED
IMPORTED)
add_library( avdevice-57
SHARED
IMPORTED)
add_library( avfilter-6
SHARED
IMPORTED)
add_library( avformat-57
SHARED
IMPORTED)
add_library( avutil-55
SHARED
IMPORTED)
add_library( swresample-2
SHARED
IMPORTED)
add_library( swscale-4
SHARED
IMPORTED)
set_target_properties( avcodec-57
PROPERTIES IMPORTED_LOCATION
${distribution_DIR}/${ANDROID_ABI}/libavcodec-57.so)
set_target_properties( avdevice-57
PROPERTIES IMPORTED_LOCATION
${distribution_DIR}/${ANDROID_ABI}/libavdevice-57.so)
set_target_properties( avfilter-6
PROPERTIES IMPORTED_LOCATION
${distribution_DIR}/${ANDROID_ABI}/libavfilter-6.so)
set_target_properties( avformat-57
PROPERTIES IMPORTED_LOCATION
${distribution_DIR}/${ANDROID_ABI}/libavformat-57.so)
set_target_properties( avutil-55
PROPERTIES IMPORTED_LOCATION
${distribution_DIR}/${ANDROID_ABI}/libavutil-55.so)
set_target_properties( swresample-2
PROPERTIES IMPORTED_LOCATION
${distribution_DIR}/${ANDROID_ABI}/libswresample-2.so)
set_target_properties( swscale-4
PROPERTIES IMPORTED_LOCATION
${distribution_DIR}/${ANDROID_ABI}/libswscale-4.so)

target_link_libraries( native-lib

avcodec-57
avdevice-57
avfilter-6
avformat-57
avutil-55
swresample-2
swscale-4

${log-lib}
android
EGL
GLESv2
)

引入opengl,egl和android包,都要用到

java代码

setContentView(R.layout.activity_main);
SurfaceView surfaceView = findViewById(R.id.surface_view);

surfaceView.getHolder().addCallback(new SurfaceHolder.Callback() {
@Override
public void surfaceCreated(SurfaceHolder holder) {

}

@Override
public void surfaceChanged(final SurfaceHolder holder, int format, int width, int height) {
Thread thread = new Thread(){
@Override
public void run() {
super.run();
String videoPath = "/storage/emulated/0/baiduNetdisk/season09.mp4";
videoPlay(videoPath,holder.getSurface());
}
};
thread.start();
}

@Override
public void surfaceDestroyed(SurfaceHolder holder) {

}
});

很简单,主要是调用

public native void videoPlay(String path, Surface surface);

来进行处理

c++代码

const char *path = env->GetStringUTFChars(path_, 0);
// TODO

av_register_all();
AVFormatContext *fmt_ctx = avformat_alloc_context();
if (avformat_open_input(&fmt_ctx, path, NULL, NULL) < 0) {
return;
}
if (avformat_find_stream_info(fmt_ctx, NULL) < 0) {
return;
}
AVStream *avStream = NULL;
int video_stream_index = -1;
for (int i = 0; i < fmt_ctx->nb_streams; i++) {
if (fmt_ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
avStream = fmt_ctx->streams[i];
video_stream_index = i;
break;
}
}
if (video_stream_index == -1) {
return;
}
AVCodecContext *codec_ctx = avcodec_alloc_context3(NULL);
avcodec_parameters_to_context(codec_ctx, avStream->codecpar);

AVCodec *avCodec = avcodec_find_decoder(codec_ctx->codec_id);
if (avcodec_open2(codec_ctx, avCodec, NULL) < 0) {
return;
}

ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env,surface);

AVFrame *yuvFrame = av_frame_alloc();

EGLUtils *eglUtils = new EGLUtils();
eglUtils->initEGL(nativeWindow);

OpenGLUtils *openGLUtils = new OpenGLUtils();
openGLUtils->surfaceCreated();
openGLUtils->surfaceChanged(eglUtils->getWidth(),eglUtils->getHeight());
openGLUtils->initTexture(codec_ctx->width,codec_ctx->height);

int y_size = codec_ctx->width * codec_ctx->height;
AVPacket *pkt = (AVPacket *) malloc(sizeof(AVPacket));
av_new_packet(pkt, y_size);
int ret;
while (1) {
if (av_read_frame(fmt_ctx, pkt) < 0) {
av_packet_unref(pkt);
break;
}
if (pkt->stream_index == video_stream_index) {
ret = avcodec_send_packet(codec_ctx, pkt);
if (ret < 0 && ret != AVERROR(EAGAIN) && ret != AVERROR_EOF) {
av_packet_unref(pkt);
continue;
}
ret = avcodec_receive_frame(codec_ctx, yuvFrame);
if (ret < 0 && ret != AVERROR_EOF) {
av_packet_unref(pkt);
continue;
}

openGLUtils->updateTexture(yuvFrame->width,yuvFrame->height,yuvFrame->data[0],yuvFrame->data[1],yuvFrame->data[2]);
openGLUtils->surfaceDraw();
eglUtils->drawEGL();

av_packet_unref(pkt);
}
av_packet_unref(pkt);
}
av_frame_free(&yuvFrame);
avcodec_close(codec_ctx);
avformat_close_input(&fmt_ctx);

env->ReleaseStringUTFChars(path_, path);

引入的包

#include <android/native_window.h>
#include <android/native_window_jni.h>
#include "EGLUtils.h"
#include "OpenGLUtils.h"
extern "C" {
#include "libavformat/avformat.h"
#include "libavfilter/avfiltergraph.h"
}

因为ffmpeg用c写的,所以c++引入时要用extern不然会报错

EGLUtils和OpenGLUtils是我自己写的opengl渲染代码

EGLUtils.h

#ifndef VIDEOPLAY_EGLUTILS_H
#define VIDEOPLAY_EGLUTILS_H

#include <EGL/egl.h>
class EGLUtils {
public:

EGLUtils();
~EGLUtils();

void initEGL(ANativeWindow *nativeWindow);

void drawEGL();
int getWidth();
int getHeight();

private:
EGLConfig eglConf;
EGLSurface eglWindow;
EGLContext eglCtx;
EGLDisplay eglDisp;

int windowWidth;
int windowHeight;

void releaseEGL();
};

#endif //VIDEOPLAY_EGLUTILS_H

EGLUtils.cpp

#include "EGLUtils.h"
EGLUtils::EGLUtils() {

}
EGLUtils::~EGLUtils() {
releaseEGL();
}

void EGLUtils::initEGL(ANativeWindow *nativeWindow) {
EGLint configSpec[] = { EGL_RED_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_BLUE_SIZE, 8,
EGL_SURFACE_TYPE, EGL_WINDOW_BIT, EGL_NONE };

eglDisp = eglGetDisplay(EGL_DEFAULT_DISPLAY);
EGLint eglMajVers, eglMinVers;
EGLint numConfigs;
eglInitialize(eglDisp, &eglMajVers, &eglMinVers);
eglChooseConfig(eglDisp, configSpec, &eglConf, 1, &numConfigs);

eglWindow = eglCreateWindowSurface(eglDisp, eglConf,nativeWindow, NULL);

eglQuerySurface(eglDisp,eglWindow,EGL_WIDTH,&windowWidth);
eglQuerySurface(eglDisp,eglWindow,EGL_HEIGHT,&windowHeight);
const EGLint ctxAttr[] = {
EGL_CONTEXT_CLIENT_VERSION, 2,
EGL_NONE
};
eglCtx = eglCreateContext(eglDisp, eglConf,EGL_NO_CONTEXT, ctxAttr);
eglMakeCurrent(eglDisp, eglWindow, eglWindow, eglCtx);
}
int EGLUtils::getWidth() {
return windowWidth;
}
int EGLUtils::getHeight(){
return windowHeight;
}
void EGLUtils::releaseEGL() {
eglMakeCurrent(eglDisp, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
eglDestroyContext(eglDisp, eglCtx);

eglDestroySurface(eglDisp, eglWindow);
eglTerminate(eglDisp);

eglDisp = EGL_NO_DISPLAY;

eglWindow = EGL_NO_SURFACE;
eglCtx = EGL_NO_CONTEXT;
}
void EGLUtils::drawEGL() {
eglSwapBuffers(eglDisp, eglWindow);
}

OpenGLUtils.h

#ifndef VIDEOPLAY_OPENGLUTILS_H
#define VIDEOPLAY_OPENGLUTILS_H

#include <GLES2/gl2.h>

class OpenGLUtils {
public:
OpenGLUtils();
~OpenGLUtils();

void surfaceCreated();

void surfaceChanged(int width, int height);

void initTexture(int width,int height);

void updateTexture(int width,int height,void *bufY,void *bufU,void *bufV);

void surfaceDraw();
void release();
private:

GLuint programId;

GLuint aPositionHandle;
GLuint aTextureCoordHandle;
GLuint textureSamplerHandle[3];

GLuint textureId[3];

float *vertexData;
float *textureVertexData;

int viewWidth,viewHeight;
int videoWidth,videoHeight;
int screenWidth, screenHeight;

void viewport();

};

#endif

OpenGLUtils.cpp

#include "OpenGLUtils.h"
#include "ShaderUtils.h"

OpenGLUtils::OpenGLUtils() {
vertexData = new float[12]{
1.0f, -1.0f, 0.0f,
-1.0f, -1.0f, 0.0f,
1.0f, 1.0f, 0.0f,
-1.0f, 1.0f, 0.0f
};

textureVertexData = new float[8]{
1.0f, 0.0f,//右下
0.0f, 0.0f,//左下
1.0f, 1.0f,//右上
0.0f, 1.0f//左上
};
}

OpenGLUtils::~OpenGLUtils() {
release();
}

void OpenGLUtils::surfaceCreated() {
ShaderUtils *shaderUtils = new ShaderUtils();
programId = shaderUtils->getYUVShader();

aPositionHandle = (GLuint) glGetAttribLocation(programId, "aPosition");
aTextureCoordHandle = (GLuint) glGetAttribLocation(programId, "aTexCoord");

textureSamplerHandle[0] = (GLuint) glGetUniformLocation(programId, "yTexture");
textureSamplerHandle[1] = (GLuint) glGetUniformLocation(programId, "uTexture");
textureSamplerHandle[2] = (GLuint) glGetUniformLocation(programId, "vTexture");
delete shaderUtils;

glUseProgram(programId);
glEnableVertexAttribArray(aPositionHandle);
glVertexAttribPointer(aPositionHandle, 3, GL_FLOAT, GL_FALSE,
12, vertexData);

glEnableVertexAttribArray(aTextureCoordHandle);
glVertexAttribPointer(aTextureCoordHandle, 2, GL_FLOAT, GL_FALSE, 8, textureVertexData);

}

void OpenGLUtils::surfaceChanged(int width, int height) {
screenWidth = width;
screenHeight = height;
}

void OpenGLUtils::initTexture(int width, int height) {

videoWidth = width;
videoHeight = height;

glGenTextures(1,&textureId[0]);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D,textureId[0]);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,
GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width, height, 0,
GL_LUMINANCE, GL_UNSIGNED_BYTE, NULL);
glUniform1i(textureSamplerHandle[0],0);

glGenTextures(1,&textureId[1]);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D,textureId[1]);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,
GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width/2, height/2, 0,
GL_LUMINANCE, GL_UNSIGNED_BYTE, NULL);
glUniform1i(textureSamplerHandle[1],1);

glGenTextures(1,&textureId[2]);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D,textureId[2]);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,
GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width/2, height/2, 0,
GL_LUMINANCE, GL_UNSIGNED_BYTE, NULL);
glUniform1i(textureSamplerHandle[2],2);

viewport();
}

void OpenGLUtils::updateTexture(int width, int height, void *bufY, void *bufU, void *bufV) {
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, textureId[0]);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, GL_LUMINANCE, GL_UNSIGNED_BYTE, bufY);

glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, textureId[1]);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width/2, height/2, GL_LUMINANCE, GL_UNSIGNED_BYTE, bufU);

glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, textureId[2]);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width/2, height/2, GL_LUMINANCE, GL_UNSIGNED_BYTE, bufV);

}

void OpenGLUtils::surfaceDraw() {
glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}

void OpenGLUtils::viewport() {
int left,top;
if(screenHeight > screenWidth){
left = 0;
viewWidth = screenWidth;
viewHeight = (int)(videoHeight*1.0f/videoWidth*viewWidth);
top = (screenHeight - viewHeight)/2;
}else{
top = 0;
viewHeight = screenHeight;
viewWidth = (int)(videoWidth*1.0f/videoHeight*viewHeight);
left = (screenWidth - viewWidth)/2;
}
glViewport(left, top, viewWidth, viewHeight);
}

void OpenGLUtils::release() {
glDeleteProgram(programId);
glDeleteTextures(3,textureId);
}

还有shader文件,用来显示YUV格式的图像

ShaderUtils.h

#ifndef VIDEOPLAY_SHADERUTILS_H
#define VIDEOPLAY_SHADERUTILS_H

#include <GLES2/gl2.h>
class ShaderUtils {
public:
GLuint createProgram(const char *vertexSource, const char *fragmentSource);

GLuint loadShader(GLenum shaderType, const char *source);

GLuint getYUVShader();
};

#endif

ShaderUtils.cpp

#include <malloc.h>
#include "ShaderUtils.h"
#define GET_STR(x) #x
const char *vertexYUVShaderString = GET_STR(
attribute vec4 aPosition;
attribute vec2 aTexCoord;
varying vec2 vTexCoord;
void main() {
vTexCoord=vec2(aTexCoord.x,1.0-aTexCoord.y);
gl_Position = aPosition;
}
);
const char *fragmentYUVSShaderString = GET_STR(
precision mediump float;
varying vec2 vTexCoord;
uniform sampler2D yTexture;
uniform sampler2D uTexture;
uniform sampler2D vTexture;
void main() {
vec3 yuv;
vec3 rgb;
yuv.r = texture2D(yTexture, vTexCoord).r;
yuv.g = texture2D(uTexture, vTexCoord).r - 0.5;
yuv.b = texture2D(vTexture, vTexCoord).r - 0.5;
rgb = mat3(1.0,       1.0,         1.0,
0.0,       -0.39465,  2.03211,
1.13983, -0.58060,  0.0) * yuv;
gl_FragColor = vec4(rgb, 1.0);
}
);
GLuint ShaderUtils::getYUVShader(){
return createProgram(vertexYUVShaderString,fragmentYUVSShaderString);
}
GLuint ShaderUtils::createProgram(const char *vertexSource, const char *fragmentSource) {
GLuint vertexShader = loadShader(GL_VERTEX_SHADER, vertexSource);
if (!vertexShader) {
return 0;
}
GLuint pixelShader = loadShader(GL_FRAGMENT_SHADER, fragmentSource);
if (!pixelShader) {
return 0;
}

GLuint program = glCreateProgram();
if (program != 0) {
glAttachShader(program, vertexShader);
glAttachShader(program, pixelShader);
glLinkProgram(program);
GLint  linkStatus = 0;
glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
if (!linkStatus) {
GLint info_length = 0;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &info_length);
if(info_length){
char* buf = (char*)malloc(info_length * sizeof(char));
glGetProgramInfoLog(program, info_length, NULL, buf);
free(buf);
}
glDeleteProgram(program);
program = 0;
}
}
return program;
}
GLuint ShaderUtils::loadShader(GLenum shaderType, const char *source) {
GLuint shader = glCreateShader(shaderType);
if (shader != 0) {
glShaderSource(shader,1, &source,NULL);
glCompileShader(shader);
GLint compiled = 0;
glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
if (!compiled) {
GLint info_length = 0;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &info_length);
if(info_length){
char* buf = (char*)malloc(info_length * sizeof(char));
if(buf){ glGetShaderInfoLog(shader, info_length, NULL, buf);
}
free(buf);
}
glDeleteShader(shader);shader = 0;
}
}
return shader;
}

shader代码是拿雷神的

这些代码只是显示解码出来的图像,没有做时间矫正,所以就是快进画面,播放速度就是解码速度
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: