您的位置:首页 > 移动开发 > Android开发

android基于ffmpeg的简单视频播发器 跳到指定帧 av_seek_frame()

2017-11-06 15:26 405 查看
跳到指定帧,在ffmpeg使用av_seek_frame()进行跳转,这个函数只能跳到关键帧,所以对关键帧时间差距比较大的视频很尴尬,总是不能调到想要的画面
还有av_seek_frame中的时间参数,刚开始以为用秒乘以time_base结果不是,而是AVPacket或AVFrame里的pts或dts对应的数,不用很精确,可以用大概值,因为av_seek_frame会跳到关键帧
所以可以用两种方式跳转

av_seek_frame(fmt_ctx, audio_stream_index,pts,AVSEEK_FLAG_BACKWARD);


或者
int64_t time = (int64_t) (avStream->duration * 0.01);
av_seek_frame(fmt_ctx, audio_stream_index,time,AVSEEK_FLAG_BACKWARD);

duration是视频总时长,但不是时间单位,也就是说不是秒或毫秒
我是这样写的,记录每帧的时间

double nowTime = frame->pts * av_q2d(avStream->time_base);
long t = (long) (nowTime * 1000);
audio_time = t;


计算跳转时间

move_time = audio_time/1000.0f + time;


audio_time是毫秒,所以要除以1000,time=10,就是加十秒,然后跳转



int64_t k = (int64_t) (move_time / av_q2d(avStream->time_base));
av_seek_frame(fmt_ctx, audio_stream_index,k,AVSEEK_FLAG_BACKWARD);
avcodec_flush_buffers(codec_ctx);
audio_move_time = move_time;

在av_read_frame()之后舍弃掉小于指定时间的帧

if(audio_move_time != -1){
double nowTime = pkt->pts * av_q2d(avStream->time_base);
if(nowTime < audio_move_time){
av_packet_unref(pkt);
continue;
}
}


这样就可以得到指定时间的帧数据了,之后用avcodec_send_packet()和avcodec_receive_frame()进行解码,不过这样做有时候调用avcodec_send_packet()会非常慢,因为现在得到的AVPacket不是关键帧,avcodec_send_packet()会把数据补全
接下来贴完整代码
java

public void video_move(View view){
move(10.0d);
}
public native void move(double time);


c++

#include <android/native_window.h>
#include <android/native_window_jni.h>
#include "EGLUtils.h"
#include "OpenGLUtils.h"
extern "C" {
#include "libavformat/avformat.h"
#include "libavfilter/avfiltergraph.h"
#include "ffmpeg.h"
}

//视频线程锁
pthread_mutex_t video_mutex;
pthread_cond_t video_cond;
//当前音频帧时间
long audio_time = 0;
long start_time = 0;
//当前系统时间
long getCurrentTime() {
struct timeval tv;
gettimeofday(&tv,NULL);
return tv.tv_sec * 1000 + tv.tv_usec / 1000;
}
//计算等待时间
timespec waitTime(long timeout_ms){
struct timespec abstime;
struct timeval now;
gettimeofday(&now, NULL);
long nsec = now.tv_usec * 1000 + (timeout_ms % 1000) * 1000000;
abstime.tv_sec=now.tv_sec + nsec / 1000000000 + timeout_ms / 1000;
abstime.tv_nsec=nsec % 1000000000;
return abstime;
}
//视频是否跳转完成
bool isVideoMove = false;
//音频是否跳转完成
bool isAudioMove = false;
//要跳转的时间
double move_time = 0;
//跳转时用的锁
pthread_mutex_t move_mutex;
pthread_cond_t move_cond;
//记录视频是否完成跳转,先进行视频跳转音频跳转
bool isMoveWait = false;
extern "C"
JNIEXPORT void JNICALL
Java_com_example_ffmpegrun_MainActivity_videoPlay(JNIEnv *env, jobject instance, jstring path_,
jobject surface) {
const char *path = env->GetStringUTFChars(path_, 0);

// TODO
//初始化锁
pthread_mutex_init (&video_mutex,NULL);
pthread_cond_init(&video_cond,NULL);

pthread_mutex_init (&move_mutex,NULL);
pthread_cond_init(&move_cond,NULL);
//初始化视频ffmpeg
av_register_all();
AVFormatContext *fmt_ctx = avformat_alloc_context();
if (avformat_open_input(&fmt_ctx, path, NULL, NULL) < 0) {
return;
}
if (avformat_find_stream_info(fmt_ctx, NULL) < 0) {
return;
}
AVStream *avStream = NULL;
int video_stream_index = -1;
for (int i = 0; i < fmt_ctx->nb_streams; i++) {
if (fmt_ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
avStream = fmt_ctx->streams[i];
video_stream_index = i;
break;
}
}
if (video_stream_index == -1) {
return;
}
AVCodecContext *codec_ctx = avcodec_alloc_context3(NULL);
avcodec_parameters_to_context(codec_ctx, avStream->codecpar);

AVCodec *avCodec = avcodec_find_decoder(codec_ctx->codec_id);
if (avcodec_open2(codec_ctx, avCodec, NULL) < 0) {
return;
}

//初始化opengl
ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env,surface);

EGLUtils *eglUtils = new EGLUtils();
eglUtils->initEGL(nativeWindow);

OpenGLUtils *openGLUtils = new OpenGLUtils();
openGLUtils->surfaceCreated();
openGLUtils->surfaceChanged(eglUtils->getWidth(),eglUtils->getHeight());
openGLUtils->initTexture(codec_ctx->width,codec_ctx->height);
//初始化AVPacket
int y_size = codec_ctx->width * codec_ctx->height;
AVPacket *pkt = (AVPacket *) malloc(sizeof(AVPacket));
av_new_packet(pkt, y_size);
double video_move_time = -1;

int ret;
while (1) {
//跳转
if(isVideoMove){
//还原时间单位
int64_t time = (int64_t) (move_time / av_q2d(avStream->time_base));
//跳转
av_seek_frame(fmt_ctx, video_stream_index,
time,
AVSEEK_FLAG_BACKWARD);
//清空缓存数据
avcodec_flush_buffers(codec_ctx);
//跳转完成
isVideoMove = false;
//记录时间
video_move_time = move_time;
}
if (av_read_frame(fmt_ctx, pkt) < 0) {
av_packet_unref(pkt);
break;
}

if (pkt->stream_index == video_stream_index) {
//过滤比跳转时间小的数据
if(video_move_time != -1){
double nowTime = pkt->pts * av_q2d(avStream->time_base);
if(nowTime < video_move_time){
av_packet_unref(pkt);
continue;
}
}
ret = avcodec_send_packet(codec_ctx, pkt);
if (ret < 0 && ret != AVERROR(EAGAIN) && ret != AVERROR_EOF) {
av_packet_unref(pkt);
continue;
}
AVFrame *yuvFrame = av_frame_alloc();
ret = avcodec_receive_frame(codec_ctx, yuvFrame);
if (ret < 0 && ret != AVERROR_EOF) {
av_packet_unref(pkt);
av_frame_free(&yuvFrame);
continue;
}
double nowTime = yuvFrame->pts * av_q2d(avStream->time_base);
//判断是这一帧否是跳转完的第一帧,是就直接显示
if(video_move_time == -1){
//不是跳转的第一帧,和音频进行矫正
long t = (long) (nowTime * 1000);
long time = getCurrentTime() - start_time;
long wait = t - time - audio_time;

struct timespec abstime = waitTime(wait);
pthread_mutex_lock(&video_mutex);
if(!isVideoMove){
pthread_cond_timedwait(&video_cond, &video_mutex,&abstime);
}
pthread_mutex_unlock(&video_mutex);
}
//opengl加载数据渲染
openGLUtils->updateTexture(yuvFrame->width,yuvFrame->height,yuvFrame->data[0],yuvFrame->data[1],yuvFrame->data[2]);
openGLUtils->surfaceDraw();
eglUtils->drawEGL();
av_frame_free(&yuvFrame);
av_packet_unref(pkt);
//判断是否是跳转后的第一帧
if(video_move_time != -1){
video_move_time = -1;
//视频跳转完后开始音频跳转
pthread_mutex_lock(&move_mutex);
//把跳转时间变成当前这一帧的时间
move_time = nowTime;
if(isAudioMove){
pthread_cond_signal(&move_cond);
}
isMoveWait = true;
pthread_cond_wait(&move_cond, &move_mutex);
isMoveWait = false;
pthread_mutex_unlock(&move_mutex);
}
}
av_packet_unref(pkt);
}

avcodec_close(codec_ctx);
avformat_close_input(&fmt_ctx);

pthread_cond_destroy(&video_cond);
pthread_mutex_destroy(&video_mutex);

env->ReleaseStringUTFChars(path_, path);
}
#define MAX_AUDIO_FRME_SIZE 48000 * 4
extern "C"
JNIEXPORT void JNICALL
Java_com_example_ffmpegrun_MainActivity_audioPlay(JNIEnv *env, jobject instance, jstring path_) {
const char *path = env->GetStringUTFChars(path_, 0);

// TODO

av_register_all();
AVFormatContext *fmt_ctx = avformat_alloc_context();
if (avformat_open_input(&fmt_ctx, path, NULL, NULL) < 0) {
return;
}
if (avformat_find_stream_info(fmt_ctx, NULL) < 0) {
return;
}
AVStream *avStream = NULL;
int audio_stream_index = -1;
for (int i = 0; i < fmt_ctx->nb_streams; i++) {
if (fmt_ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
avStream = fmt_ctx->streams[i];
audio_stream_index = i;
break;
}
}

if (audio_stream_index == -1) {
return;
}
AVCodecContext *codec_ctx = avcodec_alloc_context3(NULL);
avcodec_parameters_to_context(codec_ctx, avStream->codecpar);

AVCodec *avCodec = avcodec_find_decoder(codec_ctx->codec_id);
if (avcodec_open2(codec_ctx, avCodec, NULL) < 0) {
return;
}
SwrContext *swr_ctx = swr_alloc();

enum AVSampleFormat in_sample_fmt = codec_ctx->sample_fmt;

enum AVSampleFormat out_sample_fmt = AV_SAMPLE_FMT_S16;

int in_sample_rate = codec_ctx->sample_rate;

int out_sample_rate = in_sample_rate;

uint64_t in_ch_layout = codec_ctx->channel_layout;

uint64_t out_ch_layout = AV_CH_LAYOUT_STEREO;

swr_alloc_set_opts(swr_ctx,
out_ch_layout, out_sample_fmt, out_sample_rate,
in_ch_layout, in_sample_fmt, in_sample_rate,
0, NULL);
swr_init(swr_ctx);

int out_channel_nb = av_get_channel_layout_nb_channels(out_ch_layout);

jclass player_class = env->GetObjectClass(instance);
jmethodID create_audio_track_mid = env->GetMethodID(player_class, "createAudio",
"(II)Landroid/media/AudioTrack;");
jobject audio_track = env->CallObjectMethod(instance, create_audio_track_mid,
out_sample_rate, out_channel_nb);

jclass audio_track_class = env->GetObjectClass(audio_track);
jmethodID audio_track_play_mid = env->GetMethodID(audio_track_class, "play", "()V");
jmethodID audio_track_stop_mid = env->GetMethodID(audio_track_class, "stop", "()V");
env->CallVoidMethod(audio_track, audio_track_play_mid);

jmethodID audio_track_write_mid = env->GetMethodID(audio_track_class, "write",
"([BII)I");

uint8_t *out_buffer = (uint8_t *) av_malloc(MAX_AUDIO_FRME_SIZE);

AVPacket *pkt = (AVPacket *) malloc(sizeof(AVPacket));

double audio_move_time = -1;
int ret;
while (1) {
//判断是否跳转
if(isAudioMove){
//判断视频是否跳转完成,没有完成等待视频完成
pthread_mutex_lock(&move_mutex);
if(!isMoveWait){
pthread_cond_wait(&move_cond, &move_mutex);
}
isAudioMove = false;
pthread_mutex_unlock(&move_mutex);

//跳转
int64_t k = (int64_t) (move_time / av_q2d(avStream->time_base)); av_seek_frame(fmt_ctx, audio_stream_index,k,AVSEEK_FLAG_BACKWARD); avcodec_flush_buffers(codec_ctx); audio_move_time = move_time;

}

if (av_read_frame(fmt_ctx, pkt) < 0){
av_packet_unref(pkt);
break;
}
if(pkt->stream_index == audio_stream_index){
if(audio_move_time != -1){ double nowTime = pkt->pts * av_q2d(avStream->time_base); if(nowTime < audio_move_time){ av_packet_unref(pkt); continue; } }
ret = avcodec_send_packet(codec_ctx, pkt);
if (ret < 0 && ret != AVERROR(EAGAIN) && ret != AVERROR_EOF) {
av_packet_unref(pkt);
continue;
}

AVFrame *frame = av_frame_alloc();

ret = avcodec_receive_frame(codec_ctx, frame);
if (ret < 0 && ret != AVERROR_EOF) {
av_packet_unref(pkt);
av_frame_free(&frame);
continue;
}
double nowTime = frame->pts * av_q2d(avStream->time_base); long t = (long) (nowTime * 1000); audio_time = t;
start_time = getCurrentTime();
//判断是否是跳转后的第一帧
if(audio_move_time != -1){
audio_move_time = -1;
pthread_mutex_lock(&move_mutex);
//视频帧是否在等待,是就让它开始跑
if(isMoveWait){
pthread_cond_signal(&move_cond);
}
pthread_mutex_unlock(&move_mutex);
}
swr_convert(swr_ctx, &out_buffer, MAX_AUDIO_FRME_SIZE,
(const uint8_t **) frame->data,
frame->nb_samples);
int out_buffer_size = av_samples_get_buffer_size(NULL, out_channel_nb,
frame->nb_samples, out_sample_fmt,
1);

jbyteArray audio_sample_array = env->NewByteArray(out_buffer_size);
jbyte *sample_bytep = env->GetByteArrayElements(audio_sample_array, NULL);

memcpy(sample_bytep, out_buffer, (size_t) out_buffer_size);
env->ReleaseByteArrayElements(audio_sample_array, sample_bytep, 0);

env->CallIntMethod(audio_track, audio_track_write_mid,
audio_sample_array, 0, out_buffer_size);

env->DeleteLocalRef(audio_sample_array);

av_frame_free(&frame);

av_packet_unref(pkt);
}
}

env->CallVoidMethod(audio_track, audio_track_stop_mid);
av_free(out_buffer);
swr_free(&swr_ctx);
avcodec_close(codec_ctx);
avformat_close_input(&fmt_ctx);

env->ReleaseStringUTFChars(path_, path);
}extern "C"
JNIEXPORT void JNICALL
Java_com_example_ffmpegrun_MainActivity_move(JNIEnv *env, jobject instance, jdouble time) {

// TODO
//计算跳转时间
move_time = audio_time/1000.0f + time;
isAudioMove = true;
pthread_mutex_lock(&video_mutex);
isVideoMove = true;
pthread_mutex_unlock(&video_mutex);
}


先视频进行跳转,在音频进行跳转,完成后正常播放
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: