您的位置:首页 > 移动开发 > IOS开发

ffmpeg + ios 移植ffmpeg 到ios 平台上

2015-04-27 10:44 183 查看

声明:本文章原创,禁止转载。分享出来,主要是在csdn上收获了这么多,该贡献点了

说实话移植ffmpeg 到mac ox 和 模拟器 上基本没啥难度,基本就是config && make && make install,也没有遇到啥难度,在模拟器上 运行没啥问题,问题的重点在移植到ios 平台上。
以前也知道交叉编译是怎么一回事,但是没有实战过,ide 用多了,对gcc 那一套选项根本就动不了,对ffmpeg 的 编译选项也是动不了,怎么办呢,慢慢啃吧,这是咋程序员的命。
那么就要对这些知识温故一下才有法了,要不然 编译的时候copy人家得脚本,然后一出问题就找不到 南北了。精简版 支持 mp4 + m3u8 的编译脚本如下,我的imac 不是i386处理器,是x86_64, 这个也是个坑,网上都说模拟器是 i386 处理器,尼玛我的就怎么不一样,编译脚本如下,如果你懂原理 基本没啥难度。

#!/bin/sh

# FFMpeg,SDK版本号
VERSION="2.1"
SDKVERSION="8.2"

#最低支持的SDK版本号
MINSDKVERSION="7.0"

# 源文件路径
SRCDIR="$(pwd)"
BUILDDIR="${SRCDIR}/build"
mkdir -p $BUILDDIR

# 获取xcode开发环境安装路径
DEVELOPER=`xcode-select -print-path`

# 要编译的架构列表
ARCHS="x86_64,armv7,arm64"

for ARCH in ${ARCHS}
do
if [ "${ARCH}" == "x86_64" ];
then
PLATFORM="iPhoneSimulator"
EXTRA_CFLAGS="-arch x86_64"
EXTRA_LDFLAGS="-arch x86_64 -mfpu=neon"
EXTRA_CONFIG="--arch=x86_64 --cpu=x86_64"
else
PLATFORM="iPhoneOS"
EXTRA_CFLAGS="-arch ${ARCH} -mfloat-abi=softfp"
EXTRA_LDFLAGS="-arch ${ARCH} -mfpu=neon -mfloat-abi=softfp"
EXTRA_CONFIG="--arch=${ARCH} --disable-armv5te"
fi

make clean

# 根据需要裁减
./configure --prefix="${BUILDDIR}/${ARCH}"         \
--disable-doc                         \
--disable-ffprobe                   \
--disable-ffmpeg                     \
--disable-ffplay                     \
--disable-ffserver                     \
--disable-debug                         \
--disable-zlib                          \
--disable-encoders \
--disable-muxers \
--disable-devices \
--disable-filters \
--disable-bsfs \
--disable-demuxers \
--enable-demuxer=hls \
--enable-demuxer=mpegts \
--enable-demuxer=mpegtsraw \
--enable-demuxer=h264 \
--enable-demuxer=m4v \
--enable-demuxer=mov \
--enable-demuxer=mpegvideo \
--enable-demuxer=yuv4mpegpipe \
--disable-protocols \
--enable-protocol=file \
--enable-protocol=http \
--enable-protocol=hls \
--disable-decoders \
--enable-decoder=h264 \
--enable-decoder=h264_crystalhd \
--enable-decoder=h264_vda \
--enable-decoder=h264_vdpau \
--enable-decoder=aac \
--enable-decoder=aac_latm \
--enable-decoder=tscc \
--enable-decoder=tscc2 \
--enable-decoder=mts2 \
--disable-parsers \
--enable-parser=aac \
--enable-parser=aac_latm \
--enable-parser=h264 \
--enable-parser=mpeg4video \
--enable-cross-compile                 \
--enable-pic                         \
--disable-asm                        \
--target-os=darwin                     \
--enable-small \
${EXTRA_CONFIG}                        \
--cc="${DEVELOPER}/Platforms/${PLATFORM}.platform/Developer/usr/bin/gcc"                                         \
--as="/usr/bin/gas-preprocessor.pl"                                                                                \
--sysroot="${DEVELOPER}/Platforms/${PLATFORM}.platform/Developer/SDKs/${PLATFORM}${SDKVERSION}.sdk"                 \
--extra-cflags="-miphoneos-version-min=${MINSDKVERSION} ${EXTRA_CFLAGS}"                                                        \
--extra-ldflags="-miphoneos-version-min=${MINSDKVERSION} ${EXTRA_LDFLAGS} -isysroot ${DEVELOPER}/Platforms/${PLATFORM}.platform/Developer/SDKs/${PLATFORM}${SDKVERSION}.sdk"

make && make install && make clean

done
是否开启硬件加速,这个要根据处理器来,在 cflags 和 ldflags 中,指定选项。 需要合并*.a 库的 lipo 工具可以帮到你。

好吧,好人做到底,贴上测试 播放器代码 opengl + openal。也可以用苹果的CoreGraphics + audioqueuf,这个你随意。

//
//  CorePlayer.c
//  player
//
//  Created by KuaiyuIOS on 15/4/16.
//  Copyright (c) 2015年 KuaiyuD. All rights reserved.
//

#include "CorePlayer.h"
#include <stdlib.h>
#include <string.h>

#include <pthread/pthread.h>
#include <OpenAL/al.h>
#include <OpenAL/alc.h>
#include <unistd.h>

#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswresample/swresample.h"
#include "libswscale/swscale.h"
#include "libavutil/time.h"

#define kTwoConverImgThread 0

#define QueueSize 26

#define AudioBufferMaxSize 26

#define SYN_PTS 500

#define VideoType PIX_FMT_RGBA
#define AudioType AV_SAMPLE_FMT_S16

#define MAX_AUDIO_FRAME_SIZE 192000 // 1 second of 48khz 32bit audio

//typedef struct videoItem{
//    uint8_t *rgbData;
//    int width;
//    int height;
//    int length;
//    int64_t pts;
//} VideoItem;

typedef struct rgbVideoItem{
uint8_t *rgbData;
int widht;
int height;
int length;
int64_t pts;
}RGBVideoItem;

typedef struct packetquque {
int size;
int lenght;
void **items;  //指向具体的item
void (*free_cb)(void *);
double pts;
pthread_mutex_t mutex;
pthread_cond_t cond;
}PacketQueue;

//openal 无需持有 一旦有的data  马上让 opanal  持有
typedef struct audioQueue{
int inUse[AudioBufferMaxSize];
ALuint alBuffers[AudioBufferMaxSize];
int64_t pts[AudioBufferMaxSize];
int itemIndex;
pthread_mutex_t mutex;
pthread_cond_t cond;

} AudioQueue;

struct CPContext {
//base
char url[256];
double duratioin;
double progress;
double volume;
void *userinfo;
void (*dis_callback)(uint8_t *data,int lenght,int width,int height,void *info);
void (*dis_audio_callback)(uint8_t *audioDta,int lenght,void *info);
void (*init_callback)(int width,int height,void *info);
void (*finish_callback)(int flag,void *info);
CPContex **p_ref;

int eof;
int seek;
int seekPos;

PacketQueue *video_q;
PacketQueue *picture_q;
AudioQueue *audio_q;

pthread_t read_t;
pthread_t decode_audio_t;
pthread_t decode_video_t;
pthread_t decode_video_other_t;
pthread_t display_picture_t;

pthread_mutex_t lock_muxt;
pthread_cond_t lock_cond;
int play;
int last_play;
int quit;

//时钟
int64_t audio_pts_curr;

//openal
ALCcontext *al_ctx;
ALCdevice *al_device;
ALuint al_source;

int videoLength;
int audioLength;
struct SwsContext *img_convert_ctx;
struct SwrContext *aud_convert_ctx;
AVCodecContext *decode_video_ctx;
AVCodecContext  *decode_audio_ctx;

};

void seconds_sleep(unsigned seconds){
struct timeval tv = {seconds,0};
int err;
do{
err = select(0, NULL, NULL, NULL, &tv);
}while (err < 0 && errno == EINTR);
}

void suseconds_sleep(double suseconds){
struct timeval tv = {0,(int)(suseconds * 1000)};
int err;
do{
err = select(0, NULL, NULL, NULL, &tv);
}while (err < 0 && errno == EINTR);
}

#pragma mark ==============================
#pragma mark ==[quque item free callback]==
#pragma mark ==============================

void packet_quque_free_callback(void *item)
{
AVPacket *packet = (AVPacket *)item;
av_free_packet(packet);
}

void frame_queue_free_callback(void *item){
AVFrame *frame = (AVFrame *)item;
av_frame_free(&frame);
}

void rgb_video_quque_free_callback(void *item)
{
RGBVideoItem *videoitem = (RGBVideoItem *)item;
av_free(videoitem->rgbData);
av_free(item);
}

#pragma mark =========================
#pragma mark ==[quque operation]==
#pragma mark =========================

int queue_init(PacketQueue **q,void (*free_cb)(void *)){
PacketQueue *tempq = av_mallocz(sizeof(PacketQueue));
tempq->size = QueueSize;
tempq->items = (void **)av_mallocz(sizeof(void *) * QueueSize);
tempq->free_cb = free_cb;

pthread_mutex_init(&tempq->mutex, NULL);
pthread_cond_init(&tempq->cond, NULL);

*q = tempq;
return 0;
}

int queue_flush(PacketQueue *q)
{
pthread_mutex_lock(&q->mutex);

for(int i = 0;i < q->lenght;i++)
{
q->free_cb(q->items[i]);
}

q->lenght = 0;

pthread_mutex_unlock(&q-
f621
>mutex);

return 0;
}

int queue_uninit(PacketQueue **q){
PacketQueue *tempq = *q;
pthread_cond_broadcast(&tempq->cond);
queue_flush(tempq);
av_free((void *)tempq->items);
pthread_mutex_destroy(&tempq->mutex);
pthread_cond_destroy(&tempq->cond);
av_free(tempq);
*q = NULL;
return 0;
}

int queue_push(PacketQueue *q,void *item)
{
pthread_mutex_lock(&q->mutex);
while (q->lenght + 1 >= q->size)
{
pthread_cond_wait(&q->cond, &q->mutex);
}

q->items[q->lenght] = item;
q->lenght ++;
pthread_mutex_unlock(&q->mutex);

pthread_cond_signal(&q->cond);
return 0;
}

int queue_get(PacketQueue *q,void **item ,void *info){
CPContex *ctx = (CPContex *)info;
pthread_mutex_lock(&q->mutex);
while (q->lenght <= 0) {
pthread_cond_wait(&q->cond, &q->mutex);
if(ctx->quit || ctx->eof)
{
*item = NULL;
pthread_mutex_unlock(&q->mutex);
return -1;

}
}
q->lenght --;

*item = q->items[0];
if(q->lenght > 0) //前移动
{
memcpy(&q->items[0], &q->items[1], sizeof(void *) * q->lenght);
}
q->items[q->lenght] = NULL;
pthread_mutex_unlock(&q->mutex);

pthread_cond_signal(&q->cond);
return 0;
}

int audio_queue_init(AudioQueue **q){
AudioQueue *tempq = av_mallocz(sizeof(AudioQueue));
pthread_mutex_init(&tempq->mutex, NULL);
pthread_cond_init(&tempq->cond, NULL);
*q = tempq;
return 0;
}
int audio_queue_uninit(AudioQueue **q){
AudioQueue *tempq = *q;

pthread_cond_broadcast(&tempq->cond);

pthread_mutex_lock(&tempq->mutex);
for(int i = 0;i < AudioBufferMaxSize;i++){
if(tempq->alBuffers[i] != 0)
{
alDeleteBuffers(1, &tempq->alBuffers[i]);
}
}
pthread_mutex_unlock(&tempq->mutex);

pthread_mutex_destroy(&tempq->mutex);
pthread_cond_destroy(&tempq->cond);

*q = NULL;
return 0;
}

int audio_queue_put(AudioQueue *q,uint8_t *audioData,int length,int64_t pts,void *info){
CPContex *ctx = (CPContex *)info;
pthread_mutex_lock(&q->mutex);
while (q->inUse[q->itemIndex]) {
pthread_cond_wait(&q->cond, &q->mutex);

if(ctx->quit || ctx->eof)
{
pthread_mutex_unlock(&q->mutex);
return -1;
}

}
q->inUse[q->itemIndex] = 1;
pthread_mutex_unlock(&q->mutex);

ALuint buffid = q->alBuffers[q->itemIndex];
if(buffid <= 0){
alGenBuffers(1, &buffid);
q->alBuffers[q->itemIndex] = buffid;
}

alBufferData(buffid, AL_FORMAT_STEREO16, audioData, length, ctx ->decode_audio_ctx->sample_rate);
alSourceQueueBuffers(ctx->al_source, 1, &buffid);
q->pts[q->itemIndex] = pts;
//    ctx->audio_pts_curr = pts;

if(ctx->dis_audio_callback)
{
ctx->dis_audio_callback(audioData,length,ctx->userinfo);
}

q->itemIndex ++;
if(q->itemIndex >= AudioBufferMaxSize) q->itemIndex = 0;

return 0;
}

int audio_queue_flush(AudioQueue *q){
return 0;
}

#pragma mark =========================
#pragma mark ==[three thread]==
#pragma mark =========================

static const char * read_thread_name = "com.CorePlayer.read";
static const char * decode_audio_thread_name = "com.CorePlayer.decodeAudio";
static const char * decode_video_thread_name = "com.CorePlayer.decodeVideo";
static const char * display_picture_thread_name = "com.CorePlayer.displayPicture";

void * decode_audio_thread(void *info)
{
pthread_setname_np(decode_audio_thread_name);
CPContex *ctx = (CPContex *)info;

while (ctx->quit == 0 && ctx->eof == 0) {

pthread_mutex_lock(&ctx->lock_muxt);
while (!ctx->play) {
pthread_cond_wait(&ctx->lock_cond, &ctx->lock_muxt);
}
pthread_mutex_unlock(&ctx->lock_muxt);

if(ctx->quit || ctx->eof) break;

//暂停
suseconds_sleep(0.75);

if(ctx->quit || ctx->eof) break;

if(!ctx->play) continue;

//清空
ALint stateValue;
int processed,queued;
ALfloat sec_off;
alGetSourcei(ctx->al_source, AL_BUFFERS_PROCESSED, &processed);
alGetSourcei(ctx->al_source, AL_BUFFERS_QUEUED, &queued);
alGetSourcei(ctx->al_source, AL_SOURCE_STATE, &stateValue);
alGetSourcef(ctx->al_source, AL_SEC_OFFSET, &sec_off);
ctx->progress += sec_off;

while (processed)
{
ALuint buffId;
alSourceUnqueueBuffers(ctx->al_source, 1, &buffId);
processed --;

int buffIndex = -1;
for (int i = 0; i <  AudioBufferMaxSize;i ++)
{
if(buffId == ctx->audio_q->alBuffers[i])
{
buffIndex = i;
break;
}
}
if(buffIndex != -1)
{
ctx->audio_pts_curr = ctx->audio_q->pts[buffIndex];

pthread_mutex_lock(&ctx->audio_q->mutex);
ctx->audio_q->inUse[buffIndex] = 0;
pthread_cond_signal(&ctx->audio_q->cond);
pthread_mutex_unlock(&ctx->audio_q->mutex);
}

}

if(stateValue != AL_PLAYING && queued >= 5){
alSourcePlay(ctx->al_source);
}
else if(stateValue == AL_PLAYING && queued < 5){
alSourcePause(ctx->al_source);
}

}

return NULL;
}

void * display_picture_thread(void *info)
{
pthread_setname_np(display_picture_thread_name);
CPContex *ctx = (CPContex *)info;

while (ctx->quit == 0 && ctx->eof == 0) {

pthread_mutex_lock(&ctx->lock_muxt);
while (!ctx->play) {
pthread_cond_wait(&ctx->lock_cond, &ctx->lock_muxt);
}
pthread_mutex_unlock(&ctx->lock_muxt);

if(ctx->quit || ctx->eof) break;

void *item = NULL;
queue_get(ctx->picture_q, (void **)&item,info);
if(item != NULL)
{

RGBVideoItem *newItem = (RGBVideoItem *)item;
if(ctx->dis_callback /*&& newItem->pts != AV_NOPTS_VALUE*/ ) ctx->dis_callback(newItem->rgbData ,newItem->length,newItem->widht,newItem->height,ctx->userinfo);

ctx->picture_q->free_cb(item);
}

}
return NULL;
}

void decode_video_inline(void *info){

CPContex *ctx = (CPContex *)info;
AVFrame *pframeRGB = av_frame_alloc();

while (ctx->quit == 0 && ctx->eof == 0) {

pthread_mutex_lock(&ctx->lock_muxt);
while (!ctx->play) {
pthread_cond_wait(&ctx->lock_cond, &ctx->lock_muxt);
}
pthread_mutex_unlock(&ctx->lock_muxt);

if(ctx->quit || ctx->eof) break;

while (ctx->seek){ //等待清空完毕
suseconds_sleep(0.2);
}

if(ctx->quit || ctx->eof) break;

AVFrame *pframe = NULL;
queue_get(ctx->video_q, (void **)&pframe,info);
if(pframe != NULL){
uint8_t *buff = (uint8_t *)av_mallocz(ctx->videoLength);
avpicture_fill((AVPicture *)pframeRGB, buff, VideoType, ctx->decode_video_ctx->width, ctx->decode_video_ctx->height);
sws_scale(ctx->img_convert_ctx, (const uint8_t * const *)pframe->data, pframe->linesize, 0, ctx->decode_video_ctx->height, pframeRGB->data, pframeRGB->linesize);

RGBVideoItem *item = av_mallocz(sizeof(RGBVideoItem));
item->rgbData = buff;
item->widht = ctx->decode_video_ctx->width;
item->height = ctx->decode_video_ctx->height;
item->length = ctx->videoLength;
item->pts = pframe->pts == AV_NOPTS_VALUE ? (pframe->pkt_pts == AV_NOPTS_VALUE ? pframe->pkt_dts : pframe->pkt_pts) : pframe->pts;
queue_push(ctx->picture_q, item);

ctx->video_q->free_cb(pframe);
}

}

av_frame_free(&pframeRGB);
}

#if kTwoConverImgThread
static const char * decode_video_thread_other = "com.CorePlayer.other.decodeVideo";
void *decode_video_thread_two(void *info){
pthread_setname_np(decode_video_thread_other);

decode_video_inline(info);

return NULL;
}
#endif

void *decode_video_thread(void *info){
pthread_setname_np(decode_video_thread_name);
CPContex *ctx = (CPContex *)info;

#if kTwoConverImgThread
pthread_create(&ctx->decode_video_other_t, NULL, decode_video_thread_two , info);
#endif

queue_init(&ctx->picture_q, rgb_video_quque_free_callback);
pthread_create(&ctx->display_picture_t, NULL, display_picture_thread, info);

decode_video_inline(info);

#if kTwoConverImgThread
pthread_join(ctx->decode_video_other_t, NULL);
#endif

queue_uninit(&ctx->picture_q);
pthread_join(ctx->display_picture_t, NULL);

return NULL;
}

void * read_thread(void *info)
{
pthread_setname_np(read_thread_name);
pthread_detach(pthread_self());
CPContex *ctx = (CPContex *)info;
//完成必要的初始化

ctx->al_device = alcOpenDevice(NULL);
if(ctx->al_device != 0){
ctx->al_ctx = alcCreateContext(ctx->al_device, NULL);
alcMakeContextCurrent(ctx->al_ctx);
}
alGenSources(1, &ctx->al_source);
alSourcei(ctx->al_source, AL_LOOPING, AL_FALSE);
alSourcei(ctx->al_source, AL_GAIN, 1.0f); //声音
alSpeedOfSound(1.0f);

queue_init(&ctx->video_q, frame_queue_free_callback);
audio_queue_init(&ctx->audio_q);

pthread_mutex_init(&ctx->lock_muxt, NULL);
pthread_cond_init(&ctx->lock_cond, NULL);

av_register_all();
avformat_network_init();

int ret = -1,stream_index_a = -1,stream_index_v = -1;
AVFormatContext *fmt_ctx = NULL;
AVCodec *dec_a = NULL,*dec_v = NULL;
AVCodecContext *dec_ctx_a = NULL,*dec_ctx_v = NULL;
AVPacket pkt,*pktptr = &pkt;
AVFrame *pframe = av_frame_alloc(),*pframeRGB = av_frame_alloc();
int frameFinished = 0,audioLength = 0,videoLength = 0;
struct SwsContext *img_convert_ctx = NULL;
struct SwrContext *aud_convert_ctx = NULL;
pthread_mutex_t wait_muxt;
pthread_cond_t wait_cond;

pthread_mutex_init(&wait_muxt, NULL);
pthread_cond_init(&wait_cond, NULL);

if((ret = avformat_open_input(&fmt_ctx, ctx->url, NULL, NULL)) < 0){
printf("open file fail\n");
goto lable;
}
if((ret  = avformat_find_stream_info(fmt_ctx, NULL)) < 0){
printf("not more stream info \n");
goto lable;
}
ctx->duratioin = (double)(fmt_ctx->duration / AV_TIME_BASE);

ret = av_find_best_stream(fmt_ctx, AVMEDIA_TYPE_VIDEO, -1, -1, &dec_v, 0);
if(ret != AVERROR_STREAM_NOT_FOUND){
stream_index_v = ret;
dec_ctx_v = fmt_ctx->streams[ret]->codec;
ret = avcodec_open2(dec_ctx_v, dec_v, NULL);
if(ret < 0)
{
printf("open video decode fail\n");
}
}

ret = av_find_best_stream(fmt_ctx, AVMEDIA_TYPE_AUDIO, -1, -1, &dec_a, 0);
if(ret != AVERROR_STREAM_NOT_FOUND){
stream_index_a = ret;
dec_ctx_a = fmt_ctx->streams[ret]->codec;
ret = avcodec_open2(dec_ctx_a, dec_a, NULL);
if(ret < 0){
printf("open audio decode fail\n");
}
}
if(stream_index_v == -1 && stream_index_a == -1)
{
printf("no  effect stream \n");
goto lable;
}

if(stream_index_v >= 0){
videoLength = avpicture_get_size(VideoType, dec_ctx_v->width, dec_ctx_v->height);
img_convert_ctx  = sws_getContext(dec_ctx_v->width, dec_ctx_v->height, dec_ctx_v->pix_fmt, dec_ctx_v->width, dec_ctx_v->height, VideoType, SWS_FAST_BILINEAR, NULL, NULL, NULL);

ctx->decode_video_ctx = dec_ctx_v;
ctx->videoLength = videoLength;
ctx->img_convert_ctx = img_convert_ctx;

pthread_create(&ctx->decode_video_t, NULL, decode_video_thread, info);
}
if(stream_index_a >= 0){
audioLength = av_samples_get_buffer_size(NULL, av_get_channel_layout_nb_channels(AV_CH_LAYOUT_STEREO), dec_ctx_a->frame_size, AudioType, 1);
aud_convert_ctx = swr_alloc_set_opts(NULL, AV_CH_LAYOUT_STEREO, AudioType, dec_ctx_a->sample_rate, dec_ctx_a->channel_layout, dec_ctx_a->sample_fmt, dec_ctx_a->sample_rate, 0, NULL);
swr_init(aud_convert_ctx);

ctx->aud_convert_ctx = aud_convert_ctx;
ctx->decode_audio_ctx = dec_ctx_a;
ctx->audioLength = audioLength;

pthread_create(&ctx->decode_audio_t, NULL, decode_audio_thread, info);
}

if(/* DISABLES CODE */ (0)){
av_dump_format(fmt_ctx, 0, ctx->url, 0);
}

if(ctx->init_callback){
ctx->init_callback(dec_ctx_v->width,dec_ctx_v->height,ctx->userinfo);
}

pthread_mutex_lock(&ctx->lock_muxt);
while (!ctx->play) {
pthread_cond_wait(&ctx->lock_cond, &ctx->lock_muxt);
}
pthread_mutex_unlock(&ctx->lock_muxt);

while (ctx->quit == 0 && ctx->eof == 0) {

//是否需要暂停网络
if(ctx->play != ctx->last_play)
{
ctx->last_play = ctx->play;
if(ctx->play)
av_read_pause(fmt_ctx);
else
av_read_play(fmt_ctx);
}

//是否暂停
pthread_mutex_lock(&ctx->lock_muxt);
while (!ctx->play) {
pthread_cond_wait(&ctx->lock_cond, &ctx->lock_muxt);
}
pthread_mutex_unlock(&ctx->lock_muxt);

if(ctx->quit && ctx->eof) break;

//是否快进
if(ctx->seek)
{
//new api   for seek
int seek_ret = avformat_seek_file(fmt_ctx, -1, (ctx->seekPos - 10) * AV_TIME_BASE, (ctx->seekPos) * AV_TIME_BASE, (ctx->seekPos + 10) *AV_TIME_BASE, AVSEEK_FLAG_ANY);
if(seek_ret < 0){
printf("error while seeking\n");
}else{
if(stream_index_v >= 0)
{
queue_flush(ctx->video_q);
}
if(stream_index_a >= 0)
{
audio_queue_flush(ctx->audio_q);
ctx->progress = ctx->seekPos;
}

//设置  exl clocl
}
ctx->seek = 0;
ctx->eof = 0;
}
frameFinished = 0;
ret = av_read_frame(fmt_ctx, pktptr);
if(ret < 0){
if((ret == AVERROR_EOF || avio_feof(fmt_ctx->pb)) && !ctx->eof){
ctx->eof = 1;
}
if(fmt_ctx->pb && fmt_ctx->pb->error)
break;

/* sleep */
struct timespec tm = {10,0};

pthread_mutex_lock(&wait_muxt);
pthread_cond_timedwait(&wait_cond, &wait_muxt, &tm);
pthread_mutex_unlock(&wait_muxt);
continue;
}
else{
ctx->eof = 0;
}

if(pktptr->stream_index == stream_index_v)
{
ret = avcodec_decode_video2(ctx->decode_video_ctx,pframe, &frameFinished, pktptr);
if(ret < 0)
break;
if(frameFinished)
{

AVFrame *newFrame = av_frame_alloc();

av_frame_copy_props(newFrame, pframe);
newFrame->format =  pframe->format;
newFrame->width = dec_ctx_v->width;
newFrame->height = dec_ctx_v->height;
ret  = av_frame_get_buffer(newFrame, 1);
av_frame_copy(newFrame, pframe);

queue_push(ctx->video_q, newFrame);
}

}
else if(pktptr->stream_index == stream_index_a)
{
ret = avcodec_decode_audio4(dec_ctx_a, pframe, &frameFinished, pktptr);
if(ret < 0)
break;
if(frameFinished){
uint8_t *buff = (uint8_t *)av_mallocz(MAX_AUDIO_FRAME_SIZE * 2);
swr_convert(aud_convert_ctx, &buff, MAX_AUDIO_FRAME_SIZE, (const uint8_t **)pframe->data, pframe->nb_samples);
int64_t pts = (pframe->pts == AV_NOPTS_VALUE ? pframe->pkt_pts : pframe->pts);
audio_queue_put(ctx->audio_q, buff, audioLength, pts,info);
av_free(buff);
}
}
av_free_packet(pktptr);

}

lable:

avformat_network_deinit();

pthread_mutex_destroy(&wait_muxt);
pthread_cond_destroy(&wait_cond);

av_frame_free(&pframe);
av_frame_free(&pframeRGB);

pthread_mutex_destroy(&ctx->lock_muxt);
pthread_cond_destroy(&ctx->lock_cond);

ctx->eof = 1;

audio_queue_uninit(&ctx->audio_q);
queue_uninit(&ctx->video_q);

if(ctx->decode_video_t != 0)  pthread_join(ctx->decode_video_t, NULL);
if(ctx->decode_audio_t != 0) pthread_join(ctx->decode_audio_t, NULL);

if(aud_convert_ctx != NULL) swr_free(&aud_convert_ctx);
if(img_convert_ctx != NULL) sws_freeContext(img_convert_ctx);

if(dec_ctx_a) avcodec_close(dec_ctx_a);
if(dec_ctx_v) avcodec_close(dec_ctx_v);
if(fmt_ctx) avformat_close_input(&fmt_ctx);

alSourceStop(ctx->al_source);
alDeleteSources(1, &ctx->al_source);
if(ctx->al_device){
alcCloseDevice(ctx->al_device);
}

if(ctx->finish_callback){
int quit_flag = (ctx->quit == 1) ? 1 : 0;
ctx->finish_callback(quit_flag,ctx->userinfo);
}
*(ctx->p_ref) = NULL;
av_free(ctx);

return NULL;
}

#pragma mark =========================
#pragma mark ==[out controll method]==
#pragma mark =========================

int CP_start(CPContex *cp_ctx){
if(! cp_ctx ) return -1;
pthread_mutex_lock(&cp_ctx->lock_muxt);
cp_ctx->play = 1;
pthread_cond_broadcast(&cp_ctx->lock_cond);
pthread_mutex_unlock(&cp_ctx->lock_muxt);

alSourcePlay(cp_ctx->al_source);

return 0;
}
int CP_pasue(CPContex *cp_ctx){
if(! cp_ctx ) return -1;
pthread_mutex_lock(&cp_ctx->lock_muxt);
cp_ctx->play = 0;
pthread_mutex_unlock(&cp_ctx->lock_muxt);

alSourcePause(cp_ctx->al_source);

return 0;
}
int CP_stop(CPContex *cp_ctx){
if(! cp_ctx ) return -1;
cp_ctx->quit = 1;
if(cp_ctx->play == 0){
pthread_mutex_lock(&cp_ctx->lock_muxt);
cp_ctx->play = 1;
pthread_cond_broadcast(&cp_ctx->lock_cond);
pthread_mutex_unlock(&cp_ctx->lock_muxt);
}
return 0;
}

int CP_init(CPContex **cp_ctx,
const char *url,
void (*dis_callback)(uint8_t *data,int lenght,int width,int height,void *info),
void (*dis_audio_callback)(uint8_t *audioData,int length,void *info),
void (*init_callback)(int width,int height,void *info),
void *userinfo)
{
// checkurl

CPContex *temp = av_mallocz(sizeof(CPContex));
strcpy(temp->url, url);
temp->userinfo = userinfo;
temp->dis_callback = dis_callback;
temp->dis_audio_callback = dis_audio_callback;
temp->init_callback = init_callback;
temp->p_ref = cp_ctx;
//
pthread_create(&temp->read_t, NULL, read_thread, temp);

*cp_ctx = temp;
return 0;
}

int CP_set_finish_callback(CPContex *cp_ctx,void (*finish_callback)(int flag,void*info)){
if(! cp_ctx ) return -1;
cp_ctx->finish_callback = finish_callback;
return 0;
}

int CP_seek(CPContex *cp_ctx,double pos,int flag)
{
if(! cp_ctx ) return -1;

if(cp_ctx->seek == 1) return -2;
if(flag == 0){
cp_ctx->seek = 1;
cp_ctx->seekPos = pos;
}
return 0;
}

int CP_get_progress(CPContex *cp_ctx,double *progress){
if(! cp_ctx ) return -1;
*progress = cp_ctx->progress;
return 0;
}
int CP_get_duration(CPContex *cp_ctx,double *duration){
if(! cp_ctx ) return -1;
*duration = cp_ctx->duratioin;
return 0;
}


以下为真机和模拟器测试图片

模拟器

iphone5


内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签:  iOS ffmpeg 移植 h264 视频