您的位置:首页 > 运维架构 > Linux

linux视频采集X264编码

2013-12-17 14:58 169 查看
#include "stdint.h"
#include <stdlib.h>
#include "x264.h"
#include "x264_config.h"
#include "stdio.h"

/*
SOCK ADD

*/
#include<sys/types.h>
#include<sys/socket.h>
#include<errno.h>
#include<string.h>
#include<unistd.h>
#include<netinet/in.h>
#define PORT 8888
#define BUFFER_SIZE 1024
#define DSTIP "192.168.1.10"
#define TEXT "message"
int sockfd,sendbytes;
char buf[BUFFER_SIZE];
struct hostent *host;
struct sockaddr_in serv_addr;
/*
* v4l2Setp1.c
*
*  Created on: 2011-8-9
*      Author: jcracker
*/
#include<stdio.h>
#include<linux/videodev2.h>
#include<fcntl.h>
#include<malloc.h>
#include<sys/mman.h>
#include <assert.h>
int fd;
unsigned int min;
#define DEVICE "/dev/video0"
#define CLEAR(x) memset (&(x), 0, sizeof (x))
#define WIDTH 176
#define HEIGHT 144
#define FPS 6
static unsigned int n_buffers = 0;
struct buffer {
void * start;
size_t length;
};
struct x264_encoder{
x264_param_t param;
x264_t *x264;
x264_picture_t *pic_in;
x264_nal_t *headers;
x264_nal_t *nals;
int i_nal;
};
struct x264_encoder *x264_en=NULL ;
FILE *x264Stream;
int i_frame;
#define TOTAL_FRAME_NUMBER 50;

struct v4l2_capability cap;
struct v4l2_cropcap cropcap;
struct v4l2_crop crop;
struct v4l2_format fmt;
struct v4l2_requestbuffers req; //向驱动申请帧缓冲的请求,里面包含申请的个数
struct buffer * buffers = NULL, *buffers_target =NULL;

void deviceClose(){

if (-1 == close(fd))
printf("close device failed\n");

fd = -1;

}

void deviceUninit(){

unsigned int i;

for(i = 0;i<n_buffers;++i ){

if(-1 == munmap(buffers[i].start,buffers[i].length)){
printf("wrong~~~munmap\n");
}
}
free(buffers);
}

void captrueStop(){
enum v4l2_buf_type type;

type	=	V4L2_BUF_TYPE_VIDEO_CAPTURE;
if(-1 == ioctl(fd,VIDIOC_STREAMOFF,&type)){
printf("wrong~~ VIDIOC_STREAMOFF\n");
}
}

/**
* 读取一帧
*/
int frameRead(){

struct v4l2_buffer buf;

CLEAR(buf);

buf.type	=	V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory	=	V4L2_MEMORY_MMAP;

if(-1 == ioctl(fd,VIDIOC_DQBUF,&buf)){ // 从视频缓冲区的输出队列中取得一个已经保存有一帧视频数据的视频缓冲区;
printf("wrong!VIDIOC_DQBUF\n");
}

/**
* assert的作用是现计算表达式 expression ,如果其值为假(即为0),那么它先向stderr打印一条出错信息,
然后通过调用 abort 来终止程序运行。
*/
assert(buf.index < n_buffers);
assert(buffers[buf.index].start);

//videoProcess(buffers[buf.index].start);

x264Process(buf);

return 1;
}

void mainLoop(){
unsigned int count;
count = TOTAL_FRAME_NUMBER;
x264Init();

for(i_frame=0;i_frame<=count;i_frame++){
//作用不明
for(;;){
fd_set fds;
struct timeval tv;
int r;

FD_ZERO(&fds);
FD_SET(fd,&fds);

/*超时设置*/
tv.tv_sec	=	2;
tv.tv_usec	=	0;

r = select(fd+1,&fds,NULL,NULL,&tv);

if(-1 == r || 0 == r){
printf("select wrong!\n");
}

if(frameRead()){ //------>去读取帧
break;
}
}
}
x264_release();
}

void captureStart(){

unsigned int i;
enum v4l2_buf_type type;

for(i=0 ; i < n_buffers ; ++i){

struct v4l2_buffer buf;

CLEAR(buf);
//这里的操作和openInint()中的重复,是什么意思?
buf.type	=	V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory	=	V4L2_MEMORY_MMAP;
buf.index	=	i;

/**
* 投放一个空的视频缓冲区到视频缓冲区输入队列中 ;
* 执行成功时,函数返回值为 0;函数执行成功后,指令的视频缓冲区进入视频输入队列,在启动视频设备拍摄图像时,相应的视频数据被保存到视频输入队列相应的视频缓冲区中。
*/
if(-1 == ioctl(fd,VIDIOC_QBUF,&buf)){
printf("VIDIOC_QBUF failed!\n");
}else{
printf("throw empty video buffer Success\n");
}
}

type	=	V4L2_BUF_TYPE_VIDEO_CAPTURE;

/**
*  启动视频采集命令,应用程序调用VIDIOC_STREAMON启动视频采集命令后,视频设备驱动程序开始采集视频数据,并把采集到的视频数据保存到视频驱动的视频缓冲区中。
*/
if(-1 == ioctl(fd,VIDIOC_STREAMON,&type)){
printf("VIDIOC_STREAMON 失败!\n");
}

}

void mmapInit(){

CLEAR(req);

req.count		=	4;	//申请缓冲区的个数
req.type		=	V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory		=	V4L2_MEMORY_MMAP;

// 请求V4L2驱动分配视频缓冲区(申请V4L2视频驱动分配内存)
if(-1 == ioctl(fd,VIDIOC_REQBUFS,&req)){
printf("apply to buffer failed!maybe support mmap\n");
}

if(req.count < 2){
printf("buffer memory!!\n");
}

buffers = calloc(req.count,sizeof(*buffers));

//判断内存申请是否成功
if(!buffers){
printf("out of memory\n");
}

//作用不明
for(n_buffers = 0; n_buffers < req.count;++n_buffers){
struct v4l2_buffer buf; //struct v4l2_buffer结构体变量中保存了指令的缓冲区的相关信息;
CLEAR(buf);

buf.type		=	V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory		=	V4L2_MEMORY_MMAP;
buf.index		=	n_buffers;

if(-1 == ioctl(fd,VIDIOC_QUERYBUF,&buf)){ //查询已经分配的V4L2的视频缓冲区的相关信息,包括视频缓冲区的使用状态、在内核空间的偏移地址、缓冲区长度等
printf("Query buffer failed!\n");
}
buffers[n_buffers].length	=	buf.length;
buffers[n_buffers].start	=	mmap(NULL/*可以从任何地址开始*/,buf.length,PROT_READ | PROT_WRITE,MAP_SHARED,fd,buf.m.offset);
if(MAP_FAILED == buffers[n_buffers].start){
printf("mmap failed\n");
}else{
printf("MMAP success%d\n",n_buffers);
}
//经过上面的步骤,将缓冲区中的记录的数据信息的开始地址和长度记录到了buffers结构体中,但是buffers一次只能保存一个信息,4个申请数 是什么意思呢? 是连续采集4帧的偏移量
}

}

void openDevice(){

fd = open(DEVICE,O_RDWR | O_NONBLOCK,0);
if(fd == -1){
printf("open cam failed!\n");
}else{
printf("open cam success openDevice()\n");
}
}

void initDevice(){

if(-1 == ioctl(fd,VIDIOC_QUERYCAP,&cap)){
printf("query device function falied~~\n");
}else{
/**
* #define V4L2_CAP_STREAMING              0x04000000  /* streaming I/O ioctls
* #define V4L2_CAP_VIDEO_CAPTURE		0x00000001  /* Is a video capture device
*/
printf("query device function success,and support:%8x\n",cap.capabilities);//4000001 表明能力有V4L2_CAP_STREAMING ,V4L2_CAP_VIDEO_CAPTURE
}

//判断该设备是否具有视频输入捕获功能
if(!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)){ //!(0x04000001 & 0x00000001)
printf("the device not support video capture!\n");
}

//判断该设备是否具有V4L2_CAP_READWRITE能力
if(!(cap.capabilities & V4L2_CAP_READWRITE)){
printf("the device not have v4l2_CAP_READWRITE capatiy\n");
}else{
printf(" has v4l2_CAP_READWRITE ability\n");
}

//要使用mmap方式,需要判断该设备是否具有V4l2_CAP_CTREAMING能力
if(!(cap.capabilities & V4L2_CAP_STREAMING)){
printf("the device has V4L2_CAP_STREAMING ability\n");
}

CLEAR(cropcap);//清空cropcap

cropcap.type =V4L2_BUF_TYPE_VIDEO_CAPTURE;

/**
* 关于cropcap与crop的理解
* 相当与 cropcap是一个设备的x264_encoder_open大致能力范围,而crop是在cropcap这个能力范围内设定值
*/
//查询驱动的修剪能力
if(0 == ioctl(fd,VIDIOC_CROPCAP,&cropcap)){
crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
crop.c = cropcap.defrect;

//使用crop来确定需要采集的窗口
if(-1 == ioctl(fd,VIDIOC_S_CROP,&crop));

printf("query drive crop success\n");
}

struct v4l2_fmtdesc fmtdesc;
fmtdesc.index=0;
fmtdesc.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
printf("Support format:\n");
while(ioctl(fd,VIDIOC_ENUM_FMT,&fmtdesc)!=-1)
{
printf("\t%d.%s\n",fmtdesc.index+1,fmtdesc.description);
fmtdesc.index++;
}

//设定v4l2格式
CLEAR(fmt);
fmt.type 				=	V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width 		=	WIDTH;
fmt.fmt.pix.height		=	HEIGHT;
//fmt.fmt.pix.pixelformat	=	V4L2_PIX_FMT_YUYV;
fmt.fmt.pix.pixelformat		=	V4L2_PIX_FMT_YUV420;//这里 明明设定 420 为何还是422
fmt.fmt.pix.field		=	V4L2_FIELD_INTERLACED;

//设置格式
if(-1 == ioctl(fd,VIDIOC_S_FMT,&fmt)){
printf("set format failed!!!\n");
}

//判断格式是否可用,且是否生效
if(WIDTH != fmt.fmt.pix.width || HEIGHT != fmt.fmt.pix.height){
printf("set width height failed!");
}

//作用不明
min = fmt.fmt.pix.width * 2;
if(fmt.fmt.pix.bytesperline < min){
fmt.fmt.pix.bytesperline = min;
}
min = fmt.fmt.pix.bytesperline * fmt.fmt.pix.height;
if(fmt.fmt.pix.sizeimage < min){
fmt.fmt.pix.sizeimage = min;
}

//continue  mmapInit()
mmapInit();
}

/**********x264 Start********************/
static void dumpnal (x264_nal_t *nal)
{
// 打印前面 10 个字节
int i = 0;
printf("----->dumpnal nal.p_payload=%d\n",nal->p_payload);
for (i = 0; i < nal->i_payload && i < 20; i++) {
fprintf(stderr, "%02x ", (unsigned char)nal->p_payload[i]);
}

}

void x264Init(){
x264_en = (struct x264_encoder *)malloc(sizeof(struct x264_encoder));
x264_en->pic_in = (struct x264_picture_t *)malloc(sizeof(x264_picture_t));
x264_en->headers=(struct x264_nal_t *) malloc(sizeof(x264_nal_t));
x264_en->nals=(struct x264_nal_t *) malloc(sizeof(x264_nal_t));

x264_param_default(&(x264_en->param));
//param add
if(x264_param_default_preset(&(x264_en->param), "fast", "zerolatency")){
printf("\n\n\n\n\nx264_param_default_preset failed\n\n\n\n\n");
}
//x264_param_apply_fastfirstpass(&(x264_en->param))	;

//then set param

x264_en->param.i_threads=X264_THREADS_AUTO;
x264_en->param.i_frame_total=0;
x264_en->param.i_csp=X264_CSP_I420;
x264_en->param.i_width=WIDTH;
x264_en->param.i_height=HEIGHT;
x264_en->param.i_fps_num=(int)(FPS * 1000+.5);
x264_en->param.i_fps_den=1000;
x264_en->param.i_keyint_min=5;
x264_en->param.i_keyint_max=250;
x264_en->param.i_bframe=0;
x264_en->param.b_repeat_headers=1;
//x264_en->param.rc.i_bitrate=300;

x264_en->param.rc.i_rc_method=X264_RC_ABR;
x264_en->param.rc.i_qp_constant=26;
x264_en->param.rc.b_mb_tree=0;
x264_en->param.rc.i_qp_min=10;
x264_en->param.rc.i_qp_max=30;
x264_en->param.analyse.i_me_method=X264_ME_DIA;

//add soon
//x264_en->param.i_bframe = 2;
//add soon end

printf("x264_en->param.rc.i_qp_max=%d\n",x264_en->param.rc.i_qp_max);
//open encoder
x264_en->x264=x264_encoder_open(&(x264_en->param));
if(!x264_en->x264){
printf("\n\n\n\n\nx264_encoder_open failed\n\n\n\n\n");
}

}

void x264_release(){
x264_picture_clean(x264_en->pic_in);
x264_encoder_close(x264_en->x264);
printf("Load X264 Success\n");
}

x264_nal_t* encode_frame(struct v4l2_buffer buf){

printf("\n\nEnter Encode frame\n\n");
x264_nal_t *nal_t;
int num;
x264_picture_t pic_out;

x264_en->pic_in->img.i_csp = X264_CSP_I420;		/* Colorspace ,h264 just support I420 */
x264_en->pic_in->img.i_plane = 3;			/* Number of image planes YUV or RGB */

//data from YUV
memcpy(x264_en->pic_in->img.plane[0],buffers[buf.index].start,WIDTH*HEIGHT);
memcpy(x264_en->pic_in->img.plane[1],(buffers[buf.index].start+WIDTH*HEIGHT),WIDTH*HEIGHT/4);
memcpy(x264_en->pic_in->img.plane[2],(buffers[buf.index].start+WIDTH*HEIGHT+WIDTH*HEIGHT/4),WIDTH*HEIGHT/4);

x264_en->param.i_frame_total++;

x264_en->pic_in->i_pts= x264_en->param.i_frame_total * x264_en->param.i_fps_den;
printf("x264_en->param.i_frame_total=%d\nx264_en->pic_in->i_pts=%d\n",x264_en->param.i_frame_total,x264_en->pic_in->i_pts);
x264_en->pic_in->i_type=X264_TYPE_AUTO;

//x264_encoder_encode()  api change so x264_nal_encode is not interface ,and it include x264_encoder_encapsulate_nals
/*
*nal_t nal layer data
*num nal number  pic_out pointer to dst data
*/
int i_frame_size = x264_encoder_encode(x264_en->x264, &nal_t, &num, x264_en->pic_in, &pic_out);
if(i_frame_size<0){
printf("\n\n\n\n\nx264_encoder_encode falied\n\n\n\n\n");
//fprintf(stderr, "%s: x264_encoder_encode err\n", __func__);
}
if(i_frame_size==0){
printf("\nthere has no nals\n");
}
if(i_frame_size>0){
printf("\nencode frame:%d  nal package numbers=%d\n",i_frame_size,num);
printf("\n***********************info Start*****************************\n");

int j=0;
for(j=0;j<num;j++){
printf("\n\n\nnal[%d]\tsize=%dbytes\tdata16=%0x\tnal_type=%d\n\n\n",j,nal_t[j].i_payload,&nal_t[j],nal_t[j].i_type);
dumpnal(&nal_t[j]);

printf("\n\n\nbefore send nal_t[j].p_payload=%d\n\n\n",*(nal_t[j].p_payload));
//SOCK ADD
if((sendbytes = sendto(sockfd,nal_t[j].p_payload,nal_t[j].i_payload,0,(struct sockaddr *)&serv_addr,sizeof(struct sockaddr))) == -1){
perror("send");
exit(1);
}
printf("\n\nsendbytes:%d\n\n",sendbytes);
//save nal package to file or network
if(x264Stream != NULL){
printf("write data now \n");
int c = fwrite(nal_t[j].p_payload,1,nal_t[j].i_payload,x264Stream);
printf("\nwrite %dbytes ",c);
fflush(x264Stream);
}else{
printf("write data failed");
}

}
printf("\n***********************info End*****************************\n");
}

return nal_t;
}

void x264Process(struct v4l2_buffer buf){

//1.x264_param_default;
//2.x264_encoder_open
//3.x264_picture_alloc
//3.5 x264_encoder_headers

//3.8 trans data from YUV
//4.x264_encoder_encode
//5.x264_nal_encode---->old copyright
//6.x264_picture_clean
//7.x264_encoder_close

/******************************************
********************progress***************
*/

//x264_picture_alloc

x264_picture_alloc(x264_en->pic_in,X264_CSP_I420,WIDTH,HEIGHT);

//x264_encoder_headers          there is a question ! or not????????????????????????
if(!x264_en->param.b_repeat_headers){
printf("\nInit X264 header\n");

if(x264_encoder_headers(x264_en->x264,&(x264_en->headers),&(x264_en->i_nal))){
printf("\n\n\n\n\nERROR:encoder headers failed\n\n\n\n\n");
}
}

x264_en->nals = encode_frame(buf);

/* Flush delayed frames */
/*   while(x264_encoder_delayed_frames(x264_en->x264 ) )
{
int i_frame_size;
i_frame_size = encode_frame(buf);
if( i_frame_size < 0 )
{
printf("\nFLush delayed frames==%d\n",i_frame_size);
}else{
printf("\nFLush delayed frames==%d\n",i_frame_size);
}
}*/

}

/**********x264 End********************/
int main(){

/*
SOCK ADD
*/

if(host=gethostbyname(DSTIP)==NULL){
perror("gethostbyname");
exit(1);
}

memset(buf,0,sizeof(buf));
sprintf(buf,"%s",TEXT);

if((sockfd = socket(AF_INET,SOCK_DGRAM,0))== -1){
perror("socket");
exit(1);
}

serv_addr.sin_family = AF_INET;
serv_addr.sin_port = htons(PORT);
serv_addr.sin_addr.s_addr = inet_addr(DSTIP);
memset(serv_addr.sin_zero,0,8);

/*
if(connect(sockfd,(struct sockaddr *)&serv_addr,sizeof(struct sockaddr))== -1){
perror("connect");
exit(1);
}

*/
//打开设备
openDevice();
//初始化设备
initDevice();
x264Stream = fopen("264data.264","w+b");
//开始抓取图像
captureStart();

//循环获取帧
mainLoop();

//停止捕获
captrueStop();
fflush(x264Stream);
fclose(x264Stream);
//释放设备初始化
deviceUninit();

//关闭设备
deviceClose();
close(sockfd);
}
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: