python 调用c++ ffmpeg接收yuv
2018-02-14 16:55
567 查看
26张图片:
http://blog.csdn.net/u011430225/article/details/51462311
index=1
def trans(s):
return "b'%s'" % ''.join('\\x%.2x' % x for x in s)
def getStreamCallback(a,b):
# print(b)
# print(string_at(a, b))
# create_string_buffer(a,b)
# print(string_at(a,-1))
bbb=string_at(a,b)
counter = datetime.datetime.now().strftime('%Y%m%d_%H%M%S_%f')
print(1,counter)
nparr = np.fromstring(bbb, np.uint8)
rgb=getimage(nparr)
counter = datetime.datetime.now().strftime('%Y%m%d_%H%M%S_%f')
print(2,counter)
# img_decode = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
# img_decode=cv2.resize(img_decode,(img_decode.shape[1]//3,img_decode.shape[0]//3),interpolation=cv2.INTER_CUBIC)
# print(img_decode.shape)
# cv2.imshow("sadf",rgb)
# cv2.waitKey(1)
# print(dll.Add(8,6))
# data = b'/x1'
# print(dll.writedata(data,5))
if __name__ == '__main__':
dll = CDLL(r"./dllt1.dll")
# print(b)
CMPFUNC = CFUNCTYPE(c_void_p,c_void_p,c_int)
m_callback = CMPFUNC(getStreamCallback)
dll.ffmpeg_recv(m_callback)
c++保存yuv
#include "stdafx.h"
#include "testdll.h"
#include <iostream>
#include<fstream>
#include <sys/types.h>
#include "opencv2/opencv.hpp"
#include "Ws2tcpip.h"
#include <sys/types.h>
#include <sys/types.h>
#include <winsock2.h>
#include <fcntl.h>
#include <cstring>
#include <cstdio>
#include <signal.h>
#pragma comment(lib,"ws2_32.lib")
using namespace std;
using namespace cv;
extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libavdevice/avdevice.h"
};
#define OUTPUT_YUV420P 0
FILE *fp_open = NULL;
char recv_buf[1504];
std::ofstream fout1("video.dat", std::ios::binary);
int Add(int plus1, int plus2)
{
int add_result = plus1 + plus2;
return add_result;
}
char* testchar(int plus1) {
char* str = "hello world11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111";
//str = "asdfsdf";
return str;
}
char* teststring(int* plus1) {
string str = "hello world11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111";
int i = 0;
for (i = 0; i < 10000; i++) {
str += "asdffffffffff";
}
Mat mat;
//加载图片
mat = imread("D:/8.jpg", CV_LOAD_IMAGE_COLOR);
printf("%d %d", mat.rows,mat.cols);
//if (!mat.empty()) {
int m, n;
n = mat.cols * 3;
m = mat.rows;
unsigned char *data = (unsigned char*)malloc(sizeof(unsigned char) * m * n);
int p = 0;
for (int i = 0; i < m; i++)
{
for (int j = 0; j < n; j++)
{
data[p] = mat.at<unsigned char>(i, j);
p++;
}
}
*plus1 = p;
return (char*)data;
//uchar **array = new uchar*[mat.rows];
//for (int i = 0; i<mat.rows; ++i)
// array[i] = new uchar[mat.cols];
//for (int i = 0; i<mat.rows; ++i)
// array[i] = mat.ptr<uchar>(i);
//return (char**)array;
//std::vector<uchar> data_encode;
//int res = imencode(".jpg", mat, data_encode);
//std::string str_encode(data_encode.begin(), data_encode.end());
////string bbb = "aaaaaaaaaaaaaaaaaaaaa234234234234234234234234343423234234234343434343443443444aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa12";
////const char *q = bbb.c_str();
//char* char_r = (char *)malloc(sizeof(char) * (str_encode.size() + 10));
////memset(char_r, '0', sizeof(char) * (str_encode.size()));
//
//printf("%d", sizeof(char) * (str_encode.size()));
//memcpy(char_r, str_encode.data(), sizeof(char) * (str_encode.size()));
/* for (int ii = 0; ii<str_encode.size(); ii++) {
printf("%X", char_r[ii]);
}*/
//return str_encode;
}
//str = "asdfsdf";
//string aaaa(str,500);
//char* char_r = (char *)malloc(sizeof(char) * (str.size() + 10));
//memcpy(char_r, str.data(), sizeof(char) * (str.size()));
//
////return const_cast<char*>(name.c_str());
////printf("1111111111");
//return str;
//}
int writedata(const char* data, int size)
{
fout1.write(data, sizeof(char) * (size));
return size + 1;
}
int dataclose()
{
fout1.close();
return 2;
}
// ffmpeg_02.cpp : 定义控制台应用程序的入口点。
//
struct Data //数据包
{
int size;
char recvbuf[1500];
}data_recv;
SOCKET sockClient;
//Callback
int read_buffer(void *opaque, uint8_t *buf, int buf_size) {
/*if (!feof(fp_open)) {
int true_size = fread(buf, 1, buf_size, fp_open);
return true_size;
}
else {
return -1;
}*/
int ret;
memset(data_recv.recvbuf, '0', sizeof(data_recv.recvbuf));
ret = recv(sockClient, (char *)&data_recv, sizeof(struct Data), 0); //第二个参数使用强制类型,为一个数据包
if (ret < 0)
{
printf("WSAStartup() failed!\n");
return -1;
}
//memset(recv_buf, 'z', 1504);//清空缓存
//recv(sockConn, recv_buf, 1504, 0);//读取数据
//data_recv recvUser;
//memset(recvUser.recvbuf, 0, 1504);
//recvUser.recvbuf = new char[strlen(recv_buf)];
//memset(recvUser.recvbuf, 0x0, 102400);//清空缓存
//memset(&recvUser, 0, sizeof(recvUser));//清空结构体
//memcpy(&recvUser, recv_buf, sizeof(recvUser));
buf_size = data_recv.size;
memcpy(buf, data_recv.recvbuf, buf_size);
return data_recv.size;
}
int read_buffer_file(void *opaque, uint8_t *buf, int buf_size) {
if (!feof(fp_open)) {
int true_size = fread(buf, 1, buf_size, fp_open);
return true_size;
}
else {
return -1;
}
}
static int callbackprint(int a, int b) {
return a;
}
int tcpInit()
{
WSADATA wsaData;
char buff[1024];
memset(buff, 0, sizeof(buff));
if (WSAStartup(MAKEWORD(2, 2), &wsaData) != 0)
{
printf("初始化Winsock失败");
return -1;
}
SOCKADDR_IN addrSrv;
addrSrv.sin_family = AF_INET;
addrSrv.sin_port = htons(8888);//端口号
//addrSrv.sin_addr.S_un.S_addr = inet_pton("127.0.0.1");//IP地址
//addrSrv.sin_addr.S_un.S_addr = InetPton(AF_INET, _T("127.0.0.1"), &addrSrv.sin_addr.s_addr);
// //创建套接字
sockClient = socket(AF_INET, SOCK_STREAM, 0);
inet_pton(AF_INET, "127.0.0.1", &addrSrv.sin_addr.s_addr);
if (connect(sockClient, (struct sockaddr*)&addrSrv, sizeof(addrSrv)) == -1)
return -2;
//throw "连接失败";
if (SOCKET_ERROR == sockClient) {
printf("Socket() error:%d", WSAGetLastError());
return -3;
}
return 0;
}
int ffmpeg_recv(CompareFunc tcallback(char* a, int b))
{
av_register_all();
unsigned version = avcodec_version();
printf("FFmpeg version:\t%d\n", version);
int aaab = tcpInit();
printf("conn server\t%d\n", aaab);
AVFormatContext *pFormatCtx;
int i, videoindex;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;
char filepath[] = "video.264";
//av_register_all();
avformat_network_init();
pFormatCtx = avformat_alloc_context();
string patha = "C:\\Users\\sbd01\\Videos\\video.264";
//patha = "C:\\Users\\sbd01\\Pictures\\ffmpegtest\\Debug\\video.dat";
fp_open = fopen(patha.c_str(), "rb+");
//Init AVIOContext
unsigned char *aviobuffer = (unsigned char *)av_malloc(1504);
AVIOContext *avio = avio_alloc_context(aviobuffer, 1504, 0, NULL, read_buffer, NULL, NULL);
pFormatCtx->pb = avio;
//if (avformat_open_input(&pFormatCtx, patha.c_str(), NULL, NULL) != 0) {
//tcallback("------------------",11);
if (avformat_open_input(&pFormatCtx, NULL, NULL, NULL) != 0) {
printf("Couldn't open input stream.\n");
return -1;
}
//tcallback("------------------", 3);
if (avformat_find_stream_info(pFormatCtx, NULL)<0) {
printf("Couldn't find stream information.\n");
return -1;
}
videoindex = -1;
for (i = 0; i<pFormatCtx->nb_streams; i++)
if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
videoindex = i;
break;
}
if (videoindex == -1) {
printf("Didn't find a video stream.\n");
return -1;
}
pCodecCtx = pFormatCtx->streams[videoindex]->codec;
pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
if (pCodec == NULL) {
printf("Codec not found.\n");
return -1;
}
if (avcodec_open2(pCodecCtx, pCodec, NULL)<0) {
printf("Could not open codec.\n");
return -1;
}
AVFrame *pFrame, *pFrameYUV;
pFrame = av_frame_alloc();
pFrameYUV = av_frame_alloc();
/*if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
printf("Could not initialize SDL - %s\n", SDL_GetError());
return -1;
}*/
/*int screen_w = 0, screen_h = 0;
SDL_Surface *screen;
screen_w = pCodecCtx->width;
screen_h = pCodecCtx->height;
screen = SDL_SetVideoMode(screen_w, screen_h, 0, 0);
if (!screen) {
printf("SDL: could not set video mode - exiting:%s\n", SDL_GetError());
return -1;
}
SDL_Overlay *bmp;
bmp = SDL_CreateYUVOverlay(pCodecCtx->width, pCodecCtx->height, SDL_YV12_OVERLAY, screen);
SDL_Rect rect;
rect.x = 0;
rect.y = 0;
rect.w = screen_w;
rect.h = screen_h;*/
//SDL End------------------------
int ret, got_picture;
AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
#if OUTPUT_YUV420P
FILE *fp_yuv = fopen("output.yuv", "wb+");
#endif
//SDL_WM_SetCaption("Simplest FFmpeg Mem Player", NULL);
struct SwsContext *img_convert_ctx;
img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
//------------------------------
/*uint8_t *out_buffer;
out_buffer = new uint8_t[avpicture_get_size(AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height)];
avpicture_fill((AVPicture *)pFrameYUV, out_buffer, AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height);*/
while (av_read_frame(pFormatCtx, packet) >= 0) {
if (packet->stream_index == videoindex) {
ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
if (ret < 0) {
printf("Decode Error.\n");
return -1;
}
if (got_picture) {
char* buf = new char[pCodecCtx->height * pCodecCtx->width * 3 / 2];
memset(buf, 0, pCodecCtx->height * pCodecCtx->width * 3 / 2);
int height = pCodecCtx->height;
int width = pCodecCtx->width;
printf("decode video ok\n");
int a = 0, i;
for (i = 0; i<height; i++)
{
memcpy(buf + a, pFrame->data[0] + i * pFrame->linesize[0], width);
a += width;
}
for (i = 0; i<height / 2; i++)
{
memcpy(buf + a, pFrame->data[1] + i * pFrame->linesize[1], width / 2);
a += width / 2;
}
for (i = 0; i<height / 2; i++)
{
memcpy(buf + a, pFrame->data[2] + i * pFrame->linesize[2], width / 2);
a += width / 2;
}
tcallback(buf, pCodecCtx->height * pCodecCtx->width * 3 / 2);
//fwrite(buf, 1, pCodecCtx->height * pCodecCtx->width * 3 / 2, fp_yuv);
delete buf;
buf = NULL;
//-------------------------------------------------------------
// while (av_read_frame(pFormatCtx, packet) >= 0) {
// if (packet->stream_index == videoindex) {
// ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);//解码
// if (ret < 0) {
// printf("Decode Error.\n");
// return -1;
// }
// if (got_picture)
// {
// string aaa = "aaaa";
// string bbb = "bbbb";
// const char *p = aaa.c_str();
// const char *q = bbb.c_str();
// tcallback(p, q);
// /*SDL_LockYUVOverlay(bmp);
// pFrameYUV->data[0] = bmp->pixels[0];
// pFrameYUV->data[1] = bmp->pixels[2];
// pFrameYUV->data[2] = bmp->pixels[1];
// pFrameYUV->linesize[0] = bmp->pitches[0];
// pFrameYUV->linesize[1] = bmp->pitches[2];
// pFrameYUV->linesize[2] = bmp->pitches[1];
// sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);*/
//#if OUTPUT_YUV420P
// int y_size = pCodecCtx->width*pCodecCtx->height;
// fwrite(pFrameYUV->data[0], 1, y_size, fp_yuv); //Y
// fwrite(pFrameYUV->data[1], 1, y_size / 4, fp_yuv); //U
// fwrite(pFrameYUV->data[2], 1, y_size / 4, fp_yuv); //V
//#endif
// /*SDL_UnlockYUVOverlay(bmp);
//
// SDL_DisplayYUVOverlay(bmp, &rect);*/
// //Delay 40ms
// //SDL_Delay(40);
}
}
av_free_packet(packet);
}
sws_freeContext(img_convert_ctx);
#if OUTPUT_YUV420P
fclose(fp_yuv);
#endif
fclose(fp_open);
//SDL_Quit();
//av_free(out_buffer);
av_free(pFrameYUV);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);
int a;
scanf("%d", &a);
return 0;
}
//int _tmain(int argc, _TCHAR* argv[])
//{
// cout << "Hello FFmpeg!" << endl;
//// av_register_all();
//// unsigned version = avcodec_version();
////
//// printf("FFmpeg 版本号:\t%d\n", version);
////
//// WSADATA wsaData;
//// int port = 8888;//端口号
//// if (WSAStartup(MAKEWORD(2, 2), &wsaData) != 0)
//// {
//// printf("初始化失败");
//// return 0;
//// }
////
//// //创建用于监听的套接字,即服务端的套接字
//// SOCKET sockSrv = socket(AF_INET, SOCK_STREAM, 0);
////
//// SOCKADDR_IN addrSrv;
//// addrSrv.sin_family = AF_INET;
//// addrSrv.sin_port = htons(port); //1024以上的端口号
//// /**
//// * INADDR_ANY就是指定地址为0.0.0.0的地址,这个地址事实上表示不确定地址,或“所有地址”、“任意地址”。 一般来说,在各个系统中均定义成为0值。
//// */
//// addrSrv.sin_addr.S_un.S_addr = htonl(INADDR_ANY);
////
//// int retVal = bind(sockSrv, (LPSOCKADDR)&addrSrv, sizeof(SOCKADDR_IN));
//// if (retVal == SOCKET_ERROR) {
//// printf("连接失败:%d\n", WSAGetLastError());
//// return 0;
//// }
////
//// if (listen(sockSrv, 10) == SOCKET_ERROR) {
//// printf("监听失败:%d", WSAGetLastError());
//// return 0;
//// }
////
//// SOCKADDR_IN addrClient;
//// int len = sizeof(SOCKADDR);
////
//// while (1)
//// {
//// //等待客户请求到来
//// sockConn = accept(sockSrv, (SOCKADDR *)&addrClient, &len);
//// if (sockConn == SOCKET_ERROR) {
//// printf("等待请求失败:%d", WSAGetLastError());
//// break;
//// }
////
//// //printf("客户端的IP是:[%s]\n", inet_ntoa(addrClient.sin_addr));
////
//// //发送数据
//// char sendbuf[] = "你好,我是服务端,咱们一起聊天吧";
//// int iSend = send(sockConn, sendbuf, sizeof(sendbuf), 0);
//// if (iSend == SOCKET_ERROR) {
//// printf("发送失败");
//// break;
//// }
//// break;
////
//// /* HANDLE hThread = CreateThread(NULL, 0, Fun, NULL, 0, NULL);
//// CloseHandle(hThread);*/
////
//// }
//// AVFormatContext *pFormatCtx;
//// int i, videoindex;
//// AVCodecContext *pCodecCtx;
//// AVCodec *pCodec;
//// char filepath[] = "video.264";
////
//// av_register_all();
//// avformat_network_init();
//// pFormatCtx = avformat_alloc_context();
//// string patha = "C:\\Users\\sbd01\\Videos\\video.264";
//// //patha = "C:\\Users\\sbd01\\Pictures\\ffmpegtest\\Debug\\video.dat";
////
//// fp_open = fopen(patha.c_str(), "rb+");
//// //Init AVIOContext
//// unsigned char *aviobuffer = (unsigned char *)av_malloc(1504);
//// AVIOContext *avio = avio_alloc_context(aviobuffer, 1504, 0, NULL, read_buffer, NULL, NULL);
//// pFormatCtx->pb = avio;
//// //if (avformat_open_input(&pFormatCtx, patha.c_str(), NULL, NULL) != 0) {
////
//// if (avformat_open_input(&pFormatCtx, NULL, NULL, NULL) != 0) {
//// printf("Couldn't open input stream.\n");
//// return -1;
//// }
//// if (avformat_find_stream_info(pFormatCtx, NULL)<0) {
//// printf("Couldn't find stream information.\n");
//// return -1;
//// }
//// videoindex = -1;
//// for (i = 0; i<pFormatCtx->nb_streams; i++)
//// if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
//// videoindex = i;
//// break;
//// }
//// if (videoindex == -1) {
//// printf("Didn't find a video stream.\n");
//// return -1;
//// }
//// pCodecCtx = pFormatCtx->streams[videoindex]->codec;
//// pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
//// if (pCodec == NULL) {
//// printf("Codec not found.\n");
//// return -1;
//// }
//// if (avcodec_open2(pCodecCtx, pCodec, NULL)<0) {
//// printf("Could not open codec.\n");
//// return -1;
//// }
//// AVFrame *pFrame, *pFrameYUV;
//// pFrame = av_frame_alloc();
//// pFrameYUV = av_frame_alloc();
////
//// if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
//// printf("Could not initialize SDL - %s\n", SDL_GetError());
//// return -1;
//// }
////
//// int screen_w = 0, screen_h = 0;
//// SDL_Surface *screen;
//// screen_w = pCodecCtx->width;
//// screen_h = pCodecCtx->height;
//// screen = SDL_SetVideoMode(screen_w, screen_h, 0, 0);
////
//// if (!screen) {
//// printf("SDL: could not set video mode - exiting:%s\n", SDL_GetError());
//// return -1;
//// }
//// SDL_Overlay *bmp;
//// bmp = SDL_CreateYUVOverlay(pCodecCtx->width, pCodecCtx->height, SDL_YV12_OVERLAY, screen);
//// SDL_Rect rect;
//// rect.x = 0;
//// rect.y = 0;
//// rect.w = screen_w;
//// rect.h = screen_h;
//// //SDL End------------------------
//// int ret, got_picture;
////
//// AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
////
////#if OUTPUT_YUV420P
//// FILE *fp_yuv = fopen("output.yuv", "wb+");
////#endif
//// SDL_WM_SetCaption("Simplest FFmpeg Mem Player", NULL);
////
//// struct SwsContext *img_convert_ctx;
//// img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
//// //------------------------------
//// while (av_read_frame(pFormatCtx, packet) >= 0) {
//// if (packet->stream_index == videoindex) {
//// ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
//// if (ret < 0) {
//// printf("Decode Error.\n");
//// return -1;
//// }
//// if (got_picture) {
//// SDL_LockYUVOverlay(bmp);
//// pFrameYUV->data[0] = bmp->pixels[0];
//// pFrameYUV->data[1] = bmp->pixels[2];
//// pFrameYUV->data[2] = bmp->pixels[1];
//// pFrameYUV->linesize[0] = bmp->pitches[0];
//// pFrameYUV->linesize[1] = bmp->pitches[2];
//// pFrameYUV->linesize[2] = bmp->pitches[1];
//// sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);
////#if OUTPUT_YUV420P
//// int y_size = pCodecCtx->width*pCodecCtx->height;
//// fwrite(pFrameYUV->data[0], 1, y_size, fp_yuv); //Y
//// fwrite(pFrameYUV->data[1], 1, y_size / 4, fp_yuv); //U
//// fwrite(pFrameYUV->data[2], 1, y_size / 4, fp_yuv); //V
////#endif
//// SDL_UnlockYUVOverlay(bmp);
////
//// SDL_DisplayYUVOverlay(bmp, &rect);
//// //Delay 40ms
//// //SDL_Delay(40);
//// }
//// }
//// av_free_packet(packet);
//// }
//// sws_freeContext(img_convert_ctx);
////
////#if OUTPUT_YUV420P
//// fclose(fp_yuv);
////#endif
////
//// fclose(fp_open);
////
//// SDL_Quit();
////
//// //av_free(out_buffer);
//// av_free(pFrameYUV);
//// avcodec_close(pCodecCtx);
//// avformat_close_input(&pFormatCtx);
//// int a;
//// scanf("%d", &a);
// return 0;
//}
26张图片:
http://blog.csdn.net/u011430225/article/details/51462311
index=1
def trans(s):
return "b'%s'" % ''.join('\\x%.2x' % x for x in s)
def getStreamCallback(a,b):
# print(b)
# print(string_at(a, b))
# create_string_buffer(a,b)
# print(string_at(a,-1))
bbb=string_at(a,b)
counter = datetime.datetime.now().strftime('%Y%m%d_%H%M%S_%f')
print(1,counter)
nparr = np.fromstring(bbb, np.uint8)
rgb=getimage(nparr)
counter = datetime.datetime.now().strftime('%Y%m%d_%H%M%S_%f')
print(2,counter)
# img_decode = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
# img_decode=cv2.resize(img_decode,(img_decode.shape[1]//3,img_decode.shape[0]//3),interpolation=cv2.INTER_CUBIC)
# print(img_decode.shape)
# cv2.imshow("sadf",rgb)
# cv2.waitKey(1)
# print(dll.Add(8,6))
# data = b'/x1'
# print(dll.writedata(data,5))
if __name__ == '__main__':
dll = CDLL(r"./dllt1.dll")
# print(b)
CMPFUNC = CFUNCTYPE(c_void_p,c_void_p,c_int)
m_callback = CMPFUNC(getStreamCallback)
dll.ffmpeg_recv(m_callback)
c++保存yuv
#include "stdafx.h"
#include "testdll.h"
#include <iostream>
#include<fstream>
#include <sys/types.h>
#include "opencv2/opencv.hpp"
#include "Ws2tcpip.h"
#include <sys/types.h>
#include <sys/types.h>
#include <winsock2.h>
#include <fcntl.h>
#include <cstring>
#include <cstdio>
#include <signal.h>
#pragma comment(lib,"ws2_32.lib")
using namespace std;
using namespace cv;
extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libavdevice/avdevice.h"
};
#define OUTPUT_YUV420P 0
FILE *fp_open = NULL;
char recv_buf[1504];
std::ofstream fout1("video.dat", std::ios::binary);
int Add(int plus1, int plus2)
{
int add_result = plus1 + plus2;
return add_result;
}
char* testchar(int plus1) {
char* str = "hello world11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111";
//str = "asdfsdf";
return str;
}
char* teststring(int* plus1) {
string str = "hello world11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111";
int i = 0;
for (i = 0; i < 10000; i++) {
str += "asdffffffffff";
}
Mat mat;
//加载图片
mat = imread("D:/8.jpg", CV_LOAD_IMAGE_COLOR);
printf("%d %d", mat.rows,mat.cols);
//if (!mat.empty()) {
int m, n;
n = mat.cols * 3;
m = mat.rows;
unsigned char *data = (unsigned char*)malloc(sizeof(unsigned char) * m * n);
int p = 0;
for (int i = 0; i < m; i++)
{
for (int j = 0; j < n; j++)
{
data[p] = mat.at<unsigned char>(i, j);
p++;
}
}
*plus1 = p;
return (char*)data;
//uchar **array = new uchar*[mat.rows];
//for (int i = 0; i<mat.rows; ++i)
// array[i] = new uchar[mat.cols];
//for (int i = 0; i<mat.rows; ++i)
// array[i] = mat.ptr<uchar>(i);
//return (char**)array;
//std::vector<uchar> data_encode;
//int res = imencode(".jpg", mat, data_encode);
//std::string str_encode(data_encode.begin(), data_encode.end());
////string bbb = "aaaaaaaaaaaaaaaaaaaaa234234234234234234234234343423234234234343434343443443444aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa12";
////const char *q = bbb.c_str();
//char* char_r = (char *)malloc(sizeof(char) * (str_encode.size() + 10));
////memset(char_r, '0', sizeof(char) * (str_encode.size()));
//
//printf("%d", sizeof(char) * (str_encode.size()));
//memcpy(char_r, str_encode.data(), sizeof(char) * (str_encode.size()));
/* for (int ii = 0; ii<str_encode.size(); ii++) {
printf("%X", char_r[ii]);
}*/
//return str_encode;
}
//str = "asdfsdf";
//string aaaa(str,500);
//char* char_r = (char *)malloc(sizeof(char) * (str.size() + 10));
//memcpy(char_r, str.data(), sizeof(char) * (str.size()));
//
////return const_cast<char*>(name.c_str());
////printf("1111111111");
//return str;
//}
int writedata(const char* data, int size)
{
fout1.write(data, sizeof(char) * (size));
return size + 1;
}
int dataclose()
{
fout1.close();
return 2;
}
// ffmpeg_02.cpp : 定义控制台应用程序的入口点。
//
struct Data //数据包
{
int size;
char recvbuf[1500];
}data_recv;
SOCKET sockClient;
//Callback
int read_buffer(void *opaque, uint8_t *buf, int buf_size) {
/*if (!feof(fp_open)) {
int true_size = fread(buf, 1, buf_size, fp_open);
return true_size;
}
else {
return -1;
}*/
int ret;
memset(data_recv.recvbuf, '0', sizeof(data_recv.recvbuf));
ret = recv(sockClient, (char *)&data_recv, sizeof(struct Data), 0); //第二个参数使用强制类型,为一个数据包
if (ret < 0)
{
printf("WSAStartup() failed!\n");
return -1;
}
//memset(recv_buf, 'z', 1504);//清空缓存
//recv(sockConn, recv_buf, 1504, 0);//读取数据
//data_recv recvUser;
//memset(recvUser.recvbuf, 0, 1504);
//recvUser.recvbuf = new char[strlen(recv_buf)];
//memset(recvUser.recvbuf, 0x0, 102400);//清空缓存
//memset(&recvUser, 0, sizeof(recvUser));//清空结构体
//memcpy(&recvUser, recv_buf, sizeof(recvUser));
buf_size = data_recv.size;
memcpy(buf, data_recv.recvbuf, buf_size);
return data_recv.size;
}
int read_buffer_file(void *opaque, uint8_t *buf, int buf_size) {
if (!feof(fp_open)) {
int true_size = fread(buf, 1, buf_size, fp_open);
return true_size;
}
else {
return -1;
}
}
static int callbackprint(int a, int b) {
return a;
}
int tcpInit()
{
WSADATA wsaData;
char buff[1024];
memset(buff, 0, sizeof(buff));
if (WSAStartup(MAKEWORD(2, 2), &wsaData) != 0)
{
printf("初始化Winsock失败");
return -1;
}
SOCKADDR_IN addrSrv;
addrSrv.sin_family = AF_INET;
addrSrv.sin_port = htons(8888);//端口号
//addrSrv.sin_addr.S_un.S_addr = inet_pton("127.0.0.1");//IP地址
//addrSrv.sin_addr.S_un.S_addr = InetPton(AF_INET, _T("127.0.0.1"), &addrSrv.sin_addr.s_addr);
// //创建套接字
sockClient = socket(AF_INET, SOCK_STREAM, 0);
inet_pton(AF_INET, "127.0.0.1", &addrSrv.sin_addr.s_addr);
if (connect(sockClient, (struct sockaddr*)&addrSrv, sizeof(addrSrv)) == -1)
return -2;
//throw "连接失败";
if (SOCKET_ERROR == sockClient) {
printf("Socket() error:%d", WSAGetLastError());
return -3;
}
return 0;
}
int ffmpeg_recv(CompareFunc tcallback(char* a, int b))
{
av_register_all();
unsigned version = avcodec_version();
printf("FFmpeg version:\t%d\n", version);
int aaab = tcpInit();
printf("conn server\t%d\n", aaab);
AVFormatContext *pFormatCtx;
int i, videoindex;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;
char filepath[] = "video.264";
//av_register_all();
avformat_network_init();
pFormatCtx = avformat_alloc_context();
string patha = "C:\\Users\\sbd01\\Videos\\video.264";
//patha = "C:\\Users\\sbd01\\Pictures\\ffmpegtest\\Debug\\video.dat";
fp_open = fopen(patha.c_str(), "rb+");
//Init AVIOContext
unsigned char *aviobuffer = (unsigned char *)av_malloc(1504);
AVIOContext *avio = avio_alloc_context(aviobuffer, 1504, 0, NULL, read_buffer, NULL, NULL);
pFormatCtx->pb = avio;
//if (avformat_open_input(&pFormatCtx, patha.c_str(), NULL, NULL) != 0) {
//tcallback("------------------",11);
if (avformat_open_input(&pFormatCtx, NULL, NULL, NULL) != 0) {
printf("Couldn't open input stream.\n");
return -1;
}
//tcallback("------------------", 3);
if (avformat_find_stream_info(pFormatCtx, NULL)<0) {
printf("Couldn't find stream information.\n");
return -1;
}
videoindex = -1;
for (i = 0; i<pFormatCtx->nb_streams; i++)
if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
videoindex = i;
break;
}
if (videoindex == -1) {
printf("Didn't find a video stream.\n");
return -1;
}
pCodecCtx = pFormatCtx->streams[videoindex]->codec;
pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
if (pCodec == NULL) {
printf("Codec not found.\n");
return -1;
}
if (avcodec_open2(pCodecCtx, pCodec, NULL)<0) {
printf("Could not open codec.\n");
return -1;
}
AVFrame *pFrame, *pFrameYUV;
pFrame = av_frame_alloc();
pFrameYUV = av_frame_alloc();
/*if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
printf("Could not initialize SDL - %s\n", SDL_GetError());
return -1;
}*/
/*int screen_w = 0, screen_h = 0;
SDL_Surface *screen;
screen_w = pCodecCtx->width;
screen_h = pCodecCtx->height;
screen = SDL_SetVideoMode(screen_w, screen_h, 0, 0);
if (!screen) {
printf("SDL: could not set video mode - exiting:%s\n", SDL_GetError());
return -1;
}
SDL_Overlay *bmp;
bmp = SDL_CreateYUVOverlay(pCodecCtx->width, pCodecCtx->height, SDL_YV12_OVERLAY, screen);
SDL_Rect rect;
rect.x = 0;
rect.y = 0;
rect.w = screen_w;
rect.h = screen_h;*/
//SDL End------------------------
int ret, got_picture;
AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
#if OUTPUT_YUV420P
FILE *fp_yuv = fopen("output.yuv", "wb+");
#endif
//SDL_WM_SetCaption("Simplest FFmpeg Mem Player", NULL);
struct SwsContext *img_convert_ctx;
img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
//------------------------------
/*uint8_t *out_buffer;
out_buffer = new uint8_t[avpicture_get_size(AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height)];
avpicture_fill((AVPicture *)pFrameYUV, out_buffer, AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height);*/
while (av_read_frame(pFormatCtx, packet) >= 0) {
if (packet->stream_index == videoindex) {
ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
if (ret < 0) {
printf("Decode Error.\n");
return -1;
}
if (got_picture) {
char* buf = new char[pCodecCtx->height * pCodecCtx->width * 3 / 2];
memset(buf, 0, pCodecCtx->height * pCodecCtx->width * 3 / 2);
int height = pCodecCtx->height;
int width = pCodecCtx->width;
printf("decode video ok\n");
int a = 0, i;
for (i = 0; i<height; i++)
{
memcpy(buf + a, pFrame->data[0] + i * pFrame->linesize[0], width);
a += width;
}
for (i = 0; i<height / 2; i++)
{
memcpy(buf + a, pFrame->data[1] + i * pFrame->linesize[1], width / 2);
a += width / 2;
}
for (i = 0; i<height / 2; i++)
{
memcpy(buf + a, pFrame->data[2] + i * pFrame->linesize[2], width / 2);
a += width / 2;
}
tcallback(buf, pCodecCtx->height * pCodecCtx->width * 3 / 2);
//fwrite(buf, 1, pCodecCtx->height * pCodecCtx->width * 3 / 2, fp_yuv);
delete buf;
buf = NULL;
//-------------------------------------------------------------
// while (av_read_frame(pFormatCtx, packet) >= 0) {
// if (packet->stream_index == videoindex) {
// ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);//解码
// if (ret < 0) {
// printf("Decode Error.\n");
// return -1;
// }
// if (got_picture)
// {
// string aaa = "aaaa";
// string bbb = "bbbb";
// const char *p = aaa.c_str();
// const char *q = bbb.c_str();
// tcallback(p, q);
// /*SDL_LockYUVOverlay(bmp);
// pFrameYUV->data[0] = bmp->pixels[0];
// pFrameYUV->data[1] = bmp->pixels[2];
// pFrameYUV->data[2] = bmp->pixels[1];
// pFrameYUV->linesize[0] = bmp->pitches[0];
// pFrameYUV->linesize[1] = bmp->pitches[2];
// pFrameYUV->linesize[2] = bmp->pitches[1];
// sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);*/
//#if OUTPUT_YUV420P
// int y_size = pCodecCtx->width*pCodecCtx->height;
// fwrite(pFrameYUV->data[0], 1, y_size, fp_yuv); //Y
// fwrite(pFrameYUV->data[1], 1, y_size / 4, fp_yuv); //U
// fwrite(pFrameYUV->data[2], 1, y_size / 4, fp_yuv); //V
//#endif
// /*SDL_UnlockYUVOverlay(bmp);
//
// SDL_DisplayYUVOverlay(bmp, &rect);*/
// //Delay 40ms
// //SDL_Delay(40);
}
}
av_free_packet(packet);
}
sws_freeContext(img_convert_ctx);
#if OUTPUT_YUV420P
fclose(fp_yuv);
#endif
fclose(fp_open);
//SDL_Quit();
//av_free(out_buffer);
av_free(pFrameYUV);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);
int a;
scanf("%d", &a);
return 0;
}
//int _tmain(int argc, _TCHAR* argv[])
//{
// cout << "Hello FFmpeg!" << endl;
//// av_register_all();
//// unsigned version = avcodec_version();
////
//// printf("FFmpeg 版本号:\t%d\n", version);
////
//// WSADATA wsaData;
//// int port = 8888;//端口号
//// if (WSAStartup(MAKEWORD(2, 2), &wsaData) != 0)
//// {
//// printf("初始化失败");
//// return 0;
//// }
////
//// //创建用于监听的套接字,即服务端的套接字
//// SOCKET sockSrv = socket(AF_INET, SOCK_STREAM, 0);
////
//// SOCKADDR_IN addrSrv;
//// addrSrv.sin_family = AF_INET;
//// addrSrv.sin_port = htons(port); //1024以上的端口号
//// /**
//// * INADDR_ANY就是指定地址为0.0.0.0的地址,这个地址事实上表示不确定地址,或“所有地址”、“任意地址”。 一般来说,在各个系统中均定义成为0值。
//// */
//// addrSrv.sin_addr.S_un.S_addr = htonl(INADDR_ANY);
////
//// int retVal = bind(sockSrv, (LPSOCKADDR)&addrSrv, sizeof(SOCKADDR_IN));
//// if (retVal == SOCKET_ERROR) {
//// printf("连接失败:%d\n", WSAGetLastError());
//// return 0;
//// }
////
//// if (listen(sockSrv, 10) == SOCKET_ERROR) {
//// printf("监听失败:%d", WSAGetLastError());
//// return 0;
//// }
////
//// SOCKADDR_IN addrClient;
//// int len = sizeof(SOCKADDR);
////
//// while (1)
//// {
//// //等待客户请求到来
//// sockConn = accept(sockSrv, (SOCKADDR *)&addrClient, &len);
//// if (sockConn == SOCKET_ERROR) {
//// printf("等待请求失败:%d", WSAGetLastError());
//// break;
//// }
////
//// //printf("客户端的IP是:[%s]\n", inet_ntoa(addrClient.sin_addr));
////
//// //发送数据
//// char sendbuf[] = "你好,我是服务端,咱们一起聊天吧";
//// int iSend = send(sockConn, sendbuf, sizeof(sendbuf), 0);
//// if (iSend == SOCKET_ERROR) {
//// printf("发送失败");
//// break;
//// }
//// break;
////
//// /* HANDLE hThread = CreateThread(NULL, 0, Fun, NULL, 0, NULL);
//// CloseHandle(hThread);*/
////
//// }
//// AVFormatContext *pFormatCtx;
//// int i, videoindex;
//// AVCodecContext *pCodecCtx;
//// AVCodec *pCodec;
//// char filepath[] = "video.264";
////
//// av_register_all();
//// avformat_network_init();
//// pFormatCtx = avformat_alloc_context();
//// string patha = "C:\\Users\\sbd01\\Videos\\video.264";
//// //patha = "C:\\Users\\sbd01\\Pictures\\ffmpegtest\\Debug\\video.dat";
////
//// fp_open = fopen(patha.c_str(), "rb+");
//// //Init AVIOContext
//// unsigned char *aviobuffer = (unsigned char *)av_malloc(1504);
//// AVIOContext *avio = avio_alloc_context(aviobuffer, 1504, 0, NULL, read_buffer, NULL, NULL);
//// pFormatCtx->pb = avio;
//// //if (avformat_open_input(&pFormatCtx, patha.c_str(), NULL, NULL) != 0) {
////
//// if (avformat_open_input(&pFormatCtx, NULL, NULL, NULL) != 0) {
//// printf("Couldn't open input stream.\n");
//// return -1;
//// }
//// if (avformat_find_stream_info(pFormatCtx, NULL)<0) {
//// printf("Couldn't find stream information.\n");
//// return -1;
//// }
//// videoindex = -1;
//// for (i = 0; i<pFormatCtx->nb_streams; i++)
//// if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
//// videoindex = i;
//// break;
//// }
//// if (videoindex == -1) {
//// printf("Didn't find a video stream.\n");
//// return -1;
//// }
//// pCodecCtx = pFormatCtx->streams[videoindex]->codec;
//// pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
//// if (pCodec == NULL) {
//// printf("Codec not found.\n");
//// return -1;
//// }
//// if (avcodec_open2(pCodecCtx, pCodec, NULL)<0) {
//// printf("Could not open codec.\n");
//// return -1;
//// }
//// AVFrame *pFrame, *pFrameYUV;
//// pFrame = av_frame_alloc();
//// pFrameYUV = av_frame_alloc();
////
//// if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
//// printf("Could not initialize SDL - %s\n", SDL_GetError());
//// return -1;
//// }
////
//// int screen_w = 0, screen_h = 0;
//// SDL_Surface *screen;
//// screen_w = pCodecCtx->width;
//// screen_h = pCodecCtx->height;
//// screen = SDL_SetVideoMode(screen_w, screen_h, 0, 0);
////
//// if (!screen) {
//// printf("SDL: could not set video mode - exiting:%s\n", SDL_GetError());
//// return -1;
//// }
//// SDL_Overlay *bmp;
//// bmp = SDL_CreateYUVOverlay(pCodecCtx->width, pCodecCtx->height, SDL_YV12_OVERLAY, screen);
//// SDL_Rect rect;
//// rect.x = 0;
//// rect.y = 0;
//// rect.w = screen_w;
//// rect.h = screen_h;
//// //SDL End------------------------
//// int ret, got_picture;
////
//// AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
////
////#if OUTPUT_YUV420P
//// FILE *fp_yuv = fopen("output.yuv", "wb+");
////#endif
//// SDL_WM_SetCaption("Simplest FFmpeg Mem Player", NULL);
////
//// struct SwsContext *img_convert_ctx;
//// img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
//// //------------------------------
//// while (av_read_frame(pFormatCtx, packet) >= 0) {
//// if (packet->stream_index == videoindex) {
//// ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
//// if (ret < 0) {
//// printf("Decode Error.\n");
//// return -1;
//// }
//// if (got_picture) {
//// SDL_LockYUVOverlay(bmp);
//// pFrameYUV->data[0] = bmp->pixels[0];
//// pFrameYUV->data[1] = bmp->pixels[2];
//// pFrameYUV->data[2] = bmp->pixels[1];
//// pFrameYUV->linesize[0] = bmp->pitches[0];
//// pFrameYUV->linesize[1] = bmp->pitches[2];
//// pFrameYUV->linesize[2] = bmp->pitches[1];
//// sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);
////#if OUTPUT_YUV420P
//// int y_size = pCodecCtx->width*pCodecCtx->height;
//// fwrite(pFrameYUV->data[0], 1, y_size, fp_yuv); //Y
//// fwrite(pFrameYUV->data[1], 1, y_size / 4, fp_yuv); //U
//// fwrite(pFrameYUV->data[2], 1, y_size / 4, fp_yuv); //V
////#endif
//// SDL_UnlockYUVOverlay(bmp);
////
//// SDL_DisplayYUVOverlay(bmp, &rect);
//// //Delay 40ms
//// //SDL_Delay(40);
//// }
//// }
//// av_free_packet(packet);
//// }
//// sws_freeContext(img_convert_ctx);
////
////#if OUTPUT_YUV420P
//// fclose(fp_yuv);
////#endif
////
//// fclose(fp_open);
////
//// SDL_Quit();
////
//// //av_free(out_buffer);
//// av_free(pFrameYUV);
//// avcodec_close(pCodecCtx);
//// avformat_close_input(&pFormatCtx);
//// int a;
//// scanf("%d", &a);
// return 0;
//}
http://blog.csdn.net/u011430225/article/details/51462311
from ctypes import * import pickle import cv2 import struct import datetime from PIL import Image # dll = CDLL(r"C:\Users\Administrator\Documents\Visual Studio 2015\Projects\dlltest2\x64\Debug/dlltest2.dll") import numpy as np from numba import jit index=0
@jit def getimage(nparr): l_np = (1280 * 720) # print(len(nparr)) y = nparr[:l_np].reshape(-1, 1280) u = nparr[l_np:l_np + l_np // 4].reshape(-1, 1280 // 2) v = nparr[l_np + l_np // 4:].reshape(-1, 1280 // 2) # rgb = _ConvertYUV420toBGR(y, u, v) # u = u.repeat(2, axis=0).repeat(2, axis=1) # v = v.repeat(2, axis=0).repeat(2, axis=1) enlarge_U = cv2.resize(u, (0, 0), fx=2.0, fy=2.0, interpolation=cv2.INTER_CUBIC) enlarge_V = cv2.resize(v, (0, 0), fx=2.0, fy=2.0, interpolation=cv2.INTER_CUBIC) # 合并YUV3通道 img_YUV = cv2.merge([y, enlarge_U, enlarge_V]) dst = cv2.cvtColor(img_YUV, cv2.COLOR_YUV2BGR) return dst # # ITU-R standard # y = y.reshape((y.shape[0], y.shape[1], 1)).astype(np.int) # u = u.reshape((u.shape[0], u.shape[1], 1)).astype(np.int) - 128 # v = v.reshape((v.shape[0], v.shape[1], 1)).astype(np.int) - 128 # r = (1 * y + 0 * u + 1.402 * v) # g = (1 * y - 0.344 * u - 0.714 * v) # b = (1 * y + 1.772 * u + 0 * v) # return np.concatenate((b, g, r), axis=2).clip(0, 255).astype(np.uint8)
index=1
def trans(s):
return "b'%s'" % ''.join('\\x%.2x' % x for x in s)
def getStreamCallback(a,b):
# print(b)
# print(string_at(a, b))
# create_string_buffer(a,b)
# print(string_at(a,-1))
bbb=string_at(a,b)
counter = datetime.datetime.now().strftime('%Y%m%d_%H%M%S_%f')
print(1,counter)
nparr = np.fromstring(bbb, np.uint8)
rgb=getimage(nparr)
counter = datetime.datetime.now().strftime('%Y%m%d_%H%M%S_%f')
print(2,counter)
# img_decode = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
# img_decode=cv2.resize(img_decode,(img_decode.shape[1]//3,img_decode.shape[0]//3),interpolation=cv2.INTER_CUBIC)
# print(img_decode.shape)
# cv2.imshow("sadf",rgb)
# cv2.waitKey(1)
# print(dll.Add(8,6))
# data = b'/x1'
# print(dll.writedata(data,5))
if __name__ == '__main__':
dll = CDLL(r"./dllt1.dll")
# print(b)
CMPFUNC = CFUNCTYPE(c_void_p,c_void_p,c_int)
m_callback = CMPFUNC(getStreamCallback)
dll.ffmpeg_recv(m_callback)
c++保存yuv
#include "stdafx.h"
#include "testdll.h"
#include <iostream>
#include<fstream>
#include <sys/types.h>
#include "opencv2/opencv.hpp"
#include "Ws2tcpip.h"
#include <sys/types.h>
#include <sys/types.h>
#include <winsock2.h>
#include <fcntl.h>
#include <cstring>
#include <cstdio>
#include <signal.h>
#pragma comment(lib,"ws2_32.lib")
using namespace std;
using namespace cv;
extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libavdevice/avdevice.h"
};
#define OUTPUT_YUV420P 0
FILE *fp_open = NULL;
char recv_buf[1504];
std::ofstream fout1("video.dat", std::ios::binary);
int Add(int plus1, int plus2)
{
int add_result = plus1 + plus2;
return add_result;
}
char* testchar(int plus1) {
char* str = "hello world11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111";
//str = "asdfsdf";
return str;
}
char* teststring(int* plus1) {
string str = "hello world11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111";
int i = 0;
for (i = 0; i < 10000; i++) {
str += "asdffffffffff";
}
Mat mat;
//加载图片
mat = imread("D:/8.jpg", CV_LOAD_IMAGE_COLOR);
printf("%d %d", mat.rows,mat.cols);
//if (!mat.empty()) {
int m, n;
n = mat.cols * 3;
m = mat.rows;
unsigned char *data = (unsigned char*)malloc(sizeof(unsigned char) * m * n);
int p = 0;
for (int i = 0; i < m; i++)
{
for (int j = 0; j < n; j++)
{
data[p] = mat.at<unsigned char>(i, j);
p++;
}
}
*plus1 = p;
return (char*)data;
//uchar **array = new uchar*[mat.rows];
//for (int i = 0; i<mat.rows; ++i)
// array[i] = new uchar[mat.cols];
//for (int i = 0; i<mat.rows; ++i)
// array[i] = mat.ptr<uchar>(i);
//return (char**)array;
//std::vector<uchar> data_encode;
//int res = imencode(".jpg", mat, data_encode);
//std::string str_encode(data_encode.begin(), data_encode.end());
////string bbb = "aaaaaaaaaaaaaaaaaaaaa234234234234234234234234343423234234234343434343443443444aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa12";
////const char *q = bbb.c_str();
//char* char_r = (char *)malloc(sizeof(char) * (str_encode.size() + 10));
////memset(char_r, '0', sizeof(char) * (str_encode.size()));
//
//printf("%d", sizeof(char) * (str_encode.size()));
//memcpy(char_r, str_encode.data(), sizeof(char) * (str_encode.size()));
/* for (int ii = 0; ii<str_encode.size(); ii++) {
printf("%X", char_r[ii]);
}*/
//return str_encode;
}
//str = "asdfsdf";
//string aaaa(str,500);
//char* char_r = (char *)malloc(sizeof(char) * (str.size() + 10));
//memcpy(char_r, str.data(), sizeof(char) * (str.size()));
//
////return const_cast<char*>(name.c_str());
////printf("1111111111");
//return str;
//}
int writedata(const char* data, int size)
{
fout1.write(data, sizeof(char) * (size));
return size + 1;
}
int dataclose()
{
fout1.close();
return 2;
}
// ffmpeg_02.cpp : 定义控制台应用程序的入口点。
//
struct Data //数据包
{
int size;
char recvbuf[1500];
}data_recv;
SOCKET sockClient;
//Callback
int read_buffer(void *opaque, uint8_t *buf, int buf_size) {
/*if (!feof(fp_open)) {
int true_size = fread(buf, 1, buf_size, fp_open);
return true_size;
}
else {
return -1;
}*/
int ret;
memset(data_recv.recvbuf, '0', sizeof(data_recv.recvbuf));
ret = recv(sockClient, (char *)&data_recv, sizeof(struct Data), 0); //第二个参数使用强制类型,为一个数据包
if (ret < 0)
{
printf("WSAStartup() failed!\n");
return -1;
}
//memset(recv_buf, 'z', 1504);//清空缓存
//recv(sockConn, recv_buf, 1504, 0);//读取数据
//data_recv recvUser;
//memset(recvUser.recvbuf, 0, 1504);
//recvUser.recvbuf = new char[strlen(recv_buf)];
//memset(recvUser.recvbuf, 0x0, 102400);//清空缓存
//memset(&recvUser, 0, sizeof(recvUser));//清空结构体
//memcpy(&recvUser, recv_buf, sizeof(recvUser));
buf_size = data_recv.size;
memcpy(buf, data_recv.recvbuf, buf_size);
return data_recv.size;
}
int read_buffer_file(void *opaque, uint8_t *buf, int buf_size) {
if (!feof(fp_open)) {
int true_size = fread(buf, 1, buf_size, fp_open);
return true_size;
}
else {
return -1;
}
}
static int callbackprint(int a, int b) {
return a;
}
int tcpInit()
{
WSADATA wsaData;
char buff[1024];
memset(buff, 0, sizeof(buff));
if (WSAStartup(MAKEWORD(2, 2), &wsaData) != 0)
{
printf("初始化Winsock失败");
return -1;
}
SOCKADDR_IN addrSrv;
addrSrv.sin_family = AF_INET;
addrSrv.sin_port = htons(8888);//端口号
//addrSrv.sin_addr.S_un.S_addr = inet_pton("127.0.0.1");//IP地址
//addrSrv.sin_addr.S_un.S_addr = InetPton(AF_INET, _T("127.0.0.1"), &addrSrv.sin_addr.s_addr);
// //创建套接字
sockClient = socket(AF_INET, SOCK_STREAM, 0);
inet_pton(AF_INET, "127.0.0.1", &addrSrv.sin_addr.s_addr);
if (connect(sockClient, (struct sockaddr*)&addrSrv, sizeof(addrSrv)) == -1)
return -2;
//throw "连接失败";
if (SOCKET_ERROR == sockClient) {
printf("Socket() error:%d", WSAGetLastError());
return -3;
}
return 0;
}
int ffmpeg_recv(CompareFunc tcallback(char* a, int b))
{
av_register_all();
unsigned version = avcodec_version();
printf("FFmpeg version:\t%d\n", version);
int aaab = tcpInit();
printf("conn server\t%d\n", aaab);
AVFormatContext *pFormatCtx;
int i, videoindex;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;
char filepath[] = "video.264";
//av_register_all();
avformat_network_init();
pFormatCtx = avformat_alloc_context();
string patha = "C:\\Users\\sbd01\\Videos\\video.264";
//patha = "C:\\Users\\sbd01\\Pictures\\ffmpegtest\\Debug\\video.dat";
fp_open = fopen(patha.c_str(), "rb+");
//Init AVIOContext
unsigned char *aviobuffer = (unsigned char *)av_malloc(1504);
AVIOContext *avio = avio_alloc_context(aviobuffer, 1504, 0, NULL, read_buffer, NULL, NULL);
pFormatCtx->pb = avio;
//if (avformat_open_input(&pFormatCtx, patha.c_str(), NULL, NULL) != 0) {
//tcallback("------------------",11);
if (avformat_open_input(&pFormatCtx, NULL, NULL, NULL) != 0) {
printf("Couldn't open input stream.\n");
return -1;
}
//tcallback("------------------", 3);
if (avformat_find_stream_info(pFormatCtx, NULL)<0) {
printf("Couldn't find stream information.\n");
return -1;
}
videoindex = -1;
for (i = 0; i<pFormatCtx->nb_streams; i++)
if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
videoindex = i;
break;
}
if (videoindex == -1) {
printf("Didn't find a video stream.\n");
return -1;
}
pCodecCtx = pFormatCtx->streams[videoindex]->codec;
pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
if (pCodec == NULL) {
printf("Codec not found.\n");
return -1;
}
if (avcodec_open2(pCodecCtx, pCodec, NULL)<0) {
printf("Could not open codec.\n");
return -1;
}
AVFrame *pFrame, *pFrameYUV;
pFrame = av_frame_alloc();
pFrameYUV = av_frame_alloc();
/*if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
printf("Could not initialize SDL - %s\n", SDL_GetError());
return -1;
}*/
/*int screen_w = 0, screen_h = 0;
SDL_Surface *screen;
screen_w = pCodecCtx->width;
screen_h = pCodecCtx->height;
screen = SDL_SetVideoMode(screen_w, screen_h, 0, 0);
if (!screen) {
printf("SDL: could not set video mode - exiting:%s\n", SDL_GetError());
return -1;
}
SDL_Overlay *bmp;
bmp = SDL_CreateYUVOverlay(pCodecCtx->width, pCodecCtx->height, SDL_YV12_OVERLAY, screen);
SDL_Rect rect;
rect.x = 0;
rect.y = 0;
rect.w = screen_w;
rect.h = screen_h;*/
//SDL End------------------------
int ret, got_picture;
AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
#if OUTPUT_YUV420P
FILE *fp_yuv = fopen("output.yuv", "wb+");
#endif
//SDL_WM_SetCaption("Simplest FFmpeg Mem Player", NULL);
struct SwsContext *img_convert_ctx;
img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
//------------------------------
/*uint8_t *out_buffer;
out_buffer = new uint8_t[avpicture_get_size(AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height)];
avpicture_fill((AVPicture *)pFrameYUV, out_buffer, AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height);*/
while (av_read_frame(pFormatCtx, packet) >= 0) {
if (packet->stream_index == videoindex) {
ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
if (ret < 0) {
printf("Decode Error.\n");
return -1;
}
if (got_picture) {
char* buf = new char[pCodecCtx->height * pCodecCtx->width * 3 / 2];
memset(buf, 0, pCodecCtx->height * pCodecCtx->width * 3 / 2);
int height = pCodecCtx->height;
int width = pCodecCtx->width;
printf("decode video ok\n");
int a = 0, i;
for (i = 0; i<height; i++)
{
memcpy(buf + a, pFrame->data[0] + i * pFrame->linesize[0], width);
a += width;
}
for (i = 0; i<height / 2; i++)
{
memcpy(buf + a, pFrame->data[1] + i * pFrame->linesize[1], width / 2);
a += width / 2;
}
for (i = 0; i<height / 2; i++)
{
memcpy(buf + a, pFrame->data[2] + i * pFrame->linesize[2], width / 2);
a += width / 2;
}
tcallback(buf, pCodecCtx->height * pCodecCtx->width * 3 / 2);
//fwrite(buf, 1, pCodecCtx->height * pCodecCtx->width * 3 / 2, fp_yuv);
delete buf;
buf = NULL;
//-------------------------------------------------------------
// while (av_read_frame(pFormatCtx, packet) >= 0) {
// if (packet->stream_index == videoindex) {
// ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);//解码
// if (ret < 0) {
// printf("Decode Error.\n");
// return -1;
// }
// if (got_picture)
// {
// string aaa = "aaaa";
// string bbb = "bbbb";
// const char *p = aaa.c_str();
// const char *q = bbb.c_str();
// tcallback(p, q);
// /*SDL_LockYUVOverlay(bmp);
// pFrameYUV->data[0] = bmp->pixels[0];
// pFrameYUV->data[1] = bmp->pixels[2];
// pFrameYUV->data[2] = bmp->pixels[1];
// pFrameYUV->linesize[0] = bmp->pitches[0];
// pFrameYUV->linesize[1] = bmp->pitches[2];
// pFrameYUV->linesize[2] = bmp->pitches[1];
// sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);*/
//#if OUTPUT_YUV420P
// int y_size = pCodecCtx->width*pCodecCtx->height;
// fwrite(pFrameYUV->data[0], 1, y_size, fp_yuv); //Y
// fwrite(pFrameYUV->data[1], 1, y_size / 4, fp_yuv); //U
// fwrite(pFrameYUV->data[2], 1, y_size / 4, fp_yuv); //V
//#endif
// /*SDL_UnlockYUVOverlay(bmp);
//
// SDL_DisplayYUVOverlay(bmp, &rect);*/
// //Delay 40ms
// //SDL_Delay(40);
}
}
av_free_packet(packet);
}
sws_freeContext(img_convert_ctx);
#if OUTPUT_YUV420P
fclose(fp_yuv);
#endif
fclose(fp_open);
//SDL_Quit();
//av_free(out_buffer);
av_free(pFrameYUV);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);
int a;
scanf("%d", &a);
return 0;
}
//int _tmain(int argc, _TCHAR* argv[])
//{
// cout << "Hello FFmpeg!" << endl;
//// av_register_all();
//// unsigned version = avcodec_version();
////
//// printf("FFmpeg 版本号:\t%d\n", version);
////
//// WSADATA wsaData;
//// int port = 8888;//端口号
//// if (WSAStartup(MAKEWORD(2, 2), &wsaData) != 0)
//// {
//// printf("初始化失败");
//// return 0;
//// }
////
//// //创建用于监听的套接字,即服务端的套接字
//// SOCKET sockSrv = socket(AF_INET, SOCK_STREAM, 0);
////
//// SOCKADDR_IN addrSrv;
//// addrSrv.sin_family = AF_INET;
//// addrSrv.sin_port = htons(port); //1024以上的端口号
//// /**
//// * INADDR_ANY就是指定地址为0.0.0.0的地址,这个地址事实上表示不确定地址,或“所有地址”、“任意地址”。 一般来说,在各个系统中均定义成为0值。
//// */
//// addrSrv.sin_addr.S_un.S_addr = htonl(INADDR_ANY);
////
//// int retVal = bind(sockSrv, (LPSOCKADDR)&addrSrv, sizeof(SOCKADDR_IN));
//// if (retVal == SOCKET_ERROR) {
//// printf("连接失败:%d\n", WSAGetLastError());
//// return 0;
//// }
////
//// if (listen(sockSrv, 10) == SOCKET_ERROR) {
//// printf("监听失败:%d", WSAGetLastError());
//// return 0;
//// }
////
//// SOCKADDR_IN addrClient;
//// int len = sizeof(SOCKADDR);
////
//// while (1)
//// {
//// //等待客户请求到来
//// sockConn = accept(sockSrv, (SOCKADDR *)&addrClient, &len);
//// if (sockConn == SOCKET_ERROR) {
//// printf("等待请求失败:%d", WSAGetLastError());
//// break;
//// }
////
//// //printf("客户端的IP是:[%s]\n", inet_ntoa(addrClient.sin_addr));
////
//// //发送数据
//// char sendbuf[] = "你好,我是服务端,咱们一起聊天吧";
//// int iSend = send(sockConn, sendbuf, sizeof(sendbuf), 0);
//// if (iSend == SOCKET_ERROR) {
//// printf("发送失败");
//// break;
//// }
//// break;
////
//// /* HANDLE hThread = CreateThread(NULL, 0, Fun, NULL, 0, NULL);
//// CloseHandle(hThread);*/
////
//// }
//// AVFormatContext *pFormatCtx;
//// int i, videoindex;
//// AVCodecContext *pCodecCtx;
//// AVCodec *pCodec;
//// char filepath[] = "video.264";
////
//// av_register_all();
//// avformat_network_init();
//// pFormatCtx = avformat_alloc_context();
//// string patha = "C:\\Users\\sbd01\\Videos\\video.264";
//// //patha = "C:\\Users\\sbd01\\Pictures\\ffmpegtest\\Debug\\video.dat";
////
//// fp_open = fopen(patha.c_str(), "rb+");
//// //Init AVIOContext
//// unsigned char *aviobuffer = (unsigned char *)av_malloc(1504);
//// AVIOContext *avio = avio_alloc_context(aviobuffer, 1504, 0, NULL, read_buffer, NULL, NULL);
//// pFormatCtx->pb = avio;
//// //if (avformat_open_input(&pFormatCtx, patha.c_str(), NULL, NULL) != 0) {
////
//// if (avformat_open_input(&pFormatCtx, NULL, NULL, NULL) != 0) {
//// printf("Couldn't open input stream.\n");
//// return -1;
//// }
//// if (avformat_find_stream_info(pFormatCtx, NULL)<0) {
//// printf("Couldn't find stream information.\n");
//// return -1;
//// }
//// videoindex = -1;
//// for (i = 0; i<pFormatCtx->nb_streams; i++)
//// if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
//// videoindex = i;
//// break;
//// }
//// if (videoindex == -1) {
//// printf("Didn't find a video stream.\n");
//// return -1;
//// }
//// pCodecCtx = pFormatCtx->streams[videoindex]->codec;
//// pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
//// if (pCodec == NULL) {
//// printf("Codec not found.\n");
//// return -1;
//// }
//// if (avcodec_open2(pCodecCtx, pCodec, NULL)<0) {
//// printf("Could not open codec.\n");
//// return -1;
//// }
//// AVFrame *pFrame, *pFrameYUV;
//// pFrame = av_frame_alloc();
//// pFrameYUV = av_frame_alloc();
////
//// if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
//// printf("Could not initialize SDL - %s\n", SDL_GetError());
//// return -1;
//// }
////
//// int screen_w = 0, screen_h = 0;
//// SDL_Surface *screen;
//// screen_w = pCodecCtx->width;
//// screen_h = pCodecCtx->height;
//// screen = SDL_SetVideoMode(screen_w, screen_h, 0, 0);
////
//// if (!screen) {
//// printf("SDL: could not set video mode - exiting:%s\n", SDL_GetError());
//// return -1;
//// }
//// SDL_Overlay *bmp;
//// bmp = SDL_CreateYUVOverlay(pCodecCtx->width, pCodecCtx->height, SDL_YV12_OVERLAY, screen);
//// SDL_Rect rect;
//// rect.x = 0;
//// rect.y = 0;
//// rect.w = screen_w;
//// rect.h = screen_h;
//// //SDL End------------------------
//// int ret, got_picture;
////
//// AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
////
////#if OUTPUT_YUV420P
//// FILE *fp_yuv = fopen("output.yuv", "wb+");
////#endif
//// SDL_WM_SetCaption("Simplest FFmpeg Mem Player", NULL);
////
//// struct SwsContext *img_convert_ctx;
//// img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
//// //------------------------------
//// while (av_read_frame(pFormatCtx, packet) >= 0) {
//// if (packet->stream_index == videoindex) {
//// ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
//// if (ret < 0) {
//// printf("Decode Error.\n");
//// return -1;
//// }
//// if (got_picture) {
//// SDL_LockYUVOverlay(bmp);
//// pFrameYUV->data[0] = bmp->pixels[0];
//// pFrameYUV->data[1] = bmp->pixels[2];
//// pFrameYUV->data[2] = bmp->pixels[1];
//// pFrameYUV->linesize[0] = bmp->pitches[0];
//// pFrameYUV->linesize[1] = bmp->pitches[2];
//// pFrameYUV->linesize[2] = bmp->pitches[1];
//// sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);
////#if OUTPUT_YUV420P
//// int y_size = pCodecCtx->width*pCodecCtx->height;
//// fwrite(pFrameYUV->data[0], 1, y_size, fp_yuv); //Y
//// fwrite(pFrameYUV->data[1], 1, y_size / 4, fp_yuv); //U
//// fwrite(pFrameYUV->data[2], 1, y_size / 4, fp_yuv); //V
////#endif
//// SDL_UnlockYUVOverlay(bmp);
////
//// SDL_DisplayYUVOverlay(bmp, &rect);
//// //Delay 40ms
//// //SDL_Delay(40);
//// }
//// }
//// av_free_packet(packet);
//// }
//// sws_freeContext(img_convert_ctx);
////
////#if OUTPUT_YUV420P
//// fclose(fp_yuv);
////#endif
////
//// fclose(fp_open);
////
//// SDL_Quit();
////
//// //av_free(out_buffer);
//// av_free(pFrameYUV);
//// avcodec_close(pCodecCtx);
//// avformat_close_input(&pFormatCtx);
//// int a;
//// scanf("%d", &a);
// return 0;
//}
26张图片:
http://blog.csdn.net/u011430225/article/details/51462311
from ctypes import * import pickle import cv2 import struct import datetime from PIL import Image # dll = CDLL(r"C:\Users\Administrator\Documents\Visual Studio 2015\Projects\dlltest2\x64\Debug/dlltest2.dll") import numpy as np from numba import jit index=0
@jit def getimage(nparr): l_np = (1280 * 720) # print(len(nparr)) y = nparr[:l_np].reshape(-1, 1280) u = nparr[l_np:l_np + l_np // 4].reshape(-1, 1280 // 2) v = nparr[l_np + l_np // 4:].reshape(-1, 1280 // 2) # rgb = _ConvertYUV420toBGR(y, u, v) # u = u.repeat(2, axis=0).repeat(2, axis=1) # v = v.repeat(2, axis=0).repeat(2, axis=1) enlarge_U = cv2.resize(u, (0, 0), fx=2.0, fy=2.0, interpolation=cv2.INTER_CUBIC) enlarge_V = cv2.resize(v, (0, 0), fx=2.0, fy=2.0, interpolation=cv2.INTER_CUBIC) # 合并YUV3通道 img_YUV = cv2.merge([y, enlarge_U, enlarge_V]) dst = cv2.cvtColor(img_YUV, cv2.COLOR_YUV2BGR) return dst # # ITU-R standard # y = y.reshape((y.shape[0], y.shape[1], 1)).astype(np.int) # u = u.reshape((u.shape[0], u.shape[1], 1)).astype(np.int) - 128 # v = v.reshape((v.shape[0], v.shape[1], 1)).astype(np.int) - 128 # r = (1 * y + 0 * u + 1.402 * v) # g = (1 * y - 0.344 * u - 0.714 * v) # b = (1 * y + 1.772 * u + 0 * v) # return np.concatenate((b, g, r), axis=2).clip(0, 255).astype(np.uint8)
index=1
def trans(s):
return "b'%s'" % ''.join('\\x%.2x' % x for x in s)
def getStreamCallback(a,b):
# print(b)
# print(string_at(a, b))
# create_string_buffer(a,b)
# print(string_at(a,-1))
bbb=string_at(a,b)
counter = datetime.datetime.now().strftime('%Y%m%d_%H%M%S_%f')
print(1,counter)
nparr = np.fromstring(bbb, np.uint8)
rgb=getimage(nparr)
counter = datetime.datetime.now().strftime('%Y%m%d_%H%M%S_%f')
print(2,counter)
# img_decode = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
# img_decode=cv2.resize(img_decode,(img_decode.shape[1]//3,img_decode.shape[0]//3),interpolation=cv2.INTER_CUBIC)
# print(img_decode.shape)
# cv2.imshow("sadf",rgb)
# cv2.waitKey(1)
# print(dll.Add(8,6))
# data = b'/x1'
# print(dll.writedata(data,5))
if __name__ == '__main__':
dll = CDLL(r"./dllt1.dll")
# print(b)
CMPFUNC = CFUNCTYPE(c_void_p,c_void_p,c_int)
m_callback = CMPFUNC(getStreamCallback)
dll.ffmpeg_recv(m_callback)
c++保存yuv
#include "stdafx.h"
#include "testdll.h"
#include <iostream>
#include<fstream>
#include <sys/types.h>
#include "opencv2/opencv.hpp"
#include "Ws2tcpip.h"
#include <sys/types.h>
#include <sys/types.h>
#include <winsock2.h>
#include <fcntl.h>
#include <cstring>
#include <cstdio>
#include <signal.h>
#pragma comment(lib,"ws2_32.lib")
using namespace std;
using namespace cv;
extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libavdevice/avdevice.h"
};
#define OUTPUT_YUV420P 0
FILE *fp_open = NULL;
char recv_buf[1504];
std::ofstream fout1("video.dat", std::ios::binary);
int Add(int plus1, int plus2)
{
int add_result = plus1 + plus2;
return add_result;
}
char* testchar(int plus1) {
char* str = "hello world11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111";
//str = "asdfsdf";
return str;
}
char* teststring(int* plus1) {
string str = "hello world11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111";
int i = 0;
for (i = 0; i < 10000; i++) {
str += "asdffffffffff";
}
Mat mat;
//加载图片
mat = imread("D:/8.jpg", CV_LOAD_IMAGE_COLOR);
printf("%d %d", mat.rows,mat.cols);
//if (!mat.empty()) {
int m, n;
n = mat.cols * 3;
m = mat.rows;
unsigned char *data = (unsigned char*)malloc(sizeof(unsigned char) * m * n);
int p = 0;
for (int i = 0; i < m; i++)
{
for (int j = 0; j < n; j++)
{
data[p] = mat.at<unsigned char>(i, j);
p++;
}
}
*plus1 = p;
return (char*)data;
//uchar **array = new uchar*[mat.rows];
//for (int i = 0; i<mat.rows; ++i)
// array[i] = new uchar[mat.cols];
//for (int i = 0; i<mat.rows; ++i)
// array[i] = mat.ptr<uchar>(i);
//return (char**)array;
//std::vector<uchar> data_encode;
//int res = imencode(".jpg", mat, data_encode);
//std::string str_encode(data_encode.begin(), data_encode.end());
////string bbb = "aaaaaaaaaaaaaaaaaaaaa234234234234234234234234343423234234234343434343443443444aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa12";
////const char *q = bbb.c_str();
//char* char_r = (char *)malloc(sizeof(char) * (str_encode.size() + 10));
////memset(char_r, '0', sizeof(char) * (str_encode.size()));
//
//printf("%d", sizeof(char) * (str_encode.size()));
//memcpy(char_r, str_encode.data(), sizeof(char) * (str_encode.size()));
/* for (int ii = 0; ii<str_encode.size(); ii++) {
printf("%X", char_r[ii]);
}*/
//return str_encode;
}
//str = "asdfsdf";
//string aaaa(str,500);
//char* char_r = (char *)malloc(sizeof(char) * (str.size() + 10));
//memcpy(char_r, str.data(), sizeof(char) * (str.size()));
//
////return const_cast<char*>(name.c_str());
////printf("1111111111");
//return str;
//}
int writedata(const char* data, int size)
{
fout1.write(data, sizeof(char) * (size));
return size + 1;
}
int dataclose()
{
fout1.close();
return 2;
}
// ffmpeg_02.cpp : 定义控制台应用程序的入口点。
//
struct Data //数据包
{
int size;
char recvbuf[1500];
}data_recv;
SOCKET sockClient;
//Callback
int read_buffer(void *opaque, uint8_t *buf, int buf_size) {
/*if (!feof(fp_open)) {
int true_size = fread(buf, 1, buf_size, fp_open);
return true_size;
}
else {
return -1;
}*/
int ret;
memset(data_recv.recvbuf, '0', sizeof(data_recv.recvbuf));
ret = recv(sockClient, (char *)&data_recv, sizeof(struct Data), 0); //第二个参数使用强制类型,为一个数据包
if (ret < 0)
{
printf("WSAStartup() failed!\n");
return -1;
}
//memset(recv_buf, 'z', 1504);//清空缓存
//recv(sockConn, recv_buf, 1504, 0);//读取数据
//data_recv recvUser;
//memset(recvUser.recvbuf, 0, 1504);
//recvUser.recvbuf = new char[strlen(recv_buf)];
//memset(recvUser.recvbuf, 0x0, 102400);//清空缓存
//memset(&recvUser, 0, sizeof(recvUser));//清空结构体
//memcpy(&recvUser, recv_buf, sizeof(recvUser));
buf_size = data_recv.size;
memcpy(buf, data_recv.recvbuf, buf_size);
return data_recv.size;
}
int read_buffer_file(void *opaque, uint8_t *buf, int buf_size) {
if (!feof(fp_open)) {
int true_size = fread(buf, 1, buf_size, fp_open);
return true_size;
}
else {
return -1;
}
}
static int callbackprint(int a, int b) {
return a;
}
int tcpInit()
{
WSADATA wsaData;
char buff[1024];
memset(buff, 0, sizeof(buff));
if (WSAStartup(MAKEWORD(2, 2), &wsaData) != 0)
{
printf("初始化Winsock失败");
return -1;
}
SOCKADDR_IN addrSrv;
addrSrv.sin_family = AF_INET;
addrSrv.sin_port = htons(8888);//端口号
//addrSrv.sin_addr.S_un.S_addr = inet_pton("127.0.0.1");//IP地址
//addrSrv.sin_addr.S_un.S_addr = InetPton(AF_INET, _T("127.0.0.1"), &addrSrv.sin_addr.s_addr);
// //创建套接字
sockClient = socket(AF_INET, SOCK_STREAM, 0);
inet_pton(AF_INET, "127.0.0.1", &addrSrv.sin_addr.s_addr);
if (connect(sockClient, (struct sockaddr*)&addrSrv, sizeof(addrSrv)) == -1)
return -2;
//throw "连接失败";
if (SOCKET_ERROR == sockClient) {
printf("Socket() error:%d", WSAGetLastError());
return -3;
}
return 0;
}
int ffmpeg_recv(CompareFunc tcallback(char* a, int b))
{
av_register_all();
unsigned version = avcodec_version();
printf("FFmpeg version:\t%d\n", version);
int aaab = tcpInit();
printf("conn server\t%d\n", aaab);
AVFormatContext *pFormatCtx;
int i, videoindex;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;
char filepath[] = "video.264";
//av_register_all();
avformat_network_init();
pFormatCtx = avformat_alloc_context();
string patha = "C:\\Users\\sbd01\\Videos\\video.264";
//patha = "C:\\Users\\sbd01\\Pictures\\ffmpegtest\\Debug\\video.dat";
fp_open = fopen(patha.c_str(), "rb+");
//Init AVIOContext
unsigned char *aviobuffer = (unsigned char *)av_malloc(1504);
AVIOContext *avio = avio_alloc_context(aviobuffer, 1504, 0, NULL, read_buffer, NULL, NULL);
pFormatCtx->pb = avio;
//if (avformat_open_input(&pFormatCtx, patha.c_str(), NULL, NULL) != 0) {
//tcallback("------------------",11);
if (avformat_open_input(&pFormatCtx, NULL, NULL, NULL) != 0) {
printf("Couldn't open input stream.\n");
return -1;
}
//tcallback("------------------", 3);
if (avformat_find_stream_info(pFormatCtx, NULL)<0) {
printf("Couldn't find stream information.\n");
return -1;
}
videoindex = -1;
for (i = 0; i<pFormatCtx->nb_streams; i++)
if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
videoindex = i;
break;
}
if (videoindex == -1) {
printf("Didn't find a video stream.\n");
return -1;
}
pCodecCtx = pFormatCtx->streams[videoindex]->codec;
pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
if (pCodec == NULL) {
printf("Codec not found.\n");
return -1;
}
if (avcodec_open2(pCodecCtx, pCodec, NULL)<0) {
printf("Could not open codec.\n");
return -1;
}
AVFrame *pFrame, *pFrameYUV;
pFrame = av_frame_alloc();
pFrameYUV = av_frame_alloc();
/*if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
printf("Could not initialize SDL - %s\n", SDL_GetError());
return -1;
}*/
/*int screen_w = 0, screen_h = 0;
SDL_Surface *screen;
screen_w = pCodecCtx->width;
screen_h = pCodecCtx->height;
screen = SDL_SetVideoMode(screen_w, screen_h, 0, 0);
if (!screen) {
printf("SDL: could not set video mode - exiting:%s\n", SDL_GetError());
return -1;
}
SDL_Overlay *bmp;
bmp = SDL_CreateYUVOverlay(pCodecCtx->width, pCodecCtx->height, SDL_YV12_OVERLAY, screen);
SDL_Rect rect;
rect.x = 0;
rect.y = 0;
rect.w = screen_w;
rect.h = screen_h;*/
//SDL End------------------------
int ret, got_picture;
AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
#if OUTPUT_YUV420P
FILE *fp_yuv = fopen("output.yuv", "wb+");
#endif
//SDL_WM_SetCaption("Simplest FFmpeg Mem Player", NULL);
struct SwsContext *img_convert_ctx;
img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
//------------------------------
/*uint8_t *out_buffer;
out_buffer = new uint8_t[avpicture_get_size(AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height)];
avpicture_fill((AVPicture *)pFrameYUV, out_buffer, AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height);*/
while (av_read_frame(pFormatCtx, packet) >= 0) {
if (packet->stream_index == videoindex) {
ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
if (ret < 0) {
printf("Decode Error.\n");
return -1;
}
if (got_picture) {
char* buf = new char[pCodecCtx->height * pCodecCtx->width * 3 / 2];
memset(buf, 0, pCodecCtx->height * pCodecCtx->width * 3 / 2);
int height = pCodecCtx->height;
int width = pCodecCtx->width;
printf("decode video ok\n");
int a = 0, i;
for (i = 0; i<height; i++)
{
memcpy(buf + a, pFrame->data[0] + i * pFrame->linesize[0], width);
a += width;
}
for (i = 0; i<height / 2; i++)
{
memcpy(buf + a, pFrame->data[1] + i * pFrame->linesize[1], width / 2);
a += width / 2;
}
for (i = 0; i<height / 2; i++)
{
memcpy(buf + a, pFrame->data[2] + i * pFrame->linesize[2], width / 2);
a += width / 2;
}
tcallback(buf, pCodecCtx->height * pCodecCtx->width * 3 / 2);
//fwrite(buf, 1, pCodecCtx->height * pCodecCtx->width * 3 / 2, fp_yuv);
delete buf;
buf = NULL;
//-------------------------------------------------------------
// while (av_read_frame(pFormatCtx, packet) >= 0) {
// if (packet->stream_index == videoindex) {
// ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);//解码
// if (ret < 0) {
// printf("Decode Error.\n");
// return -1;
// }
// if (got_picture)
// {
// string aaa = "aaaa";
// string bbb = "bbbb";
// const char *p = aaa.c_str();
// const char *q = bbb.c_str();
// tcallback(p, q);
// /*SDL_LockYUVOverlay(bmp);
// pFrameYUV->data[0] = bmp->pixels[0];
// pFrameYUV->data[1] = bmp->pixels[2];
// pFrameYUV->data[2] = bmp->pixels[1];
// pFrameYUV->linesize[0] = bmp->pitches[0];
// pFrameYUV->linesize[1] = bmp->pitches[2];
// pFrameYUV->linesize[2] = bmp->pitches[1];
// sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);*/
//#if OUTPUT_YUV420P
// int y_size = pCodecCtx->width*pCodecCtx->height;
// fwrite(pFrameYUV->data[0], 1, y_size, fp_yuv); //Y
// fwrite(pFrameYUV->data[1], 1, y_size / 4, fp_yuv); //U
// fwrite(pFrameYUV->data[2], 1, y_size / 4, fp_yuv); //V
//#endif
// /*SDL_UnlockYUVOverlay(bmp);
//
// SDL_DisplayYUVOverlay(bmp, &rect);*/
// //Delay 40ms
// //SDL_Delay(40);
}
}
av_free_packet(packet);
}
sws_freeContext(img_convert_ctx);
#if OUTPUT_YUV420P
fclose(fp_yuv);
#endif
fclose(fp_open);
//SDL_Quit();
//av_free(out_buffer);
av_free(pFrameYUV);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);
int a;
scanf("%d", &a);
return 0;
}
//int _tmain(int argc, _TCHAR* argv[])
//{
// cout << "Hello FFmpeg!" << endl;
//// av_register_all();
//// unsigned version = avcodec_version();
////
//// printf("FFmpeg 版本号:\t%d\n", version);
////
//// WSADATA wsaData;
//// int port = 8888;//端口号
//// if (WSAStartup(MAKEWORD(2, 2), &wsaData) != 0)
//// {
//// printf("初始化失败");
//// return 0;
//// }
////
//// //创建用于监听的套接字,即服务端的套接字
//// SOCKET sockSrv = socket(AF_INET, SOCK_STREAM, 0);
////
//// SOCKADDR_IN addrSrv;
//// addrSrv.sin_family = AF_INET;
//// addrSrv.sin_port = htons(port); //1024以上的端口号
//// /**
//// * INADDR_ANY就是指定地址为0.0.0.0的地址,这个地址事实上表示不确定地址,或“所有地址”、“任意地址”。 一般来说,在各个系统中均定义成为0值。
//// */
//// addrSrv.sin_addr.S_un.S_addr = htonl(INADDR_ANY);
////
//// int retVal = bind(sockSrv, (LPSOCKADDR)&addrSrv, sizeof(SOCKADDR_IN));
//// if (retVal == SOCKET_ERROR) {
//// printf("连接失败:%d\n", WSAGetLastError());
//// return 0;
//// }
////
//// if (listen(sockSrv, 10) == SOCKET_ERROR) {
//// printf("监听失败:%d", WSAGetLastError());
//// return 0;
//// }
////
//// SOCKADDR_IN addrClient;
//// int len = sizeof(SOCKADDR);
////
//// while (1)
//// {
//// //等待客户请求到来
//// sockConn = accept(sockSrv, (SOCKADDR *)&addrClient, &len);
//// if (sockConn == SOCKET_ERROR) {
//// printf("等待请求失败:%d", WSAGetLastError());
//// break;
//// }
////
//// //printf("客户端的IP是:[%s]\n", inet_ntoa(addrClient.sin_addr));
////
//// //发送数据
//// char sendbuf[] = "你好,我是服务端,咱们一起聊天吧";
//// int iSend = send(sockConn, sendbuf, sizeof(sendbuf), 0);
//// if (iSend == SOCKET_ERROR) {
//// printf("发送失败");
//// break;
//// }
//// break;
////
//// /* HANDLE hThread = CreateThread(NULL, 0, Fun, NULL, 0, NULL);
//// CloseHandle(hThread);*/
////
//// }
//// AVFormatContext *pFormatCtx;
//// int i, videoindex;
//// AVCodecContext *pCodecCtx;
//// AVCodec *pCodec;
//// char filepath[] = "video.264";
////
//// av_register_all();
//// avformat_network_init();
//// pFormatCtx = avformat_alloc_context();
//// string patha = "C:\\Users\\sbd01\\Videos\\video.264";
//// //patha = "C:\\Users\\sbd01\\Pictures\\ffmpegtest\\Debug\\video.dat";
////
//// fp_open = fopen(patha.c_str(), "rb+");
//// //Init AVIOContext
//// unsigned char *aviobuffer = (unsigned char *)av_malloc(1504);
//// AVIOContext *avio = avio_alloc_context(aviobuffer, 1504, 0, NULL, read_buffer, NULL, NULL);
//// pFormatCtx->pb = avio;
//// //if (avformat_open_input(&pFormatCtx, patha.c_str(), NULL, NULL) != 0) {
////
//// if (avformat_open_input(&pFormatCtx, NULL, NULL, NULL) != 0) {
//// printf("Couldn't open input stream.\n");
//// return -1;
//// }
//// if (avformat_find_stream_info(pFormatCtx, NULL)<0) {
//// printf("Couldn't find stream information.\n");
//// return -1;
//// }
//// videoindex = -1;
//// for (i = 0; i<pFormatCtx->nb_streams; i++)
//// if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
//// videoindex = i;
//// break;
//// }
//// if (videoindex == -1) {
//// printf("Didn't find a video stream.\n");
//// return -1;
//// }
//// pCodecCtx = pFormatCtx->streams[videoindex]->codec;
//// pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
//// if (pCodec == NULL) {
//// printf("Codec not found.\n");
//// return -1;
//// }
//// if (avcodec_open2(pCodecCtx, pCodec, NULL)<0) {
//// printf("Could not open codec.\n");
//// return -1;
//// }
//// AVFrame *pFrame, *pFrameYUV;
//// pFrame = av_frame_alloc();
//// pFrameYUV = av_frame_alloc();
////
//// if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
//// printf("Could not initialize SDL - %s\n", SDL_GetError());
//// return -1;
//// }
////
//// int screen_w = 0, screen_h = 0;
//// SDL_Surface *screen;
//// screen_w = pCodecCtx->width;
//// screen_h = pCodecCtx->height;
//// screen = SDL_SetVideoMode(screen_w, screen_h, 0, 0);
////
//// if (!screen) {
//// printf("SDL: could not set video mode - exiting:%s\n", SDL_GetError());
//// return -1;
//// }
//// SDL_Overlay *bmp;
//// bmp = SDL_CreateYUVOverlay(pCodecCtx->width, pCodecCtx->height, SDL_YV12_OVERLAY, screen);
//// SDL_Rect rect;
//// rect.x = 0;
//// rect.y = 0;
//// rect.w = screen_w;
//// rect.h = screen_h;
//// //SDL End------------------------
//// int ret, got_picture;
////
//// AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
////
////#if OUTPUT_YUV420P
//// FILE *fp_yuv = fopen("output.yuv", "wb+");
////#endif
//// SDL_WM_SetCaption("Simplest FFmpeg Mem Player", NULL);
////
//// struct SwsContext *img_convert_ctx;
//// img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
//// //------------------------------
//// while (av_read_frame(pFormatCtx, packet) >= 0) {
//// if (packet->stream_index == videoindex) {
//// ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
//// if (ret < 0) {
//// printf("Decode Error.\n");
//// return -1;
//// }
//// if (got_picture) {
//// SDL_LockYUVOverlay(bmp);
//// pFrameYUV->data[0] = bmp->pixels[0];
//// pFrameYUV->data[1] = bmp->pixels[2];
//// pFrameYUV->data[2] = bmp->pixels[1];
//// pFrameYUV->linesize[0] = bmp->pitches[0];
//// pFrameYUV->linesize[1] = bmp->pitches[2];
//// pFrameYUV->linesize[2] = bmp->pitches[1];
//// sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);
////#if OUTPUT_YUV420P
//// int y_size = pCodecCtx->width*pCodecCtx->height;
//// fwrite(pFrameYUV->data[0], 1, y_size, fp_yuv); //Y
//// fwrite(pFrameYUV->data[1], 1, y_size / 4, fp_yuv); //U
//// fwrite(pFrameYUV->data[2], 1, y_size / 4, fp_yuv); //V
////#endif
//// SDL_UnlockYUVOverlay(bmp);
////
//// SDL_DisplayYUVOverlay(bmp, &rect);
//// //Delay 40ms
//// //SDL_Delay(40);
//// }
//// }
//// av_free_packet(packet);
//// }
//// sws_freeContext(img_convert_ctx);
////
////#if OUTPUT_YUV420P
//// fclose(fp_yuv);
////#endif
////
//// fclose(fp_open);
////
//// SDL_Quit();
////
//// //av_free(out_buffer);
//// av_free(pFrameYUV);
//// avcodec_close(pCodecCtx);
//// avformat_close_input(&pFormatCtx);
//// int a;
//// scanf("%d", &a);
// return 0;
//}
相关文章推荐
- python 调用c++ ffmpeg接收rgb
- C++ 调用Python文件方法传递字典参数并接收返回值
- C++调用Python浅析
- c++ 调用python传输图片
- C++调用PythonAPI线程状态和全局解释器锁
- python下面通过ctypes模块调用c++库的方法
- Python调用C/C++的种种方法
- C++程序调用Python的函数(简单应用)及Ubuntu16.04下codeblocks的环境配置
- c++ 调用 python
- 查看python调用c++代码的代码实现位于那个cpp文件
- python调用C++之pybind11入门
- linux下使用SWIG实现Python调用C++ sdk读取相机图片
- C++调用python配置及编译出现的问题
- python 调用c++
- python 利用swig 调用c++的接口。
- 转帖:C/C++中如何调用Python方法
- c++调用python脚本
- c++调用python封装接口
- C++中调用Python脚本(
- [转载] Python调用C/C++动态链接库