您的位置:首页 > 移动开发 > IOS开发

iOS多张图片合成一个视频文件

2012-05-24 10:53 1536 查看
转自:http://blog.iosxcode4.com/archives/160

用到的FrameWork有:

MediaPlayer.framework,QuartzCore.framework,CoreVideoframework,CoreMedia.framework,AVFoundation.framework

代码如下:

- (CVPixelBufferRef )pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size

{

NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:

[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,

[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil];

CVPixelBufferRef pxbuffer = NULL;

CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options, &pxbuffer);


NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);

CVPixelBufferLockBaseAddress(pxbuffer, 0);

void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);

NSParameterAssert(pxdata != NULL);

CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();

CGContextRef context = CGBitmapContextCreate(pxdata, size.width, size.height, 8, 4*size.width, rgbColorSpace, kCGImageAlphaPremultipliedFirst);

NSParameterAssert(context);

CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image);

CGColorSpaceRelease(rgbColorSpace);

CGContextRelease(context);

CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

return pxbuffer;

}

- (IBAction)testCompressionSession

{

NSString *moviePath = [[NSBundle mainBundle] pathForResource:@”Movie” ofType:@”mov”];

CGSize size = CGSizeMake(320,400);//定义视频的大小

NSError *error = nil;

unlink([betaCompressionDirectory UTF8String]);

//—-initialize compression engine

AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:betaCompressionDirectory]

fileType:AVFileTypeQuickTimeMovie

error:&error];

NSParameterAssert(videoWriter);

if(error)

NSLog(@”error = %@”, [error localizedDescription]);

NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,

[NSNumber numberWithInt:size.width], AVVideoWidthKey,

[NSNumber numberWithInt:size.height], AVVideoHeightKey, nil];

AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];

NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];

AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor

assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];

NSParameterAssert(writerInput);

NSParameterAssert([videoWriter canAddInput:writerInput]);

if ([videoWriter canAddInput:writerInput])

NSLog(@” “);

else

NSLog(@” “);

[videoWriter addInput:writerInput];

[videoWriter startWriting];

[videoWriter startSessionAtSourceTime:kCMTimeZero];

//合成多张图片为一个视频文件

dispatch_queue_t dispatchQueue = dispatch_queue_create(“mediaInputQueue”, NULL);

int __block frame = 0;

[writerInput requestMediaDataWhenReadyOnQueue:dispatchQueue usingBlock:^{

while ([writerInput isReadyForMoreMediaData])

{

if(++frame >= [imageArr count]*10)

{

[writerInput markAsFinished];

[videoWriter finishWriting];

[videoWriter release];

break;

}

CVPixelBufferRef buffer = NULL;

int idx = frame/10;

buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:[[imageArr objectAtIndex:idx] CGImage] size:size];

if (buffer)

{

if(![adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(frame, 10)])

NSLog(@”FAIL”);

else

CFRelease(buffer);

}

}

}];

}
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: