您的位置:首页 > 移动开发 > IOS开发

iOS 视频旋转及平移详解

2016-07-12 10:59 393 查看
//必须是fileURL
//截取将会是视频的中间部分
//这里假设拍摄出来的视频总是高大于宽的

/*!
@method mergeAndExportVideosAtFileURLs:

@param fileURLArray
包含所有视频分段的文件URL数组,必须是[NSURL fileURLWithString:...]得到的

@discussion
将所有分段视频合成为一段完整视频,并且裁剪为正方形
*/
#if 1
- (void)mergeAndExportVideosAtFileURLs:(NSArray *)fileURLArray
{

// NSLog(@"the getVideoCount is %lu", (unsigned long)[self getVideoCount]);

// if (self.getVideoCount != fileURLArray.count) {
// NSLog(@"必定崩溃-------");
// NSLog(@"必定崩溃-------");
// NSLog(@"必定崩溃-------");
// }

NSLog(@"the fileURLArray is %@", fileURLArray);

if (fileURLArray.count <= 0) {

#warning 小片段视频还未成功生成就开始 合并 视频
NSLog(@"严重错误!!!!!!!!!!!!!!!!!!!!!");
return;
}else{

for (NSURL *fileURL in fileURLArray) {

NSString *path = fileURL.resourceSpecifier;

if ([[NSFileManager defaultManager] fileExistsAtPath:path])
{

JFLog(DBGUI, @"mergeAndExportVideosAtFileURLs theVideoPath is %@", path);

NSUInteger size;

NSDictionary *attr = [[NSFileManager defaultManager] attributesOfItemAtPath:path error:nil];
size = [attr[NSFileSize] unsignedIntegerValue];

JFLog(DBGUI, @"mergeAndExportVideosAtFileURLs fileSize is %lu", size/(1024*1024));

}
}

}

dispatch_async(_serialQueue, ^{
// dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
NSError *error = nil;

CGSize renderSize = CGSizeMake(0, 0);

NSMutableArray *layerInstructionArray = [[NSMutableArray alloc] init];

AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];

CMTime totalDuration = kCMTimeZero;

//先去assetTrack 也为了取renderSize
NSMutableArray *assetTrackArray = [[NSMutableArray alloc] init];

NSMutableArray *assetAudioTrackArray = [NSMutableArray arrayWithCapacity:0];

NSMutableArray *assetArray = [[NSMutableArray alloc] init];
for (NSURL *fileURL in fileURLArray) {

AVAsset *asset = [AVAsset assetWithURL:fileURL];

// AVURLAsset *asset = [AVURLAsset URLAssetWithURL:fileURL options:nil];

// NSString *tracksKey = @"tracks";

NSString *tracksKey = @"AVMediaTypeAudio";

if (!asset) {
continue;
}

[assetArray addObject:asset];

AVAssetTrack *assetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[assetTrackArray addObject:assetTrack];

// AVAssetTrack *assetAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
// [assetAudioTrackArray addObject:assetAudioTrack];

renderSize.width = MAX(renderSize.width, assetTrack.naturalSize.height);
renderSize.height = MAX(renderSize.height, assetTrack.naturalSize.width);
}

NSLog(@"the assetAudioTrackArray is %@", assetAudioTrackArray);

CGFloat renderW = MIN(renderSize.width, renderSize.height);

NSLog(@"the renderW is %f", renderW);

NSLog(@"the assetArray cout is %lu", (unsigned long)[assetArray count]);
NSLog(@"the assetTrackArray cout is %lu", (unsigned long)[assetTrackArray count]);

for (int i = 0; i < [assetArray count] && i < [assetTrackArray count]; i++) {

AVAsset *asset = [assetArray objectAtIndex:i];
AVAssetTrack *assetTrack = [assetTrackArray objectAtIndex:i];

// AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];

// [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration)
// ofTrack:[[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
// atTime:totalDuration
// error:nil];

#warning 这里加一个判断
//[asset tracksWithMediaType:AVMediaTypeAudio]取出的数组可能为空 这段视频没有音频
NSArray *arr = [asset tracksWithMediaType:AVMediaTypeAudio];

JFLog(DBGUI, @"the audioTrackArr is %@", arr);

if (arr.count <= 0) {
NSLog(@"没有视频!!!!!!!!!!!!!!!!!!!!!");
NSLog(@"没有视频!!!!!!!!!!!!!!!!!!!!!");

}

if (arr.count > 0) {

AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];

[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:[arr objectAtIndex:0] atTime:totalDuration error:nil];
}

// [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:([arr count]>0)?[arr objectAtIndex:0]:nil atTime:totalDuration error:nil];

AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];

NSLog(@"the assetduration is %lld", asset.duration.value/asset.duration.timescale);

[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration)
ofTrack:assetTrack
atTime:totalDuration
error:&error];

//fix orientationissue
AVMutableVideoCompositionLayerInstruction *layerInstruciton = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];

totalDuration = CMTimeAdd(totalDuration, asset.duration);

CGFloat rate;

NSLog(@"the renderW is %f", renderW);
NSLog(@"assetTrack.naturalSize.width is %f", assetTrack.naturalSize.width);
NSLog(@"assetTrack.naturalSize.height is %f", assetTrack.naturalSize.height);

rate = renderW / MIN(assetTrack.naturalSize.width, assetTrack.naturalSize.height);

NSLog(@"the rate is %f", rate);

NSLog(@" preferredTransform.a is %f", assetTrack.preferredTransform.a);
NSLog(@" preferredTransform.b is %f", assetTrack.preferredTransform.b);
NSLog(@" preferredTransform.c is %f", assetTrack.preferredTransform.c);
NSLog(@" preferredTransform.d is %f", assetTrack.preferredTransform.d);
NSLog(@" preferredTransform.tx is %f", assetTrack.preferredTransform.tx);
NSLog(@" preferredTransform.ty is %f", assetTrack.preferredTransform.ty);

CGAffineTransform translateToCenter;
CGAffineTransform mixedTransform;

// AVMutableVideoComposition *waterMarkVideoComposition = [AVMutableVideoComposition videoComposition];
// waterMarkVideoComposition.frameDuration = CMTimeMake(1, 30);

int degrees = [self degressFromVideoFileWithURL:assetTrack];

// degrees = 180;

if (degrees == 0) {

// AVAssetExportSession *session = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetMediumQuality];
// session.outputURL = outputURL;
// session.outputFileType = AVFileTypeQuickTimeMovie;

}else{
if(degrees == 90){
//顺时针旋转90°
NSLog(@"视频旋转90度,home按键在左");

translateToCenter = CGAffineTransformMakeTranslation(assetTrack.naturalSize.height, 0.0);
// mixedTransform = CGAffineTransformRotate(translateToCenter,0);
mixedTransform = CGAffineTransformRotate(translateToCenter,M_PI_2);
// videoTrack.renderSize = CGSizeMake(assetTrack.naturalSize.height,assetTrack.naturalSize.width);
}else if(degrees == 180){
//顺时针旋转180°
NSLog(@"视频旋转180度,home按键在上");
translateToCenter = CGAffineTransformMakeTranslation(assetTrack.naturalSize.width, assetTrack.naturalSize.height);
mixedTransform = CGAffineTransformRotate(translateToCenter,M_PI);
// waterMarkVideoComposition.renderSize = CGSizeMake(assetTrack.naturalSize.width,assetTrack.naturalSize.height);
}else if(degrees == 270){
//顺时针旋转270°
NSLog(@"视频旋转270度,home按键在右");
translateToCenter = CGAffineTransformMakeTranslation(0.0, assetTrack.naturalSize.width);
mixedTransform = CGAffineTransformRotate(translateToCenter,M_PI_2*3.0);
// waterMarkVideoComposition.renderSize = CGSizeMake(assetTrack.naturalSize.height,assetTrack.naturalSize.width);
}
}

CGAffineTransform preferredTransform = assetTrack.preferredTransform;

CGAffineTransform trans = CGAffineTransformTranslate(preferredTransform, 0.0, -assetTrack.naturalSize.height);

CGAffineTransform transNew = CGAffineTransformRotate(preferredTransform,M_PI_2*3);

transNew = CGAffineTransformTranslate(transNew, 0, -(assetTrack.naturalSize.width - assetTrack.naturalSize.height) / 2.0);

transNew = CGAffineTransformConcat(trans, transNew);

transNew = CGAffineTransformScale(transNew, rate, rate);//放缩,解决前后摄像结果大小不对称

// CGAffineTransform layerTransform = CGAffineTransformMake(assetTrack.preferredTransform.a, assetTrack.preferredTransform.b, assetTrack.preferredTransform.c, assetTrack.preferredTransform.d, assetTrack.preferredTransform.tx * rate, assetTrack.preferredTransform.ty * rate);

// layerTransform = (a = 0, b = 1, c = -1, d = 0, tx = 1080, ty = 0)

CGAffineTransform layerTransform = CGAffineTransformMake(assetTrack.preferredTransform.a, assetTrack.preferredTransform.b, assetTrack.preferredTransform.c, assetTrack.preferredTransform.d, assetTrack.naturalSize.height * rate, assetTrack.preferredTransform.ty * rate);
//
// layerTransform = CGAffineTransformConcat(layerTransform, CGAffineTransformMake(1, 0, 0, 1, 0, -(assetTrack.naturalSize.width - assetTrack.naturalSize.height) / 2.0));//向上移动取中部影响
////
// layerTransform = CGAffineTransformScale(layerTransform, rate, rate);//放缩,解决前后摄像结果大小不对称

// [layerInstruciton setTransform:layerTransform atTime:kCMTimeZero];
// [layerInstruciton setOpacity:0.0 atTime:totalDuration];

[layerInstruciton setTransform:transNew atTime:kCMTimeZero];

// [layerInstruciton setTransform:mixedTransform atTime:kCMTimeZero];

// [layerInstruciton setTransform:assetTrack.preferredTransform atTime:kCMTimeZero];

//data
[layerInstructionArray addObject:layerInstruciton];
}

//get save path
NSURL *mergeFileURL = [NSURL fileURLWithPath:[[self class] getVideoMergeFilePathString]];

//export
AVMutableVideoCompositionInstruction *mainInstruciton = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruciton.timeRange = CMTimeRangeMake(kCMTimeZero, totalDuration);
mainInstruciton.layerInstructions = layerInstructionArray;
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
mainCompositionInst.instructions = @[mainInstruciton];

mainCompositionInst.frameDuration = CMTimeMake(1, 30);

// mainCompositionInst.frameDuration = CMTimeMake(1, 24);

NSLog(@"the renderSize is %@", NSStringFromCGSize(CGSizeMake(renderW, renderW)));

mainCompositionInst.renderSize = CGSizeMake(renderW, renderW);

AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];

exporter.videoComposition = mainCompositionInst;
exporter.outputURL = mergeFileURL;
exporter.outputFileType = AVFileTypeMPEG4;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^{

if ([exporter status] != AVAssetExportSessionStatusCompleted) {

NSLog(@"the status is %ld", (long)[exporter status]);

NSLog(@"the outPutPath is %@", [exporter.outputURL absoluteString]);

NSLog(@"the error is %@", [exporter error].userInfo);

NSLog(@"the error is %@", [exporter error]);
}

if ([exporter status] == AVAssetExportSessionStatusFailed) {
if (DEBUG) {
NSLog(@"error ");
}
}

NSString *path = mergeFileURL.resourceSpecifier;

// NSString *pa1 = outputFileURL.absoluteString;
// NSString *pa2 = outputFileURL.resourceSpecifier;
// NSString *pa3 = outputFileURL.scheme;
// NSString *pa4 = outputFileURL.relativePath;

NSLog(@"theVideoPath is %@", path);

NSLog(@"outputFileURL is %@", mergeFileURL);

if ([[NSFileManager defaultManager] fileExistsAtPath:path])
{

NSLog(@"theVideoPath is %@", path);

NSUInteger size;

NSDictionary *attr = [[NSFileManager defaultManager] attributesOfItemAtPath:path error:nil];
size = [attr[NSFileSize] unsignedIntegerValue];

NSLog(@"didFinishRecordingToOutputFileAtURL fileSize is %lu", size/(1024*1024));

}

NSLog(@"the outputFile is %@", mergeFileURL);

dispatch_async(dispatch_get_main_queue(), ^{

MSPreViewController *VC = [MSPreViewController new];

VC.videoURL = mergeFileURL;

NSLog(@"navi is %@", self.navigationController);

[self.navigationController pushViewController:VC animated:YES];
});

// return;

// dispatch_async(dispatch_get_main_queue(), ^{
// if ([_delegate respondsToSelector:@selector(videoRecorder:didFinishMergingVideosToOutPutFileAtURL:)]) {
// [_delegate videoRecorder:self didFinishMergingVideosToOutPutFileAtURL:mergeFileURL];
// }
// });

}];
});
}
#endif
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: