ios GPUImage简单滤镜 -- 录制视频(保存+聚焦)
2016-04-08 10:57
781 查看
最近初学ios以及研究GUPImage第三方库,在度娘及google上查了不少的资料后,勉强写了一个小程序,分享给大家。代码中肯定有不足之处(毕竟初学者),望大家指教!
在写代码之前,要先引入GPUImage库。可以用自已动手引入或直接用cocoapods( 度娘及官网有教程,不懂的请自行搜索 - -)。
下面是代码:
VideoCameraView.h
VideoCameraView.m
将以上两个文件放入你的工程后,只需在viewDidLoad函数中加入一下代码,就可以运行了(记得引入头文件~)
程序下载地址:http://download.csdn.net/detail/u012965341/9484245
在写代码之前,要先引入GPUImage库。可以用自已动手引入或直接用cocoapods( 度娘及官网有教程,不懂的请自行搜索 - -)。
下面是代码:
VideoCameraView.h
#import <UIKit/UIKit.h> #import "GPUImage.h" @interface VideoCameraView : UIView { GPUImageVideoCamera *videoCamera; GPUImageOutput<GPUImageInput> *filter; GPUImageMovieWriter *movieWriter; NSString *pathToMovie; GPUImageView *filteredVideoView; CALayer *_focusLayer; NSTimer *myTimer; UILabel *timeLabel; NSDate *fromdate; CGRect mainScreenFrame; } - (instancetype)initWithFrame:(CGRect)frame NS_DESIGNATED_INITIALIZER; @end
VideoCameraView.m
#import "VideoCameraView.h" @interface VideoCameraView () @end @implementation VideoCameraView - (instancetype) initWithFrame:(CGRect)frame{ if (!(self = [super initWithFrame:frame])) { return nil; } mainScreenFrame = frame; videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset1280x720 cameraPosition:AVCaptureDevicePositionBack]; videoCamera.outputImageOrientation = UIInterfaceOrientationPortrait; [videoCamera addAudioInputsAndOutputs]; filter = [[GPUImageSaturationFilter alloc] init]; filteredVideoView = [[GPUImageView alloc] initWithFrame:[[UIScreen mainScreen] bounds]]; [videoCamera addTarget:filter]; [filter addTarget:filteredVideoView]; [videoCamera startCameraCapture]; [self addSomeView]; UITapGestureRecognizer *singleFingerOne = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(cameraViewTapAction:)]; singleFingerOne.numberOfTouchesRequired = 1; //手指数 singleFingerOne.numberOfTapsRequired = 1; //tap次数 [filteredVideoView addGestureRecognizer:singleFingerOne]; [self addSubview:filteredVideoView]; return self; } - (void) addSomeView{ UISlider *filterSettingsSlider = [[UISlider alloc] initWithFrame:CGRectMake(25.0, 30.0, mainScreenFrame.size.width - 50.0, 40.0)]; [filterSettingsSlider addTarget:self action:@selector(updateSliderValue:) forControlEvents:UIControlEventValueChanged]; filterSettingsSlider.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleTopMargin; filterSettingsSlider.minimumValue = 0.0; filterSettingsSlider.maximumValue = 2.0; filterSettingsSlider.value = 1.0; [filteredVideoView addSubview:filterSettingsSlider]; timeLabel = [[UILabel alloc] initWithFrame:CGRectMake(20.0, 60.0, 100, 30.0)]; timeLabel.font = [UIFont systemFontOfSize:15.0f]; timeLabel.text = @"00:00:00"; timeLabel.textAlignment = NSTextAlignmentCenter; timeLabel.backgroundColor = [UIColor clearColor]; timeLabel.textColor = [UIColor whiteColor]; [filteredVideoView addSubview:timeLabel]; UIButton *photoCaptureButton = [UIButton buttonWithType:UIButtonTypeRoundedRect]; [photoCaptureButton.layer setCornerRadius:8]; photoCaptureButton.frame = CGRectMake(50, mainScreenFrame.size.height - 70.0, 50.0, 40.0); photoCaptureButton.backgroundColor = [UIColor whiteColor]; [photoCaptureButton setTitle:@"开始" forState:UIControlStateNormal]; photoCaptureButton.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleTopMargin; [photoCaptureButton addTarget:self action:@selector(startRecording:) forControlEvents:UIControlEventTouchUpInside]; [photoCaptureButton setTitleColor:[UIColor grayColor] forState:UIControlStateDisabled]; [filteredVideoView addSubview:photoCaptureButton]; UIButton *cameraChangeButton = [UIButton buttonWithType:UIButtonTypeRoundedRect]; [cameraChangeButton.layer setCornerRadius:8]; cameraChangeButton.frame = CGRectMake(mainScreenFrame.size.width - 150, mainScreenFrame.size.height - 70.0, 100.0, 40.0); cameraChangeButton.backgroundColor = [UIColor whiteColor]; [cameraChangeButton setTitle:@"录制结束" forState:UIControlStateNormal]; cameraChangeButton.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleTopMargin; [cameraChangeButton addTarget:self action:@selector(stopRecording:) forControlEvents:UIControlEventTouchUpInside]; [cameraChangeButton setTitleColor:[UIColor grayColor] forState:UIControlStateDisabled]; [filteredVideoView addSubview:cameraChangeButton]; } - (IBAction)updateSliderValue:(id)sender { [(GPUImageSaturationFilter *)filter setSaturation:[(UISlider *)sender value]]; } - (IBAction)stopRecording:(id)sender { //[filter removeTarget:movieWriter]; videoCamera.audioEncodingTarget = nil; NSLog(@"Path %@",pathToMovie); UISaveVideoAtPathToSavedPhotosAlbum(pathToMovie, nil, nil, nil); [movieWriter finishRecording]; [filter removeTarget:movieWriter]; timeLabel.text = @"00:00:00"; [myTimer invalidate]; myTimer = nil; //[movieWriter cancelRecording]; } - (IBAction)startRecording:(id)sender { pathToMovie = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents/Movie.m4v"]; unlink([pathToMovie UTF8String]); // If a file already exists, AVAssetWriter won't let you record new frames, so delete the old movie NSURL *movieURL = [NSURL fileURLWithPath:pathToMovie]; movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:movieURL size:CGSizeMake(360.0, 640.0)]; movieWriter.encodingLiveVideo = YES; movieWriter.shouldPassthroughAudio = YES; [filter addTarget:movieWriter]; videoCamera.audioEncodingTarget = movieWriter; [movieWriter startRecording]; NSTimeInterval timeInterval =1.0; fromdate = [NSDate date]; myTimer = [NSTimer scheduledTimerWithTimeInterval:timeInterval target:self selector:@selector(updateTimer:) userInfo:nil repeats:YES]; } - (void)updateTimer:(NSTimer *)sender{ NSDateFormatter *dateFormator = [[NSDateFormatter alloc] init]; dateFormator.dateFormat = @"HH:mm:ss"; NSDate *todate = [NSDate date]; NSCalendar *calendar = [NSCalendar currentCalendar]; NSInteger unitFlags = NSYearCalendarUnit | NSMonthCalendarUnit | NSDayCalendarUnit | NSHourCalendarUnit | NSMinuteCalendarUnit | NSSecondCalendarUnit; NSDateComponents *comps = [calendar components:unitFlags fromDate:fromdate toDate:todate options:NSCalendarWrapComponents]; NSCalendar *gregorian = [[NSCalendar alloc] initWithCalendarIdentifier:NSCalendarIdentifierGregorian]; NSDate *timer = [gregorian dateFromComponents:comps]; NSString *date = [dateFormator stringFromDate:timer]; timeLabel.text = date; } - (void)setfocusImage{ UIImage *focusImage = [UIImage imageNamed:@"96"]; UIImageView *imageView = [[UIImageView alloc] initWithFrame:CGRectMake(0, 0, focusImage.size.width, focusImage.size.height)]; imageView.image = focusImage; CALayer *layer = imageView.layer; layer.hidden = YES; [filteredVideoView.layer addSublayer:layer]; _focusLayer = layer; } - (void)layerAnimationWithPoint:(CGPoint)point { if (_focusLayer) { CALayer *focusLayer = _focusLayer; focusLayer.hidden = NO; [CATransaction begin]; [CATransaction setDisableActions:YES]; [focusLayer setPosition:point]; focusLayer.transform = CATransform3 ae99 DMakeScale(2.0f,2.0f,1.0f); [CATransaction commit]; CABasicAnimation *animation = [ CABasicAnimation animationWithKeyPath: @"transform" ]; animation.toValue = [ NSValue valueWithCATransform3D: CATransform3DMakeScale(1.0f,1.0f,1.0f)]; animation.delegate = self; animation.duration = 0.3f; animation.repeatCount = 1; animation.removedOnCompletion = NO; animation.fillMode = kCAFillModeForwards; [focusLayer addAnimation: animation forKey:@"animation"]; // 0.5秒钟延时 [self performSelector:@selector(focusLayerNormal) withObject:self afterDelay:0.5f]; } } - (void)animationDidStop:(CAAnimation *)anim finished:(BOOL)flag { } - (void)focusLayerNormal { filteredVideoView.userInteractionEnabled = YES; _focusLayer.hidden = YES; } -(void)cameraViewTapAction:(UITapGestureRecognizer *)tgr { if (tgr.state == UIGestureRecognizerStateRecognized && (_focusLayer == NO || _focusLayer.hidden)) { CGPoint location = [tgr locationInView:filteredVideoView]; [self setfocusImage]; [self layerAnimationWithPoint:location]; AVCaptureDevice *device = videoCamera.inputCamera; CGPoint pointOfInterest = CGPointMake(0.5f, 0.5f); NSLog(@"taplocation x = %f y = %f", location.x, location.y); CGSize frameSize = [filteredVideoView frame].size; if ([videoCamera cameraPosition] == AVCaptureDevicePositionFront) { location.x = frameSize.width - location.x; } pointOfInterest = CGPointMake(location.y / frameSize.height, 1.f - (location.x / frameSize.width)); if ([device isFocusPointOfInterestSupported] && [device isFocusModeSupported:AVCaptureFocusModeAutoFocus]) { NSError *error; if ([device lockForConfiguration:&error]) { [device setFocusPointOfInterest:pointOfInterest]; [device setFocusMode:AVCaptureFocusModeAutoFocus]; if([device isExposurePointOfInterestSupported] && [device isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) { [device setExposurePointOfInterest:pointOfInterest]; [device setExposureMode:AVCaptureExposureModeContinuousAutoExposure]; } [device unlockForConfiguration]; NSLog(@"FOCUS OK"); } else { NSLog(@"ERROR = %@", error); } } } } @end
将以上两个文件放入你的工程后,只需在viewDidLoad函数中加入一下代码,就可以运行了(记得引入头文件~)
CGRect frame = [[UIScreen mainScreen] bounds]; VideoCameraView *view = [[VideoCameraView alloc] initWithFrame:frame]; [self.view addSubview:view];
程序下载地址:http://download.csdn.net/detail/u012965341/9484245
相关文章推荐
- 峰回路转,Firefox 浏览器即将重返 iOS 平台
- 峰回路转,Firefox 浏览器即将重返 iOS 平台
- 不可修补的 iOS 漏洞可能导致 iPhone 4s 到 iPhone X 永久越狱
- iOS 12.4 系统遭黑客破解,漏洞危及数百万用户
- 每日安全资讯:NSO,一家专业入侵 iPhone 的神秘公司
- [转][源代码]Comex公布JailbreakMe 3.0源代码
- css滤镜实现页面灰色黑白色效果代码
- camera录制视频的缩略图获取原理心得分享
- 讲解iOS开发中基本的定位功能实现
- iOS中定位当前位置坐标及转换为火星坐标的方法
- js判断客户端是iOS还是Android等移动终端的方法
- iOS应用中UISearchDisplayController搜索效果的用法
- IOS开发环境windows化攻略
- iOS应用中UITableView左滑自定义选项及批量删除的实现
- 浅析iOS应用开发中线程间的通信与线程安全问题
- 检测iOS设备是否越狱的方法
- .net平台推送ios消息的实现方法
- 探讨Android与iOS,我们将何去何从?
- Android、iOS和Windows Phone中的推送技术详解