IOS获取视频每一帧的图片方法
2014-10-27 21:19
736 查看
// 获取视频每一帧
- (UIImage *)getVideoPreViewImageByTime:(float)t
{
AVURLAsset *asset = [[AVURLAsset
alloc] initWithURL:_playUrl
options:nil];
AVAssetImageGenerator *gen = [[AVAssetImageGenerator
alloc]
initWithAsset:asset];
gen.appliesPreferredTrackTransform =
YES;
CMTime time =
CMTimeMakeWithSeconds(t, 1);
//0.0 600
NSError *error = nil;
CMTime actualTime;
CGImageRef image = [gen
copyCGImageAtTime:time actualTime:&actualTime
error:&error];
UIImage *img = [[UIImage
alloc] initWithCGImage:image];
CGImageRelease(image);
//
创建一个亮度的滤镜
GPUImageBrightnessFilter *passthroughFilter = [[GPUImageBrightnessFilter
alloc]
init];
passthroughFilter.brightness =
0.5f;
//
设置要渲染的区域
[passthroughFilter
forceProcessingAtSize:img.size];
// ???
[passthroughFilter useNextFrameForImageCapture];
//
获取数据源
GPUImagePicture *stillImageSource = [[GPUImagePicture
alloc] initWithImage:img];
//
添加上滤镜
[stillImageSource
addTarget:passthroughFilter];
//
开始渲染
[stillImageSource
processImage];
//
获取渲染后的图片
UIImage *nearestNeighborImage = [passthroughFilter
imageFromCurrentFramebuffer];
//
加载出来处理后的
UIImageView *imageView = [[UIImageView
alloc] initWithImage:nearestNeighborImage];
imageView.backgroundColor = [UIColor
redColor];
imageView.image = nearestNeighborImage;
imageView.frame =
CGRectMake(0,
pointY, 100,
60);
[self.view
addSubview:imageView];
pointY += 70;
NSLog(@"� %g", t);
[self.imageArr
addObject:nearestNeighborImage];
return img;
}
// 在需要时调用:(此处是每0.1秒调用一次)
AVAsset *movieAsset = [AVAsset
assetWithURL:fileUrl]; // fileUrl:文件路径
int second = (int)movieAsset.duration.value / movieAsset.duration.timescale;
// 获取视频总时长,单位秒
NSLog(@"movie duration : %d", second);
for (float i =
0.0; i < second;) {
[self getVideoPreViewImageByTime:i];
i +=
0.1;
}
- (UIImage *)getVideoPreViewImageByTime:(float)t
{
AVURLAsset *asset = [[AVURLAsset
alloc] initWithURL:_playUrl
options:nil];
AVAssetImageGenerator *gen = [[AVAssetImageGenerator
alloc]
initWithAsset:asset];
gen.appliesPreferredTrackTransform =
YES;
CMTime time =
CMTimeMakeWithSeconds(t, 1);
//0.0 600
NSError *error = nil;
CMTime actualTime;
CGImageRef image = [gen
copyCGImageAtTime:time actualTime:&actualTime
error:&error];
UIImage *img = [[UIImage
alloc] initWithCGImage:image];
CGImageRelease(image);
//
创建一个亮度的滤镜
GPUImageBrightnessFilter *passthroughFilter = [[GPUImageBrightnessFilter
alloc]
init];
passthroughFilter.brightness =
0.5f;
//
设置要渲染的区域
[passthroughFilter
forceProcessingAtSize:img.size];
// ???
[passthroughFilter useNextFrameForImageCapture];
//
获取数据源
GPUImagePicture *stillImageSource = [[GPUImagePicture
alloc] initWithImage:img];
//
添加上滤镜
[stillImageSource
addTarget:passthroughFilter];
//
开始渲染
[stillImageSource
processImage];
//
获取渲染后的图片
UIImage *nearestNeighborImage = [passthroughFilter
imageFromCurrentFramebuffer];
//
加载出来处理后的
UIImageView *imageView = [[UIImageView
alloc] initWithImage:nearestNeighborImage];
imageView.backgroundColor = [UIColor
redColor];
imageView.image = nearestNeighborImage;
imageView.frame =
CGRectMake(0,
pointY, 100,
60);
[self.view
addSubview:imageView];
pointY += 70;
NSLog(@"� %g", t);
[self.imageArr
addObject:nearestNeighborImage];
return img;
}
// 在需要时调用:(此处是每0.1秒调用一次)
AVAsset *movieAsset = [AVAsset
assetWithURL:fileUrl]; // fileUrl:文件路径
int second = (int)movieAsset.duration.value / movieAsset.duration.timescale;
// 获取视频总时长,单位秒
NSLog(@"movie duration : %d", second);
for (float i =
0.0; i < second;) {
[self getVideoPreViewImageByTime:i];
i +=
0.1;
}
相关文章推荐
- iOS 播放视频进行滑动获取每一帧图片并且通过UIImageView展示出来
- ios中摄像头/相册获取图片,压缩图片,上传服务器方法总结
- ios根据视频地址获取某一帧的图像
- ios 用url获取图片的方法
- ios中摄像头/相册获取图片,压缩图片,上传服务器方法总结
- iOS里加密字符串、图片、视频方法
- ios中摄像头/相册获取图片,压缩图片,上传服务器方法总结
- ios中摄像头/相册获取图片,压缩图片,上传服务器方法总结
- IOS开发获取图片的方法
- ios中摄像头/相册获取图片,压缩图片,上传服务器方法总结
- IOS 获取视频图片的
- ios中摄像头/相册获取图片,压缩图片,上传服务器方法总结
- IOS获取图片方法,避免内存过大闪退
- ios中摄像头/相册获取图片,压缩图片,上传服务器方法
- ios中摄像头/相册获取图片,压缩图片,上传服务器方法总结
- ios中摄像头/相册获取图片,压缩图片,上传服务器方法总结
- IOS获取网络图片的方法
- iOS里加密字符串、图片、视频方法
- ios中摄像头/相册获取图片,压缩图片,上传服务器方法总结
- ios中摄像头/相册获取图片,压缩图片,上传服务器方法