iOS AVCaptureSession实现获取摄像头图像,并识别图片中身份证号码
2016-05-27 11:37
696 查看
自定义照相机通过设备摄像头实时获取身份证图片信息,然后识别图片中身份证号码,采用ocr识别数字
声明
@protocol PassImage
-(void)PassImagedata:(id)_data;
@end
@interface DZC_Carmer_photo : UIViewController
@interface DZC_Carmer_photo ()
{
UIButton *btn;
UIImageView *imgView;
}
@end
@implementation DZC_Carmer_photo
-(instancetype)init
{
self=[super init];
if (self)
{
[self initialSession];
return self;
}
return self;
}
-(void)viewWillDisappear:(BOOL)animated
{
[super viewWillDisappear:animated];
[self.operationQueue cancelAllOperations];
}
- (void)viewDidLoad {
[super viewDidLoad];
// UIButton *carmerBtn=[[UIButton alloc]initWithFrame:CGRectMake(SCREENWIDTH/2-50, SCREENHEIGHT-100, 100, 50)];
// //[carmerBtn setTransform:CGAffineTransformMakeRotation(M_PI/2)];
// [carmerBtn setTitle:@”拍照” forState:UIControlStateNormal];
// carmerBtn.backgroundColor=[UIColor orangeColor];
// [carmerBtn addTarget:self action:@selector(shutterCamera) forControlEvents:UIControlEventTouchUpInside];
// self.shutterButton=carmerBtn;
// [self.view addSubview:carmerBtn];
// [self.view bringSubviewToFront:carmerBtn];
UIButton *returnBtn=[[UIButton alloc]initWithFrame:CGRectMake(10, SCREENHEIGHT-100, 100, 50)];
//[returnBtn setTransform:CGAffineTransformMakeRotation(M_PI/2)];
[returnBtn setTitle:@”撤销” forState:UIControlStateNormal];
returnBtn.backgroundColor=[UIColor orangeColor];
[returnBtn addTarget:self action:@selector(CloseBtn) forControlEvents:UIControlEventTouchUpInside];
[self.view addSubview:returnBtn];
[self.view bringSubviewToFront:returnBtn];
UILabel *titleLable=[[UILabel alloc]initWithFrame:CGRectMake(SCREENWIDTH-110, SCREENHEIGHT-120, 100, 140)];
//[titleLable setTransform:CGAffineTransformMakeRotation(M_PI/2)];
[titleLable setFont:[UIFont systemFontOfSize:10]];
[titleLable setText:@”友情提醒:扫描身份证时请将身份证号码放置于矩形框内”];
}
-(void)CloseBtn
{
[self dismissViewControllerAnimated:YES completion:nil];
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
- (void) initialSession
{
//这个方法的执行我放在init方法里了
self.session = [[AVCaptureSession alloc] init];
[self.session setSessionPreset:AVCaptureSessionPresetMedium];
}
- (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition) position {
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices) {
if ([device position] == position) {
_cameraAvaible=YES;
return device;
}
}
_cameraAvaible=NO;
return nil;
}
(AVCaptureDevice *)frontCamera {
return [self cameraWithPosition:AVCaptureDevicePositionFront];
}
(AVCaptureDevice *)backCamera {
return [self cameraWithPosition:AVCaptureDevicePositionBack];
}
//接下来在viewWillAppear方法里执行加载预览图层的方法
(void) setUpCameraLayer
{
if (_cameraAvaible == NO) return;
if (self.previewLayer == nil) {
}
}
-(void)viewWillAppear:(BOOL)animated
{
[super viewWillAppear:animated];
[self setUpCameraLayer];
// DZC_CustomLine *line=[[DZC_CustomLine alloc]init];
// line.backgroundColor=[UIColor clearColor];
// line.frame=self.previewLayer.frame;
// [self.view addSubview:line];
// [line setNeedsDisplay];
//[self turnOnLed];//打开闪光灯
}
//在viewDidAppear和viewDidDisappear方法中启动和关闭session
(void) viewDidAppear:(BOOL)animated
{
[super viewDidAppear:animated];
if (self.session) {
[self.session startRunning];
}
}
(void) viewDidDisappea
4000
r:(BOOL)animated
{
[super viewDidDisappear: animated];
if (self.session) {
[self.session stopRunning];
}
[self.previewLayer removeFromSuperlayer];
self.previewLayer =nil;
self.session=nil;
//[self turnOffLed];//关闭闪光灯
}
//这是切换镜头的按钮方法
(void)toggleCamera {
NSUInteger cameraCount = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] count];
if (cameraCount > 1) {
NSError *error;
AVCaptureDeviceInput *newVideoInput;
AVCaptureDevicePosition position = [[_videoInput device] position];
}
DLog(@”切换摄像头”);
}
// 这是拍照按钮的方法
(void) shutterCamera
{
[MB_HUD show:YES];
self.shutterButton.userInteractionEnabled=NO;
AVCaptureConnection * videoConnection = [self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo];
if (!videoConnection) {
}
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (imageDataSampleBuffer == NULL) {
[MB_HUD setDetailsLabelText:@”take photo null!”];
[MB_HUD hide:YES afterDelay:1.0];
return;
}
UIImage * imageOne=[self fixOrientation:image];
UIImage *imageTwo=[self image:imageOne rotation:UIImageOrientationLeft];
UIImage *imageThree=[self scaleFromImage:imageTwo toSize:CGSizeMake(308, 400)];
DLog(@”image size = %@”,NSStringFromCGSize(imageThree.size));
[self.delegate PassImagedata:imageThree];
imageThree=nil;
[MB_HUD hide:YES];
[self dismissViewControllerAnimated:YES completion:nil];
//[self turnOffLed];
}];
}
-(void)captureOutput:(AVCaptureOutput )captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection )connection
{
DLog(@”有图片”);
[NSThread sleepForTimeInterval:1.5];
[self showImageView:sampleBuffer];
}
-(void)showImageView:(CMSampleBufferRef)_sampleBuffer
{
UIImage *image=[self imageFromSampleBuffer:_sampleBuffer];
}
//修正图片方向
- (UIImage )fixOrientation:(UIImage )aImage
{
}
// In a storyboard-based application, you will often want to do a little preparation before navigation
- (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender {
// Get the new view controller using [segue destinationViewController].
// Pass the selected object to the new view controller.
}
*/
- (UIImage )image:(UIImage )image rotation:(UIImageOrientation)orientation
{
long double rotate = 0.0;
CGRect rect;
float translateX = 0;
float translateY = 0;
float scaleX = 1.0;
float scaleY = 1.0;
}
// 改变图像的尺寸,方便上传服务器
- (UIImage ) scaleFromImage: (UIImage ) image toSize: (CGSize) size
{
UIGraphicsBeginImageContext(size);
[image drawInRect:CGRectMake(0, 0, size.width, size.height)];
UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsPopContext();
UIGraphicsEndImageContext();
return newImage;
}
-(void)turnOffLed {
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if ([device hasTorch]) {
[device lockForConfiguration:nil];
[device setTorchMode: AVCaptureTorchModeOff];
[device unlockForConfiguration];
}
}
-(void)turnOnLed {
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if ([device hasTorch]) {
[device lockForConfiguration:nil];
[device setTorchMode: AVCaptureTorchModeOn];
[device unlockForConfiguration];
}
}
// 通过抽样缓存数据创建一个UIImage对象
- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer
{
// Get a CMSampleBuffer’s Core Video image buffer for the media data
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// Lock the base address of the pixel buffer
CVPixelBufferLockBaseAddress(imageBuffer, 0);
}
//将图片改成黑白图片
- (UIImage*)grayscale:(UIImage*)anImage type:(int)type {
}
{
// Animate a progress activity indicator
}
-(void)progressImageRecognitionForTesseract:(G8Tesseract *)tesseract
{
DLog(@”progress: %lu”, (unsigned long)tesseract.progress);
}
-(BOOL)shouldCancelImageRecognitionForTesseract:(G8Tesseract *)tesseract
{
return NO;
}
@end
声明
import
import
import
import “MBProgressHUD.h”
//#import “Tesseract.h”@protocol PassImage
-(void)PassImagedata:(id)_data;
@end
@interface DZC_Carmer_photo : UIViewController
实现
import “DZC_Carmer_photo.h”
import “UIImage+DZC_UImageScral.h”
import “DzcDES.h”
import “DZC_CustomLine.h”
//#import “Tesseract.h”@interface DZC_Carmer_photo ()
{
UIButton *btn;
UIImageView *imgView;
}
@end
@implementation DZC_Carmer_photo
-(instancetype)init
{
self=[super init];
if (self)
{
[self initialSession];
return self;
}
return self;
}
-(void)viewWillDisappear:(BOOL)animated
{
[super viewWillDisappear:animated];
[self.operationQueue cancelAllOperations];
}
- (void)viewDidLoad {
[super viewDidLoad];
self.operationQueue = [[NSOperationQueue alloc] init];
// UIButton *carmerBtn=[[UIButton alloc]initWithFrame:CGRectMake(SCREENWIDTH/2-50, SCREENHEIGHT-100, 100, 50)];
// //[carmerBtn setTransform:CGAffineTransformMakeRotation(M_PI/2)];
// [carmerBtn setTitle:@”拍照” forState:UIControlStateNormal];
// carmerBtn.backgroundColor=[UIColor orangeColor];
// [carmerBtn addTarget:self action:@selector(shutterCamera) forControlEvents:UIControlEventTouchUpInside];
// self.shutterButton=carmerBtn;
// [self.view addSubview:carmerBtn];
// [self.view bringSubviewToFront:carmerBtn];
UIButton *returnBtn=[[UIButton alloc]initWithFrame:CGRectMake(10, SCREENHEIGHT-100, 100, 50)];
//[returnBtn setTransform:CGAffineTransformMakeRotation(M_PI/2)];
[returnBtn setTitle:@”撤销” forState:UIControlStateNormal];
returnBtn.backgroundColor=[UIColor orangeColor];
[returnBtn addTarget:self action:@selector(CloseBtn) forControlEvents:UIControlEventTouchUpInside];
[self.view addSubview:returnBtn];
[self.view bringSubviewToFront:returnBtn];
UILabel *titleLable=[[UILabel alloc]initWithFrame:CGRectMake(SCREENWIDTH-110, SCREENHEIGHT-120, 100, 140)];
//[titleLable setTransform:CGAffineTransformMakeRotation(M_PI/2)];
[titleLable setFont:[UIFont systemFontOfSize:10]];
[titleLable setText:@”友情提醒:扫描身份证时请将身份证号码放置于矩形框内”];
titleLable.numberOfLines=0; [titleLable setTextColor:[UIColor whiteColor]]; [self.view addSubview:titleLable]; MB_HUD=[[MBProgressHUD alloc]init]; [self.view addSubview:MB_HUD]; iamgeview=[[UIImageView alloc]initWithFrame:CGRectMake(0, 0, 300, 60)]; iamgeview.center=CGPointMake(SCREENWIDTH/2, SCREENHEIGHT/2); iamgeview.layer.borderWidth=2.0; iamgeview.layer.borderColor=[UIColor orangeColor].CGColor; [self.view addSubview:iamgeview]; [self.view bringSubviewToFront:iamgeview]; /* imgView=[[UIImageView alloc]initWithFrame:CGRectMake(0, 20, SCREENWIDTH, 50)]; imgView.backgroundColor=[UIColor orangeColor]; [self.view addSubview:imgView]; [self.view bringSubviewToFront:imgView]; */ // Do any additional setup after loading the view.
}
-(void)CloseBtn
{
[self dismissViewControllerAnimated:YES completion:nil];
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
- (void) initialSession
{
//这个方法的执行我放在init方法里了
self.session = [[AVCaptureSession alloc] init];
[self.session setSessionPreset:AVCaptureSessionPresetMedium];
self.videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self backCamera] error:nil]; //[self fronCamera]方法会返回一个AVCaptureDevice对象,因为我初始化时是采用前摄像头,所以这么写,具体的实现方法后面会介绍 /* self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init]; NSDictionary * outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG,AVVideoCodecKey, nil]; //这是输出流的设置参数AVVideoCodecJPEG参数表示以JPEG的图片格式输出图片 [self.stillImageOutput setOutputSettings:outputSettings]; */ self.VideoDataOutput=[[AVCaptureVideoDataOutput alloc]init]; if ([self.session canAddInput:self.videoInput]) { [self.session addInput:self.videoInput]; } /* if ([self.session canAddOutput:self.stillImageOutput]) { [self.session addOutput:self.stillImageOutput]; } */ if ([self.session canAddOutput:self.VideoDataOutput]) { [self.session addOutput:self.VideoDataOutput]; dispatch_queue_t queue = dispatch_queue_create("myQueue", NULL); [self.VideoDataOutput setSampleBufferDelegate:self queue:queue]; //dispatch_release(queue); // Specify the pixel format self.VideoDataOutput.videoSettings = [NSDictionary dictionaryWithObject: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]; }
}
- (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition) position {
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices) {
if ([device position] == position) {
_cameraAvaible=YES;
return device;
}
}
_cameraAvaible=NO;
return nil;
}
(AVCaptureDevice *)frontCamera {
return [self cameraWithPosition:AVCaptureDevicePositionFront];
}
(AVCaptureDevice *)backCamera {
return [self cameraWithPosition:AVCaptureDevicePositionBack];
}
//接下来在viewWillAppear方法里执行加载预览图层的方法
(void) setUpCameraLayer
{
if (_cameraAvaible == NO) return;
if (self.previewLayer == nil) {
self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session]; self.previewLayer.videoGravity=AVLayerVideoGravityResizeAspect; self.previewLayer.frame=CGRectMake(0,0, SCREENWIDTH, SCREENHEIGHT); self.previewLayer.position=CGPointMake(SCREENWIDTH/2, SCREENHEIGHT/2); [self.view.layer insertSublayer:self.previewLayer atIndex:0]; /* self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session]; self.cameraShowView=[[UIView alloc]initWithFrame:CGRectMake(0, 20, SCREENWIDTH, 300)]; UIView * view = self.cameraShowView; CALayer * viewLayer = [view layer]; [viewLayer setMasksToBounds:YES]; CGRect bounds = [view bounds]; [self.previewLayer setFrame:bounds]; [self.previewLayer setVideoGravity:AVLayerVideoGravityResizeAspect]; [viewLayer insertSublayer:self.previewLayer below:[[viewLayer sublayers] objectAtIndex:0]]; */
}
}
-(void)viewWillAppear:(BOOL)animated
{
[super viewWillAppear:animated];
[self setUpCameraLayer];
// DZC_CustomLine *line=[[DZC_CustomLine alloc]init];
// line.backgroundColor=[UIColor clearColor];
// line.frame=self.previewLayer.frame;
// [self.view addSubview:line];
// [line setNeedsDisplay];
//[self turnOnLed];//打开闪光灯
}
//在viewDidAppear和viewDidDisappear方法中启动和关闭session
(void) viewDidAppear:(BOOL)animated
{
[super viewDidAppear:animated];
if (self.session) {
[self.session startRunning];
}
}
(void) viewDidDisappea
4000
r:(BOOL)animated
{
[super viewDidDisappear: animated];
if (self.session) {
[self.session stopRunning];
}
[self.previewLayer removeFromSuperlayer];
self.previewLayer =nil;
self.session=nil;
//[self turnOffLed];//关闭闪光灯
}
//这是切换镜头的按钮方法
(void)toggleCamera {
NSUInteger cameraCount = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] count];
if (cameraCount > 1) {
NSError *error;
AVCaptureDeviceInput *newVideoInput;
AVCaptureDevicePosition position = [[_videoInput device] position];
if (position == AVCaptureDevicePositionBack) newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self frontCamera] error:&error]; else if (position == AVCaptureDevicePositionFront) newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self backCamera] error:&error]; else return; if (newVideoInput != nil) { [self.session beginConfiguration]; [self.session removeInput:self.videoInput]; if ([self.session canAddInput:newVideoInput]) { [self.session addInput:newVideoInput]; [self setVideoInput:newVideoInput]; } else { [self.session addInput:self.videoInput]; } [self.session commitConfiguration]; } else if (error) { NSLog(@"toggle carema failed, error = %@", error); }
}
DLog(@”切换摄像头”);
}
// 这是拍照按钮的方法
(void) shutterCamera
{
[MB_HUD show:YES];
self.shutterButton.userInteractionEnabled=NO;
AVCaptureConnection * videoConnection = [self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo];
if (!videoConnection) {
[MB_HUD setDetailsLabelText:@"take photo failed!"]; [MB_HUD hide:YES afterDelay:1.0]; return;
}
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (imageDataSampleBuffer == NULL) {
[MB_HUD setDetailsLabelText:@”take photo null!”];
[MB_HUD hide:YES afterDelay:1.0];
return;
}
NSData * imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer]; UIImage * image = [UIImage imageWithData:imageData]; DLog(@"image size = %@",NSStringFromCGSize(image.size));
UIImage * imageOne=[self fixOrientation:image];
UIImage *imageTwo=[self image:imageOne rotation:UIImageOrientationLeft];
UIImage *imageThree=[self scaleFromImage:imageTwo toSize:CGSizeMake(308, 400)];
DLog(@”image size = %@”,NSStringFromCGSize(imageThree.size));
[self.delegate PassImagedata:imageThree];
imageThree=nil;
[MB_HUD hide:YES];
[self dismissViewControllerAnimated:YES completion:nil];
//[self turnOffLed];
}];
}
-(void)captureOutput:(AVCaptureOutput )captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection )connection
{
DLog(@”有图片”);
[NSThread sleepForTimeInterval:1.5];
[self showImageView:sampleBuffer];
}
-(void)showImageView:(CMSampleBufferRef)_sampleBuffer
{
UIImage *image=[self imageFromSampleBuffer:_sampleBuffer];
if (image) { UIImage *one=[self fixOrientation:image]; NSData *data=UIImageJPEGRepresentation(one, 1.0); one=[UIImage imageWithData:data]; UIImage * img=[one getSubImage:CGRectMake(SCREENWIDTH/2-150, iamgeview.frame.origin.y-60, iamgeview.frame.size.width, iamgeview.frame.size.height)]; img=[self grayscale:img type:3]; /* UIImage *imag=[UIImage imageNamed:@"image_sample.jpg"]; imag=[self grayscale:imag type:3]; */ [self recognizeImageWithTesseract:img]; /* Tesseract *tesseract= [[Tesseract alloc] initWithDataPath:@"tessdata" language:@"eng"]; [tesseract setVariableValue:@"0123456789" forKey:@"tessedit_char_whitelist"]; [tesseract setImage:img]; [tesseract recognize]; NSString *str=[tesseract recognizedText]; [tesseract clear]; tesseract=nil; str=[DzcDES trim:str]; NSRange range=[str rangeOfString:@" "]; if (range.location !=NSNotFound) { NSArray *array=[str componentsSeparatedByString:@" "]; for(NSString *ss in array) { DLog(@"---%@",ss); NSString * s=[DzcDES trim:ss]; if ([DzcDES validateIdentityCard:s]) { DLog(@"字符串为身份证号码"); dispatch_async(dispatch_get_main_queue(), ^{ [MB_HUD show:YES]; [MB_HUD setDetailsLabelText:s]; [self.delegate PassImagedata:s]; [self dismissViewControllerAnimated:YES completion:nil]; }); } } } else { str=[DzcDES trim:str]; if ([DzcDES validateIdentityCard:str]) { dispatch_async(dispatch_get_main_queue(), ^{ [MB_HUD show:YES]; [MB_HUD setDetailsLabelText:str]; [self.delegate PassImagedata:str]; [self dismissViewControllerAnimated:YES completion:nil]; }); } } [[NSThread currentThread]cancel]; */ } return;
}
//修正图片方向
- (UIImage )fixOrientation:(UIImage )aImage
{
// No-op if the orientation is already correct if (aImage.imageOrientation == UIImageOrientationUp) return aImage; // We need to calculate the proper transformation to make the image upright. // We do it in 2 steps: Rotate if Left/Right/Down, and then flip if Mirrored. CGAffineTransform transform = CGAffineTransformIdentity; switch (aImage.imageOrientation) { case UIImageOrientationDown: case UIImageOrientationDownMirrored: transform = CGAffineTransformTranslate(transform, aImage.size.width, aImage.size.height); transform = CGAffineTransformRotate(transform, M_PI); break; case UIImageOrientationLeft: case UIImageOrientationLeftMirrored: transform = CGAffineTransformTranslate(transform, aImage.size.width, 0); transform = CGAffineTransformRotate(transform, M_PI_2); break; case UIImageOrientationRight: case UIImageOrientationRightMirrored: transform = CGAffineTransformTranslate(transform, 0, aImage.size.height); transform = CGAffineTransformRotate(transform, -M_PI_2); break; default: break; } switch (aImage.imageOrientation) { case UIImageOrientationUpMirrored: case UIImageOrientationDownMirrored: transform = CGAffineTransformTranslate(transform, aImage.size.width, 0); transform = CGAffineTransformScale(transform, -1, 1); break; case UIImageOrientationLeftMirrored: case UIImageOrientationRightMirrored: transform = CGAffineTransformTranslate(transform, aImage.size.height, 0); transform = CGAffineTransformScale(transform, -1, 1); break; default: break; } // Now we draw the underlying CGImage into a new context, applying the transform // calculated above. CGContextRef ctx = CGBitmapContextCreate(NULL, aImage.size.width, aImage.size.height, CGImageGetBitsPerComponent(aImage.CGImage), 0, CGImageGetColorSpace(aImage.CGImage), CGImageGetBitmapInfo(aImage.CGImage)); CGContextConcatCTM(ctx, transform); switch (aImage.imageOrientation) { case UIImageOrientationLeft: case UIImageOrientationLeftMirrored: case UIImageOrientationRight: case UIImageOrientationRightMirrored: // Grr... CGContextDrawImage(ctx, CGRectMake(0,0,aImage.size.height,aImage.size.width), aImage.CGImage); break; default: CGContextDrawImage(ctx, CGRectMake(0,0,aImage.size.width,aImage.size.height), aImage.CGImage); break; } // And now we just create a new UIImage from the drawing context CGImageRef cgimg = CGBitmapContextCreateImage(ctx); UIImage *img = [UIImage imageWithCGImage:cgimg]; CGContextRelease(ctx); CGImageRelease(cgimg); return img;
}
pragma mark - Navigation
/*// In a storyboard-based application, you will often want to do a little preparation before navigation
- (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender {
// Get the new view controller using [segue destinationViewController].
// Pass the selected object to the new view controller.
}
*/
- (UIImage )image:(UIImage )image rotation:(UIImageOrientation)orientation
{
long double rotate = 0.0;
CGRect rect;
float translateX = 0;
float translateY = 0;
float scaleX = 1.0;
float scaleY = 1.0;
switch (orientation) { case UIImageOrientationLeft: rotate = M_PI_2; rect = CGRectMake(0, 0, image.size.height, image.size.width); translateX = 0; translateY = -rect.size.width; scaleY = rect.size.width/rect.size.height; scaleX = rect.size.height/rect.size.width; break; case UIImageOrientationRight: rotate = 3 * M_PI_2; rect = CGRectMake(0, 0, image.size.height, image.size.width); translateX = -rect.size.height; translateY = 0; scaleY = rect.size.width/rect.size.height; scaleX = rect.size.height/rect.size.width; break; case UIImageOrientationDown: rotate = M_PI; rect = CGRectMake(0, 0, image.size.width, image.size.height); translateX = -rect.size.width; translateY = -rect.size.height; break; default: rotate = 0.0; rect = CGRectMake(0, 0, image.size.width, image.size.height); translateX = 0; translateY = 0; break; } UIGraphicsBeginImageContext(rect.size); CGContextRef context = UIGraphicsGetCurrentContext(); //做CTM变换 CGContextTranslateCTM(context, 0.0, rect.size.height); CGContextScaleCTM(context, 1.0, -1.0); CGContextRotateCTM(context, rotate); CGContextTranslateCTM(context, translateX, translateY); CGContextScaleCTM(context, scaleX, scaleY); //绘制图片 CGContextDrawImage(context, CGRectMake(0, 0, rect.size.width, rect.size.height), image.CGImage); UIImage *newPic = UIGraphicsGetImageFromCurrentImageContext(); return newPic;
}
// 改变图像的尺寸,方便上传服务器
- (UIImage ) scaleFromImage: (UIImage ) image toSize: (CGSize) size
{
UIGraphicsBeginImageContext(size);
[image drawInRect:CGRectMake(0, 0, size.width, size.height)];
UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsPopContext();
UIGraphicsEndImageContext();
return newImage;
}
-(void)turnOffLed {
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if ([device hasTorch]) {
[device lockForConfiguration:nil];
[device setTorchMode: AVCaptureTorchModeOff];
[device unlockForConfiguration];
}
}
-(void)turnOnLed {
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if ([device hasTorch]) {
[device lockForConfiguration:nil];
[device setTorchMode: AVCaptureTorchModeOn];
[device unlockForConfiguration];
}
}
// 通过抽样缓存数据创建一个UIImage对象
- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer
{
// Get a CMSampleBuffer’s Core Video image buffer for the media data
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// Lock the base address of the pixel buffer
CVPixelBufferLockBaseAddress(imageBuffer, 0);
// Get the number of bytes per row for the pixel buffer void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer); // Get the number of bytes per row for the pixel buffer size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); // Get the pixel buffer width and height size_t width = CVPixelBufferGetWidth(imageBuffer); size_t height = CVPixelBufferGetHeight(imageBuffer); // Create a device-dependent RGB color space CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); // Create a bitmap graphics context with the sample buffer data CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst); // Create a Quartz image from the pixel data in the bitmap graphics context CGImageRef quartzImage = CGBitmapContextCreateImage(context); // Unlock the pixel buffer CVPixelBufferUnlockBaseAddress(imageBuffer,0); // Free up the context and color space CGContextRelease(context); CGColorSpaceRelease(colorSpace); // Create an image object from the Quartz image //UIImage *image = [UIImage imageWithCGImage:quartzImage]; UIImage *image = [UIImage imageWithCGImage:quartzImage scale:1.0f orientation:UIImageOrientationRight]; // Release the Quartz image CGImageRelease(quartzImage); return image;
}
//将图片改成黑白图片
- (UIImage*)grayscale:(UIImage*)anImage type:(int)type {
CGImageRef imageRef = anImage.CGImage; size_t width = CGImageGetWidth(imageRef); size_t height = CGImageGetHeight(imageRef); size_t bitsPerComponent = CGImageGetBitsPerComponent(imageRef); size_t bitsPerPixel = CGImageGetBitsPerPixel(imageRef); size_t bytesPerRow = CGImageGetBytesPerRow(imageRef); CGColorSpaceRef colorSpace = CGImageGetColorSpace(imageRef); CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(imageRef); bool shouldInterpolate = CGImageGetShouldInterpolate(imageRef); CGColorRenderingIntent intent = CGImageGetRenderingIntent(imageRef); CGDataProviderRef dataProvider = CGImageGetDataProvider(imageRef); CFDataRef data = CGDataProviderCopyData(dataProvider); UInt8 *buffer = (UInt8*)CFDataGetBytePtr(data); NSUInteger x, y; for (y = 0; y < height; y++) { for (x = 0; x < width; x++) { UInt8 *tmp; tmp = buffer + y * bytesPerRow + x * 4; UInt8 red,green,blue; red = *(tmp + 0); green = *(tmp + 1); blue = *(tmp + 2); UInt8 brightness; switch (type) { case 1: brightness = (77 * red + 28 * green + 151 * blue) / 256; *(tmp + 0) = brightness; *(tmp + 1) = brightness; *(tmp + 2) = brightness; break; case 2: *(tmp + 0) = red; *(tmp + 1) = green * 0.7; *(tmp + 2) = blue * 0.4; break; case 3: *(tmp + 0) = 255 - red; *(tmp + 1) = 255 - green; *(tmp + 2) = 255 - blue; break; default: *(tmp + 0) = red; *(tmp + 1) = green; *(tmp + 2) = blue; break; } } } CFDataRef effectedData = CFDataCreate(NULL, buffer, CFDataGetLength(data)); CGDataProviderRef effectedDataProvider = CGDataProviderCreateWithCFData(effectedData); CGImageRef effectedCgImage = CGImageCreate( width, height, bitsPerComponent, bitsPerPixel, bytesPerRow, colorSpace, bitmapInfo, effectedDataProvider, NULL, shouldInterpolate, intent); UIImage *effectedImage = [[UIImage alloc] initWithCGImage:effectedCgImage]; CGImageRelease(effecte aa76 dCgImage); CFRelease(effectedDataProvider); CFRelease(effectedData); CFRelease(data); return effectedImage ;
}
pragma mark- 图片识别代码
-(void)recognizeImageWithTesseract:(UIImage *)image{
// Animate a progress activity indicator
// Create a new `G8RecognitionOperation` to perform the OCR asynchronously // It is assumed that there is a .traineddata file for the language pack // you want Tesseract to use in the "tessdata" folder in the root of the // project AND that the "tessdata" folder is a referenced folder and NOT // a symbolic group in your project G8RecognitionOperation *operation = [[G8RecognitionOperation alloc] initWithLanguage:@"eng"]; // Use the original Tesseract engine mode in performing the recognition // (see G8Constants.h) for other engine mode options operation.tesseract.engineMode = G8OCREngineModeTesseractOnly; // Let Tesseract automatically segment the page into blocks of text // based on its analysis (see G8Constants.h) for other page segmentation // mode options operation.tesseract.pageSegmentationMode = G8PageSegmentationModeAutoOnly; // Optionally limit the time Tesseract should spend performing the // recognition //operation.tesseract.maximumRecognitionTime = 1.0; // Set the delegate for the recognition to be this class // (see `progressImageRecognitionForTesseract` and // `shouldCancelImageRecognitionForTesseract` methods below) operation.delegate = self; // Optionally limit Tesseract's recognition to the following whitelist // and blacklist of characters operation.tesseract.charWhitelist = @"0123456789"; //operation.tesseract.charBlacklist = @"56789"; // Set the image on which Tesseract should perform recognition operation.tesseract.image = image; // Optionally limit the region in the image on which Tesseract should // perform recognition to a rectangle //operation.tesseract.rect = CGRectMake(20, 20, 100, 100); // Specify the function block that should be executed when Tesseract // finishes performing recognition on the image operation.recognitionCompleteBlock = ^(G8Tesseract *tesseract) { // Fetch the recognized text NSString *recognizedText = tesseract.recognizedText; DLog(@"-----%@", recognizedText); NSString * str=[DzcDES trim:recognizedText]; NSRange range=[str rangeOfString:@" "]; if (range.location !=NSNotFound) { NSArray *array=[str componentsSeparatedByString:@" "]; for(NSString *ss in array) { DLog(@"---%@",ss); NSString * s=[DzcDES trim:ss]; if ([DzcDES validateIdentityCard:s]) { DLog(@"字符串为身份证号码"); dispatch_async(dispatch_get_main_queue(), ^{ [self.delegate PassImagedata:s]; [self dismissViewControllerAnimated:YES completion:nil]; }); } } } else { str=[DzcDES trim:str]; if ([DzcDES validateIdentityCard:str]) { dispatch_async(dispatch_get_main_queue(), ^{ [self.delegate PassImagedata:str]; [self dismissViewControllerAnimated:YES completion:nil]; }); } } // Remove the animated progress activity indicator // Spawn an alert with the recognized text }; // Display the image to be recognized in the view // Finally, add the recognition operation to the queue [self.operationQueue addOperation:operation];
}
-(void)progressImageRecognitionForTesseract:(G8Tesseract *)tesseract
{
DLog(@”progress: %lu”, (unsigned long)tesseract.progress);
}
-(BOOL)shouldCancelImageRecognitionForTesseract:(G8Tesseract *)tesseract
{
return NO;
}
@end
相关文章推荐
- 峰回路转,Firefox 浏览器即将重返 iOS 平台
- 峰回路转,Firefox 浏览器即将重返 iOS 平台
- 不可修补的 iOS 漏洞可能导致 iPhone 4s 到 iPhone X 永久越狱
- iOS 12.4 系统遭黑客破解,漏洞危及数百万用户
- 每日安全资讯:NSO,一家专业入侵 iPhone 的神秘公司
- [转][源代码]Comex公布JailbreakMe 3.0源代码
- C#实现控制摄像头的类
- 讲解iOS开发中基本的定位功能实现
- iOS中定位当前位置坐标及转换为火星坐标的方法
- js判断客户端是iOS还是Android等移动终端的方法
- iOS应用开发中AFNetworking库的常用HTTP操作方法小结
- iOS应用中UISearchDisplayController搜索效果的用法
- IOS开发环境windows化攻略
- iOS应用中UITableView左滑自定义选项及批量删除的实现
- iOS中UIAlertView警告框组件的使用教程
- 浅析iOS应用开发中线程间的通信与线程安全问题
- 检测iOS设备是否越狱的方法
- Android 开发随手笔记之使用摄像头拍照
- .net平台推送ios消息的实现方法