qt for ios扫描二维码功能实现
2015-04-15 15:06
525 查看
问题:
公司项目考虑到跨平台一直都是用qt做,由于项目需求,项目上要增加一个二维码扫描功能,在安卓可以用QVideoProbe实现抓取摄像头视频帧,用QZxing解码图片,从而实现二维码扫描,但是在ios上,QVideProbe并不支持,所以只好选择其他抓取视频帧的方法,考虑使用OPencv实现抓取视频帧,但是在查看ios文档时,ios7以上直接支持二维码扫描功能,所以放弃使用opencv抓取+zxing解码的方法.从而采取ios官方提供的二维码解码功能.
实现:
由于我们项目ui一直是用qml实现,但是要实现扫描二维码功能,需要调用AVFoundation中的方法,同时要显示ios中的ui显示摄像头及返回qml键.所以这里需要结合oc和qt编程.
直接上代码:
pro文件增加
ios{
OBJECTIVE_SOURCES+=IOSView.mm \#objectc++file
IOSCamera.mm
HEADER+= IOSView.h\
IOSCamera.h\
IOSCameraViewProtocol.h
QMAKE_LFLAGS+=-frameworkAVFoundation #addAVfoundationframework
QT+=guiprivate
}
重新qmake生成xcodeproject
IOSView.#include<QQuickItem>
classIOSView:publicQQuickItem
{
Q_OBJECT
Q_PROPERTY(QStringqrcodeTextREADqrcodeTextWRITEsetQrcodeTextNOTIFYqrcodeTextChanged)
public:
explicitIOSView(QQuickItem*parent=0);
QStringqrcodeText(){
returnm_qrcodeText;
}
voidsetQrcodeText(QStringtext){
m_qrcodeText=text;
emitqrcodeTextChanged();
}
QStringm_qrcodeText;
publicslots:
voidstartScan(); //foropenioscamerascanandui
private:
void*m_delegate; //forcommunicationwithqt
signals:
voidqrcodeTextChanged();
voidstopCameraScan(); //showqml
};
IOSView..mm
#include<UIKit/UIKit.h>
#include<QtQuick>
#include<QtGui>
#include<QtGui/qpa/qplatformnativeinterface.h>
#include"IOSView.h"
#include"IOSCamera.h"
@interfaceIOSCameraDelegate:NSObject<IOSCameraProtocol>{
IOSView*m_iosView;
}
@end
@implementationIOSCameraDelegate
-(id)initWithIOSCamera:(IOSView*)iosView
{
self=[superinit];
if(self){
m_iosView=iosView;
}
returnself;
}
-(void)scanCancel{
emitm_iosView->stopCameraScan();
}
-(void)scanResult:(NSString*)result{
m_iosView->setQrcodeText(QString::fromNSString(result));
}
@end
IOSView::IOSView(QQuickItem*parent):
QQuickItem(parent),m_delegate([[IOSCameraDelegatealloc]initWithIOSCamera:this])
{
}
voidIOSView::startScan()
{
//GettheUIViewthatbacksourQQuickWindow:
UIView*view=static_cast<UIView*>(QGuiApplication::platformNativeInterface()->nativeResourceForWindow("uiview",window()));
UIViewController*qtController=[[viewwindow]rootViewController];
IOSCamera*iosCamera=[[[IOSCameraViewalloc]init]autorelease];
iosCamera.delegate=(id)m_delegate;
//Telltheimagecontrollertoanimateontop:
[qtControllerpresentViewController:iosCameraanimated:YEScompletion:nil];
[iosCamerastartScan];
}
IOSCameraViewProtocol.h
#import<Foundation/Foundation.h>
@protocolCameraScanViewProtocol<NSObject>
@required
-(void)scanCancel;
-(void)scanResult:(NSString*)result;
@end
IOSCamera.h
#import<UIKit/UIKit.h>
#import<AVFoundation/AVFoundation.h>
#import"CameraViewProtocol.h"
@interfaceIOSCamera:UIViewController<AVCaptureMetadataOutputObjectsDelegate>{
id<CameraScanViewProtocol>delegate;
}
@property(retain,nonatomic)IBOutletUIView*viewPreview;
-(IBAction)backQtApp:(id)sender;
-(void)startScan;
@property(retain)id<CameraScanViewProtocol>delegate;
@end
IOSCamera.cpp#import"IOSCamera.h"
[selfstopReading];
}
-(void)openQtLayer{
//BringbackQt'sviewcontroller:
UIViewController*rvc=[[[UIApplicationsharedApplication]keyWindow]rootViewController];
[rvcdismissViewControllerAnimated:YEScompletion:nil];
}
-(void)startScan{
if(!_isReading){
//ThisisthecasewheretheappshouldreadaQRcodewhenthestartbuttonistapped.
if([selfstartReading]){
//IfthestartReadingmethodsreturnsYESandthecapturesessionissuccessfully
//running,thenchangethestartbuttontitleandthestatusmessage.
NSLog(@"StartReading!!");
}
}
}
#pragmamark-Privatemethodimplementation
-(BOOL)startReading{
NSError*error;
//GetaninstanceoftheAVCaptureDeviceclasstoinitializeadeviceobjectandprovidethevideo
//asthemediatypeparameter.
AVCaptureDevice*captureDevice=[AVCaptureDevicedefaultDeviceWithMediaType:AVMediaTypeVideo];
//GetaninstanceoftheAVCaptureDeviceInputclassusingthepreviousdeviceobject.
AVCaptureDeviceInput*input=[AVCaptureDeviceInputdeviceInputWithDevice:captureDeviceerror:&error];
if(!input){
//Ifanyerroroccurs,simplylogthedescriptionofitanddon'tcontinueanymore.
NSLog(@"%@",[errorlocalizedDescription]);
returnNO;
}
//InitializethecaptureSessionobject.
_captureSession=[[AVCaptureSessionalloc]init];
//Settheinputdeviceonthecapturesession.
[_captureSessionaddInput:input];
//InitializeaAVCaptureMetadataOutputobjectandsetitastheoutputdevicetothecapturesession.
AVCaptureMetadataOutput*captureMetadataOutput=[[AVCaptureMetadataOutputalloc]init];
[_captureSessionaddOutput:captureMetadataOutput];
//Createanewserialdispatchqueue.
dispatch_queue_tdispatchQueue;
dispatchQueue=dispatch_queue_create("myQueue",NULL);
[captureMetadataOutputsetMetadataObjectsDelegate:selfqueue:dispatchQueue];
[captureMetadataOutputsetMetadataObjectTypes:[NSArrayarrayWithObject:AVMetadataObjectTypeQRCode]];
//InitializethevideopreviewlayerandadditasasublayertotheviewPreviewview'slayer.
_videoPreviewLayer=[[AVCaptureVideoPreviewLayeralloc]initWithSession:_captureSession];
[_videoPreviewLayersetVideoGravity:AVLayerVideoGravityResizeAspectFill];
[_videoPreviewLayersetFrame:_viewPreview.layer.bounds];
[_viewPreview.layeraddSublayer:_videoPreviewLayer];
//Startvideocapture.
[_captureSessionstartRunning];
_isReading=YES;
returnYES;
}
-(void)stopReading{
//Stopvideocaptureandmakethecapturesessionobjectnil.
[_captureSessionstopRunning];
_captureSession=nil;
//RemovethevideopreviewlayerfromtheviewPreviewview'slayer.
[_videoPreviewLayerremoveFromSuperlayer];
_isReading=NO;
[selfopenQtLayer];
}
#pragmamark-AVCaptureMetadataOutputObjectsDelegatemethodimplementation
-(void)captureOutput:(AVCaptureOutput*)captureOutputdidOutputMetadataObjects:(NSArray*)metadataObjectsfromConnection:(AVCaptureConnection*)connection{
//CheckifthemetadataObjectsarrayisnotnilanditcontainsatleastoneobject.
if(metadataObjects!=nil&&[metadataObjectscount]>0){
//Getthemetadataobject.
AVMetadataMachineReadableCodeObject*metadataObj=[metadataObjectsobjectAtIndex:0];
if([[metadataObjtype]isEqualToString:AVMetadataObjectTypeQRCode]){
//IfthefoundmetadataisequaltotheQRcodemetadatathenupdatethestatuslabel'stext,
//stopreadingandchangethebarbuttonitem'stitleandtheflag'svalue.
//Everythingisdoneonthemainthread.
[delegatescanResult:[metadataObjstringValue]]; //sendscanresulttoqtshow
[selfperformSelectorOnMainThread:@selector(stopReading)withObject:nilwaitUntilDone:NO];
_isReading=NO;
}
}
}
@end
OK.大概流程就这些了,添加xib文件等就不介绍了.
公司项目考虑到跨平台一直都是用qt做,由于项目需求,项目上要增加一个二维码扫描功能,在安卓可以用QVideoProbe实现抓取摄像头视频帧,用QZxing解码图片,从而实现二维码扫描,但是在ios上,QVideProbe并不支持,所以只好选择其他抓取视频帧的方法,考虑使用OPencv实现抓取视频帧,但是在查看ios文档时,ios7以上直接支持二维码扫描功能,所以放弃使用opencv抓取+zxing解码的方法.从而采取ios官方提供的二维码解码功能.
实现:
由于我们项目ui一直是用qml实现,但是要实现扫描二维码功能,需要调用AVFoundation中的方法,同时要显示ios中的ui显示摄像头及返回qml键.所以这里需要结合oc和qt编程.
直接上代码:
pro文件增加
ios{
OBJECTIVE_SOURCES+=IOSView.mm \#objectc++file
IOSCamera.mm
HEADER+= IOSView.h\
IOSCamera.h\
IOSCameraViewProtocol.h
QMAKE_LFLAGS+=-frameworkAVFoundation #addAVfoundationframework
QT+=guiprivate
}
重新qmake生成xcodeproject
IOSView.#include<QQuickItem>
IOSView..mm
IOSCameraViewProtocol.h
IOSCamera.h
IOSCamera.cpp#import"IOSCamera.h"
@interfaceIOSCamera() @property(nonatomic,strong)AVCaptureSession*captureSession; @property(nonatomic,strong)AVCaptureVideoPreviewLayer*videoPreviewLayer;-(BOOL)startReading; -(void)stopReading; -(void)openQtLayer; @end @implementationCameraScanView @synthesizedelegate; //Syncdelegateforinteractivewithqt -(void)viewDidLoad{ [superviewDidLoad]; //Doanyadditionalsetupafterloadingtheviewfromitsnib. //InitiallymakethecaptureSessionobjectnil. _captureSession=nil; //SettheinitialvalueoftheflagtoNO. _isReading=NO; //Beginloadingthesoundeffectsotohaveitreadyforplaybackwhenit'sneeded. [selfloadBeepSound]; } -(void)didReceiveMemoryWarning{ [superdidReceiveMemoryWarning]; //Disposeofanyresourcesthatcanberecreated. } /* #pragmamark-Navigation -(void)dealloc{ [_viewPreviewrelease]; [superdealloc]; } -(void)viewDidUnload{ [selfsetViewPreview:nil]; [superviewDidUnload]; }-(IBAction)backQtApp:(id)sender{[delegatescanCancel];
[selfstopReading];
}
-(void)openQtLayer{
//BringbackQt'sviewcontroller:
UIViewController*rvc=[[[UIApplicationsharedApplication]keyWindow]rootViewController];
[rvcdismissViewControllerAnimated:YEScompletion:nil];
}
-(void)startScan{
if(!_isReading){
//ThisisthecasewheretheappshouldreadaQRcodewhenthestartbuttonistapped.
if([selfstartReading]){
//IfthestartReadingmethodsreturnsYESandthecapturesessionissuccessfully
//running,thenchangethestartbuttontitleandthestatusmessage.
NSLog(@"StartReading!!");
}
}
}
#pragmamark-Privatemethodimplementation
-(BOOL)startReading{
NSError*error;
//GetaninstanceoftheAVCaptureDeviceclasstoinitializeadeviceobjectandprovidethevideo
//asthemediatypeparameter.
AVCaptureDevice*captureDevice=[AVCaptureDevicedefaultDeviceWithMediaType:AVMediaTypeVideo];
//GetaninstanceoftheAVCaptureDeviceInputclassusingthepreviousdeviceobject.
AVCaptureDeviceInput*input=[AVCaptureDeviceInputdeviceInputWithDevice:captureDeviceerror:&error];
if(!input){
//Ifanyerroroccurs,simplylogthedescriptionofitanddon'tcontinueanymore.
NSLog(@"%@",[errorlocalizedDescription]);
returnNO;
}
//InitializethecaptureSessionobject.
_captureSession=[[AVCaptureSessionalloc]init];
//Settheinputdeviceonthecapturesession.
[_captureSessionaddInput:input];
//InitializeaAVCaptureMetadataOutputobjectandsetitastheoutputdevicetothecapturesession.
AVCaptureMetadataOutput*captureMetadataOutput=[[AVCaptureMetadataOutputalloc]init];
[_captureSessionaddOutput:captureMetadataOutput];
//Createanewserialdispatchqueue.
dispatch_queue_tdispatchQueue;
dispatchQueue=dispatch_queue_create("myQueue",NULL);
[captureMetadataOutputsetMetadataObjectsDelegate:selfqueue:dispatchQueue];
[captureMetadataOutputsetMetadataObjectTypes:[NSArrayarrayWithObject:AVMetadataObjectTypeQRCode]];
//InitializethevideopreviewlayerandadditasasublayertotheviewPreviewview'slayer.
_videoPreviewLayer=[[AVCaptureVideoPreviewLayeralloc]initWithSession:_captureSession];
[_videoPreviewLayersetVideoGravity:AVLayerVideoGravityResizeAspectFill];
[_videoPreviewLayersetFrame:_viewPreview.layer.bounds];
[_viewPreview.layeraddSublayer:_videoPreviewLayer];
//Startvideocapture.
[_captureSessionstartRunning];
_isReading=YES;
returnYES;
}
-(void)stopReading{
//Stopvideocaptureandmakethecapturesessionobjectnil.
[_captureSessionstopRunning];
_captureSession=nil;
//RemovethevideopreviewlayerfromtheviewPreviewview'slayer.
[_videoPreviewLayerremoveFromSuperlayer];
_isReading=NO;
[selfopenQtLayer];
}
#pragmamark-AVCaptureMetadataOutputObjectsDelegatemethodimplementation
-(void)captureOutput:(AVCaptureOutput*)captureOutputdidOutputMetadataObjects:(NSArray*)metadataObjectsfromConnection:(AVCaptureConnection*)connection{
//CheckifthemetadataObjectsarrayisnotnilanditcontainsatleastoneobject.
if(metadataObjects!=nil&&[metadataObjectscount]>0){
//Getthemetadataobject.
AVMetadataMachineReadableCodeObject*metadataObj=[metadataObjectsobjectAtIndex:0];
if([[metadataObjtype]isEqualToString:AVMetadataObjectTypeQRCode]){
//IfthefoundmetadataisequaltotheQRcodemetadatathenupdatethestatuslabel'stext,
//stopreadingandchangethebarbuttonitem'stitleandtheflag'svalue.
//Everythingisdoneonthemainthread.
[delegatescanResult:[metadataObjstringValue]]; //sendscanresulttoqtshow
[selfperformSelectorOnMainThread:@selector(stopReading)withObject:nilwaitUntilDone:NO];
_isReading=NO;
}
}
}
@end
OK.大概流程就这些了,添加xib文件等就不介绍了.
相关文章推荐
- 用ZBar实现IOS上二维码扫描功能
- iOS 原生实现扫描二维码和条形码功能限制扫描区域
- iOS开发之扫描二维码功能的实现
- iOS 原生态API实现二维码的扫描功能
- iOS实现二维码的扫描功能
- iOS 二维码扫描相关功能实现
- IOS 对于ios7扫描二维码功能的实现
- iOS之使用ZbarSDK实现扫描二维码以及条形码功能
- 使用ZbarSDK实现扫描二维码以及条形码功能(iOS)
- IOS上原生代码实现二维码扫描功能
- [IPHONE] iOS使用ZbarSDK实现扫描二维码以及条形码功能
- [iOS 原生代码实现扫描二维码/条形码] AVCaptureDevice
- ios实现条形码扫描功能
- iOS 应用实现条形码扫描功能的方法
- ios实现条形码扫描功能
- ios实现条形码扫描功能
- iOS 应用实现条形码扫描功能的方法
- 基于ZXing Android实现生成二维码图片和相机扫描二维码图片即时解码的功能
- ios7下二维码功能的实现
- iOS 应用实现条形码扫描功能的方法