您的位置:首页 > 移动开发 > IOS开发

OpenCV for Ios 学习笔记(8)-用OPENGL渲染AR场景1

2013-05-14 08:30 357 查看
本文原始地址:OpenCV for Ios 学习笔记(8)-用OPENGL渲染AR场景1

 创建OpenGL渲染层
为了在你的应用中使用opengl函数,你必须为你的应用添加一个图形画板,它给用户呈现被渲染的场景。
为了封装opengl画板初始化逻辑,我们介绍EAGLView类:

//
//  EAGLView.h
//  OpenCvDemo
//
//  Created by apple on 13-4-24.
//  Copyright (c) 2013年 hxy. All rights reserved.
//

#import <UIKit/UIKit.h>
#import <OpenGLES/ES1/gl.h>
#import <OpenGLES/ES1/glext.h>
#import <OpenGLES/ES2/gl.h>
#import <OpenGLES/ES2/glext.h>

@class EAGLContext;
@interface EAGLView : UIView
{
@private
GLuint defaultFrameBuffer,colorRenderBuffer,depthRenderBuffer;
}
@property (nonatomic, retain) EAGLContext *context;
@property (readonly) GLint frameBufferWidth,frameBufferHeight;
//初始化画板
- (void)initContext;
//设置帧缓存
- (void)setFrameBuffer;
//呈现帧缓存
- (void)presentFrameBuffer;
@end

//
//  EAGLView.m
//  OpenCvDemo
//
//  Created by apple on 13-4-24.
//  Copyright (c) 2013年 hxy. All rights reserved.
//

#import "EAGLView.h"

@interface EAGLView (Private)
//创建帧缓存
- (void)createFrameBuffer;
//删除所有的缓存
- (void)deleteFrameBuffer;
@end

@implementation EAGLView
@synthesize context,frameBufferHeight,frameBufferWidth;

- (void)dealloc
{
[self deleteFrameBuffer];
if ([EAGLContext currentContext] == context)
{
[EAGLContext setCurrentContext:nil];
}
[super dealloc];
}

+ (Class)layerClass
{
return [CAEAGLLayer class];
}

- (void)layoutSubviews
{
//在下一次设置帧缓存时,帧缓存将会重新创建
[self deleteFrameBuffer];
}

- (void)initContext
{
if (!context)
{
EAGLRenderingAPI api = kEAGLRenderingAPIOpenGLES2;
context = [[EAGLContext alloc] initWithAPI:api];
if (!context)
{
NSLog(@"画布都创建失败,还运行个闯闯!");
exit(1);
return;
}
if (![EAGLContext setCurrentContext:context])
{
NSLog(@"悲剧了塞!");
exit(1);
return;
}
}
}

- (void)setContext:(EAGLContext *)newcontext
{
if (context != newcontext)
{
[self deleteFrameBuffer];
context = newcontext;
[EAGLContext setCurrentContext:nil];
}
}

- (CAEAGLLayer *)myLayer
{
return (CAEAGLLayer *)self.layer;
}

- (id)initWithCoder:(NSCoder *)aDecoder
{
if (self = [super initWithCoder:aDecoder])
{
CAEAGLLayer *layer = [self myLayer];
layer.opaque = TRUE;
layer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithBool:FALSE],kEAGLDrawablePropertyRetainedBacking,kEAGLColorFormatRGBA8,kEAGLDrawablePropertyColorFormat, nil];
[self initContext];
}
return self;
}

- (void)setFrameBuffer
{
if (context)
{
[EAGLContext setCurrentContext:context];
if (!depthRenderBuffer)
{
[self createFrameBuffer];
}

glBindFramebuffer(GL_FRAMEBUFFER, defaultFrameBuffer);
glViewport(0, 0, frameBufferWidth, frameBufferHeight);
glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT);
}
}

- (void)presentFrameBuffer
{
BOOL success = FALSE;
if (context)
{
[EAGLContext setCurrentContext:context];

glBindRenderbuffer(1, colorRenderBuffer);
success = [context presentRenderbuffer:GL_RENDERBUFFER];
}
}

- (void)createFrameBuffer
{
//还未设置默认帧缓存
if (context && !defaultFrameBuffer)
{
[EAGLContext setCurrentContext:context];

//创建默认帧缓存对象
glGenFramebuffers(1, &(defaultFrameBuffer));
glBindFramebuffer(GL_FRAMEBUFFER, defaultFrameBuffer);

//创建颜色缓存,分配内存
glGenRenderbuffers(1, &colorRenderBuffer);
glBindRenderbuffer(GL_RENDERBUFFER, colorRenderBuffer);

[context renderbufferStorage:GL_RENDERBUFFER fromDrawable:[self myLayer]];

glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &frameBufferWidth);
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &frameBufferHeight);

glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, colorRenderBuffer);
//生成深度缓存
glGenRenderbuffers(1, &depthRenderBuffer);
glBindRenderbuffer(GL_RENDERBUFFER, depthRenderBuffer);

glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, frameBufferWidth, frameBufferHeight);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, depthRenderBuffer);

//检查状态
if (glCheckFramebufferStatus(GL_FRAMEBUFFER)!= GL_FRAMEBUFFER_COMPLETE)
{
NSLog(@"Failed to make complete framebuffer object %x", glCheckFramebufferStatus(GL_FRAMEBUFFER));
}
}
}

- (void)deleteFrameBuffer
{
if (context)
{
[EAGLContext setCurrentContext:context];

if (defaultFrameBuffer)
{
glDeleteFramebuffers(1, &defaultFrameBuffer);
defaultFrameBuffer = 0;
}

if (colorRenderBuffer)
{
glDeleteRenderbuffers(1, &colorRenderBuffer);
colorRenderBuffer = 0;
}

if (depthRenderBuffer)
{
glDeleteRenderbuffers(1, &depthRenderBuffer);
depthRenderBuffer = 0;
}
}
}

@end

渲染AR场景

在上面的EAGLView并不包括任何的3维物体虚拟化的方法,当然,我们是故意的。EAGLView的任务是提供渲染的画板,将各个类的职责区别开有利于我们在接下来改变虚拟化过程的逻辑。
为了实现增强现实的虚拟化,我们将会创建一个单独的类VisualizationController:

@interface SimpleVisualizationController : NSObject
{
EAGLView * m_glview;
GLuint m_backgroundTextureId;
std::vector<Transformation> m_transformations;
CameraCalibration m_calibration;
CGSize m_frameSize;
}

-(id) initWithGLView:(EAGLView*)view calibration:(CameraCalibration) calibration frameSize:(CGSize) size;
//把ar渲染到EAGLView目标视图上去
-(void) drawFrame;
-(void) updateBackground:(BGRAVideoFrame) frame;
-(void) setTransformationList:(const std::vector<Transformation>&) transformations;

@end


未完
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签:  IOS OpenCV OpenGL