您的位置:首页 > 编程语言 > Go语言

GOOGLE VR SDK开发VR游戏,VR播放器之二

2016-03-24 13:40 489 查看
之前简单说了CardBoardView的使用,这里写CardboardView.StereoRenderer的,使用上十分简单,和编写glsurface的Renderer一样导出有关的接口,使用OPGENGL实现就行了。代码使用OPGENGL ES2书写。
public class MySr implements CardboardView.StereoRenderer {
private static final String TAG = "MainActivity";

private static final float Z_NEAR = 0.1f;
private static final float Z_FAR = 100.0f;

private static final float CAMERA_Z = 0.01f;
private static final float TIME_DELTA = 0.3f;

private static final float YAW_LIMIT = 0.12f;
private static final float PITCH_LIMIT = 0.12f;

private static final int COORDS_PER_VERTEX = 3;

// We keep the light always position just above the user.
private static final float[] LIGHT_POS_IN_WORLD_SPACE = new float[]{0.0f, 2.0f, 0.0f, 1.0f};

private final float[] lightPosInEyeSpace = new float[4];

private FloatBuffer cubeVertices;
private FloatBuffer cubeColors;
private FloatBuffer cubeFoundColors;
private FloatBuffer cubeNormals;

private int cubeProgram;

private int cubePositionParam;
private int cubeNormalParam;
private int cubeColorParam;
private int cubeModelParam;
private int cubeModelViewParam;
private int cubeModelViewProjectionParam;
private int cubeLightPosParam;

private float[] modelCube;
private float[] camera;
private float[] view;
private float[] headView;
private float[] modelViewProjection;
private float[] modelView;
private float[] modelPosition;
private float[] headRotation;

String light_vertex = "uniform mat4 u_Model;\n"
+"uniform mat4 u_MVP;\n"
+"uniform mat4 u_MVMatrix;\n"
+"uniform vec3 u_LightPos;\n"

+"attribute vec4 a_Position;\n"
+"attribute vec4 a_Color;\n"
+"attribute vec3 a_Normal;\n"

+"varying vec4 v_Color;\n"
+"varying vec3 v_Grid;\n"

+"void main() {\n"
+"v_Grid = vec3(u_Model * a_Position);\n"

+"vec3 modelViewVertex = vec3(u_MVMatrix * a_Position);\n"
+"vec3 modelViewNormal = vec3(u_MVMatrix * vec4(a_Normal, 0.0));\n"

+"float distance = length(u_LightPos - modelViewVertex);\n"
+"vec3 lightVector = normalize(u_LightPos - modelViewVertex);\n"
+"float diffuse = max(dot(modelViewNormal, lightVector), 0.5);\n"

+"diffuse = diffuse * (1.0 / (1.0 + (0.00001 * distance * distance)));\n"
+"v_Color = a_Color * diffuse;\n"
+"gl_Position = u_MVP * a_Position;\n"
+"}\n";

String passthrough_fragment = "precision mediump float;\n"
+"varying vec4 v_Color;\n"

+"void main() {\n"
+"gl_FragColor = v_Color;\n"
+"}\n";
public static final float[] CUBE_COORDS = new float[] {
// Front face
-1.0f, 1.0f, 1.0f,
-1.0f, -1.0f, 1.0f,
1.0f, 1.0f, 1.0f,
-1.0f, -1.0f, 1.0f,
1.0f, -1.0f, 1.0f,
1.0f, 1.0f, 1.0f,

// Right face
1.0f, 1.0f, 1.0f,
1.0f, -1.0f, 1.0f,
1.0f, 1.0f, -1.0f,
1.0f, -1.0f, 1.0f,
1.0f, -1.0f, -1.0f,
1.0f, 1.0f, -1.0f,

// Back face
1.0f, 1.0f, -1.0f,
1.0f, -1.0f, -1.0f,
-1.0f, 1.0f, -1.0f,
1.0f, -1.0f, -1.0f,
-1.0f, -1.0f, -1.0f,
-1.0f, 1.0f, -1.0f,

// Left face
-1.0f, 1.0f, -1.0f,
-1.0f, -1.0f, -1.0f,
-1.0f, 1.0f, 1.0f,
-1.0f, -1.0f, -1.0f,
-1.0f, -1.0f, 1.0f,
-1.0f, 1.0f, 1.0f,

// Top face
-1.0f, 1.0f, -1.0f,
-1.0f, 1.0f, 1.0f,
1.0f, 1.0f, -1.0f,
-1.0f, 1.0f, 1.0f,
1.0f, 1.0f, 1.0f,
1.0f, 1.0f, -1.0f,

// Bottom face
1.0f, -1.0f, -1.0f,
1.0f, -1.0f, 1.0f,
-1.0f, -1.0f, -1.0f,
1.0f, -1.0f, 1.0f,
-1.0f, -1.0f, 1.0f,
-1.0f, -1.0f, -1.0f,
};

public static final float[] CUBE_COLORS = new float[] {
// front, green
0f, 0.5273f, 0.2656f, 1.0f,
0f, 0.5273f, 0.2656f, 1.0f,
0f, 0.5273f, 0.2656f, 1.0f,
0f, 0.5273f, 0.2656f, 1.0f,
0f, 0.5273f, 0.2656f, 1.0f,
0f, 0.5273f, 0.2656f, 1.0f,

// right, blue
0.0f, 0.3398f, 0.9023f, 1.0f,
0.0f, 0.3398f, 0.9023f, 1.0f,
0.0f, 0.3398f, 0.9023f, 1.0f,
0.0f, 0.3398f, 0.9023f, 1.0f,
0.0f, 0.3398f, 0.9023f, 1.0f,
0.0f, 0.3398f, 0.9023f, 1.0f,

// back, also green
0f, 0.5273f, 0.2656f, 1.0f,
0f, 0.5273f, 0.2656f, 1.0f,
0f, 0.5273f, 0.2656f, 1.0f,
0f, 0.5273f, 0.2656f, 1.0f,
0f, 0.5273f, 0.2656f, 1.0f,
0f, 0.5273f, 0.2656f, 1.0f,

// left, also blue
0.0f, 0.3398f, 0.9023f, 1.0f,
0.0f, 0.3398f, 0.9023f, 1.0f,
0.0f, 0.3398f, 0.9023f, 1.0f,
0.0f, 0.3398f, 0.9023f, 1.0f,
0.0f, 0.3398f, 0.9023f, 1.0f,
0.0f, 0.3398f, 0.9023f, 1.0f,

// top, red
0.8359375f, 0.17578125f, 0.125f, 1.0f,
0.8359375f, 0.17578125f, 0.125f, 1.0f,
0.8359375f, 0.17578125f, 0.125f, 1.0f,
0.8359375f, 0.17578125f, 0.125f, 1.0f,
0.8359375f, 0.17578125f, 0.125f, 1.0f,
0.8359375f, 0.17578125f, 0.125f, 1.0f,

// bottom, also red
0.8359375f, 0.17578125f, 0.125f, 1.0f,
0.8359375f, 0.17578125f, 0.125f, 1.0f,
0.8359375f, 0.17578125f, 0.125f, 1.0f,
0.8359375f, 0.17578125f, 0.125f, 1.0f,
0.8359375f, 0.17578125f, 0.125f, 1.0f,
0.8359375f, 0.17578125f, 0.125f, 1.0f,
};

public static final float[] CUBE_FOUND_COLORS = new float[] {
// front, yellow
1.0f, 0.6523f, 0.0f, 1.0f,
1.0f, 0.6523f, 0.0f, 1.0f,
1.0f, 0.6523f, 0.0f, 1.0f,
1.0f, 0.6523f, 0.0f, 1.0f,
1.0f, 0.6523f, 0.0f, 1.0f,
1.0f, 0.6523f, 0.0f, 1.0f,

// right, yellow
1.0f, 0.6523f, 0.0f, 1.0f,
1.0f, 0.6523f, 0.0f, 1.0f,
1.0f, 0.6523f, 0.0f, 1.0f,
1.0f, 0.6523f, 0.0f, 1.0f,
1.0f, 0.6523f, 0.0f, 1.0f,
1.0f, 0.6523f, 0.0f, 1.0f,

// back, yellow
1.0f, 0.6523f, 0.0f, 1.0f,
1.0f, 0.6523f, 0.0f, 1.0f,
1.0f, 0.6523f, 0.0f, 1.0f,
1.0f, 0.6523f, 0.0f, 1.0f,
1.0f, 0.6523f, 0.0f, 1.0f,
1.0f, 0.6523f, 0.0f, 1.0f,

// left, yellow
1.0f, 0.6523f, 0.0f, 1.0f,
1.0f, 0.6523f, 0.0f, 1.0f,
1.0f, 0.6523f, 0.0f, 1.0f,
1.0f, 0.6523f, 0.0f, 1.0f,
1.0f, 0.6523f, 0.0f, 1.0f,
1.0f, 0.6523f, 0.0f, 1.0f,

// top, yellow
1.0f, 0.6523f, 0.0f, 1.0f,
1.0f, 0.6523f, 0.0f, 1.0f,
1.0f, 0.6523f, 0.0f, 1.0f,
1.0f, 0.6523f, 0.0f, 1.0f,
1.0f, 0.6523f, 0.0f, 1.0f,
1.0f, 0.6523f, 0.0f, 1.0f,

// bottom, yellow
1.0f, 0.6523f, 0.0f, 1.0f,
1.0f, 0.6523f, 0.0f, 1.0f,
1.0f, 0.6523f, 0.0f, 1.0f,
1.0f, 0.6523f, 0.0f, 1.0f,
1.0f, 0.6523f, 0.0f, 1.0f,
1.0f, 0.6523f, 0.0f, 1.0f,
};

public static final float[] CUBE_NORMALS = new float[] {
// Front face
0.0f, 0.0f, 1.0f,
0.0f, 0.0f, 1.0f,
0.0f, 0.0f, 1.0f,
0.0f, 0.0f, 1.0f,
0.0f, 0.0f, 1.0f,
0.0f, 0.0f, 1.0f,

// Right face
1.0f, 0.0f, 0.0f,
1.0f, 0.0f, 0.0f,
1.0f, 0.0f, 0.0f,
1.0f, 0.0f, 0.0f,
1.0f, 0.0f, 0.0f,
1.0f, 0.0f, 0.0f,

// Back face
0.0f, 0.0f, -1.0f,
0.0f, 0.0f, -1.0f,
0.0f, 0.0f, -1.0f,
0.0f, 0.0f, -1.0f,
0.0f, 0.0f, -1.0f,
0.0f, 0.0f, -1.0f,

// Left face
-1.0f, 0.0f, 0.0f,
-1.0f, 0.0f, 0.0f,
-1.0f, 0.0f, 0.0f,
-1.0f, 0.0f, 0.0f,
-1.0f, 0.0f, 0.0f,
-1.0f, 0.0f, 0.0f,

// Top face
0.0f, 1.0f, 0.0f,
0.0f, 1.0f, 0.0f,
0.0f, 1.0f, 0.0f,
0.0f, 1.0f, 0.0f,
0.0f, 1.0f, 0.0f,
0.0f, 1.0f, 0.0f,

// Bottom face
0.0f, -1.0f, 0.0f,
0.0f, -1.0f, 0.0f,
0.0f, -1.0f, 0.0f,
0.0f, -1.0f, 0.0f,
0.0f, -1.0f, 0.0f,
0.0f, -1.0f, 0.0f
};
public MySr() {
initMode();
}

public void initMode() {
modelCube = new float[16];
camera = new float[16];
view = new float[16];
modelViewProjection = new float[16];
modelView = new float[16];

// Model first appears directly in front of user.
//modelPosition = new float[]{0.0f, 0.0f, -MAX_MODEL_DISTANCE / 2.0f};
modelPosition = new float[]{-0.0f, -0.0f, -5f};
headRotation = new float[4];
headView = new float[16];
}

private int loadGLShader(int type, String code) {
int shader = GLES20.glCreateShader(type);
GLES20.glShaderSource(shader, code);
GLES20.glCompileShader(shader);

// Get the compilation status.
final int[] compileStatus = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);

// If the compilation failed, delete the shader.
if (compileStatus[0] == 0) {
Log.e(TAG, "Error compiling shader: " + GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = 0;
}

if (shader == 0) {
throw new RuntimeException("Error creating shader.");
}

return shader;
}
/**
* Checks if we've had an error inside of OpenGL ES, and if so what that error is.
*
* @param label Label to report in case of error.
*/
private static void checkGLError(String label) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e(TAG, label + ": glError " + error);
throw new RuntimeException(label + ": glError " + error);
}
}

@Override
public void onRendererShutdown() {
Log.i(TAG, "onRendererShutdown");
}

@Override
public void onSurfaceChanged(int width, int height) {
Log.i(TAG, "onSurfaceChanged");
}

/**
* Creates the buffers we use to store information about the 3D world.
* <p/>
* <p>OpenGL doesn't use Java arrays, but rather needs data in a format it can understand.
* Hence we use ByteBuffers.
*
* @param config The EGL configuration used when creating the surface.
*/
@Override
public void onSurfaceCreated(EGLConfig config) {
Log.i(TAG, "onSurfaceCreated");
//GLES20.glClearColor(0.1f, 0.1f, 0.1f, 0.5f); // Dark background so text shows up well.
GLES20.glClearColor(1f, 1f, 1f, 1.0f);

ByteBuffer bbVertices = ByteBuffer.allocateDirect(CUBE_COORDS.length * 4);
bbVertices.order(ByteOrder.nativeOrder());
cubeVertices = bbVertices.asFloatBuffer();
cubeVertices.put(CUBE_COORDS);
cubeVertices.position(0);

ByteBuffer bbColors = ByteBuffer.allocateDirect(CUBE_COLORS.length * 4);
bbColors.order(ByteOrder.nativeOrder());
cubeColors = bbColors.asFloatBuffer();
cubeColors.put(CUBE_COLORS);
cubeColors.position(0);

ByteBuffer bbFoundColors =
ByteBuffer.allocateDirect(CUBE_FOUND_COLORS.length * 4);
bbFoundColors.order(ByteOrder.nativeOrder());
cubeFoundColors = bbFoundColors.asFloatBuffer();
cubeFoundColors.put(CUBE_FOUND_COLORS);
cubeFoundColors.position(0);

ByteBuffer bbNormals = ByteBuffer.allocateDirect(CUBE_NORMALS.length * 4);
bbNormals.order(ByteOrder.nativeOrder());
cubeNormals = bbNormals.asFloatBuffer();
cubeNormals.put(CUBE_NORMALS);
cubeNormals.position(0);

int vertexShader = loadGLShader(GLES20.GL_VERTEX_SHADER, light_vertex);
int passthroughShader = loadGLShader(GLES20.GL_FRAGMENT_SHADER, passthrough_fragment);

cubeProgram = GLES20.glCreateProgram();
GLES20.glAttachShader(cubeProgram, vertexShader);
GLES20.glAttachShader(cubeProgram, passthroughShader);
GLES20.glLinkProgram(cubeProgram);
GLES20.glUseProgram(cubeProgram);

checkGLError("Cube program");

cubePositionParam = GLES20.glGetAttribLocation(cubeProgram, "a_Position");
cubeNormalParam = GLES20.glGetAttribLocation(cubeProgram, "a_Normal");
cubeColorParam = GLES20.glGetAttribLocation(cubeProgram, "a_Color");

cubeModelParam = GLES20.glGetUniformLocation(cubeProgram, "u_Model");
cubeModelViewParam = GLES20.glGetUniformLocation(cubeProgram, "u_MVMatrix");
cubeModelViewProjectionParam = GLES20.glGetUniformLocation(cubeProgram, "u_MVP");
cubeLightPosParam = GLES20.glGetUniformLocation(cubeProgram, "u_LightPos");

GLES20.glEnableVertexAttribArray(cubePositionParam);
GLES20.glEnableVertexAttribArray(cubeNormalParam);
GLES20.glEnableVertexAttribArray(cubeColorParam);

checkGLError("Cube program params");

updateModelPosition();

checkGLError("onSurfaceCreated");
}

/**
* Updates the cube model position.
*/
private void updateModelPosition() {
Matrix.setIdentityM(modelCube, 0);
Matrix.translateM(modelCube, 0, modelPosition[0], modelPosition[1], modelPosition[2]);

checkGLError("updateCubePosition");
}

/**
* Prepares OpenGL ES before we draw a frame.
*
* @param headTransform The head transformation in the new frame.
*/
@Override
public void onNewFrame(HeadTransform headTransform) {
// Build the Model part of the ModelView matrix.
Matrix.rotateM(modelCube, 0, TIME_DELTA, 0.5f, 0.5f, 1.0f);

// Build the camera matrix and apply it to the ModelView.
Matrix.setLookAtM(camera, 0, 0.0f, 0.0f, CAMERA_Z, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f);

headTransform.getHeadView(headView, 0);

// Update the 3d audio engine with the most recent head rotation.
headTransform.getQuaternion(headRotation, 0);

checkGLError("onReadyToDraw");
}

/**
* Draws a frame for an eye.
*
* @param eye The eye to render. Includes all required transformations.
*/
@Override
public void onDrawEye(Eye eye) {

GLES20.glEnable(GLES20.GL_DEPTH_TEST);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);

checkGLError("colorParam");

// Apply the eye transformation to the camera.
Matrix.multiplyMM(view, 0, eye.getEyeView(), 0, camera, 0);

// Set the position of the light
Matrix.multiplyMV(lightPosInEyeSpace, 0, view, 0, LIGHT_POS_IN_WORLD_SPACE, 0);

// Build the ModelView and ModelViewProjection matrices
// for calculating cube position and light.
float[] perspective = eye.getPerspective(Z_NEAR, Z_FAR);
Matrix.multiplyMM(modelView, 0, view, 0, modelCube, 0);
Matrix.multiplyMM(modelViewProjection, 0, perspective, 0, modelView, 0);
drawCube();

}

@Override
public void onFinishFrame(Viewport viewport) {
}

/**
* Draw the cube.
* <p/>
* <p>We've set all of our transformation matrices. Now we simply pass them into the shader.
*/
public void drawCube() {

//GLES20.glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
GLES20.glUseProgram(cubeProgram);

GLES20.glUniform3fv(cubeLightPosParam, 1, lightPosInEyeSpace, 0);

// Set the Model in the shader, used to calculate lighting
GLES20.glUniformMatrix4fv(cubeModelParam, 1, false, modelCube, 0);

// Set the ModelView in the shader, used to calculate lighting
GLES20.glUniformMatrix4fv(cubeModelViewParam, 1, false, modelView, 0);

// Set the position of the cube
GLES20.glVertexAttribPointer(
cubePositionParam, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, cubeVertices);

// Set the ModelViewProjection matrix in the shader.
GLES20.glUniformMatrix4fv(cubeModelViewProjectionParam, 1, false, modelViewProjection, 0);

// Set the normal positions of the cube, again for shading
GLES20.glVertexAttribPointer(cubeNormalParam, 3, GLES20.GL_FLOAT, false, 0, cubeNormals);
GLES20.glVertexAttribPointer(cubeColorParam, 4, GLES20.GL_FLOAT, false, 0,
isLookingAtObject() ? cubeFoundColors : cubeColors);

GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 36);
checkGLError("Drawing cube");
}

/**
* Check if user is looking at object by calculating where the object is in eye-space.
*
* @return true if the user is looking at the object.
*/
public boolean isLookingAtObject() {
float[] initVec = {0, 0, 0, 1.0f};
float[] objPositionVec = new float[4];

// Convert object space to camera space. Use the headView from onNewFrame.
Matrix.multiplyMM(modelView, 0, headView, 0, modelCube, 0);
Matrix.multiplyMV(objPositionVec, 0, modelView, 0, initVec, 0);

float pitch = (float) Math.atan2(objPositionVec[1], -objPositionVec[2]);
float yaw = (float) Math.atan2(objPositionVec[0], -objPositionVec[2]);

return Math.abs(pitch) < PITCH_LIMIT && Math.abs(yaw) < YAW_LIMIT;
}
}

代码使用opengl知识构建一个正六面体,由于添加了VR效果,和glSurfaceView的Renderer不太一样,多出了一些方法。使用该方法创建的VR效果需要把应用的activity设置mSurfaceView.setVRModeEnabled(true);你也可以添加一个开关,以使你的应用支持VR模式和普通模式切换。谷歌的VR SDK可以帮我节约大量的时间专著于游戏开发,使得可以方便的像开发一般游戏一样,同时使游戏具有两种模式。而不必为VR单独开发代码。谷歌的SDK已经实现的普通模式,VR模式,分屏模式,正常模式,畸变,反畸变,并且封装了手机方向感应器,使得可以使用感应器来瞄准射击之类。

参考: http://blog.csdn.net/lyx2007825/article/details/8792475 http://book.51cto.com/art/201303/382840.htm http://blog.csdn.net/kesalin/article/details/7168967 http://wenku.baidu.com/view/a584657e168884868762d6a0.html?from=search http://www.tuicool.com/articles/VZVJra http://www.tuicool.com/articles/IFZBJn
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息