Android OpenGLES 2 从触摸坐标拾取光线,稍微关闭投影计算 [英] Android OpenGLES 2 ray picking from touch coordinates, unprojecting calculation slightly off

查看:33
本文介绍了Android OpenGLES 2 从触摸坐标拾取光线,稍微关闭投影计算的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

我正在尝试通过交叉光线测试实现基于触摸坐标的对象拾取.我无法找到有关将触摸坐标转换为世界中使用的坐标系以构建此射线的信息.

到目前为止,我的理解是应用于场景中每个顶点的矩阵是:

projectionMatrix * viewMatrix * modelMatrix

这里是我反转该过程以尝试在场景中找到光线的端点以及我的绘图循环的过程,以防我只是错误地应用了不同的矩阵:

 public float[] getMouseRayProjection(float touchX, float touchY, float windowWidth, float windowHeight, float[] modelView, float[] projection){浮动[] rayDirection = 新浮动[4];float normalizedX = 2 * touchX/windowWidth - 1;float normalizedY = 1 - 2*touchY/windowHeight;浮动[] unviewMatrix = 新浮动[16];浮动[] viewMatrix = 新浮动[16];Matrix.multiplyMM(viewMatrix, 0, 投影, 0, modelView, 0);Matrix.invertM(unviewMatrix, 0, viewMatrix, 0);float[] NearPoint = multiplyMat4ByVec4(projection, new float[]{normalizedX, normalizedY, 0, 1});float[] modelviewInverse = new float[16];Matrix.invertM(modelviewInverse, 0, modelView, 0);float[] cameraPos = new float[4];cameraPos[0] = modelviewInverse[12];cameraPos[1] = modelviewInverse[13];cameraPos[2] = modelviewInverse[14];cameraPos[3] = modelviewInverse[15];rayDirection[0] = nearPoint[0] - cameraPos[0];rayDirection[1] = nearPoint[1] - cameraPos[1];rayDirection[2] = nearPoint[2] - cameraPos[2];rayDirection[3] = nearPoint[3] - cameraPos[3];返回射线方向;}public float[] multiplyMat4ByVec4(float[] matrix4, float[] vector4){float[] returnMatrix = new float[4];returnMatrix[0] = (matrix4[0] * vector4[0]) + (matrix4[1] * vector4[1]) + (matrix4[2] * vector4[2]) + (matrix4[3] * vector4[3]]);returnMatrix[1] = (matrix4[4] * vector4[0]) + (matrix4[5] * vector4[1]) + (matrix4[6] * vector4[2]) + (matrix4[7] * vector4[3]]);returnMatrix[2] = (matrix4[8] * vector4[0]) + (matrix4[9] * vector4[1]) + (matrix4[10] * vector4[2]) + (matrix4[11] * vector4[3]]);returnMatrix[3] = (matrix4[12] * vector4[0]) + (matrix4[13] * vector4[1]) + (matrix4[14] * vector4[2]) + (matrix4[15] * vector4[3]]);返回返回矩阵;}@覆盖公共无效 onDrawFrame(GL10 gl10) {GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);长时间 = SystemClock.uptimeMillis() % 10000L;float angleInDegrees = (360.0f/10000.0f) * ((int) time);GLES20.glViewport(0, 0, (int)(width/2), (int)(height/2));Matrix.setIdentityM(mModelMatrix, 0);Matrix.setLookAtM(viewMatrix, 0, 0f, 0f, 1.5f, 0f, 0f, -5f, 0f, 1f, 0f);//Matrix.rotateM(mModelMatrix, 0,angleInDegrees, 0.0f, 0.0f, 1.0f);drawTriangle(triangleVertices);//Matrix.translateM(mModelMatrix, 0, 1.5f, 0, -1f);//Matrix.frustumM(mProjectionMatrix, 0, left, right, -1.0f, 1.0f, 1.0f, 10.0f);Matrix.setIdentityM(mModelMatrix, 0);Matrix.setLookAtM(viewMatrix, 0, 1.5f, 0.8f, 0.5f, 0f, 0f, 0f, 0f, 1f, 0f);GLES20.glViewport((int)(width/2), (int)(height/2), (int)(width/2), (int)(height/2));drawTriangle(triangleVertices);drawIntersectionLine();/*Matrix.setLookAtM(viewMatrix, 0, 0, 1.5f, 0.5f, 0, 0, 0, 0, 0, -1f);GLES20.glViewport((int)(width/2), (int)height, (int)(width/2), (int)(height/2));drawTriangle(triangleVertices);drawIntersectionLine();*/}private void drawTriangle(final FloatBuffer triangleBuffer){三角形缓冲区.位置(位置偏移);GLES20.glVertexAttribPointer(mPositionHandle, positionDataSize, GLES20.GL_FLOAT, false, strideBytes,triangleBuffer);GLES20.glEnableVertexAttribArray(mPositionHandle);三角形缓冲区.位置(颜色偏移);GLES20.glVertexAttribPointer(mColorHandle, colorDataSize, GLES20.GL_FLOAT, false, strideBytes,triangleBuffer);GLES20.glEnableVertexAttribArray(mColorHandle);Matrix.multiplyMM(mMVPMatrix, 0, viewMatrix, 0, mModelMatrix, 0);mMVMatrix = mMVPMatrix;Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 3);//Log.d("OpenGLES2Test", "相交光线为:" + floatArrayAsString(getCameraPos(mMVMatrix)) + " + " + floatArrayAsString(getMouseRayProjection((int)(width/2), (int)(height/2)), 1.0f, (int)width, (int)height, mMVMatrix, mProjectionMatrix)));}私有无效 drawIntersectionLine(){lineVertices.position(0);GLES20.glVertexAttribPointer(mPositionHandle, positionDataSize, GLES20.GL_FLOAT, false, lineStrideBytes, lineVertices);GLES20.glEnableVertexAttribArray(mPositionHandle);GLES20.glDrawArrays(GLES20.GL_LINES, 0, 2);}private void moveIntersectionLineEndPoint(float[] lineEndPoint){this.lineEndPoint = lineEndPoint;浮动[] lineVerticesData = {lineStartPoint[0], lineStartPoint[1], lineStartPoint[2],lineEndPoint[0]、lineEndPoint[1]、lineEndPoint[2]};lineVertices = ByteBuffer.allocateDirect(lineVerticesData.length * bytesPerFloat).order(ByteOrder.nativeOrder()).asFloatBuffer();lineVertices.put(lineVerticesData).position(0);}

虽然我很确定我的 4x4 矩阵乘以 4d 向量乘法是正确的,但为了以防万一,这里也是该方法:

public float[] multiplyMat4ByVec4(float[] matrix4, float[] vector4){float[] returnMatrix = new float[4];returnMatrix[0] = (matrix4[0] * vector4[0]) + (matrix4[1] * vector4[1]) + (matrix4[2] * vector4[2]) + (matrix4[3] * vector4[3]]);returnMatrix[1] = (matrix4[4] * vector4[0]) + (matrix4[5] * vector4[1]) + (matrix4[6] * vector4[2]) + (matrix4[7] * vector4[3]]);returnMatrix[2] = (matrix4[8] * vector4[0]) + (matrix4[9] * vector4[1]) + (matrix4[10] * vector4[2]) + (matrix4[11] * vector4[3]]);returnMatrix[3] = (matrix4[12] * vector4[0]) + (matrix4[13] * vector4[1]) + (matrix4[14] * vector4[2]) + (matrix4[15] * vector4[3]]);返回返回矩阵;}

此测试应用程序的目标是从几个不同的角度显示场景,以便我可以根据我的代码查看相交线的外观.我想画一条从相机的原点开始到交点结束的线,但它的表现很奇怪.端点似乎被沿 x 轴向正方向推得比它应该的更远,并且在某些地方它似乎有点......跳过,好像那个位置有一个洞或什么的.虽然我仍然记得微积分中的一些线性代数,但我记不清了,无法确切地知道我在这里做什么,而且我已经在网上搜索了许多资源,但没有运气.我希望读到这篇文章的人会比我有更多的经验来处理这个问题,并且会很友好地帮助我,或者如果还有其他我可能做错或效率低下的事情,请给我任何提示.

变量引用:矩阵都是长度为 16 的浮点数组

mProjectionMatrix = 投影矩阵mModelMatrix = 模型矩阵mMVPMatrix = 投影 * 模型视图矩阵mMVMatrix = 模型视图矩阵私人最终 FloatBuffer 三角形顶点;私人 FloatBuffer lineVertices;私人最终 int bytesPerFloat = 4;私有浮点[] viewMatrix = 新浮点[16];私有静态上下文上下文;私有 int mMVPMatrixHandle;私人 int mPositionHandle;私人 int mColorHandle;私有浮点[] mProjectionMatrix = 新浮点[16];私有浮点[] mModelMatrix = 新浮点[16];私有浮点[] mMVPMatrix = 新浮点[16];私有浮点[] mMVMatrix = 新浮点[16];私人最终 int strideBytes = 7 * bytesPerFloat;私人最终 int lineStrideBytes = 3 * bytesPerFloat;私人最终 int positionOffset = 0;私有最终 int positionDataSize = 3;私有最终 int colorOffset = 3;私有最终 int colorDataSize = 4;私人浮动宽度,高度;私有浮点[] lineStartPoint = 新浮点[]{0, 0, 1.5f};私有浮点[] lineEndPoint = 新浮点[]{0, 0, 0};

解决方案

经过一番搜索,我找到了一个以不同方式详细说明此过程的页面.现在,我不再遇到光线末端随机跳到意外位置并且终点指向它应该指向的确切位置的问题!这是我用来修复我的过程的页面:http://www.antongerdelan.net/opengl/raycasting.html

这是我的整个交叉路口测试应用程序的最终源代码.大多数相关代码位于 OpenGLRenderer 类中的 getMouseRayProjection 方法下.

MainActivity.java:

import android.opengl.GLSurfaceView;导入 android.os.Bundle;导入 android.app.Activity;导入 android.content.Context;导入 android.view.Menu;导入 android.view.MotionEvent;公共类 MainActivity 扩展 Activity {私人 MyGLSurfaceView mGLSurfaceView;@覆盖protected void onCreate(Bundle savedInstanceState) {super.onCreate(savedInstanceState);mGLSurfaceView = 新的 MyGLSurfaceView(this);mGLSurfaceView.setEGLContextClientVersion(2);mGLSurfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0);OpenGLRenderer 渲染器 = 新的 OpenGLRenderer(this);mGLSurfaceView.setRenderer(renderer);mGLSurfaceView.renderer = 渲染器;设置内容视图(mGLSurfaceView);}@覆盖公共布尔 onCreateOptionsMenu(菜单菜单){//给菜单充气;如果它存在,这会将项目添加到操作栏.getMenuInflater().inflate(R.menu.main, menu);返回真;}@覆盖受保护的无效 onResume() {super.onResume();mGLSurfaceView.onResume();}@覆盖受保护的无效 onPause() {super.onPause();mGLSurfaceView.onPause();}}类 MyGLSurfaceView 扩展了 GLSurfaceView {公共 OpenGLRenderer 渲染器;公共浮动previousX,previousY;公共 MyGLSurfaceView(上下文上下文){超级(上下文);}@覆盖公共布尔 onTouchEvent(MotionEvent e){浮动 x = e.getX();浮动 y = e.getY();开关(e.getAction()){案例 MotionEvent.ACTION_MOVE:浮动 dx = x - 前一个 X;浮动 dy = y -previousY;renderer.onTouch(x, y);}前一个X = x;上一个Y = y;返回真;}}

OpenGLRenderer.java:

import java.io.BufferedReader;导入 java.io.IOException;导入 java.io.InputStreamReader;导入 java.nio.ByteBuffer;导入 java.nio.ByteOrder;导入 java.nio.FloatBuffer;导入 javax.microedition.khronos.egl.EGLConfig;导入 javax.microedition.khronos.opengles.GL10;导入 android.content.Context;导入 android.opengl.GLES20;导入 android.opengl.GLU;导入 android.opengl.Matrix;导入 android.opengl.GLSurfaceView;导入 android.os.SystemClock;导入 android.util.Log;公共类 OpenGLRenderer 实现 GLSurfaceView.Renderer {私人最终 FloatBuffer 三角形顶点;私人 FloatBuffer lineVertices;私人最终 int bytesPerFloat = 4;私有浮点[] viewMatrix = 新浮点[16];私有静态上下文上下文;私有 int mMVPMatrixHandle;私人 int mPositionHandle;私人 int mColorHandle;私有浮点[] mProjectionMatrix = 新浮点[16];私有浮点[] mModelMatrix = 新浮点[16];私有浮点[] mMVPMatrix = 新浮点[16];私有浮点[] mMVMatrix = 新浮点[16];私有 int[] 视口 = 新 int[4];私人最终 int strideBytes = 7 * bytesPerFloat;私人最终 int lineStrideBytes = 3 * bytesPerFloat;私人最终 int positionOffset = 0;私有最终 int positionDataSize = 3;私有最终 int colorOffset = 3;私有最终 int colorDataSize = 4;私人浮动宽度,高度;私有浮点[] lineStartPoint = 新浮点[]{0, 0, 1f};私有浮点[] lineEndPoint = 新浮点[]{0, 0, 0};私有浮点[] cameraPos = 新浮点[]{0f, 0f, 2.5f};私有浮点[] cameraLook = 新浮点[]{0f, 0f, -5f};私有浮点[] cameraUp = 新浮点[]{0f, 1f, 0f};公共 OpenGLRenderer(上下文上下文){this.context = 上下文;最终浮点[]triangleVerticesData = {-0.5f, -0.25f, 0.0f,1.0f, 0.0f, 0.0f, 1.0f,0.5f, -0.25f, 0.0f,0.0f, 0.0f, 1.0f, 1.0f,0.0f, 0.559016994f, 0.0f,0.0f、1.0f、0.0f、1.0f};triangleVertices = ByteBuffer.allocateDirect(triangleVerticesData.length * bytesPerFloat).order(ByteOrder.nativeOrder()).asFloatBuffer();triangleVertices.put(triangleVerticesData).position(0);浮动[] lineVerticesData = {lineStartPoint[0], lineStartPoint[1], lineStartPoint[2],lineEndPoint[0]、lineEndPoint[1]、lineEndPoint[2]};lineVertices = ByteBuffer.allocateDirect(lineVerticesData.length * bytesPerFloat).order(ByteOrder.nativeOrder()).asFloatBuffer();lineVertices.put(lineVerticesData).position(0);}@覆盖public void onSurfaceCreated(GL10 gl10, EGLConfig eglConfig) {GLES20.glClearColor(0.5f, 0.5f, 0.5f, 0.5f);Matrix.setLookAtM(viewMatrix, 0, cameraPos[0], cameraPos[1], cameraPos[2], cameraLook[0], cameraLook[1], cameraLook[2], cameraUp[0], cameraUp[1], cameraUp[2]);尝试 {int vertexShaderHandle = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);如果(顶点着色器句柄!= 0){GLES20.glShaderSource(vertexShaderHandle, readShader("vertexShader"));GLES20.glCompileShader(vertexShaderHandle);最终 int[] compileStatus = new int[1];GLES20.glGetShaderiv(vertexShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);如果(编译状态[0] == 0){GLES20.glDeleteShader(vertexShaderHandle);vertexShaderHandle = 0;}}如果(vertexShaderHandle == 0){throw new RuntimeException("创建顶点着色器时出错");}int fragmentShaderHandle = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);如果(片段着色器句柄!= 0){GLES20.glShaderSource(fragmentShaderHandle, readShader("fragmentShader"));GLES20.glCompileShader(fragmentShaderHandle);最终 int[] compileStatus = new int[1];GLES20.glGetShaderiv(fragmentShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);如果(编译状态[0] == 0){GLES20.glDeleteShader(fragmentShaderHandle);fragmentShaderHandle = 0;}}如果(fragmentShaderHandle == 0){throw new RuntimeException("创建片段着色器时出错.");}int programHandle = GLES20.glCreateProgram();如果(程序句柄!= 0){GLES20.glAttachShader(programHandle, vertexShaderHandle);GLES20.glAttachShader(programHandle, fragmentShaderHandle);GLES20.glBindAttribLocation(programHandle, 0, "a_Position");GLES20.glBindAttribLocation(programHandle, 1, "a_Color");GLES20.glLinkProgram(programHandle);final int[] linkStatus = new int[1];GLES20.glGetProgramiv(programHandle, GLES20.GL_LINK_STATUS, linkStatus, 0);如果(链接状态 [0] == 0){GLES20.glDeleteProgram(programHandle);程序句柄 = 0;}}如果(程序句柄 == 0){throw new RuntimeException("创建程序时出错.");}mMVPMatrixHandle = GLES20.glGetUniformLocation(programHandle, "u_MVPMatrix");mPositionHandle = GLES20.glGetAttribLocation(programHandle, "a_Position");mColorHandle = GLES20.glGetAttribLocation(programHandle, "a_Color");GLES20.glUseProgram(programHandle);} catch (IOException e){Log.d("OpenGLES2Test", "无法读取着色器:" + e.getMessage());} catch (RuntimeException e){Log.d("OpenGLES2Test", e.getMessage());}GLES20.glEnable(GLES20.GL_DEPTH_TEST);GLES20.glDepthFunc(GLES20.GL_LEQUAL);GLES20.glDepthMask(真);}@覆盖public void onSurfaceChanged(GL10 gl10, int width, int height) {GLES20.glViewport(0, 0, width/2, height/2);this.width = 宽度;this.height = 高度;最终浮动比例=(浮动)宽度/高度;最终浮动左 = -ratio;最终浮动权 = 比率;最终浮动底部 = -1.0f;最终浮动顶部 = 1.0f;最终浮点数接近 = 1.0f;最终浮动远 = 10.0f;GLES20.glGetIntegerv(GLES20.GL_VIEWPORT, 视口, 0);Matrix.frustumM(mProjectionMatrix, 0, left, right, bottom, top, Near, far);}@覆盖公共无效 onDrawFrame(GL10 gl10) {GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);长时间 = SystemClock.uptimeMillis() % 10000L;GLES20.glViewport(0, 0, (int)(width), (int)(height));Matrix.setIdentityM(mModelMatrix, 0);Matrix.setLookAtM(viewMatrix, 0, cameraPos[0], cameraPos[1], cameraPos[2], cameraLook[0], cameraLook[1], cameraLook[2], cameraUp[0], cameraUp[1], cameraUp[2]);Matrix.multiplyMM(mMVMatrix, 0, viewMatrix, 0, mModelMatrix, 0);Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mMVMatrix, 0);GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);drawTriangle(triangleVertices);drawIntersectionLine();}private void drawTriangle(final FloatBuffer triangleBuffer){三角形缓冲区.位置(位置偏移);GLES20.glVertexAttribPointer(mPositionHandle, positionDataSize, GLES20.GL_FLOAT, false, strideBytes,triangleBuffer);GLES20.glEnableVertexAttribArray(mPositionHandle);三角形缓冲区.位置(颜色偏移);GLES20.glVertexAttribPointer(mColorHandle, colorDataSize, GLES20.GL_FLOAT, false, strideBytes,triangleBuffer);GLES20.glEnableVertexAttribArray(mColorHandle);GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 3);}私有无效 drawIntersectionLine(){lineVertices.position(0);GLES20.glVertexAttribPointer(mPositionHandle, positionDataSize, GLES20.GL_FLOAT, false, lineStrideBytes, lineVertices);GLES20.glEnableVertexAttribArray(mPositionHandle);GLES20.glDrawArrays(GLES20.GL_LINES, 0, 2);}private void moveIntersectionLineEndPoint(float[] lineEndPoint){this.lineEndPoint = lineEndPoint;浮动[] lineVerticesData = {lineStartPoint[0], lineStartPoint[1], lineStartPoint[2],lineEndPoint[0]、lineEndPoint[1]、lineEndPoint[2]};lineVertices = ByteBuffer.allocateDirect(lineVerticesData.length * bytesPerFloat).order(ByteOrder.nativeOrder()).asFloatBuffer();lineVertices.put(lineVerticesData).position(0);}公共静态字符串 readShader(String filePath) 抛出 IOException {BufferedReader reader = new BufferedReader(new InputStreamReader(context.getAssets().open(filePath)));StringBuilder sb = new StringBuilder();字符串线;while( ( line = reader.readLine() ) != null){sb.append(line + "
");}reader.close();返回 sb.toString();}公共浮动[] getMouseRayProjection(浮动触摸X,浮动触摸Y,浮动窗口宽度,浮动窗口高度,浮动[]视图,浮动[]投影){浮动[] rayDirection = 新浮动[4];float normalizedX = 2f * touchX/windowWidth - 1f;float normalizedY = 1f - 2f*touchY/windowHeight;浮动归一化Z = 1.0f;float[] rayNDC = new float[]{normalizedX, normalizedY, normalizedZ};float[] rayClip = new float[]{rayNDC[0], rayNDC[1], -1f, 1f};float[] inverseProjection = new float[16];Matrix.invertM(inverseProjection, 0, 投影, 0);float[] rayEye =multiplyMat4ByVec4(inverseProjection, rayClip);rayClip = new float[]{rayClip[0], rayClip[1], -1f, 0f};float[] inverseView = new float[16];Matrix.invertM(inverseView, 0, view, 0);float[] rayWorld4D = multiplyMat4ByVec4(inverseView, rayEye);float[] rayWorld = new float[]{rayWorld4D[0], rayWorld4D[1], rayWorld4D[2]};rayDirection = normalizeVector3(rayWorld);返回射线方向;}public float[] normalizeVector3(float[] vector3){float[] normalizedVector = new float[3];浮点幅度 = (float) Math.sqrt((vector3[0] * vector3[0]) + (vector3[1] * vector3[1]) + (vector3[2] * vector3[2]));normalizedVector[0] = vector3[0]/幅度;normalizedVector[1] = vector3[1]/幅度;normalizedVector[2] = vector3[2]/幅度;返回归一化向量;}/*公共浮动[] getMouseRayProjection(浮动触摸X,浮动触摸Y,浮动窗口宽度,浮动窗口高度,浮动[]模型视图,浮动[]投影){浮动[] rayDirection = 新浮动[4];float normalizedX = 2 * touchX/windowWidth - 1;float normalizedY = 1 - 2*touchY/windowHeight;浮动[] unviewMatrix = 新浮动[16];浮动[] viewMatrix = 新浮动[16];Matrix.multiplyMM(viewMatrix, 0, 投影, 0, modelView, 0);Matrix.invertM(unviewMatrix, 0, viewMatrix, 0);float[] nearPoint = multiplyMat4ByVec4(unviewMatrix, new float[]{normalizedX, normalizedY, 0, 1});float[] modelviewInverse = new float[16];Matrix.invertM(modelviewInverse, 0, modelView, 0);float[] cameraPos = new float[4];cameraPos[0] = modelviewInverse[12];cameraPos[1] = modelviewInverse[13];cameraPos[2] = modelviewInverse[14];cameraPos[3] = modelviewInverse[15];rayDirection[0] = (nearPoint[0] - cameraPos[0]);rayDirection[1] = (nearPoint[1] - cameraPos[1]);rayDirection[2] = (nearPoint[2] - cameraPos[2]);rayDirection[3] = (nearPoint[3] - cameraPos[3]);返回射线方向;}*//*公共浮点数[] getOGLPosition(int x, int y){GLU.gluUnProject(x, y, 0, , modelOffset, project, projectOffset, view, viewOffset, obj, objOffset)}*/public float[] getCameraPos(float[] modelView){float[] modelviewInverse = new float[16];Matrix.invertM(modelviewInverse, 0, modelView, 0);float[] cameraPos = new float[4];cameraPos[0] = modelviewInverse[12];cameraPos[1] = modelviewInverse[13];cameraPos[2] = modelviewInverse[14];cameraPos[3] = modelviewInverse[15];返回相机位置;}公共字符串 floatArrayAsString(float[] 数组){StringBuilder sb = new StringBuilder();sb.append("[");for (Float f : 数组){sb.append(f + ", ");}sb.deleteCharAt(sb.length() - 1);sb.deleteCharAt(sb.length() - 1);sb.append("]");返回 sb.toString();}public float[] getInverseMatrix(float[] originalMatrix){float[] inverseMatrix = new float[16];Matrix.invertM(inverseMatrix, 0, originalMatrix, 0);返回逆矩阵;}public float[] multiplyMat4ByVec4(float[] matrix4, float[] vector4){float[] returnMatrix = new float[4];returnMatrix[0] = (matrix4[0] * vector4[0]) + (matrix4[1] * vector4[1]) + (matrix4[2] * vector4[2]) + (matrix4[3] * vector4[3]]);returnMatrix[1] = (matrix4[4] * vector4[0]) + (matrix4[5] * vector4[1]) + (matrix4[6] * vector4[2]) + (matrix4[7] * vector4[3]]);returnMatrix[2] = (matrix4[8] * vector4[0]) + (matrix4[9] * vector4[1]) + (matrix4[10] * vector4[2]) + (matrix4[11] * vector4[3]]);returnMatrix[3] = (matrix4[12] * vector4[0]) + (matrix4[13] * vector4[1]) + (matrix4[14] * vector4[2]) + (matrix4[15] * vector4[3]]);返回返回矩阵;}公共无效onTouch(浮动touchX,浮动touchY){float[] mouseRayProjection = getMouseRayProjection(touchX, touchY, width, height, mMVMatrix, mProjectionMatrix);Log.d("OpenGLES2Test", "鼠标射线:" + floatArrayAsString(mouseRayProjection));//Log.d("OpenGLES2Test", "ModelView:" + floatArrayAsString(mMVMatrix));//Log.d("OpenGLES2Test", "ModelViewInverse: " + floatArrayAsString(getInverseMatrix(mMVMatrix)));//Log.d("OpenGLES2Test", "鼠标坐标:" + touchX + ", " + touchY);//Log.d("OpenGLES2Test", "光线坐标:" + mouseRayProjection[0] + ", " + mouseRayProjection[1] + ", " + mouseRayProjection[2] + ", " + mouseRayProjection[3]);moveIntersectionLineEndPoint(mouseRayProjection);}}

片段着色器:

precision mediump float;不同的 vec4 v_Color;无效主(){gl_FragColor = v_Color;}

顶点着色器:

uniform mat4 u_MVPMatrix;属性 vec4 a_Position;属性 vec4 a_Color;不同的 vec4 v_Color;无效主(){v_Color = a_Color;gl_Position = u_MVPMatrix * a_Position;}

I am trying to implement object picking based on touch coordinates via an intersecting ray test. I am having trouble finding information on converting the touch coordinates to the coordinate system used in the world in order to construct this ray.

My understanding so far is that the matrix that is applied to each vertex in the scene is:

projectionMatrix * viewMatrix * modelMatrix

Here is my process for reversing that process in a an attempt to find the ray's endpoint in the scene as well as my drawing loop in case I'm simply applying the different matrices incorrectly:

    public float[] getMouseRayProjection(float touchX, float touchY, float windowWidth, float windowHeight, float[] modelView, float[] projection)
        {
        float[] rayDirection = new float[4];

        float normalizedX = 2 * touchX/windowWidth - 1;
        float normalizedY = 1 - 2*touchY/windowHeight;

        float[] unviewMatrix = new float[16];
        float[] viewMatrix = new float[16];
        Matrix.multiplyMM(viewMatrix, 0, projection, 0, modelView, 0);
        Matrix.invertM(unviewMatrix, 0, viewMatrix, 0);

        float[] nearPoint = multiplyMat4ByVec4(projection, new float[]{normalizedX, normalizedY, 0, 1});
        float[] modelviewInverse = new float[16];
        Matrix.invertM(modelviewInverse, 0, modelView, 0);

        float[] cameraPos = new float[4];
        cameraPos[0] = modelviewInverse[12];
        cameraPos[1] = modelviewInverse[13];
        cameraPos[2] = modelviewInverse[14];
        cameraPos[3] = modelviewInverse[15];

        rayDirection[0] = nearPoint[0] - cameraPos[0];
        rayDirection[1] = nearPoint[1] - cameraPos[1];
        rayDirection[2] = nearPoint[2] - cameraPos[2];
        rayDirection[3] = nearPoint[3] - cameraPos[3];

        return rayDirection;
        }

    public float[] multiplyMat4ByVec4(float[] matrix4, float[] vector4)
        {
        float[] returnMatrix = new float[4];

        returnMatrix[0] = (matrix4[0] * vector4[0]) + (matrix4[1] * vector4[1]) + (matrix4[2] * vector4[2]) + (matrix4[3] * vector4[3]);
        returnMatrix[1] = (matrix4[4] * vector4[0]) + (matrix4[5] * vector4[1]) + (matrix4[6] * vector4[2]) + (matrix4[7] * vector4[3]);
        returnMatrix[2] = (matrix4[8] * vector4[0]) + (matrix4[9] * vector4[1]) + (matrix4[10] * vector4[2]) + (matrix4[11] * vector4[3]);
        returnMatrix[3] = (matrix4[12] * vector4[0]) + (matrix4[13] * vector4[1]) + (matrix4[14] * vector4[2]) + (matrix4[15] * vector4[3]);

        return returnMatrix;
        }

    @Override
    public void onDrawFrame(GL10 gl10) {
        GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);

        long time = SystemClock.uptimeMillis() % 10000L;
        float angleInDegrees = (360.0f / 10000.0f) * ((int) time);

        GLES20.glViewport(0, 0, (int)(width/2), (int)(height/2));
        Matrix.setIdentityM(mModelMatrix, 0);
        Matrix.setLookAtM(viewMatrix, 0, 0f, 0f, 1.5f, 0f, 0f, -5f, 0f, 1f, 0f);
        //Matrix.rotateM(mModelMatrix, 0, angleInDegrees, 0.0f, 0.0f, 1.0f);
        drawTriangle(triangleVertices);

        //Matrix.translateM(mModelMatrix, 0, 1.5f, 0, -1f);
        //Matrix.frustumM(mProjectionMatrix, 0, left, right, -1.0f, 1.0f, 1.0f, 10.0f);
        Matrix.setIdentityM(mModelMatrix, 0);
        Matrix.setLookAtM(viewMatrix, 0, 1.5f, 0.8f, 0.5f, 0f, 0f, 0f, 0f, 1f, 0f);
        GLES20.glViewport((int)(width/2), (int)(height/2), (int)(width/2), (int)(height/2));
        drawTriangle(triangleVertices);
        drawIntersectionLine();

        /*
        Matrix.setLookAtM(viewMatrix, 0, 0, 1.5f, 0.5f, 0, 0, 0, 0, 0, -1f);
        GLES20.glViewport((int)(width/2), (int)height, (int)(width/2), (int)(height/2));
        drawTriangle(triangleVertices);
        drawIntersectionLine();
        */
        }

    private void drawTriangle(final FloatBuffer triangleBuffer)
    {
        triangleBuffer.position(positionOffset);
        GLES20.glVertexAttribPointer(mPositionHandle, positionDataSize, GLES20.GL_FLOAT, false, strideBytes, triangleBuffer);
        GLES20.glEnableVertexAttribArray(mPositionHandle);

        triangleBuffer.position(colorOffset);
        GLES20.glVertexAttribPointer(mColorHandle, colorDataSize, GLES20.GL_FLOAT, false, strideBytes, triangleBuffer);
        GLES20.glEnableVertexAttribArray(mColorHandle);

        Matrix.multiplyMM(mMVPMatrix, 0, viewMatrix, 0, mModelMatrix, 0);

        mMVMatrix = mMVPMatrix;

        Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);

        GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);
        GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 3);

        //Log.d("OpenGLES2Test", "The intersection ray is: " + floatArrayAsString(getCameraPos(mMVMatrix)) + " + " + floatArrayAsString(getMouseRayProjection((int)(width / 2), (int)(height / 2), 1.0f, (int)width, (int)height, mMVMatrix, mProjectionMatrix)));
    }

    private void drawIntersectionLine()
    {
        lineVertices.position(0);
        GLES20.glVertexAttribPointer(mPositionHandle, positionDataSize, GLES20.GL_FLOAT, false, lineStrideBytes, lineVertices);
        GLES20.glEnableVertexAttribArray(mPositionHandle);
        GLES20.glDrawArrays(GLES20.GL_LINES, 0, 2);
    }

    private void moveIntersectionLineEndPoint(float[] lineEndPoint)
    {
        this.lineEndPoint = lineEndPoint;

        float[] lineVerticesData = {
            lineStartPoint[0], lineStartPoint[1], lineStartPoint[2],
            lineEndPoint[0], lineEndPoint[1], lineEndPoint[2]
        };
        lineVertices = ByteBuffer.allocateDirect(lineVerticesData.length * bytesPerFloat).order(ByteOrder.nativeOrder()).asFloatBuffer();
        lineVertices.put(lineVerticesData).position(0);
    }

Although I'm pretty sure my 4x4 matrix by 4d vector multiplication method is correct, here is that method as well just in case:

public float[] multiplyMat4ByVec4(float[] matrix4, float[] vector4)
    {
        float[] returnMatrix = new float[4];

        returnMatrix[0] = (matrix4[0] * vector4[0]) + (matrix4[1] * vector4[1]) + (matrix4[2] * vector4[2]) + (matrix4[3] * vector4[3]);
        returnMatrix[1] = (matrix4[4] * vector4[0]) + (matrix4[5] * vector4[1]) + (matrix4[6] * vector4[2]) + (matrix4[7] * vector4[3]);
        returnMatrix[2] = (matrix4[8] * vector4[0]) + (matrix4[9] * vector4[1]) + (matrix4[10] * vector4[2]) + (matrix4[11] * vector4[3]);
        returnMatrix[3] = (matrix4[12] * vector4[0]) + (matrix4[13] * vector4[1]) + (matrix4[14] * vector4[2]) + (matrix4[15] * vector4[3]);

        return returnMatrix;
    }

The goal of this test app is to show the scene from a few separate angles so that I can see how the intersection line looks based on my code. I wanted to draw the line starting at the camera's origin and ending at the intersection point, but it's acting oddly. The endpoint seems to be being pushed farther along the x axis in the positive direction than it should be, and in some spots it's seems to sort of...skip, as if there were a hole at that location or something. Although I still remember a bit of linear algebra from calculus, I don't remember enough to know exactly what I'm doing here and I've scoured through many of the resources online with no luck. I'm hoping someone that reads this will have more experience dealing with this than I and will be kind enough to help me, or give me any tips if there's something else that I may be doing the wrong or in an inefficient way.

Variable Reference: Matrices are all float arrays of length 16

mProjectionMatrix = projection matrix

mModelMatrix = model matrix

mMVPMatrix = projection * modelview matrix

mMVMatrix = modelview matrix


    private final FloatBuffer triangleVertices;

    private FloatBuffer lineVertices;

    private final int bytesPerFloat = 4;

    private float[] viewMatrix = new float[16];

    private static Context context;

    private int mMVPMatrixHandle;

    private int mPositionHandle;

    private int mColorHandle;

    private float[] mProjectionMatrix = new float[16];

    private float[] mModelMatrix = new float[16];

    private float[] mMVPMatrix = new float[16];

    private float[] mMVMatrix = new float[16];

    private final int strideBytes = 7 * bytesPerFloat;
    private final int lineStrideBytes = 3 * bytesPerFloat;

    private final int positionOffset = 0;

    private final int positionDataSize = 3;

    private final int colorOffset = 3;

    private final int colorDataSize = 4;

    private float width, height;

    private float[] lineStartPoint = new float[]{0, 0, 1.5f};

    private float[] lineEndPoint = new float[]{0, 0, 0};

解决方案

After some searching, I found a page that details this process in a different manner. Now I no longer have the issue with the end of the ray jumping to an unexpected position at random times and the end point points to the exact location it should! Here is the page I used to fix my process: http://www.antongerdelan.net/opengl/raycasting.html

And here is my final source code for the entire intersection testing app. Most of the relevant code is within the OpenGLRenderer class under the getMouseRayProjection method.

MainActivity.java:

import android.opengl.GLSurfaceView;
import android.os.Bundle;
import android.app.Activity;
import android.content.Context;
import android.view.Menu;
import android.view.MotionEvent;

public class MainActivity extends Activity {

    private MyGLSurfaceView mGLSurfaceView;

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        mGLSurfaceView = new MyGLSurfaceView(this);

        mGLSurfaceView.setEGLContextClientVersion(2);
        mGLSurfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0);
        OpenGLRenderer renderer = new OpenGLRenderer(this);
        mGLSurfaceView.setRenderer(renderer);
        mGLSurfaceView.renderer = renderer;

        setContentView(mGLSurfaceView);
    }


    @Override
    public boolean onCreateOptionsMenu(Menu menu) {
        // Inflate the menu; this adds items to the action bar if it is present.
        getMenuInflater().inflate(R.menu.main, menu);
        return true;
    }

    @Override
    protected void onResume() {
        super.onResume();
        mGLSurfaceView.onResume();
    }

    @Override
    protected void onPause() {
        super.onPause();
        mGLSurfaceView.onPause();
    }

}

class MyGLSurfaceView extends GLSurfaceView {

    public OpenGLRenderer renderer;

    public float previousX, previousY;

    public MyGLSurfaceView(Context context)
    {
        super(context);
    }

    @Override
    public boolean onTouchEvent(MotionEvent e)
    {
        float x = e.getX();
        float y = e.getY();

        switch(e.getAction()) {
        case MotionEvent.ACTION_MOVE:
            float dx = x - previousX;
            float dy = y - previousY;

            renderer.onTouch(x, y);
        }

        previousX = x;
        previousY = y;
        return true;
    }
}

OpenGLRenderer.java:

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;

import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;

import android.content.Context;
import android.opengl.GLES20;
import android.opengl.GLU;
import android.opengl.Matrix;
import android.opengl.GLSurfaceView;
import android.os.SystemClock;
import android.util.Log;

public class OpenGLRenderer implements GLSurfaceView.Renderer {

    private final FloatBuffer triangleVertices;

    private FloatBuffer lineVertices;

    private final int bytesPerFloat = 4;

    private float[] viewMatrix = new float[16];

    private static Context context;

    private int mMVPMatrixHandle;

    private int mPositionHandle;

    private int mColorHandle;

    private float[] mProjectionMatrix = new float[16];

    private float[] mModelMatrix = new float[16];

    private float[] mMVPMatrix = new float[16];

    private float[] mMVMatrix = new float[16];

    private int[] viewport = new int[4];

    private final int strideBytes = 7 * bytesPerFloat;
    private final int lineStrideBytes = 3 * bytesPerFloat;

    private final int positionOffset = 0;

    private final int positionDataSize = 3;

    private final int colorOffset = 3;

    private final int colorDataSize = 4;

    private float width, height;

    private float[] lineStartPoint = new float[]{0, 0, 1f};

    private float[] lineEndPoint = new float[]{0, 0, 0};

    private float[] cameraPos = new float[]{0f, 0f, 2.5f};
    private float[] cameraLook = new float[]{0f, 0f, -5f};
    private float[] cameraUp = new float[]{0f, 1f, 0f};

    public OpenGLRenderer(Context context) {
        this.context = context;

        final float[] triangleVerticesData = {
                -0.5f, -0.25f, 0.0f,
                1.0f, 0.0f, 0.0f, 1.0f,

                0.5f, -0.25f, 0.0f,
                0.0f, 0.0f, 1.0f, 1.0f,

                0.0f, 0.559016994f, 0.0f,
                0.0f, 1.0f, 0.0f, 1.0f
        };

        triangleVertices = ByteBuffer.allocateDirect(triangleVerticesData.length * bytesPerFloat).order(ByteOrder.nativeOrder()).asFloatBuffer();
        triangleVertices.put(triangleVerticesData).position(0);

        float[] lineVerticesData = {
            lineStartPoint[0], lineStartPoint[1], lineStartPoint[2],
            lineEndPoint[0], lineEndPoint[1], lineEndPoint[2]
        };
        lineVertices = ByteBuffer.allocateDirect(lineVerticesData.length * bytesPerFloat).order(ByteOrder.nativeOrder()).asFloatBuffer();
        lineVertices.put(lineVerticesData).position(0);
    }

    @Override
    public void onSurfaceCreated(GL10 gl10, EGLConfig eglConfig) {
        GLES20.glClearColor(0.5f, 0.5f, 0.5f, 0.5f);

        Matrix.setLookAtM(viewMatrix, 0, cameraPos[0], cameraPos[1], cameraPos[2], cameraLook[0], cameraLook[1], cameraLook[2], cameraUp[0], cameraUp[1], cameraUp[2]);

        try {
            int vertexShaderHandle = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);

            if (vertexShaderHandle != 0)
            {
                GLES20.glShaderSource(vertexShaderHandle, readShader("vertexShader"));

                GLES20.glCompileShader(vertexShaderHandle);

                final int[] compileStatus = new int[1];
                GLES20.glGetShaderiv(vertexShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);

                if (compileStatus[0] == 0)
                {
                    GLES20.glDeleteShader(vertexShaderHandle);
                    vertexShaderHandle = 0;
                }
            }

            if (vertexShaderHandle == 0)
            {
                throw new RuntimeException("Error creating vertex shader");
            }

            int fragmentShaderHandle = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);

            if (fragmentShaderHandle != 0)
            {
                GLES20.glShaderSource(fragmentShaderHandle, readShader("fragmentShader"));

                GLES20.glCompileShader(fragmentShaderHandle);

                final int[] compileStatus = new int[1];
                GLES20.glGetShaderiv(fragmentShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);

                if (compileStatus[0] == 0)
                {
                    GLES20.glDeleteShader(fragmentShaderHandle);
                    fragmentShaderHandle = 0;
                }
            }
            if (fragmentShaderHandle == 0)
            {
                throw new RuntimeException("Error creating fragment shader.");
            }

            int programHandle = GLES20.glCreateProgram();

            if (programHandle != 0)
            {
                GLES20.glAttachShader(programHandle, vertexShaderHandle);
                GLES20.glAttachShader(programHandle, fragmentShaderHandle);

                GLES20.glBindAttribLocation(programHandle, 0, "a_Position");
                GLES20.glBindAttribLocation(programHandle, 1, "a_Color");

                GLES20.glLinkProgram(programHandle);

                final int[] linkStatus = new int[1];
                GLES20.glGetProgramiv(programHandle, GLES20.GL_LINK_STATUS, linkStatus, 0);

                if (linkStatus[0] == 0)
                {
                    GLES20.glDeleteProgram(programHandle);
                    programHandle = 0;
                }
            }

            if (programHandle == 0)
            {
                throw new RuntimeException("Error creating program.");
            }

            mMVPMatrixHandle = GLES20.glGetUniformLocation(programHandle, "u_MVPMatrix");
            mPositionHandle = GLES20.glGetAttribLocation(programHandle, "a_Position");
            mColorHandle = GLES20.glGetAttribLocation(programHandle, "a_Color");

            GLES20.glUseProgram(programHandle);
        } catch (IOException e)
        {
            Log.d("OpenGLES2Test", "The shader could not be read: " + e.getMessage());
        } catch (RuntimeException e)
        {
            Log.d("OpenGLES2Test", e.getMessage());
        }

        GLES20.glEnable(GLES20.GL_DEPTH_TEST);
        GLES20.glDepthFunc(GLES20.GL_LEQUAL);
        GLES20.glDepthMask(true);
    }

    @Override
    public void onSurfaceChanged(GL10 gl10, int width, int height) {
        GLES20.glViewport(0, 0, width/2, height/2);

        this.width = width;
        this.height = height;

        final float ratio = (float) width / height;
        final float left = -ratio;
        final float right = ratio;
        final float bottom = -1.0f;
        final float top = 1.0f;
        final float near = 1.0f;
        final float far = 10.0f;

        GLES20.glGetIntegerv(GLES20.GL_VIEWPORT, viewport, 0);

        Matrix.frustumM(mProjectionMatrix, 0, left, right, bottom, top, near, far);
    }

    @Override
    public void onDrawFrame(GL10 gl10) {
        GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);

        long time = SystemClock.uptimeMillis() % 10000L;

        GLES20.glViewport(0, 0, (int)(width), (int)(height));
        Matrix.setIdentityM(mModelMatrix, 0);
        Matrix.setLookAtM(viewMatrix, 0, cameraPos[0], cameraPos[1], cameraPos[2], cameraLook[0], cameraLook[1], cameraLook[2], cameraUp[0], cameraUp[1], cameraUp[2]);

        Matrix.multiplyMM(mMVMatrix, 0, viewMatrix, 0, mModelMatrix, 0);

        Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mMVMatrix, 0);

        GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);

        drawTriangle(triangleVertices);
        drawIntersectionLine();
    }

    private void drawTriangle(final FloatBuffer triangleBuffer)
    {
        triangleBuffer.position(positionOffset);
        GLES20.glVertexAttribPointer(mPositionHandle, positionDataSize, GLES20.GL_FLOAT, false, strideBytes, triangleBuffer);
        GLES20.glEnableVertexAttribArray(mPositionHandle);

        triangleBuffer.position(colorOffset);
        GLES20.glVertexAttribPointer(mColorHandle, colorDataSize, GLES20.GL_FLOAT, false, strideBytes, triangleBuffer);
        GLES20.glEnableVertexAttribArray(mColorHandle);

        GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 3);
    }

    private void drawIntersectionLine()
    {
        lineVertices.position(0);
        GLES20.glVertexAttribPointer(mPositionHandle, positionDataSize, GLES20.GL_FLOAT, false, lineStrideBytes, lineVertices);
        GLES20.glEnableVertexAttribArray(mPositionHandle);
        GLES20.glDrawArrays(GLES20.GL_LINES, 0, 2);
    }

    private void moveIntersectionLineEndPoint(float[] lineEndPoint)
    {
        this.lineEndPoint = lineEndPoint;

        float[] lineVerticesData = {
            lineStartPoint[0], lineStartPoint[1], lineStartPoint[2],
            lineEndPoint[0], lineEndPoint[1], lineEndPoint[2]
        };
        lineVertices = ByteBuffer.allocateDirect(lineVerticesData.length * bytesPerFloat).order(ByteOrder.nativeOrder()).asFloatBuffer();
        lineVertices.put(lineVerticesData).position(0);
    }

    public static String readShader(String filePath) throws IOException {
        BufferedReader reader = new BufferedReader(new InputStreamReader(context.getAssets().open(filePath)));
        StringBuilder sb = new StringBuilder();
        String line;
        while( ( line = reader.readLine() ) != null)
        {
            sb.append(line + "
");
        }
        reader.close();
        return sb.toString();
    }

    public float[] getMouseRayProjection(float touchX, float touchY, float windowWidth, float windowHeight, float[] view, float[] projection)
    {
        float[] rayDirection = new float[4];

        float normalizedX = 2f * touchX/windowWidth - 1f;
        float normalizedY = 1f - 2f*touchY/windowHeight;
        float normalizedZ = 1.0f;

        float[] rayNDC = new float[]{normalizedX, normalizedY, normalizedZ};

        float[] rayClip = new float[]{rayNDC[0], rayNDC[1], -1f, 1f};

        float[] inverseProjection = new float[16];
        Matrix.invertM(inverseProjection, 0, projection, 0);
        float[] rayEye = multiplyMat4ByVec4(inverseProjection, rayClip);

        rayClip = new float[]{rayClip[0], rayClip[1], -1f, 0f};

        float[] inverseView = new float[16];
        Matrix.invertM(inverseView, 0, view, 0);
        float[] rayWorld4D = multiplyMat4ByVec4(inverseView, rayEye);
        float[] rayWorld = new float[]{rayWorld4D[0], rayWorld4D[1], rayWorld4D[2]};

        rayDirection = normalizeVector3(rayWorld);

        return rayDirection;
    }

    public float[] normalizeVector3(float[] vector3)
    {
        float[] normalizedVector = new float[3];
        float magnitude = (float) Math.sqrt((vector3[0] * vector3[0]) + (vector3[1] * vector3[1]) + (vector3[2] * vector3[2]));
        normalizedVector[0] = vector3[0] / magnitude;
        normalizedVector[1] = vector3[1] / magnitude;
        normalizedVector[2] = vector3[2] / magnitude;
        return normalizedVector;
    }

    /*
        public float[] getMouseRayProjection(float touchX, float touchY, float windowWidth, float windowHeight, float[] modelView, float[] projection)
        {
            float[] rayDirection = new float[4];

            float normalizedX = 2 * touchX/windowWidth - 1;
            float normalizedY = 1 - 2*touchY/windowHeight;

            float[] unviewMatrix = new float[16];
            float[] viewMatrix = new float[16];
            Matrix.multiplyMM(viewMatrix, 0, projection, 0, modelView, 0);
            Matrix.invertM(unviewMatrix, 0, viewMatrix, 0);

            float[] nearPoint = multiplyMat4ByVec4(unviewMatrix, new float[]{normalizedX, normalizedY, 0, 1});
            float[] modelviewInverse = new float[16];
            Matrix.invertM(modelviewInverse, 0, modelView, 0);

            float[] cameraPos = new float[4];
            cameraPos[0] = modelviewInverse[12];
            cameraPos[1] = modelviewInverse[13];
            cameraPos[2] = modelviewInverse[14];
            cameraPos[3] = modelviewInverse[15];

            rayDirection[0] = (nearPoint[0] - cameraPos[0]);
            rayDirection[1] = (nearPoint[1] - cameraPos[1]);
            rayDirection[2] = (nearPoint[2] - cameraPos[2]);
            rayDirection[3] = (nearPoint[3] - cameraPos[3]);

            return rayDirection;
        }
     */

    /*
    public float[] getOGLPosition(int x, int y)
    {
        GLU.gluUnProject(x, y, 0, , modelOffset, project, projectOffset, view, viewOffset, obj, objOffset)
    }
    */

    public float[] getCameraPos(float[] modelView)
    {
        float[] modelviewInverse = new float[16];
        Matrix.invertM(modelviewInverse, 0, modelView, 0);
        float[] cameraPos = new float[4];
        cameraPos[0] = modelviewInverse[12];
        cameraPos[1] = modelviewInverse[13];
        cameraPos[2] = modelviewInverse[14];
        cameraPos[3] = modelviewInverse[15];
        return cameraPos;
    }

    public String floatArrayAsString(float[] array)
    {
        StringBuilder sb = new StringBuilder();
        sb.append("[");
        for (Float f : array)
        {
            sb.append(f + ", ");
        }
        sb.deleteCharAt(sb.length() - 1);
        sb.deleteCharAt(sb.length() - 1);
        sb.append("]");
        return sb.toString();
    }

    public float[] getInverseMatrix(float[] originalMatrix)
    {
        float[] inverseMatrix = new float[16];
        Matrix.invertM(inverseMatrix, 0, originalMatrix, 0);
        return inverseMatrix;
    }

    public float[] multiplyMat4ByVec4(float[] matrix4, float[] vector4)
    {
        float[] returnMatrix = new float[4];

        returnMatrix[0] = (matrix4[0] * vector4[0]) + (matrix4[1] * vector4[1]) + (matrix4[2] * vector4[2]) + (matrix4[3] * vector4[3]);
        returnMatrix[1] = (matrix4[4] * vector4[0]) + (matrix4[5] * vector4[1]) + (matrix4[6] * vector4[2]) + (matrix4[7] * vector4[3]);
        returnMatrix[2] = (matrix4[8] * vector4[0]) + (matrix4[9] * vector4[1]) + (matrix4[10] * vector4[2]) + (matrix4[11] * vector4[3]);
        returnMatrix[3] = (matrix4[12] * vector4[0]) + (matrix4[13] * vector4[1]) + (matrix4[14] * vector4[2]) + (matrix4[15] * vector4[3]);

        return returnMatrix;
    }

    public void onTouch(float touchX, float touchY)
    {
        float[] mouseRayProjection = getMouseRayProjection(touchX, touchY, width, height, mMVMatrix, mProjectionMatrix);
        Log.d("OpenGLES2Test", "Mouse Ray: " + floatArrayAsString(mouseRayProjection));
        //Log.d("OpenGLES2Test", "ModelView: " + floatArrayAsString(mMVMatrix));
        //Log.d("OpenGLES2Test", "ModelViewInverse: " + floatArrayAsString(getInverseMatrix(mMVMatrix)));
        //Log.d("OpenGLES2Test", "Mouse Coordinates: " + touchX + ", " + touchY);
        //Log.d("OpenGLES2Test", "Ray Coordinates: " + mouseRayProjection[0] + ", " + mouseRayProjection[1] + ", " + mouseRayProjection[2] + ", " + mouseRayProjection[3]);
        moveIntersectionLineEndPoint(mouseRayProjection);
    }
}

fragmentShader:

precision mediump float;

varying vec4 v_Color;

void main()
{
    gl_FragColor = v_Color;
}

vertexShader:

uniform mat4 u_MVPMatrix;

attribute vec4 a_Position;
attribute vec4 a_Color;

varying vec4 v_Color;

void main()
{
    v_Color = a_Color;
    gl_Position = u_MVPMatrix * a_Position;
}

这篇关于Android OpenGLES 2 从触摸坐标拾取光线,稍微关闭投影计算的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆