从触摸坐标Android的OpenGLES 2线采摘,unprojecting计算稍微偏离 [英] Android OpenGLES 2 ray picking from touch coordinates, unprojecting calculation slightly off

查看:446
本文介绍了从触摸坐标Android的OpenGLES 2线采摘,unprojecting计算稍微偏离的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

我想实现目标的基础上触摸采摘通过交叉线检查坐标。 我有麻烦找上转换触摸信息坐标应用于世界,以构建该线的坐标系。

我的理解到目前为止是施加到每个顶点场景中的矩阵是:

  projectionMatrix * viewMatrix * modelMatrix
 

下面是我的过程扭转这一进程在试图找到在场景中光线的端点,以及我的绘图环的情况下,我简单地将这些不同的矩阵错误:

 公众持股量[] getMouseRayProjection(浮touchX,浮敏感,浮WINDOWWIDTH,浮WINDOWHEIGHT,浮法[]模型观察,漂浮[]投影)
        {
        浮动[] rayDirection =新的浮动[4];

        浮normalizedX = 2 * touchX / WINDOWWIDTH  -  1;
        浮normalizedY ​​= 1  -  2 *敏感/ WINDOWHEIGHT;

        浮动[] unviewMatrix =新的浮动[16];
        浮动[] viewMatrix =新的浮动[16];
        Matrix.multiplyMM(viewMatrix,0,投影,0,MODELVIEW,0);
        Matrix.invertM(unviewMatrix,0,viewMatrix,0);

        浮动[]的NearPoint = multiplyMat4ByVec4(投影,新的浮动[] {normalizedX,normalizedY,0,1});
        浮动[] modelviewInverse =新的浮动[16];
        Matrix.invertM(modelviewInverse,0,MODELVIEW,0);

        浮动[] cameraPos =新的浮动[4];
        cameraPos [0] = modelviewInverse [12];
        cameraPos [1] = modelviewInverse [13];
        cameraPos [2] = modelviewInverse [14];
        cameraPos [3] = modelviewInverse [15];

        rayDirection [0] = NearPoint的[0]  -  cameraPos [0];
        rayDirection [1] = NearPoint的[1]  -  cameraPos [1];
        rayDirection [2] = NearPoint的[2]  -  cameraPos [2];
        rayDirection [3] = NearPoint的[3]  -  cameraPos [3];

        返回rayDirection;
        }

    公众持股量[] multiplyMat4ByVec4(浮动[] matrix4,浮法[]的Vector4)
        {
        浮动[] returnMatrix =新的浮动[4];

        returnMatrix [0] =(matrix4 [0] *的Vector4 [0])+(matrix4 [1] *的Vector4 [1])+(matrix4 [2] *的Vector4 [2])+(matrix4 [3] *的Vector4 [3- ]);
        returnMatrix [1] =(matrix4 [4] *的Vector4 [0])+(matrix4 [5] *的Vector4 [1])+(matrix4 [6] *的Vector4 [2])+(matrix4 [7] *的Vector4 [3- ]);
        returnMatrix [2] =(matrix4 [8] *的Vector4 [0])+(matrix4 [9] *的Vector4 [1])+(matrix4 [10] *的Vector4 [2])+(matrix4 [11] *的Vector4 [3- ]);
        returnMatrix [3] =(matrix4 [12] *的Vector4 [0])+(matrix4 [13] *的Vector4 [1])+(matrix4 [14] *的Vector4 [2])+(matrix4 [15] *的Vector4 [3- ]);

        返回returnMatrix;
        }

    @覆盖
    公共无效onDrawFrame(GL10 GL10){
        GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);

        很长一段时间= SystemClock.uptimeMillis()%10000L;
        浮动angleInDegrees =(360.0f / 10000.0f)*((INT)的时间);

        GLES20.glViewport(0,0,(int)的(宽度/ 2),(int)的(高度/ 2));
        Matrix.setIdentityM(mModelMatrix,0);
        Matrix.setLookAtM(viewMatrix,0,0F,0F,1.5F,0F,0F,-5F,0F,1F,0F);
        //Matrix.rotateM(mModelMatrix,0,angleInDegrees,0.0,0.0,1.0F);
        drawTriangle(triangleVertices);

        //Matrix.translateM(mModelMatrix,0,1.5F,0,-1f);
        //Matrix.frustumM(mProjectionMatrix,0,左,右,-1.0F,1.0F,1.0F,10.0f);
        Matrix.setIdentityM(mModelMatrix,0);
        Matrix.setLookAtM(viewMatrix,0,1.5F,0.8f,0.5F,0F,0F,0F,0F,1F,0F);
        GLES20.glViewport((int)的(宽度/ 2),(int)的(高度/ 2),(INT)(宽度/ 2),(int)的(高度/ 2));
        drawTriangle(triangleVertices);
        drawIntersectionLine();

        / *
        Matrix.setLookAtM(viewMatrix,0,0,1.5F,0.5F,0,0,0,0,0,-1f);
        GLES20.glViewport((int)的(宽度/ 2),(int)的高度,(int)的(宽度/ 2),(int)的(高度/ 2));
        drawTriangle(triangleVertices);
        drawIntersectionLine();
        * /
        }

    私人无效drawTriangle(最终FloatBuffer triangleBuffer)
    {
        triangleBuffer.position(positionOffset);
        GLES20.glVertexAttribPointer(mPositionHandle,positionDataSize,GLES20.GL_FLOAT,假的,strideBytes,triangleBuffer);
        GLES20.glEnableVertexAttribArray(mPositionHandle);

        triangleBuffer.position(colorOffset);
        GLES20.glVertexAttribPointer(mColorHandle,colorDataSize,GLES20.GL_FLOAT,假的,strideBytes,triangleBuffer);
        GLES20.glEnableVertexAttribArray(mColorHandle);

        Matrix.multiplyMM(mMVPMatrix,0,viewMatrix,0,mModelMatrix,0);

        mMVMatrix = mMVPMatrix;

        Matrix.multiplyMM(mMVPMatrix,0,mProjectionMatrix,0,mMVPMatrix,0);

        GLES20.glUniformMatrix4fv(mMVPMatrixHandle,1,假的,mMVPMatrix,0);
        GLES20.glDrawArrays(GLES20.GL_TRIANGLES,0,3);

        //Log.d("OpenGLES2Test,交叉点射线是:+ floatArrayAsString(getCameraPos(mMVMatrix))+++ floatArrayAsString(getMouseRayProjection((int)的(宽度/ 2),(int)的(高度/ 2 ),1.0F,(int)的宽度,(i​​nt)的高度,mMVMatrix,mProjectionMatrix)));
    }

    私人无效drawIntersectionLine()
    {
        lineVertices.position(0);
        GLES20.glVertexAttribPointer(mPositionHandle,positionDataSize,GLES20.GL_FLOAT,假的,lineStrideBytes,lineVertices);
        GLES20.glEnableVertexAttribArray(mPositionHandle);
        GLES20.glDrawArrays(GLES20.GL_LINES,0,2);
    }

    私人无效moveIntersectionLineEndPoint(浮动[] lineEndPoint)
    {
        this.lineEndPoint = lineEndPoint;

        浮动[] lineVerticesData = {
            lineStartPoint [0],lineStartPoint [1],lineStartPoint [2],
            lineEndPoint [0],lineEndPoint [1],lineEndPoint [2]
        };
        lineVertices = ByteBuffer.allocateDirect(lineVerticesData.length * bytesPerFloat).order(ByteOrder.nativeOrder())asFloatBuffer()。
        lineVertices.put(lineVerticesData).position(0);
    }
 

虽然我是pretty的确定由4D向量积法我4x4矩阵是正确的,这里要说的是方法,以及为了以防万一:

 公众持股量[] multiplyMat4ByVec4(浮动[] matrix4,浮法[]的Vector4)
    {
        浮动[] returnMatrix =新的浮动[4];

        returnMatrix [0] =(matrix4 [0] *的Vector4 [0])+(matrix4 [1] *的Vector4 [1])+(matrix4 [2] *的Vector4 [2])+(matrix4 [3] *的Vector4 [3- ]);
        returnMatrix [1] =(matrix4 [4] *的Vector4 [0])+(matrix4 [5] *的Vector4 [1])+(matrix4 [6] *的Vector4 [2])+(matrix4 [7] *的Vector4 [3- ]);
        returnMatrix [2] =(matrix4 [8] *的Vector4 [0])+(matrix4 [9] *的Vector4 [1])+(matrix4 [10] *的Vector4 [2])+(matrix4 [11] *的Vector4 [3- ]);
        returnMatrix [3] =(matrix4 [12] *的Vector4 [0])+(matrix4 [13] *的Vector4 [1])+(matrix4 [14] *的Vector4 [2])+(matrix4 [15] *的Vector4 [3- ]);

        返回returnMatrix;
    }
 

此测试应用程序的目标是从几个不同的角度展现了一幕让我能看到交线看起来如何根据我的code。我想画开始在相机的起源和在交叉点结束行,但它行为异常。端点似乎是被推向更远沿x轴在正的方向上比它应该是,在一些点它似乎排序为...跳过,因为如果有一个孔在该位置或东西。虽然我还记得一点从微积分线性代数的,我不记得不够确切地知道我在做什么在这里,我已经经历了许多的资源,没有运气冲刷网上。我希望有人读这将有涉及这方面比我更有经验,将是一种足以帮助我,或者给我任何提示,如果有别的东西,我可能做了错误的或低效的方式。

变量参考: 矩阵是长度为16的所有浮标阵

  mProjectionMatrix =投影矩阵

mModelMatrix =模型矩阵

mMVPMatrix =投影*模型视图矩阵

mMVMatrix =模型视图矩阵


    私人最终FloatBuffer triangleVertices;

    私人FloatBuffer lineVertices;

    私人最终诠释bytesPerFloat = 4;

    私人浮法[] viewMatrix =新的浮动[16];

    私有静态上下文的背景下;

    私人诠释mMVPMatrixHandle;

    私人诠释mPositionHandle;

    私人诠释mColorHandle;

    私人浮法[] mProjectionMatrix =新的浮动[16];

    私人浮法[] mModelMatrix =新的浮动[16];

    私人浮法[] mMVPMatrix =新的浮动[16];

    私人浮法[] mMVMatrix =新的浮动[16];

    私人最终诠释strideBytes = 7 * bytesPerFloat;
    私人最终诠释lineStrideBytes = 3 * bytesPerFloat;

    私人最终诠释positionOffset = 0;

    私人最终诠释positionDataSize = 3;

    私人最终诠释colorOffset = 3;

    私人最终诠释colorDataSize = 4;

    私人浮动宽度,高度;

    私人浮法[] lineStartPoint =新的浮动[] {0,0,1.5F};

    私人浮法[] lineEndPoint =新的浮动[] {0,0,0};
 

解决方案

经过一番搜索,我发现了一个网页,详细介绍这一过程以不同的方式。现在,我不再与光线跳跃意外的结束位置在随机时间和终点点的确切位置它应该有问题! 下面是我用来解决我的流程页面: http://www.antongerdelan.net/opengl/raycasting.html

和这里是我的最后一个来源$ C ​​$ C整个相交测试应用程序。大部分相关code是在getMouseRayProjection方法OpenGLRenderer类中。

MainActivity.java:

 进口android.opengl.GLSurfaceView;
进口android.os.Bundle;
进口android.app.Activity;
进口android.content.Context;
进口android.view.Menu;
进口android.view.MotionEvent;

公共类MainActivity延伸活动{

    私人MyGLSurfaceView mGLSurfaceView;

    @覆盖
    保护无效的onCreate(包savedInstanceState){
        super.onCreate(savedInstanceState);
        mGLSurfaceView =新MyGLSurfaceView(本);

        mGLSurfaceView.setEGLContextClientVersion(2);
        mGLSurfaceView.setEGLConfigChooser(8,​​8,8,8,16,0);
        OpenGLRenderer渲染器=新OpenGLRenderer(本);
        mGLSurfaceView.setRenderer(渲染);
        mGLSurfaceView.renderer =渲染器;

        的setContentView(mGLSurfaceView);
    }


    @覆盖
    公共布尔onCreateOptionsMenu(功能菜单){
        //充气菜单;这增加了项目操作栏,如果它是present。
        。getMenuInflater()膨胀(R.menu.main,菜单);
        返回true;
    }

    @覆盖
    保护无效onResume(){
        super.onResume();
        mGLSurfaceView.onResume();
    }

    @覆盖
    保护无效的onPause(){
        super.onPause();
        mGLSurfaceView.onPause();
    }

}

类MyGLSurfaceView扩展GLSurfaceView {

    公共OpenGLRenderer渲染器;

    公众持股量previousX,previousY;

    公共MyGLSurfaceView(上下文的背景下)
    {
        超(上下文);
    }

    @覆盖
    公共布尔的onTouchEvent(MotionEvent E)
    {
        浮X = e.getX();
        浮动Y = e.getY();

        开关(e.getAction()){
        案例MotionEvent.ACTION_MOVE:
            浮DX = X  -  previousX;
            浮DY = Y  -  previousY;

            renderer.onTouch(X,Y);
        }

        previousX = X;
        previousY = Y;
        返回true;
    }
}
 

OpenGLRenderer.java:

 进口java.io.BufferedReader中;
进口java.io.IOException异常;
进口java.io.InputStreamReader中;
进口java.nio.ByteBuffer中;
进口java.nio.ByteOrder中;
进口java.nio.FloatBuffer中;

进口javax.microedition.khronos.egl.EGLConfig;
进口javax.microedition.khronos.opengles.GL10;

进口android.content.Context;
进口android.opengl.GLES20;
进口android.opengl.GLU;
进口android.opengl.Matrix;
进口android.opengl.GLSurfaceView;
进口android.os.SystemClock;
进口android.util.Log;

公共类OpenGLRenderer实现GLSurfaceView.Renderer {

    私人最终FloatBuffer triangleVertices;

    私人FloatBuffer lineVertices;

    私人最终诠释bytesPerFloat = 4;

    私人浮法[] viewMatrix =新的浮动[16];

    私有静态上下文的背景下;

    私人诠释mMVPMatrixHandle;

    私人诠释mPositionHandle;

    私人诠释mColorHandle;

    私人浮法[] mProjectionMatrix =新的浮动[16];

    私人浮法[] mModelMatrix =新的浮动[16];

    私人浮法[] mMVPMatrix =新的浮动[16];

    私人浮法[] mMVMatrix =新的浮动[16];

    私人INT []视=新INT [4];

    私人最终诠释strideBytes = 7 * bytesPerFloat;
    私人最终诠释lineStrideBytes = 3 * bytesPerFloat;

    私人最终诠释positionOffset = 0;

    私人最终诠释positionDataSize = 3;

    私人最终诠释colorOffset = 3;

    私人最终诠释colorDataSize = 4;

    私人浮动宽度,高度;

    私人浮法[] lineStartPoint =新的浮动[] {0,0,1F};

    私人浮法[] lineEndPoint =新的浮动[] {0,0,0};

    私人浮法[] cameraPos =新的浮动[] {0F,0F,2.5F};
    私人浮法[] cameraLook =新的浮动[] {0F,0F,-5F};
    私人浮法[] cameraUp =新的浮动[] {0F,1F,0F};

    公共OpenGLRenderer(上下文的背景下){
        this.context =背景;

        最终浮动[] triangleVerticesData = {
                -0.5f,-0.25f,0.0,
                1.0F,0.0,0.0,1.0F,

                0.5F,-0.25f,0.0,
                0.0,0.0,1.0F,1.0F,

                0.0,0.559016994f,0.0,
                0.0,1.0F,0.0,1.0F
        };

        triangleVertices = ByteBuffer.allocateDirect(triangleVerticesData.length * bytesPerFloat).order(ByteOrder.nativeOrder())asFloatBuffer()。
        triangleVertices.put(triangleVerticesData).position(0);

        浮动[] lineVerticesData = {
            lineStartPoint [0],lineStartPoint [1],lineStartPoint [2],
            lineEndPoint [0],lineEndPoint [1],lineEndPoint [2]
        };
        lineVertices = ByteBuffer.allocateDirect(lineVerticesData.length * bytesPerFloat).order(ByteOrder.nativeOrder())asFloatBuffer()。
        lineVertices.put(lineVerticesData).position(0);
    }

    @覆盖
    公共无效onSurfaceCreated(GL10 GL10,EGLConfig eglConfig){
        GLES20.glClearColor(0.5F,0.5F,0.5F,0.5F);

        Matrix.setLookAtM(viewMatrix,0,cameraPos [0],cameraPos [1],cameraPos [2],cameraLook [0],cameraLook [1],cameraLook [2],cameraUp [0],cameraUp [1],cameraUp [ 2]);

        尝试 {
            INT vertexShaderHandle = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);

            如果(vertexShaderHandle!= 0)
            {
                GLES20.glShaderSource(vertexShaderHandle,readShader(vertexShader));

                GLES20.glCompileShader(vertexShaderHandle);

                最终诠释[] compileStatus =新INT [1];
                GLES20.glGetShaderiv(vertexShaderHandle,GLES20.GL_COMPILE_STATUS,compileStatus,0);

                如果(compileStatus [0] == 0)
                {
                    GLES20.glDeleteShader(vertexShaderHandle);
                    vertexShaderHandle = 0;
                }
            }

            如果(vertexShaderHandle == 0)
            {
                抛出新的RuntimeException(错误创建顶点着色器);
            }

            INT fragmentShaderHandle = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);

            如果(fragmentShaderHandle!= 0)
            {
                GLES20.glShaderSource(fragmentShaderHandle,readShader(fragmentShader));

                GLES20.glCompileShader(fragmentShaderHandle);

                最终诠释[] compileStatus =新INT [1];
                GLES20.glGetShaderiv(fragmentShaderHandle,GLES20.GL_COMPILE_STATUS,compileStatus,0);

                如果(compileStatus [0] == 0)
                {
                    GLES20.glDeleteShader(fragmentShaderHandle);
                    fragmentShaderHandle = 0;
                }
            }
            如果(fragmentShaderHandle == 0)
            {
                抛出新的RuntimeException(错误创建片段着色器。);
            }

            INT programHandle = GLES20.glCreateProgram();

            如果(programHandle!= 0)
            {
                GLES20.glAttachShader(programHandle,vertexShaderHandle);
                GLES20.glAttachShader(programHandle,fragmentShaderHandle);

                GLES20.glBindAttribLocation(programHandle,0,a_Position);
                GLES20.glBindAttribLocation(programHandle,1,A_CO​​LOR);

                GLES20.glLinkProgram(programHandle);

                最终诠释[] linkStatus =新INT [1];
                GLES20.glGetProgramiv(programHandle,GLES20.GL_LINK_STATUS,linkStatus,0);

                如果(linkStatus [0] == 0)
                {
                    GLES20.glDeleteProgram(programHandle);
                    programHandle = 0;
                }
            }

            如果(programHandle == 0)
            {
                抛出新的RuntimeException(错误创建程序。);
            }

            mMVPMatrixHandle = GLES20.glGetUniformLocation(programHandleu_MVPMatrix);
            mPositionHandle = GLES20.glGetAttribLocation(programHandlea_Position);
            mColorHandle = GLES20.glGetAttribLocation(programHandleA_CO​​LOR);

            GLES20.glUseProgram(programHandle);
        }赶上(IOException异常E)
        {
            Log.d(OpenGLES2Test,着色器无法读取:+ e.getMessage());
        }赶上(RuntimeException的E)
        {
            Log.d(OpenGLES2Test,e.getMessage());
        }

        GLES20.glEnable(GLES20.GL_DEPTH_TEST);
        GLES20.glDepthFunc(GLES20.GL_LEQUAL);
        GLES20.glDepthMask(真正的);
    }

    @覆盖
    公共无效onSurfaceChanged(GL10 GL10,诠释的宽度,高度INT){
        GLES20.glViewport(0,0,宽度/ 2,高度/ 2);

        this.width =宽度;
        this.height =身高;

        最终浮动比率=(浮点)宽/高;
        最终浮动左= -ratio;
        最终浮动权=比;
        最终浮动底部= -1.0F;
        最终浮动顶部= 1.0F;
        近= 1.0F最终浮动;
        最终浮动远= 10.0f;

        GLES20.glGetIntegerv(GLES20.GL_VIEWPORT,视口,0);

        Matrix.frustumM(mProjectionMatrix,0,左,右,底,顶,近,远);
    }

    @覆盖
    公共无效onDrawFrame(GL10 GL10){
        GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);

        很长一段时间= SystemClock.uptimeMillis()%10000L;

        GLES20.glViewport(0,0,(int)的(宽),(int)的(高度));
        Matrix.setIdentityM(mModelMatrix,0);
        Matrix.setLookAtM(viewMatrix,0,cameraPos [0],cameraPos [1],cameraPos [2],cameraLook [0],cameraLook [1],cameraLook [2],cameraUp [0],cameraUp [1],cameraUp [ 2]);

        Matrix.multiplyMM(mMVMatrix,0,viewMatrix,0,mModelMatrix,0);

        Matrix.multiplyMM(mMVPMatrix,0,mProjectionMatrix,0,mMVMatrix,0);

        GLES20.glUniformMatrix4fv(mMVPMatrixHandle,1,假的,mMVPMatrix,0);

        drawTriangle(triangleVertices);
        drawIntersectionLine();
    }

    私人无效drawTriangle(最终FloatBuffer triangleBuffer)
    {
        triangleBuffer.position(positionOffset);
        GLES20.glVertexAttribPointer(mPositionHandle,positionDataSize,GLES20.GL_FLOAT,假的,strideBytes,triangleBuffer);
        GLES20.glEnableVertexAttribArray(mPositionHandle);

        triangleBuffer.position(colorOffset);
        GLES20.glVertexAttribPointer(mColorHandle,colorDataSize,GLES20.GL_FLOAT,假的,strideBytes,triangleBuffer);
        GLES20.glEnableVertexAttribArray(mColorHandle);

        GLES20.glDrawArrays(GLES20.GL_TRIANGLES,0,3);
    }

    私人无效drawIntersectionLine()
    {
        lineVertices.position(0);
        GLES20.glVertexAttribPointer(mPositionHandle,positionDataSize,GLES20.GL_FLOAT,假的,lineStrideBytes,lineVertices);
        GLES20.glEnableVertexAttribArray(mPositionHandle);
        GLES20.glDrawArrays(GLES20.GL_LINES,0,2);
    }

    私人无效moveIntersectionLineEndPoint(浮动[] lineEndPoint)
    {
        this.lineEndPoint = lineEndPoint;

        浮动[] lineVerticesData = {
            lineStartPoint [0],lineStartPoint [1],lineStartPoint [2],
            lineEndPoint [0],lineEndPoint [1],lineEndPoint [2]
        };
        lineVertices = ByteBuffer.allocateDirect(lineVerticesData.length * bytesPerFloat).order(ByteOrder.nativeOrder())asFloatBuffer()。
        lineVertices.put(lineVerticesData).position(0);
    }

    公共静态字符串readShader(字符串文件路径)抛出IOException异常{
        的BufferedReader读卡器=新的BufferedReader(新的InputStreamReader(context.getAssets()开(文件路径)));
        StringBuilder的SB =新的StringBuilder();
        串线;
        而((行= reader.readLine())!= NULL)
        {
            sb.append(行+\ N);
        }
        reader.close();
        返回sb.toString();
    }

    公众持股量[] getMouseRayProjection(浮touchX,浮敏感,浮WINDOWWIDTH,浮WINDOWHEIGHT,浮法[]认为,浮动[]投影)
    {
        浮动[] rayDirection =新的浮动[4];

        浮normalizedX = 2F * touchX / WINDOWWIDTH  -  1F;
        浮normalizedY ​​= 1F  -  2F *敏感/ WINDOWHEIGHT;
        浮normalizedZ = 1.0F;

        浮动[] rayNDC =新的浮动[] {normalizedX,normalizedY,normalizedZ};

        浮动[] rayClip =新的浮动[] {rayNDC [0],rayNDC [1],-1F,1F};

        浮动[] inverseProjection =新的浮动[16];
        Matrix.invertM(inverseProjection,0,投影,0);
        浮动[] rayEye = multiplyMat4ByVec4(inverseProjection,rayClip);

        rayClip =新的浮动[] {rayClip [0],rayClip [1],-1f,0F};

        浮动[] inverseView =新的浮动[16];
        Matrix.invertM(inverseView,0,视图,0);
        浮动[] rayWorld4D = multiplyMat4ByVec4(inverseView,rayEye);
        浮[] rayWorld =新的浮动[] {rayWorld4D [0],rayWorld4D [1],rayWorld4D [2]};

        rayDirection = normalizeVector3(rayWorld);

        返回rayDirection;
    }

    公众持股量[] normalizeVector3(浮动[] Vector3类型)
    {
        浮动[] normalizedVector =新的浮动[3];
        浮大小=(浮点)的Math.sqrt((Vector3类型[0] * Vector3类型[0])+(Vector3类型[1] * Vector3类型[1])+(Vector3类型[2] * Vector3类型[2]));
        normalizedVector [0] = Vector3类型[0] /幅度;
        normalizedVector [1] = Vector3类型[1] /幅度;
        normalizedVector [2] = Vector3类型[2] /幅度;
        返回normalizedVector;
    }

    / *
        公众持股量[] getMouseRayProjection(浮touchX,浮敏感,浮WINDOWWIDTH,浮WINDOWHEIGHT,浮法[]模型观察,漂浮[]投影)
        {
            浮动[] rayDirection =新的浮动[4];

            浮normalizedX = 2 * touchX / WINDOWWIDTH  -  1;
            浮normalizedY ​​= 1  -  2 *敏感/ WINDOWHEIGHT;

            浮动[] unviewMatrix =新的浮动[16];
            浮动[] viewMatrix =新的浮动[16];
            Matrix.multiplyMM(viewMatrix,0,投影,0,MODELVIEW,0);
            Matrix.invertM(unviewMatrix,0,viewMatrix,0);

            浮动[]的NearPoint = multiplyMat4ByVec4(unviewMatrix,新的浮动[] {normalizedX,normalizedY,0,1});
            浮动[] modelviewInverse =新的浮动[16];
            Matrix.invertM(modelviewInverse,0,MODELVIEW,0);

            浮动[] cameraPos =新的浮动[4];
            cameraPos [0] = modelviewInverse [12];
            cameraPos [1] = modelviewInverse [13];
            cameraPos [2] = modelviewInverse [14];
            cameraPos [3] = modelviewInverse [15];

            rayDirection [0] =(NearPoint的[0]  -  cameraPos [0]);
            rayDirection [1] =(NearPoint的[1]  -  cameraPos [1]);
            rayDirection [2] =(NearPoint的[2]  -  cameraPos [2]);
            rayDirection [3] =(NearPoint的[3]  -  cameraPos [3]);

            返回rayDirection;
        }
     * /

    / *
    公众持股量[] getOGLPosition(INT X,int y)对
    {
        GLU.gluUnProject(X,Y​​,0,,modelOffset,项目,projectOffset,查看,viewOffset,物镜,objOffset)
    }
    * /

    公众持股量[] getCameraPos(浮动[]模型观察)
    {
        浮动[] modelviewInverse =新的浮动[16];
        Matrix.invertM(modelviewInverse,0,MODELVIEW,0);
        浮动[] cameraPos =新的浮动[4];
        cameraPos [0] = modelviewInverse [12];
        cameraPos [1] = modelviewInverse [13];
        cameraPos [2] = modelviewInverse [14];
        cameraPos [3] = modelviewInverse [15];
        返回cameraPos;
    }

    公共字符串floatArrayAsString(浮动[]数组)
    {
        StringBuilder的SB =新的StringBuilder();
        sb.append([);
        为(浮动F:数组)
        {
            sb.append(F +,);
        }
        sb.deleteCharAt(sb.length() -  1);
        sb.deleteCharAt(sb.length() -  1);
        sb.append(]);
        返回sb.toString();
    }

    公众持股量[] getInverseMatrix(浮动[] originalMatrix)
    {
        浮动[] inverseMatrix =新的浮动[16];
        Matrix.invertM(inverseMatrix,0,originalMatrix,0);
        返回inverseMatrix;
    }

    公众持股量[] multiplyMat4ByVec4(浮动[] matrix4,浮法[]的Vector4)
    {
        浮动[] returnMatrix =新的浮动[4];

        returnMatrix [0] =(matrix4 [0] *的Vector4 [0])+(matrix4 [1] *的Vector4 [1])+(matrix4 [2] *的Vector4 [2])+(matrix4 [3] *的Vector4 [3- ]);
        returnMatrix [1] =(matrix4 [4] *的Vector4 [0])+(matrix4 [5] *的Vector4 [1])+(matrix4 [6] *的Vector4 [2])+(matrix4 [7] *的Vector4 [3- ]);
        returnMatrix [2] =(matrix4 [8] *的Vector4 [0])+(matrix4 [9] *的Vector4 [1])+(matrix4 [10] *的Vector4 [2])+(matrix4 [11] *的Vector4 [3- ]);
        returnMatrix [3] =(matrix4 [12] *的Vector4 [0])+(matrix4 [13] *的Vector4 [1])+(matrix4 [14] *的Vector4 [2])+(matrix4 [15] *的Vector4 [3- ]);

        返回returnMatrix;
    }

    公共无效onTouch(浮动touchX,浮动敏感)
    {
        浮动[] mouseRayProjection = getMouseRayProjection(touchX,敏感,宽度,高度,mMVMatrix,mProjectionMatrix);
        Log.d(OpenGLES2Test,鼠标雷:+ floatArrayAsString(mouseRayProjection));
        //Log.d("OpenGLES2Test,模型视图:+ floatArrayAsString(mMVMatrix));
        //Log.d("OpenGLES2Test,ModelViewInverse:+ floatArrayAsString(getInverseMatrix(mMVMatrix)));
        //Log.d("OpenGLES2Test,鼠标坐标:+ touchX +,+敏感);
        //Log.d("OpenGLES2Test,雷坐标:+ mouseRayProjection [0] +,+ mouseRayProjection [1] +,+ mouseRayProjection [2] +,+ mouseRayProjection [3]);
        moveIntersectionLineEndPoint(mouseRayProjection);
    }
}
 

fragmentShader:

  precision mediump浮动;

不同vec4 v_Color;

无效的主要()
{
    gl_FragColor = v_Color;
}
 

vertexShader:

 统一mat4 u_MVPMatrix;

属性vec4 a_Position;
属性vec4 A_COLOR;

不同vec4 v_Color;

无效的主要()
{
    v_Color = A_COLOR;
    GL_POSITION = u_MVPMatrix * a_Position;
}
 

I am trying to implement object picking based on touch coordinates via an intersecting ray test. I am having trouble finding information on converting the touch coordinates to the coordinate system used in the world in order to construct this ray.

My understanding so far is that the matrix that is applied to each vertex in the scene is:

projectionMatrix * viewMatrix * modelMatrix

Here is my process for reversing that process in a an attempt to find the ray's endpoint in the scene as well as my drawing loop in case I'm simply applying the different matrices incorrectly:

    public float[] getMouseRayProjection(float touchX, float touchY, float windowWidth, float windowHeight, float[] modelView, float[] projection)
        {
        float[] rayDirection = new float[4];

        float normalizedX = 2 * touchX/windowWidth - 1;
        float normalizedY = 1 - 2*touchY/windowHeight;

        float[] unviewMatrix = new float[16];
        float[] viewMatrix = new float[16];
        Matrix.multiplyMM(viewMatrix, 0, projection, 0, modelView, 0);
        Matrix.invertM(unviewMatrix, 0, viewMatrix, 0);

        float[] nearPoint = multiplyMat4ByVec4(projection, new float[]{normalizedX, normalizedY, 0, 1});
        float[] modelviewInverse = new float[16];
        Matrix.invertM(modelviewInverse, 0, modelView, 0);

        float[] cameraPos = new float[4];
        cameraPos[0] = modelviewInverse[12];
        cameraPos[1] = modelviewInverse[13];
        cameraPos[2] = modelviewInverse[14];
        cameraPos[3] = modelviewInverse[15];

        rayDirection[0] = nearPoint[0] - cameraPos[0];
        rayDirection[1] = nearPoint[1] - cameraPos[1];
        rayDirection[2] = nearPoint[2] - cameraPos[2];
        rayDirection[3] = nearPoint[3] - cameraPos[3];

        return rayDirection;
        }

    public float[] multiplyMat4ByVec4(float[] matrix4, float[] vector4)
        {
        float[] returnMatrix = new float[4];

        returnMatrix[0] = (matrix4[0] * vector4[0]) + (matrix4[1] * vector4[1]) + (matrix4[2] * vector4[2]) + (matrix4[3] * vector4[3]);
        returnMatrix[1] = (matrix4[4] * vector4[0]) + (matrix4[5] * vector4[1]) + (matrix4[6] * vector4[2]) + (matrix4[7] * vector4[3]);
        returnMatrix[2] = (matrix4[8] * vector4[0]) + (matrix4[9] * vector4[1]) + (matrix4[10] * vector4[2]) + (matrix4[11] * vector4[3]);
        returnMatrix[3] = (matrix4[12] * vector4[0]) + (matrix4[13] * vector4[1]) + (matrix4[14] * vector4[2]) + (matrix4[15] * vector4[3]);

        return returnMatrix;
        }

    @Override
    public void onDrawFrame(GL10 gl10) {
        GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);

        long time = SystemClock.uptimeMillis() % 10000L;
        float angleInDegrees = (360.0f / 10000.0f) * ((int) time);

        GLES20.glViewport(0, 0, (int)(width/2), (int)(height/2));
        Matrix.setIdentityM(mModelMatrix, 0);
        Matrix.setLookAtM(viewMatrix, 0, 0f, 0f, 1.5f, 0f, 0f, -5f, 0f, 1f, 0f);
        //Matrix.rotateM(mModelMatrix, 0, angleInDegrees, 0.0f, 0.0f, 1.0f);
        drawTriangle(triangleVertices);

        //Matrix.translateM(mModelMatrix, 0, 1.5f, 0, -1f);
        //Matrix.frustumM(mProjectionMatrix, 0, left, right, -1.0f, 1.0f, 1.0f, 10.0f);
        Matrix.setIdentityM(mModelMatrix, 0);
        Matrix.setLookAtM(viewMatrix, 0, 1.5f, 0.8f, 0.5f, 0f, 0f, 0f, 0f, 1f, 0f);
        GLES20.glViewport((int)(width/2), (int)(height/2), (int)(width/2), (int)(height/2));
        drawTriangle(triangleVertices);
        drawIntersectionLine();

        /*
        Matrix.setLookAtM(viewMatrix, 0, 0, 1.5f, 0.5f, 0, 0, 0, 0, 0, -1f);
        GLES20.glViewport((int)(width/2), (int)height, (int)(width/2), (int)(height/2));
        drawTriangle(triangleVertices);
        drawIntersectionLine();
        */
        }

    private void drawTriangle(final FloatBuffer triangleBuffer)
    {
        triangleBuffer.position(positionOffset);
        GLES20.glVertexAttribPointer(mPositionHandle, positionDataSize, GLES20.GL_FLOAT, false, strideBytes, triangleBuffer);
        GLES20.glEnableVertexAttribArray(mPositionHandle);

        triangleBuffer.position(colorOffset);
        GLES20.glVertexAttribPointer(mColorHandle, colorDataSize, GLES20.GL_FLOAT, false, strideBytes, triangleBuffer);
        GLES20.glEnableVertexAttribArray(mColorHandle);

        Matrix.multiplyMM(mMVPMatrix, 0, viewMatrix, 0, mModelMatrix, 0);

        mMVMatrix = mMVPMatrix;

        Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);

        GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);
        GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 3);

        //Log.d("OpenGLES2Test", "The intersection ray is: " + floatArrayAsString(getCameraPos(mMVMatrix)) + " + " + floatArrayAsString(getMouseRayProjection((int)(width / 2), (int)(height / 2), 1.0f, (int)width, (int)height, mMVMatrix, mProjectionMatrix)));
    }

    private void drawIntersectionLine()
    {
        lineVertices.position(0);
        GLES20.glVertexAttribPointer(mPositionHandle, positionDataSize, GLES20.GL_FLOAT, false, lineStrideBytes, lineVertices);
        GLES20.glEnableVertexAttribArray(mPositionHandle);
        GLES20.glDrawArrays(GLES20.GL_LINES, 0, 2);
    }

    private void moveIntersectionLineEndPoint(float[] lineEndPoint)
    {
        this.lineEndPoint = lineEndPoint;

        float[] lineVerticesData = {
            lineStartPoint[0], lineStartPoint[1], lineStartPoint[2],
            lineEndPoint[0], lineEndPoint[1], lineEndPoint[2]
        };
        lineVertices = ByteBuffer.allocateDirect(lineVerticesData.length * bytesPerFloat).order(ByteOrder.nativeOrder()).asFloatBuffer();
        lineVertices.put(lineVerticesData).position(0);
    }

Although I'm pretty sure my 4x4 matrix by 4d vector multiplication method is correct, here is that method as well just in case:

public float[] multiplyMat4ByVec4(float[] matrix4, float[] vector4)
    {
        float[] returnMatrix = new float[4];

        returnMatrix[0] = (matrix4[0] * vector4[0]) + (matrix4[1] * vector4[1]) + (matrix4[2] * vector4[2]) + (matrix4[3] * vector4[3]);
        returnMatrix[1] = (matrix4[4] * vector4[0]) + (matrix4[5] * vector4[1]) + (matrix4[6] * vector4[2]) + (matrix4[7] * vector4[3]);
        returnMatrix[2] = (matrix4[8] * vector4[0]) + (matrix4[9] * vector4[1]) + (matrix4[10] * vector4[2]) + (matrix4[11] * vector4[3]);
        returnMatrix[3] = (matrix4[12] * vector4[0]) + (matrix4[13] * vector4[1]) + (matrix4[14] * vector4[2]) + (matrix4[15] * vector4[3]);

        return returnMatrix;
    }

The goal of this test app is to show the scene from a few separate angles so that I can see how the intersection line looks based on my code. I wanted to draw the line starting at the camera's origin and ending at the intersection point, but it's acting oddly. The endpoint seems to be being pushed farther along the x axis in the positive direction than it should be, and in some spots it's seems to sort of...skip, as if there were a hole at that location or something. Although I still remember a bit of linear algebra from calculus, I don't remember enough to know exactly what I'm doing here and I've scoured through many of the resources online with no luck. I'm hoping someone that reads this will have more experience dealing with this than I and will be kind enough to help me, or give me any tips if there's something else that I may be doing the wrong or in an inefficient way.

Variable Reference: Matrices are all float arrays of length 16

mProjectionMatrix = projection matrix

mModelMatrix = model matrix

mMVPMatrix = projection * modelview matrix

mMVMatrix = modelview matrix


    private final FloatBuffer triangleVertices;

    private FloatBuffer lineVertices;

    private final int bytesPerFloat = 4;

    private float[] viewMatrix = new float[16];

    private static Context context;

    private int mMVPMatrixHandle;

    private int mPositionHandle;

    private int mColorHandle;

    private float[] mProjectionMatrix = new float[16];

    private float[] mModelMatrix = new float[16];

    private float[] mMVPMatrix = new float[16];

    private float[] mMVMatrix = new float[16];

    private final int strideBytes = 7 * bytesPerFloat;
    private final int lineStrideBytes = 3 * bytesPerFloat;

    private final int positionOffset = 0;

    private final int positionDataSize = 3;

    private final int colorOffset = 3;

    private final int colorDataSize = 4;

    private float width, height;

    private float[] lineStartPoint = new float[]{0, 0, 1.5f};

    private float[] lineEndPoint = new float[]{0, 0, 0};

解决方案

After some searching, I found a page that details this process in a different manner. Now I no longer have the issue with the end of the ray jumping to an unexpected position at random times and the end point points to the exact location it should! Here is the page I used to fix my process: http://www.antongerdelan.net/opengl/raycasting.html

And here is my final source code for the entire intersection testing app. Most of the relevant code is within the OpenGLRenderer class under the getMouseRayProjection method.

MainActivity.java:

import android.opengl.GLSurfaceView;
import android.os.Bundle;
import android.app.Activity;
import android.content.Context;
import android.view.Menu;
import android.view.MotionEvent;

public class MainActivity extends Activity {

    private MyGLSurfaceView mGLSurfaceView;

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        mGLSurfaceView = new MyGLSurfaceView(this);

        mGLSurfaceView.setEGLContextClientVersion(2);
        mGLSurfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0);
        OpenGLRenderer renderer = new OpenGLRenderer(this);
        mGLSurfaceView.setRenderer(renderer);
        mGLSurfaceView.renderer = renderer;

        setContentView(mGLSurfaceView);
    }


    @Override
    public boolean onCreateOptionsMenu(Menu menu) {
        // Inflate the menu; this adds items to the action bar if it is present.
        getMenuInflater().inflate(R.menu.main, menu);
        return true;
    }

    @Override
    protected void onResume() {
        super.onResume();
        mGLSurfaceView.onResume();
    }

    @Override
    protected void onPause() {
        super.onPause();
        mGLSurfaceView.onPause();
    }

}

class MyGLSurfaceView extends GLSurfaceView {

    public OpenGLRenderer renderer;

    public float previousX, previousY;

    public MyGLSurfaceView(Context context)
    {
        super(context);
    }

    @Override
    public boolean onTouchEvent(MotionEvent e)
    {
        float x = e.getX();
        float y = e.getY();

        switch(e.getAction()) {
        case MotionEvent.ACTION_MOVE:
            float dx = x - previousX;
            float dy = y - previousY;

            renderer.onTouch(x, y);
        }

        previousX = x;
        previousY = y;
        return true;
    }
}

OpenGLRenderer.java:

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;

import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;

import android.content.Context;
import android.opengl.GLES20;
import android.opengl.GLU;
import android.opengl.Matrix;
import android.opengl.GLSurfaceView;
import android.os.SystemClock;
import android.util.Log;

public class OpenGLRenderer implements GLSurfaceView.Renderer {

    private final FloatBuffer triangleVertices;

    private FloatBuffer lineVertices;

    private final int bytesPerFloat = 4;

    private float[] viewMatrix = new float[16];

    private static Context context;

    private int mMVPMatrixHandle;

    private int mPositionHandle;

    private int mColorHandle;

    private float[] mProjectionMatrix = new float[16];

    private float[] mModelMatrix = new float[16];

    private float[] mMVPMatrix = new float[16];

    private float[] mMVMatrix = new float[16];

    private int[] viewport = new int[4];

    private final int strideBytes = 7 * bytesPerFloat;
    private final int lineStrideBytes = 3 * bytesPerFloat;

    private final int positionOffset = 0;

    private final int positionDataSize = 3;

    private final int colorOffset = 3;

    private final int colorDataSize = 4;

    private float width, height;

    private float[] lineStartPoint = new float[]{0, 0, 1f};

    private float[] lineEndPoint = new float[]{0, 0, 0};

    private float[] cameraPos = new float[]{0f, 0f, 2.5f};
    private float[] cameraLook = new float[]{0f, 0f, -5f};
    private float[] cameraUp = new float[]{0f, 1f, 0f};

    public OpenGLRenderer(Context context) {
        this.context = context;

        final float[] triangleVerticesData = {
                -0.5f, -0.25f, 0.0f,
                1.0f, 0.0f, 0.0f, 1.0f,

                0.5f, -0.25f, 0.0f,
                0.0f, 0.0f, 1.0f, 1.0f,

                0.0f, 0.559016994f, 0.0f,
                0.0f, 1.0f, 0.0f, 1.0f
        };

        triangleVertices = ByteBuffer.allocateDirect(triangleVerticesData.length * bytesPerFloat).order(ByteOrder.nativeOrder()).asFloatBuffer();
        triangleVertices.put(triangleVerticesData).position(0);

        float[] lineVerticesData = {
            lineStartPoint[0], lineStartPoint[1], lineStartPoint[2],
            lineEndPoint[0], lineEndPoint[1], lineEndPoint[2]
        };
        lineVertices = ByteBuffer.allocateDirect(lineVerticesData.length * bytesPerFloat).order(ByteOrder.nativeOrder()).asFloatBuffer();
        lineVertices.put(lineVerticesData).position(0);
    }

    @Override
    public void onSurfaceCreated(GL10 gl10, EGLConfig eglConfig) {
        GLES20.glClearColor(0.5f, 0.5f, 0.5f, 0.5f);

        Matrix.setLookAtM(viewMatrix, 0, cameraPos[0], cameraPos[1], cameraPos[2], cameraLook[0], cameraLook[1], cameraLook[2], cameraUp[0], cameraUp[1], cameraUp[2]);

        try {
            int vertexShaderHandle = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);

            if (vertexShaderHandle != 0)
            {
                GLES20.glShaderSource(vertexShaderHandle, readShader("vertexShader"));

                GLES20.glCompileShader(vertexShaderHandle);

                final int[] compileStatus = new int[1];
                GLES20.glGetShaderiv(vertexShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);

                if (compileStatus[0] == 0)
                {
                    GLES20.glDeleteShader(vertexShaderHandle);
                    vertexShaderHandle = 0;
                }
            }

            if (vertexShaderHandle == 0)
            {
                throw new RuntimeException("Error creating vertex shader");
            }

            int fragmentShaderHandle = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);

            if (fragmentShaderHandle != 0)
            {
                GLES20.glShaderSource(fragmentShaderHandle, readShader("fragmentShader"));

                GLES20.glCompileShader(fragmentShaderHandle);

                final int[] compileStatus = new int[1];
                GLES20.glGetShaderiv(fragmentShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);

                if (compileStatus[0] == 0)
                {
                    GLES20.glDeleteShader(fragmentShaderHandle);
                    fragmentShaderHandle = 0;
                }
            }
            if (fragmentShaderHandle == 0)
            {
                throw new RuntimeException("Error creating fragment shader.");
            }

            int programHandle = GLES20.glCreateProgram();

            if (programHandle != 0)
            {
                GLES20.glAttachShader(programHandle, vertexShaderHandle);
                GLES20.glAttachShader(programHandle, fragmentShaderHandle);

                GLES20.glBindAttribLocation(programHandle, 0, "a_Position");
                GLES20.glBindAttribLocation(programHandle, 1, "a_Color");

                GLES20.glLinkProgram(programHandle);

                final int[] linkStatus = new int[1];
                GLES20.glGetProgramiv(programHandle, GLES20.GL_LINK_STATUS, linkStatus, 0);

                if (linkStatus[0] == 0)
                {
                    GLES20.glDeleteProgram(programHandle);
                    programHandle = 0;
                }
            }

            if (programHandle == 0)
            {
                throw new RuntimeException("Error creating program.");
            }

            mMVPMatrixHandle = GLES20.glGetUniformLocation(programHandle, "u_MVPMatrix");
            mPositionHandle = GLES20.glGetAttribLocation(programHandle, "a_Position");
            mColorHandle = GLES20.glGetAttribLocation(programHandle, "a_Color");

            GLES20.glUseProgram(programHandle);
        } catch (IOException e)
        {
            Log.d("OpenGLES2Test", "The shader could not be read: " + e.getMessage());
        } catch (RuntimeException e)
        {
            Log.d("OpenGLES2Test", e.getMessage());
        }

        GLES20.glEnable(GLES20.GL_DEPTH_TEST);
        GLES20.glDepthFunc(GLES20.GL_LEQUAL);
        GLES20.glDepthMask(true);
    }

    @Override
    public void onSurfaceChanged(GL10 gl10, int width, int height) {
        GLES20.glViewport(0, 0, width/2, height/2);

        this.width = width;
        this.height = height;

        final float ratio = (float) width / height;
        final float left = -ratio;
        final float right = ratio;
        final float bottom = -1.0f;
        final float top = 1.0f;
        final float near = 1.0f;
        final float far = 10.0f;

        GLES20.glGetIntegerv(GLES20.GL_VIEWPORT, viewport, 0);

        Matrix.frustumM(mProjectionMatrix, 0, left, right, bottom, top, near, far);
    }

    @Override
    public void onDrawFrame(GL10 gl10) {
        GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);

        long time = SystemClock.uptimeMillis() % 10000L;

        GLES20.glViewport(0, 0, (int)(width), (int)(height));
        Matrix.setIdentityM(mModelMatrix, 0);
        Matrix.setLookAtM(viewMatrix, 0, cameraPos[0], cameraPos[1], cameraPos[2], cameraLook[0], cameraLook[1], cameraLook[2], cameraUp[0], cameraUp[1], cameraUp[2]);

        Matrix.multiplyMM(mMVMatrix, 0, viewMatrix, 0, mModelMatrix, 0);

        Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mMVMatrix, 0);

        GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);

        drawTriangle(triangleVertices);
        drawIntersectionLine();
    }

    private void drawTriangle(final FloatBuffer triangleBuffer)
    {
        triangleBuffer.position(positionOffset);
        GLES20.glVertexAttribPointer(mPositionHandle, positionDataSize, GLES20.GL_FLOAT, false, strideBytes, triangleBuffer);
        GLES20.glEnableVertexAttribArray(mPositionHandle);

        triangleBuffer.position(colorOffset);
        GLES20.glVertexAttribPointer(mColorHandle, colorDataSize, GLES20.GL_FLOAT, false, strideBytes, triangleBuffer);
        GLES20.glEnableVertexAttribArray(mColorHandle);

        GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 3);
    }

    private void drawIntersectionLine()
    {
        lineVertices.position(0);
        GLES20.glVertexAttribPointer(mPositionHandle, positionDataSize, GLES20.GL_FLOAT, false, lineStrideBytes, lineVertices);
        GLES20.glEnableVertexAttribArray(mPositionHandle);
        GLES20.glDrawArrays(GLES20.GL_LINES, 0, 2);
    }

    private void moveIntersectionLineEndPoint(float[] lineEndPoint)
    {
        this.lineEndPoint = lineEndPoint;

        float[] lineVerticesData = {
            lineStartPoint[0], lineStartPoint[1], lineStartPoint[2],
            lineEndPoint[0], lineEndPoint[1], lineEndPoint[2]
        };
        lineVertices = ByteBuffer.allocateDirect(lineVerticesData.length * bytesPerFloat).order(ByteOrder.nativeOrder()).asFloatBuffer();
        lineVertices.put(lineVerticesData).position(0);
    }

    public static String readShader(String filePath) throws IOException {
        BufferedReader reader = new BufferedReader(new InputStreamReader(context.getAssets().open(filePath)));
        StringBuilder sb = new StringBuilder();
        String line;
        while( ( line = reader.readLine() ) != null)
        {
            sb.append(line + "\n");
        }
        reader.close();
        return sb.toString();
    }

    public float[] getMouseRayProjection(float touchX, float touchY, float windowWidth, float windowHeight, float[] view, float[] projection)
    {
        float[] rayDirection = new float[4];

        float normalizedX = 2f * touchX/windowWidth - 1f;
        float normalizedY = 1f - 2f*touchY/windowHeight;
        float normalizedZ = 1.0f;

        float[] rayNDC = new float[]{normalizedX, normalizedY, normalizedZ};

        float[] rayClip = new float[]{rayNDC[0], rayNDC[1], -1f, 1f};

        float[] inverseProjection = new float[16];
        Matrix.invertM(inverseProjection, 0, projection, 0);
        float[] rayEye = multiplyMat4ByVec4(inverseProjection, rayClip);

        rayClip = new float[]{rayClip[0], rayClip[1], -1f, 0f};

        float[] inverseView = new float[16];
        Matrix.invertM(inverseView, 0, view, 0);
        float[] rayWorld4D = multiplyMat4ByVec4(inverseView, rayEye);
        float[] rayWorld = new float[]{rayWorld4D[0], rayWorld4D[1], rayWorld4D[2]};

        rayDirection = normalizeVector3(rayWorld);

        return rayDirection;
    }

    public float[] normalizeVector3(float[] vector3)
    {
        float[] normalizedVector = new float[3];
        float magnitude = (float) Math.sqrt((vector3[0] * vector3[0]) + (vector3[1] * vector3[1]) + (vector3[2] * vector3[2]));
        normalizedVector[0] = vector3[0] / magnitude;
        normalizedVector[1] = vector3[1] / magnitude;
        normalizedVector[2] = vector3[2] / magnitude;
        return normalizedVector;
    }

    /*
        public float[] getMouseRayProjection(float touchX, float touchY, float windowWidth, float windowHeight, float[] modelView, float[] projection)
        {
            float[] rayDirection = new float[4];

            float normalizedX = 2 * touchX/windowWidth - 1;
            float normalizedY = 1 - 2*touchY/windowHeight;

            float[] unviewMatrix = new float[16];
            float[] viewMatrix = new float[16];
            Matrix.multiplyMM(viewMatrix, 0, projection, 0, modelView, 0);
            Matrix.invertM(unviewMatrix, 0, viewMatrix, 0);

            float[] nearPoint = multiplyMat4ByVec4(unviewMatrix, new float[]{normalizedX, normalizedY, 0, 1});
            float[] modelviewInverse = new float[16];
            Matrix.invertM(modelviewInverse, 0, modelView, 0);

            float[] cameraPos = new float[4];
            cameraPos[0] = modelviewInverse[12];
            cameraPos[1] = modelviewInverse[13];
            cameraPos[2] = modelviewInverse[14];
            cameraPos[3] = modelviewInverse[15];

            rayDirection[0] = (nearPoint[0] - cameraPos[0]);
            rayDirection[1] = (nearPoint[1] - cameraPos[1]);
            rayDirection[2] = (nearPoint[2] - cameraPos[2]);
            rayDirection[3] = (nearPoint[3] - cameraPos[3]);

            return rayDirection;
        }
     */

    /*
    public float[] getOGLPosition(int x, int y)
    {
        GLU.gluUnProject(x, y, 0, , modelOffset, project, projectOffset, view, viewOffset, obj, objOffset)
    }
    */

    public float[] getCameraPos(float[] modelView)
    {
        float[] modelviewInverse = new float[16];
        Matrix.invertM(modelviewInverse, 0, modelView, 0);
        float[] cameraPos = new float[4];
        cameraPos[0] = modelviewInverse[12];
        cameraPos[1] = modelviewInverse[13];
        cameraPos[2] = modelviewInverse[14];
        cameraPos[3] = modelviewInverse[15];
        return cameraPos;
    }

    public String floatArrayAsString(float[] array)
    {
        StringBuilder sb = new StringBuilder();
        sb.append("[");
        for (Float f : array)
        {
            sb.append(f + ", ");
        }
        sb.deleteCharAt(sb.length() - 1);
        sb.deleteCharAt(sb.length() - 1);
        sb.append("]");
        return sb.toString();
    }

    public float[] getInverseMatrix(float[] originalMatrix)
    {
        float[] inverseMatrix = new float[16];
        Matrix.invertM(inverseMatrix, 0, originalMatrix, 0);
        return inverseMatrix;
    }

    public float[] multiplyMat4ByVec4(float[] matrix4, float[] vector4)
    {
        float[] returnMatrix = new float[4];

        returnMatrix[0] = (matrix4[0] * vector4[0]) + (matrix4[1] * vector4[1]) + (matrix4[2] * vector4[2]) + (matrix4[3] * vector4[3]);
        returnMatrix[1] = (matrix4[4] * vector4[0]) + (matrix4[5] * vector4[1]) + (matrix4[6] * vector4[2]) + (matrix4[7] * vector4[3]);
        returnMatrix[2] = (matrix4[8] * vector4[0]) + (matrix4[9] * vector4[1]) + (matrix4[10] * vector4[2]) + (matrix4[11] * vector4[3]);
        returnMatrix[3] = (matrix4[12] * vector4[0]) + (matrix4[13] * vector4[1]) + (matrix4[14] * vector4[2]) + (matrix4[15] * vector4[3]);

        return returnMatrix;
    }

    public void onTouch(float touchX, float touchY)
    {
        float[] mouseRayProjection = getMouseRayProjection(touchX, touchY, width, height, mMVMatrix, mProjectionMatrix);
        Log.d("OpenGLES2Test", "Mouse Ray: " + floatArrayAsString(mouseRayProjection));
        //Log.d("OpenGLES2Test", "ModelView: " + floatArrayAsString(mMVMatrix));
        //Log.d("OpenGLES2Test", "ModelViewInverse: " + floatArrayAsString(getInverseMatrix(mMVMatrix)));
        //Log.d("OpenGLES2Test", "Mouse Coordinates: " + touchX + ", " + touchY);
        //Log.d("OpenGLES2Test", "Ray Coordinates: " + mouseRayProjection[0] + ", " + mouseRayProjection[1] + ", " + mouseRayProjection[2] + ", " + mouseRayProjection[3]);
        moveIntersectionLineEndPoint(mouseRayProjection);
    }
}

fragmentShader:

precision mediump float;

varying vec4 v_Color;

void main()
{
    gl_FragColor = v_Color;
}

vertexShader:

uniform mat4 u_MVPMatrix;

attribute vec4 a_Position;
attribute vec4 a_Color;

varying vec4 v_Color;

void main()
{
    v_Color = a_Color;
    gl_Position = u_MVPMatrix * a_Position;
}

这篇关于从触摸坐标Android的OpenGLES 2线采摘,unprojecting计算稍微偏离的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆