如何设置onTouch监听器在Android中的OpenGL-ES纹理绘制 [英] How to set onTouch Listener for drawn texture in Android OpenGL-ES

查看:252
本文介绍了如何设置onTouch监听器在Android中的OpenGL-ES纹理绘制的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

我有一个纹理我的应用程序,我可以用手指拖动它,但我怎么可以设置一个onTouch监听的质感,当我触碰手机屏幕的任何地方纹理移动到我摸,我怎样才能使它只有当手指触摸的质感动?

任何指导,将pciated AP $ P $〜

这是我的主类:

 进口android.app.Activity;
进口android.os.Bundle;
进口android.view.WindowManager;公共类MainActivity延伸活动{私人阶段阶段;@覆盖
公共无效的onCreate(捆绑savedInstanceState){
    super.onCreate(savedInstanceState);    //屏幕设置
    。getWindow()addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
    。getWindow()addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
    。getWindow()clearFlags(WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN);    的setContentView(R.layout.main_layout);
    舞台=(第二阶段)findViewById(R.id.my_stage);
}@覆盖
保护无效的onPause(){
    super.onPause();
    stage.onPause();
}@覆盖
保护无效onResume(){
    super.onResume();
    stage.onResume();
}
}

这是阶段的子类:

 进口android.content.Context;
进口android.opengl.GLES10;
进口android.opengl.GLSurfaceView;
进口android.util.AttributeSet;
进口android.util.Log;
进口android.view.MotionEvent;
进口java.nio.ByteBuffer中;
进口java.nio.ByteOrder中;
进口java.nio.FloatBuffer中;
进口javax.microedition.khronos.egl.EGLConfig;
进口javax.microedition.khronos.opengles.GL10;公共类舞台扩展GLSurfaceView {//舞台的宽度和高度
私人浮动W,H;
//纹理位置
私人浮动XPOS,yPos;
//缩放比例
私人浮动R,比DIST1,dist2;
//屏幕宽度和高度
私人诠释屏幕宽度,screenHeight;
//我们的本土顶点缓冲
私人FloatBuffer vertexBuffer;
私人纹理TEX;
MyRenderer mRenderer;@覆盖
公共布尔onTouchEvent(MotionEvent事件){
    最终诠释行动= event.getAction()及MotionEvent.ACTION_MASK;
    浮动的x,y,X1,X2,Y1,Y2;
    INT pointerIndex;    如果(event.getPointerCount()== 2){
        如果(动作== MotionEvent.ACTION_POINTER_UP){
            X1 = event.getX(0);
            Y1 = event.getY(0);
        }其他{
            X1 = event.getX(0);
            Y1 = event.getY(0);
        }
        如果(动作== MotionEvent.ACTION_POINTER_DOWN){
            X2 = event.getX(1);
            Y2 = event.getY(1);
            DIST1 =(浮动)的Math.sqrt((X1 - X2)*(X1 - X2)+(Y1 - Y2)*(Y1 - Y2));
        }其他{
            X2 = event.getX(1);
            Y2 = event.getY(1);
            dist2 =(浮动)的Math.sqrt((X1 - X2)*(X1 - X2)+(Y1 - Y2)*(Y1 - Y2));
        }
        比= dist2 / DIST1;
        mRenderer.setRatio(比率);
        requestRender();
    }
    如果(event.getPointerCount()== 1){
        如果(动作== MotionEvent.ACTION_POINTER_DOWN){
                X = event.getX();
                Y = event.getY();
        }其他{
            pointerIndex = event.getActionIndex();
            X = event.getX(pointerIndex);
            Y = event.getY(pointerIndex);
        }
        mRenderer.setXY(X,Y);
        requestRender();
    }
    返回true;
}公共舞台(上下文的背景下,ATTRS的AttributeSet){
    超(背景下,ATTRS);
    setEGLConfigChooser(8,​​8,8,8,0,0);
    mRenderer =新MyRenderer();
    setRenderer(mRenderer);
    setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
    浮动顶点[] = {
            -0.5f,-0.5f,0.0,// 0左下方
            0.5F,-0.5f,0.0,1 //右下
            -0.5f,0.5F,0.0,// 2。左顶
            0.5F,0.5F,0.0 // 3.右键顶
    };    ByteBuffer的VBB = ByteBuffer.allocateDirect(vertices.length * 4);
    vbb.order(ByteOrder.nativeOrder());
    vertexBuffer = vbb.asFloatBuffer();
    vertexBuffer.put(顶点);
    vertexBuffer.position(0);    TEX =新的纹理(R.drawable.kdk);
}私有类MyRenderer实现GLSurfaceView.Renderer {    私有对象锁=新的对象();
    公共无效setXY(浮法X,浮法Y){
        同步(锁){
            XPOS = X * W /屏幕宽度;
            yPos = Y * H / screenHeight;
        }
    }    公共无效setRatio(浮动比例){
        R =规模;
    }    公众最终无效onDrawFrame(GL10 GL){
        gl.glClear(GLES10.GL_COLOR_BUFFER_BIT);
        TEX prepare(GL,GL10.GL_CLAMP_TO_EDGE)。
        gl.glVertexPointer(3,GL10.GL_FLOAT,0,vertexBuffer);
        同步(锁){
            tex.draw(GL,XPOS,yPos,tex.getWidth()* R,tex.getHeight()* R,0);
        }
    }    公众最终无效onSurfaceChanged(GL10 GL,诠释的宽度,高度INT){
        gl.glClearColor(0,0,0,0);        如果(宽>高度){
            H = 600;
            W =宽*高/高;
        }其他{
            W = 600;
            H =高*宽/宽;
        }
        屏幕宽度=宽度;
        screenHeight =高度;        XPOS = W / 2;
        yPos = H / 2;
        R = 1;        gl.glViewport(0,0,屏幕宽度,screenHeight);
        gl.glMatrixMode(GL10.GL_PROJECTION);
        gl.glLoadIdentity();
        gl.glOrthof(0,W,H,0,-1,1);
        gl.glMatrixMode(GL10.GL_MODELVIEW);
        gl.glLoadIdentity();
    }    公众最终无效onSurfaceCreated(GL10 GL,EGLConfig配置){
        //设置alpha混合
        gl.glEnable(GL10.GL_ALPHA_TEST);
        gl.glEnable(GL10.GL_BLEND);
        gl.glBlendFunc(GL10.GL_ONE,GL10.GL_ONE_MINUS_SRC_ALPHA);        //我们在2D。为什么需要深度?
        gl.glDisable(GL10.GL_DEPTH_TEST);        //启用顶点数组(我们会用它们来绘制原语)。
        gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);        //启用纹理协调阵列。
        gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);        tex.load(的getContext());
    }}}

这是质地子类:

 进口android.content.Context;
进口android.content.res.Resources;
进口android.graphics.Bitmap;
进口android.graphics.BitmapFactory;
进口android.opengl.GLES10;
进口android.opengl.GLES20;
进口android.opengl.GLUtils;
进口java.nio.ByteBuffer中;
进口java.nio.ByteOrder中;
进口java.nio.FloatBuffer中;
进口javax.microedition.khronos.opengles.GL10;公共类纹理{/ **
 *与此相关的质感OpenGL ES的质地名称。
 * /
保护INT textureId;/ **
 *图像的水平和垂直尺寸。
 * /
保护INT宽度,高度;/ **
 *因为我们要加载的图像资源标识符。
 * /
INT RESOURCEID;/ **
 *我们是否应该产生MIP映射。
 * /
布尔贴图;/ **
 *含质地映射的缓冲区。
 * /
私人FloatBuffer tempTextureBuffer = NULL;纹理(INT RESOURCEID,布尔贴图){
    this.resourceId = RESOURCEID;
    this.textureId = -1;
    this.mipmaps =贴图;
}纹理(INT RESOURCEID){
    这个(RESOURCEID,FALSE);
}/ **
 *生成一个新的OpenGL ES的纹理名称(标识)。
 返回:新生成的纹理名称。
 * /
私有静态最终诠释newTextureID(){
    INT [] TEMP = INT新[1];
    GLES10.glGenTextures(1,温度,0);
    返回温度[0];
}公众最终诠释的getWidth(){
    返回宽度;
}公众最终诠释的getHeight(){
    返回高度;
}公众最终无效负载(上下文的背景下){
    //将资源位图。
    BitmapFactory.Options选择采用=新BitmapFactory.Options();
    opts.inScaled = FALSE;
    BMP位图= BitmapFactory.de codeResource(context.getResources(),RESOURCEID,选择采用);    //更新此纹理实例的宽度和高度。
    宽度= bmp.getWidth();
    高度= bmp.getHeight();    //创建并绑定一个新的纹理名称。
    textureId = newTextureID();
    GLES10.glBindTexture(GL10.GL_TEXTURE_2D,textureId);    //加载纹理到我们的纹理名称。
    GLUtils.texImage2D(GL10.GL_TEXTURE_2D,0,BMP,0);    //设置放大滤波器双线性插值。
    GLES10.glTexParameterf(GL10.GL_TEXTURE_2D,GL10.GL_TEXTURE_MAG_FILTER,GL10.GL_LINEAR);    如果(贴图){
        //如果请求的贴图,生成贴图​​并设置缩小过滤器三线性过滤。
        GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_2D);
        GLES10.glTexParameterf(GL10.GL_TEXTURE_2D,GL10.GL_TEXTURE_MIN_FILTER,GL10.GL_LINEAR_MIPMAP_LINEAR);
    }
    其他GLES10.glTexParameterf(GL10.GL_TEXTURE_2D,GL10.GL_TEXTURE_MIN_FILTER,GL10.GL_LINEAR);    //回收该位图。
    bmp.recycle();    //如果纹理映射缓冲区尚未初始化,现在就做。
    如果(tempTextureBuffer == NULL)
        buildTextureMapping();
}/ **
 *构建纹理映射缓冲器。
 * /
私人无效buildTextureMapping(){
    //纹理贴图坐标的数组。
    最终浮纹[] = {
            0,0,//第一个顶点
            1,0,//第二顶点
            0,1,//第三顶点
            1,1,//第四个顶点
    };    //创建一个本地缓存了上述阵。
    最终的ByteBuffer IBB = ByteBuffer.allocateDirect(texture.length * 4);
    ibb.order(ByteOrder.nativeOrder());
    tempTextureBuffer = ibb.asFloatBuffer();
    tempTextureBuffer.put(纹理);
    tempTextureBuffer.position(0);
}/ **
 *删除纹理名称和标记这个实例作为卸载。
 * /
公众最终无效的destroy(){
    GLES10.glDeleteTextures(1,新INT [] {textureId},0);    //将此值设置为-1表示它被卸载。
    textureId = -1;
}isLoaded公共最后布尔(){
    返回textureId> = 0;
}公众最终无效prepare(GL10 GL,诠释套){
    //启用2D纹理
    gl.glEnable(GL10.GL_TEXTURE_2D);    //绑定我们的纹理名
    gl.glBindTexture(GL10.GL_TEXTURE_2D,textureId);    //设置纹理包装方法
    gl.glTexParameterf(GL10.GL_TEXTURE_2D,GL10.GL_TEXTURE_WRAP_S,套);
    gl.glTexParameterf(GL10.GL_TEXTURE_2D,GL10.GL_TEXTURE_WRAP_T,套);    //启用纹理坐标数组和加载(激活)我们
    gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
    gl.glTexCoordPointer(2,GL10.GL_FLOAT,0,tempTextureBuffer);
}公众最终无效平局(GL10 GL,浮法X,浮法Y,浮动W,浮球小时,浮腐){
    gl.glPushMatrix();
    gl.glTranslatef(X,Y,0);
    gl.glRotatef(腐烂,0,0,1);
    gl.glScalef(W,H,0); //缩放将首先进行。
    gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP,0,4);
    gl.glPopMatrix();
}}


解决方案

您有所有你需要计算的数据。看来你使用坐标系左上方(0,0)键,右下方为(W,H) 。触摸坐标必须转变为同一个系统,如 touchX *(W /屏幕宽度),类似的纵坐标。

纹理的位置也与中心,静态坐标和标度应该是足够找到纹理顶点的实际位置所定义

现在考虑你有一点触摸和你的质感边框值离开

 布尔didHit = touch.x> =左&放大器;&安培; touch.x< =右放大器;&安培; touch.y> =底部和放大器;&安培; touch.y< =顶部;

I have a texture in my application and I can drag it using finger but how can I set a onTouch Listener for the texture, when I touch anywhere of the phone screen the texture move to where I touch, how can I make it only move when finger touch the texture?

Any guidance will be appreciated~

this is my main class:

import android.app.Activity;
import android.os.Bundle;
import android.view.WindowManager;

public class MainActivity extends Activity {

private Stage stage;

@Override
public void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);

    //screen setting
    getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
    getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
    getWindow().clearFlags(WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN);

    setContentView(R.layout.main_layout);
    stage = (Stage)findViewById(R.id.my_stage);
}

@Override
protected void onPause() {
    super.onPause();
    stage.onPause();
}

@Override
protected void onResume() {
    super.onResume();
    stage.onResume();
}
}

this is stage sub class:

import android.content.Context;
import android.opengl.GLES10;
import android.opengl.GLSurfaceView;
import android.util.AttributeSet;
import android.util.Log;
import android.view.MotionEvent;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;

public class Stage extends GLSurfaceView{

//Stage width and height
private float w, h;
//Texture position
private float xPos, yPos;
//Scale ratio
private float r, ratio, dist1, dist2;
//Screen width and height
private int screenWidth, screenHeight;
//Our native vertex buffer
private FloatBuffer vertexBuffer;
private Texture tex;
MyRenderer mRenderer;

@Override
public boolean onTouchEvent(MotionEvent event) {
    final int action = event.getAction() & MotionEvent.ACTION_MASK;
    float x, y, x1, x2, y1, y2;
    int pointerIndex;

    if(event.getPointerCount()==2){
        if (action == MotionEvent.ACTION_POINTER_UP) {
            x1 = event.getX(0);
            y1 = event.getY(0);
        } else {
            x1 = event.getX(0);
            y1 = event.getY(0);
        }
        if (action == MotionEvent.ACTION_POINTER_DOWN) {
            x2 = event.getX(1);
            y2 = event.getY(1);
            dist1 = (float)Math.sqrt((x1 - x2) * (x1 - x2) + (y1 - y2) * (y1 - y2));
        } else {
            x2 = event.getX(1);
            y2 = event.getY(1);
            dist2 = (float)Math.sqrt((x1 - x2) * (x1 - x2) + (y1 - y2) * (y1 - y2));
        }
        ratio = dist2/dist1;
        mRenderer.setRatio(ratio);
        requestRender();
    }
    if(event.getPointerCount()==1){
        if (action == MotionEvent.ACTION_POINTER_DOWN) {
                x = event.getX();
                y = event.getY();
        } else {
            pointerIndex = event.getActionIndex();
            x = event.getX(pointerIndex);
            y = event.getY(pointerIndex);
        }
        mRenderer.setXY(x, y);
        requestRender();
    }
    return true;
}

public Stage(Context context, AttributeSet attrs) {
    super(context, attrs);
    setEGLConfigChooser(8, 8, 8, 8, 0, 0);
    mRenderer = new MyRenderer();
    setRenderer(mRenderer);
    setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
    float vertices[] = {
            -0.5f, -0.5f,  0.0f,  // 0. left-bottom
            0.5f, -0.5f,  0.0f,  // 1. right-bottom
            -0.5f,  0.5f,  0.0f,  // 2. left-top
            0.5f,  0.5f,  0.0f   // 3. right-top
    };

    ByteBuffer vbb = ByteBuffer.allocateDirect(vertices.length * 4);
    vbb.order(ByteOrder.nativeOrder());
    vertexBuffer = vbb.asFloatBuffer();
    vertexBuffer.put(vertices);
    vertexBuffer.position(0);

    tex = new Texture(R.drawable.kdk);
}

private class MyRenderer implements GLSurfaceView.Renderer {

    private Object lock = new Object();
    public void setXY(float x, float y) {
        synchronized (lock) {
            xPos = x * w / screenWidth;
            yPos = y * h / screenHeight;
        }
    }

    public void setRatio(float scale){
        r = scale;
    }

    public final void onDrawFrame(GL10 gl) {
        gl.glClear(GLES10.GL_COLOR_BUFFER_BIT);
        tex.prepare(gl, GL10.GL_CLAMP_TO_EDGE);
        gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexBuffer);
        synchronized (lock) {
            tex.draw(gl, xPos, yPos, tex.getWidth()*r, tex.getHeight()*r, 0);
        }
    }

    public final void onSurfaceChanged(GL10 gl, int width, int height) {
        gl.glClearColor(0, 0, 0, 0);

        if(width > height) {
            h = 600;
            w = width * h / height;
        } else {
            w = 600;
            h = height * w / width;
        }
        screenWidth = width;
        screenHeight = height;

        xPos = w/2;
        yPos = h/2;
        r=1;

        gl.glViewport(0, 0, screenWidth, screenHeight);
        gl.glMatrixMode(GL10.GL_PROJECTION);
        gl.glLoadIdentity();
        gl.glOrthof(0, w, h, 0, -1, 1);
        gl.glMatrixMode(GL10.GL_MODELVIEW);
        gl.glLoadIdentity();
    }

    public final void onSurfaceCreated(GL10 gl, EGLConfig config) {
        // Set up alpha blending
        gl.glEnable(GL10.GL_ALPHA_TEST);
        gl.glEnable(GL10.GL_BLEND);
        gl.glBlendFunc(GL10.GL_ONE, GL10.GL_ONE_MINUS_SRC_ALPHA);

        // We are in 2D. Why needs depth?
        gl.glDisable(GL10.GL_DEPTH_TEST);

        // Enable vertex arrays (we'll use them to draw primitives).
        gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);

        // Enable texture coordination arrays.
        gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);

        tex.load(getContext());
    }

}

}

this is texture sub class:

import android.content.Context;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.opengl.GLES10;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.opengles.GL10;

public class Texture {

/**
 * The OpenGL ES texture name associated with this texture.
 */
protected int textureId;

/**
 * The horizontal and vertical dimensions of the image.
 */
protected int width, height;

/**
 * The resource identifier for the image we want to load.
 */
int resourceId;

/**
 * Whether or not we should generate mip maps.
 */
boolean mipmaps;

/**
 * The buffer containing texture mappings.
 */
private FloatBuffer tempTextureBuffer = null;

Texture(int resourceId, boolean mipmaps) {
    this.resourceId = resourceId;
    this.textureId = -1;
    this.mipmaps = mipmaps;
}

Texture(int resourceId) {
    this(resourceId, false);
}

/**
 * Generates a new OpenGL ES texture name (identifier).
 * @return The newly generated texture name.
 */
private static final int newTextureID() {
    int[] temp = new int[1];
    GLES10.glGenTextures(1, temp, 0);
    return temp[0];
}

public final int getWidth() {
    return width;
}

public final int getHeight() {
    return height;
}

public final void load(Context context) {
    // Load the bitmap from resources.
    BitmapFactory.Options opts = new BitmapFactory.Options();
    opts.inScaled = false;
    Bitmap bmp = BitmapFactory.decodeResource(context.getResources(), resourceId, opts);

    // Update this texture instance's width and height.
    width = bmp.getWidth();
    height = bmp.getHeight();

    // Create and bind a new texture name.
    textureId = newTextureID();
    GLES10.glBindTexture(GL10.GL_TEXTURE_2D, textureId);

    // Load the texture into our texture name.
    GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bmp, 0);

    // Set magnification filter to bilinear interpolation.
    GLES10.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);

    if(mipmaps) {
        // If mipmaps are requested, generate mipmaps and set minification filter to trilinear filtering.
        GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_2D);
        GLES10.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR_MIPMAP_LINEAR);
    }
    else GLES10.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);

    // Recycle the bitmap.
    bmp.recycle();

    // If texture mapping buffer has not been initialized yet, do it now.
    if(tempTextureBuffer == null)
        buildTextureMapping();
}

/**
 * Builds the texture mapping buffer.
 */
private void buildTextureMapping() {
    // The array of texture mapping coordinates.
    final float texture[] = {
            0, 0, // The first vertex
            1, 0, // The second vertex
            0, 1, // The third vertex
            1, 1, // The fourth vertex
    };

    // Create a native buffer out of the above array.
    final ByteBuffer ibb = ByteBuffer.allocateDirect(texture.length * 4);
    ibb.order(ByteOrder.nativeOrder());
    tempTextureBuffer = ibb.asFloatBuffer();
    tempTextureBuffer.put(texture);
    tempTextureBuffer.position(0);
}

/**
 * Deletes the texture name and marks this instance as unloaded.
 */
public final void destroy() {
    GLES10.glDeleteTextures(1, new int[] {textureId}, 0);

    // Setting this value to -1 indicates that it is unloaded.
    textureId = -1;
}

public final boolean isLoaded() {
    return textureId >= 0;
}

public final void prepare(GL10 gl, int wrap) {
    // Enable 2D texture
    gl.glEnable(GL10.GL_TEXTURE_2D);

    // Bind our texture name
    gl.glBindTexture(GL10.GL_TEXTURE_2D, textureId);

    // Set texture wrap methods
    gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S, wrap);
    gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T, wrap);

    // Enable texture coordinate arrays and load (activate) ours
    gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
    gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, tempTextureBuffer);
}

public final void draw(GL10 gl, float x, float y, float w, float h, float rot) {
    gl.glPushMatrix();
    gl.glTranslatef(x, y, 0);
    gl.glRotatef(rot, 0, 0, 1);
    gl.glScalef(w, h, 0); // Scaling will be performed first.
    gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
    gl.glPopMatrix();
}

}

解决方案

You have all the data you need to compute that. It seems you use the coordinate system with top left (0,0) and bottom right at (w,h). The touch coordinates must be transformed into the same system such as touchX*(w/screenWidth), similar for vertical coordinate.

The position of your texture is also defined with center, static coordinates and scale which should be enough to find the actual positions of the texture vertices.

Now consider you have point touch and your texture border values as left, right, bottom, top.

bool didHit = touch.x>=left && touch.x<=right && touch.y>=bottom && touch.y<=top;

这篇关于如何设置onTouch监听器在Android中的OpenGL-ES纹理绘制的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆