如何使用onSensorChanged传感器数据结合的OpenGL [英] How to use onSensorChanged sensor data in combination with OpenGL

查看:504
本文介绍了如何使用onSensorChanged传感器数据结合的OpenGL的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

(编辑:我加在我的增强现实的框架,现在也采取了陀螺仪考虑最佳的工作方法这使得它更稳定又说: DroidAR框架

我已经写了一些测试程序来了解如何从你的获取数据SensorEventListener.onSensorChanged()计算旋转角度。 我真的希望你能完成我的解决方案,以帮助人们谁都会有像我一样的问题。这里是code,我觉得看完后你就会明白了。

随意改变,主要的想法是实现几个方法来定向角发送到OpenGL视图,或任何其他目标而需要它。

方法1至4中在工作时,它们被直接发送到rotationMatrix OpenGl的图。

方法6件作品现在也一样,但我没有解释为什么旋转也要做YXž..

所有其他方法都不能正常工作或越野车,我希望有人知道,让他们working.I认为最好的方法是方法5,如果它会工作,因为这将是最容易理解的,但我不知道怎么样高效是。完整的code不是最优化的,所以我建议不使用它,因为它是在你的项目中。

在这里它是:

  / **
 *此类提供如何使用基本的演示
 * {@link android.hardware.SensorManager的SensorManager} API绘制一个三维
 * 罗盘。
 * /
公共类SensorToOpenGlTests扩展活动实现渲染器,
  SensorEventListener {

 私有静态最终布尔TRY_TRANSPOSED_VERSION = FALSE;

 / *
  * MODUS概述:
  *
  * 1  -  unbufferd数据直接从旋转矩阵的transfaired
  *模型视图矩阵
  *
  * 2  - 缓存版本1,其中加速和磁力计是
  *缓冲
  *
  * 3  - 缓冲的1.0版本,其中仅磁缓冲
  *
  * 4  - 缓冲的1.0版本,其中仅加速缓冲
  *
  * 5  - 使用方位传感器,并设置角度如何旋转
  *摄像头​​glrotate()
  *
  * 6  - 使用旋转矩阵来计算的角度
  *
  * 7到12  - 每一种可能性如何rotationMatrix可以构造
  *在SensorManager.getRotationMatrix(见
  * http://www.songho.ca/opengl/gl_anglestoaxes.html#anglestoaxes所有
  *可能性)
  * /

 私有静态诠释MODUS = 2;

 私人GLSurfaceView openglView;
 私人FloatBuffer vertexBuffer;
 私人ByteBuffer的indexBuffer;
 私人FloatBuffer colorBuffer;

 私人的SensorManager mSensorManager;
 私人浮法[] rotationMatrix =新的浮动[16];
 私人浮法[] accelGData =新的浮动[3];
 私人浮法[] bufferedAccelGData =新的浮动[3];
 私人浮法[] magnetData =新的浮动[3];
 私人浮法[] bufferedMagnetData =新的浮动[3];
 私人浮法[] orientationData =新的浮动[3];

 //私人浮法[] MI =新的浮动[16];

 私人浮法[] resultingAngles =新的浮动[3];

 私人诠释mCount;

 最后静浮rad2deg =(浮点)(180.0f / Math.PI);

 私人布尔景观;

 公共SensorToOpenGlTests(){
 }

 / **调用方式创建的第一个活动。 * /
 @覆盖
 公共无效的onCreate(包savedInstanceState){
  super.onCreate(savedInstanceState);

  mSensorManager =(的SensorManager)getSystemService(Context.SENSOR_SERVICE);
  openglView =新GLSurfaceView(本);
  openglView.setRenderer(本);
  的setContentView(openglView);
 }

 @覆盖
 保护无效onResume(){
  //理想的游戏应该实现onResume()和的onPause()
  //采取适当行动活动时失去重心
  super.onResume();
  openglView.onResume();

  如果(((窗口管理器)getSystemService(WINDOW_SERVICE))
    .getDefaultDisplay()。getOrientation()== 1){
   景观=真;
  } 其他 {
   景观= FALSE;
  }

  mSensorManager.registerListener(这一点,mSensorManager
    .getDefaultSensor(Sensor.TYPE_ACCELEROMETER)
    SensorManager.SENSOR_DELAY_GAME);
  mSensorManager.registerListener(这一点,mSensorManager
    .getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD)
    SensorManager.SENSOR_DELAY_GAME);
  mSensorManager.registerListener(这一点,mSensorManager
    .getDefaultSensor(Sensor.TYPE_ORIENTATION)
    SensorManager.SENSOR_DELAY_GAME);
 }

 @覆盖
 保护无效的onPause(){
  //理想的游戏应该实现onResume()和的onPause()
  //采取适当行动活动时失去重心
  super.onPause();
  openglView.onPause();
  mSensorManager.unregisterListener(本);
 }

 公众诠释[] getConfigSpec(){
  //我们希望有一个深度缓冲,不关心
  //颜色缓冲区的细节。
  INT [] configSpec = {EGL10.EGL_DEPTH_SIZE,16,EGL10.EGL_NONE};
  返回configSpec;
 }

 公共无效onDrawFrame(GL10 GL){

  //清晰的画面和色彩缓存:
  gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
  //设置目标矩阵模型视图矩阵:
  gl.glMatrixMode(GL10.GL_MODELVIEW);
  //初始化模型视图矩阵:
  gl.glLoadIdentity();
  //将相机走一点点:

  如果((MODUS == 1)||(MODUS == 2)||(MODUS == 3)||(MODUS == 4)){

   如果(横向){
    //在横向模式下首先重新映射rotationMatrix使用前
    与glMultMatrixf //它:
    浮动[]结果=新的浮动[16];
    SensorManager.remapCoordinateSystem(rotationMatrix,
      SensorManager.AXIS_Y,SensorManager.AXIS_MINUS_X,
      结果);
    gl.glMultMatrixf(结果,0);
   } 其他 {
    gl.glMultMatrixf(rotationMatrix,0);
   }
  } 其他 {
   //在所有其它模式下用手做旋转
   //订单Y X Z是很重要的!
   gl.glRotatef(resultingAngles [2],0,1,0);
   gl.glRotatef(,resultingAngles [1] 1,0,0);
   gl.glRotatef(resultingAngles [0],0,0,1);
  }

  //移动轴模拟增强的行为:
  gl.glTranslatef(0,2,0);

  //画出3轴在屏幕上:
  gl.glVertexPointer(3,GL_FLOAT,0,vertexBuffer);
  gl.glColorPointer(4,GL_FLOAT,0,colorBuffer);
  gl.glDrawElements(GL_LINES,6,GL_UNSIGNED_BYTE,indexBuffer);
 }

 公共无效onSurfaceChanged(GL10 GL,诠释的宽度,高度INT){
  gl.glViewport(0,0,宽度,高度);
  浮动R =(浮点)宽/高;
  gl.glMatrixMode(GL10.GL_PROJECTION);
  gl.glLoadIdentity();
  gl.glFrustumf(-r,R,-1,1,1,10);
 }

 公共无效onSurfaceCreated(GL10 GL,EGLConfig配置){
  gl.glDisable(GL10.GL_DITHER);
  gl.glClearColor(1,1,1,1);
  gl.glEnable(GL10.GL_CULL_FACE);
  gl.glShadeModel(GL10.GL_SMOOTH);
  gl.glEnable(GL10.GL_DEPTH_TEST);

  gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
  gl.glEnableClientState(GL10.GL_COLOR_ARRAY);

  //装载3轴和有颜色:
  浮顶点[] = {0,0,0,1,0,0,0,1,0,0,0,1};
  浮色[] = {0,0,0,0,1,0,0,1,0,1,0,1,0,0,1,1};
  字节索引[] = {0,1,0,2,0,3};

  ByteBuffer的VBB;
  VBB = ByteBuffer.allocateDirect(vertices.length * 4);
  vbb.order(ByteOrder.nativeOrder());
  vertexBuffer = vbb.asFloatBuffer();
  vertexBuffer.put(顶点);
  vertexBuffer.position(0);

  VBB = ByteBuffer.allocateDirect(colors.length * 4);
  vbb.order(ByteOrder.nativeOrder());
  colorBuffer = vbb.asFloatBuffer();
  colorBuffer.put(颜色);
  colorBuffer.position(0);

  indexBuffer = ByteBuffer.allocateDirect(indices.length);
  indexBuffer.put(指标);
  indexBuffer.position(0);
 }

 公共无效onAccuracyChanged(传感器传感器,诠释精度){
 }

 公共无效onSensorChanged(SensorEvent事件){

  //将新值:
  loadNewSensorData(事件);

  如果(MODUS == 1){
   SensorManager.getRotationMatrix(rotationMatrix,空,accelGData,
     magnetData);
  }

  如果(MODUS == 2){
   rootMeanSquareBuffer(bufferedAccelGData,accelGData);
   rootMeanSquareBuffer(bufferedMagnetData,magnetData);
   SensorManager.getRotationMatrix(rotationMatrix,空,
     bufferedAccelGData,bufferedMagnetData);
  }

  如果(MODUS == 3){
   rootMeanSquareBuffer(bufferedMagnetData,magnetData);
   SensorManager.getRotationMatrix(rotationMatrix,空,accelGData,
     bufferedMagnetData);
  }

  如果(MODUS == 4){
   rootMeanSquareBuffer(bufferedAccelGData,accelGData);
   SensorManager.getRotationMatrix(rotationMatrix,空,
     bufferedAccelGData,magnetData);
  }

  如果(MODUS == 5){
   //该模式使用来自取向:收到传感器数据
   // 传感器
   resultingAngles = orientationData.clone();
   如果((-90 GT; resultingAngles [1])||(resultingAngles [1]→90)){
    resultingAngles [1] = orientationData [0];
    resultingAngles [2] = orientationData [1];
    resultingAngles [0] = orientationData [2];
   }
  }

  如果(MODUS == 6){
   SensorManager.getRotationMatrix(rotationMatrix,空,accelGData,
     magnetData);
   最终浮动[] anglesInRadians =新的浮动[3];
   SensorManager.getOrientation(rotationMatrix,anglesInRadians);
   // TODO检查横向模式
   resultingAngles [0] = anglesInRadians [0] * rad2deg;
   resultingAngles [1] = anglesInRadians [1] * rad2deg;
   resultingAngles [2] = anglesInRadians [2] * -rad2deg;
  }

  如果(MODUS == 7){
   SensorManager.getRotationMatrix(rotationMatrix,空,accelGData,
     magnetData);

   rotationMatrix =转(rotationMatrix);
   / *
    *此假定旋转矩阵相乘在xyz的
    *为了接收* Ry的* RZ
    * /

   resultingAngles [2] =(浮子)(Math.asin(rotationMatrix [2]));
   最终浮=的CoSb(浮点)Math.cos(resultingAngles [2]);
   resultingAngles [2] = resultingAngles [2] * rad2deg;
   resultingAngles [0] =  - (浮子)(Math.acos(rotationMatrix [0] /的CoSb))
     * rad2deg;
   resultingAngles [1] =(浮子)(Math.acos(rotationMatrix [10] /的CoSb))
     * rad2deg;
  }

  如果(MODUS == 8){
   SensorManager.getRotationMatrix(rotationMatrix,空,accelGData,
     magnetData);
   rotationMatrix =转(rotationMatrix);
   / *
    *这个假定旋转矩阵相乘的ZYX
    * /

   resultingAngles [2] =(浮子)(Math.asin(-rotationMatrix [8]));
   最终浮=的CoSb(浮点)Math.cos(resultingAngles [2]);
   resultingAngles [2] = resultingAngles [2] * rad2deg;
   resultingAngles [1] =(浮子)(Math.acos(rotationMatrix [9] /的CoSb))
     * rad2deg;
   resultingAngles [0] =(浮点)(Math.asin(rotationMatrix [4] /的CoSb))
     * rad2deg;
  }

  如果(MODUS == 9){
   SensorManager.getRotationMatrix(rotationMatrix,空,accelGData,
     magnetData);
   rotationMatrix =转(rotationMatrix);
   / *
    *这个假定旋转矩阵相乘的ZXY
    *
    *注Z轴看起来不错,在这一个
    * /

   resultingAngles [1] =(浮子)(Math.asin(rotationMatrix [9]));
   最终浮动minusCosA =  - (浮点)Math.cos(resultingAngles [1]);
   resultingAngles [1] = resultingAngles [1] * rad2deg;
   resultingAngles [2] =(浮子)(Math.asin(rotationMatrix [8]
     / minusCosA))
     * rad2deg;
   resultingAngles [0] =(浮点)(Math.asin(rotationMatrix [1]
     / minusCosA))
     * rad2deg;
  }

  如果(MODUS == 10){
   SensorManager.getRotationMatrix(rotationMatrix,空,accelGData,
     magnetData);
   rotationMatrix =转(rotationMatrix);
   / *
    *此假定旋转矩阵相乘在YXZ
    * /

   resultingAngles [1] =(浮子)(Math.asin(-rotationMatrix [6]));
   最终浮COSA =(浮点)Math.cos(resultingAngles [1]);
   resultingAngles [1] = resultingAngles [1] * rad2deg;
   resultingAngles [2] =(浮子)(Math.asin(rotationMatrix [2] / COSA))
     * rad2deg;
   resultingAngles [0] =(浮点)(Math.acos(rotationMatrix [5] / COSA))
     * rad2deg;
  }

  如果(MODUS == 11){
   SensorManager.getRotationMatrix(rotationMatrix,空,accelGData,
     magnetData);
   rotationMatrix =转(rotationMatrix);
   / *
    *此假定旋转矩阵相乘在YZX
    * /

   resultingAngles [0] =(浮点)(Math.asin(rotationMatrix [4]));
   最终浮COSC =(浮点)Math.cos(resultingAngles [0]);
   resultingAngles [0] = resultingAngles [0] * rad2deg;
   resultingAngles [2] =(浮点)(Math.acos(rotationMatrix [0] / COSC))
     * rad2deg;
   resultingAngles [1] =(浮点)(Math.acos(rotationMatrix [5] / COSC))
     * rad2deg;
  }

  如果(MODUS == 12){
   SensorManager.getRotationMatrix(rotationMatrix,空,accelGData,
     magnetData);
   rotationMatrix =转(rotationMatrix);
   / *
    *这个假定旋转矩阵相乘在XZY
    * /

   resultingAngles [0] =(浮子)(Math.asin(-rotationMatrix [1]));
   最终浮COSC =(浮点)Math.cos(resultingAngles [0]);
   resultingAngles [0] = resultingAngles [0] * rad2deg;
   resultingAngles [2] =(浮点)(Math.acos(rotationMatrix [0] / COSC))
     * rad2deg;
   resultingAngles [1] =(浮点)(Math.acos(rotationMatrix [5] / COSC))
     * rad2deg;
  }
  logOutput();
 }

 / **
  *调换基质,因为它被transposted(反相,但在这里其
  *一样的,因为它的旋转矩阵)将用于OpenGL的
  *
  * @参数来源
  * @返回
  * /
 私人浮法[]转(浮动[]源){
  最终浮动[]结果= source.clone();
  如果(TRY_TRANSPOSED_VERSION){
   结果[1] =源[4];
   结果[2] =源[8];
   结果[4] =来源[1];
   结果[6] =源[9];
   结果[8] =源[2];
   结果[9] =源[6]。
  }
  //是在基质中的其它值不相关的转
  返回结果;
 }

 私人无效rootMeanSquareBuffer(浮法[]的目标,浮法[]值){

  最终浮动放大= 200.0f;
  float缓冲区= 20.0f;

  靶向[0] + =放大;
  目标[1] + =放大;
  目标[2] + =放大;
  值[0] + =放大;
  值[1] + =放大;
  数值[2] + =放大;

  目标[0] =(浮点)(数学
    .sqrt((目标[0] *靶[0] *缓冲液+值[0] *值[0])
      /(1 +缓冲)));
  目标[1] =(浮点)(数学
    .sqrt((目标[1] *目标[1] *缓冲液+值[1] *值[1])
      /(1 +缓冲)));
  目标[2] =(浮点)(数学
    .sqrt((目标[2] *目标[2] *缓冲液+值[2] *值[2])
      /(1 +缓冲)));

  靶向[0]  -  =放大;
  目标[1]  -  =放大;
  目标[2]  -  =放大;
  值[0]  -  =放大;
  值[1]  -  =放大;
  值[2]  -  =放大;
 }

 私人无效loadNewSensorData(SensorEvent事件){
  最后整型= event.sensor.getType();
  如果(类型== Sensor.TYPE_ACCELEROMETER){
   accelGData = event.values​​.clone();
  }
  如果(类型== Sensor.TYPE_MAGNETIC_FIELD){
   magnetData = event.values​​.clone();
  }
  如果(类型== Sensor.TYPE_ORIENTATION){
   orientationData = event.values​​.clone();
  }
 }

 私人无效logOutput(){
  如果(mCount ++盐; 30){
   mCount = 0;
   Log.d(北斗,yaw0:+(INT)(resultingAngles [0])
     +pitch1:+(int)的(resultingAngles [1])+roll2:
     +(int)的(resultingAngles [2]));
  }
 }
}
 

解决方案

分析你的code以上,在方法5要指定方向数据后,如下所示,

  resultingAngles [1] = orientationData [0]; //方向Z轴Y轴
resultingAngles [2] = orientationData [1]; //方向X轴Z轴
resultingAngles [0] = orientationData [2]; //方向Y轴X轴
 

您已经做Y轴Z X的方式旋转。尝试改变方向。

我想可能是问题出在那里..请让我知道。

请参阅文件事件值, http://developer.android.com/guide/topics/sensors/sensors_position.html

感谢您的艰巨的工作。

( edit: I added the best working approach in my augmented reality framework and now also take the gyroscope into account which makes it much more stable again: DroidAR framework )

I have written a TestSuite to find out how to calculate the rotation angles from the data you get in SensorEventListener.onSensorChanged(). I really hope you can complete my solution to help people who will have the same problems like me. Here is the code, I think you will understand it after reading it.

Feel free to change it, the main idea was to implement several methods to send the orientation angles to the opengl view or any other target which would need it.

method 1 to 4 are working, they are directly sending the rotationMatrix to the OpenGl view.

method 6 works now too, but I have no explanation why the rotation has to be done y x z..

all other methods are not working or buggy and I hope someone knows to get them working.I think the best method would be method 5 if it would work, because it would be the easiest to understand but i'm not sure how efficient it is. the complete code isn't optimized so I recommend to not use it as it is in your project.

here it is:

/**
 * This class provides a basic demonstration of how to use the
 * {@link android.hardware.SensorManager SensorManager} API to draw a 3D
 * compass.
 */
public class SensorToOpenGlTests extends Activity implements Renderer,
  SensorEventListener {

 private static final boolean TRY_TRANSPOSED_VERSION = false;

 /*
  * MODUS overview:
  * 
  * 1 - unbufferd data directly transfaired from the rotation matrix to the
  * modelview matrix
  * 
  * 2 - buffered version of 1 where both acceleration and magnetometer are
  * buffered
  * 
  * 3 - buffered version of 1 where only magnetometer is buffered
  * 
  * 4 - buffered version of 1 where only acceleration is buffered
  * 
  * 5 - uses the orientation sensor and sets the angles how to rotate the
  * camera with glrotate()
  * 
  * 6 - uses the rotation matrix to calculate the angles
  * 
  * 7 to 12 - every possibility how the rotationMatrix could be constructed
  * in SensorManager.getRotationMatrix (see
  * http://www.songho.ca/opengl/gl_anglestoaxes.html#anglestoaxes for all
  * possibilities)
  */

 private static int MODUS = 2;

 private GLSurfaceView openglView;
 private FloatBuffer vertexBuffer;
 private ByteBuffer indexBuffer;
 private FloatBuffer colorBuffer;

 private SensorManager mSensorManager;
 private float[] rotationMatrix = new float[16];
 private float[] accelGData = new float[3];
 private float[] bufferedAccelGData = new float[3];
 private float[] magnetData = new float[3];
 private float[] bufferedMagnetData = new float[3];
 private float[] orientationData = new float[3];

 // private float[] mI = new float[16];

 private float[] resultingAngles = new float[3];

 private int mCount;

 final static float rad2deg = (float) (180.0f / Math.PI);

 private boolean landscape;

 public SensorToOpenGlTests() {
 }

 /** Called with the activity is first created. */
 @Override
 public void onCreate(Bundle savedInstanceState) {
  super.onCreate(savedInstanceState);

  mSensorManager = (SensorManager) getSystemService(Context.SENSOR_SERVICE);
  openglView = new GLSurfaceView(this);
  openglView.setRenderer(this);
  setContentView(openglView);
 }

 @Override
 protected void onResume() {
  // Ideally a game should implement onResume() and onPause()
  // to take appropriate action when the activity looses focus
  super.onResume();
  openglView.onResume();

  if (((WindowManager) getSystemService(WINDOW_SERVICE))
    .getDefaultDisplay().getOrientation() == 1) {
   landscape = true;
  } else {
   landscape = false;
  }

  mSensorManager.registerListener(this, mSensorManager
    .getDefaultSensor(Sensor.TYPE_ACCELEROMETER),
    SensorManager.SENSOR_DELAY_GAME);
  mSensorManager.registerListener(this, mSensorManager
    .getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD),
    SensorManager.SENSOR_DELAY_GAME);
  mSensorManager.registerListener(this, mSensorManager
    .getDefaultSensor(Sensor.TYPE_ORIENTATION),
    SensorManager.SENSOR_DELAY_GAME);
 }

 @Override
 protected void onPause() {
  // Ideally a game should implement onResume() and onPause()
  // to take appropriate action when the activity looses focus
  super.onPause();
  openglView.onPause();
  mSensorManager.unregisterListener(this);
 }

 public int[] getConfigSpec() {
  // We want a depth buffer, don't care about the
  // details of the color buffer.
  int[] configSpec = { EGL10.EGL_DEPTH_SIZE, 16, EGL10.EGL_NONE };
  return configSpec;
 }

 public void onDrawFrame(GL10 gl) {

  // clear screen and color buffer:
  gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
  // set target matrix to modelview matrix:
  gl.glMatrixMode(GL10.GL_MODELVIEW);
  // init modelview matrix:
  gl.glLoadIdentity();
  // move camera away a little bit:

  if ((MODUS == 1) || (MODUS == 2) || (MODUS == 3) || (MODUS == 4)) {

   if (landscape) {
    // in landscape mode first remap the rotationMatrix before using
    // it with glMultMatrixf:
    float[] result = new float[16];
    SensorManager.remapCoordinateSystem(rotationMatrix,
      SensorManager.AXIS_Y, SensorManager.AXIS_MINUS_X,
      result);
    gl.glMultMatrixf(result, 0);
   } else {
    gl.glMultMatrixf(rotationMatrix, 0);
   }
  } else {
   //in all other modes do the rotation by hand
   //the order y x z is important!
   gl.glRotatef(resultingAngles[2], 0, 1, 0);
   gl.glRotatef(resultingAngles[1], 1, 0, 0);
   gl.glRotatef(resultingAngles[0], 0, 0, 1);
  }

  //move the axis to simulate augmented behaviour:
  gl.glTranslatef(0, 2, 0);

  // draw the 3 axis on the screen:
  gl.glVertexPointer(3, GL_FLOAT, 0, vertexBuffer);
  gl.glColorPointer(4, GL_FLOAT, 0, colorBuffer);
  gl.glDrawElements(GL_LINES, 6, GL_UNSIGNED_BYTE, indexBuffer);
 }

 public void onSurfaceChanged(GL10 gl, int width, int height) {
  gl.glViewport(0, 0, width, height);
  float r = (float) width / height;
  gl.glMatrixMode(GL10.GL_PROJECTION);
  gl.glLoadIdentity();
  gl.glFrustumf(-r, r, -1, 1, 1, 10);
 }

 public void onSurfaceCreated(GL10 gl, EGLConfig config) {
  gl.glDisable(GL10.GL_DITHER);
  gl.glClearColor(1, 1, 1, 1);
  gl.glEnable(GL10.GL_CULL_FACE);
  gl.glShadeModel(GL10.GL_SMOOTH);
  gl.glEnable(GL10.GL_DEPTH_TEST);

  gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
  gl.glEnableClientState(GL10.GL_COLOR_ARRAY);

  // load the 3 axis and there colors:
  float vertices[] = { 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1 };
  float colors[] = { 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1 };
  byte indices[] = { 0, 1, 0, 2, 0, 3 };

  ByteBuffer vbb;
  vbb = ByteBuffer.allocateDirect(vertices.length * 4);
  vbb.order(ByteOrder.nativeOrder());
  vertexBuffer = vbb.asFloatBuffer();
  vertexBuffer.put(vertices);
  vertexBuffer.position(0);

  vbb = ByteBuffer.allocateDirect(colors.length * 4);
  vbb.order(ByteOrder.nativeOrder());
  colorBuffer = vbb.asFloatBuffer();
  colorBuffer.put(colors);
  colorBuffer.position(0);

  indexBuffer = ByteBuffer.allocateDirect(indices.length);
  indexBuffer.put(indices);
  indexBuffer.position(0);
 }

 public void onAccuracyChanged(Sensor sensor, int accuracy) {
 }

 public void onSensorChanged(SensorEvent event) {

  // load the new values:
  loadNewSensorData(event);

  if (MODUS == 1) {
   SensorManager.getRotationMatrix(rotationMatrix, null, accelGData,
     magnetData);
  }

  if (MODUS == 2) {
   rootMeanSquareBuffer(bufferedAccelGData, accelGData);
   rootMeanSquareBuffer(bufferedMagnetData, magnetData);
   SensorManager.getRotationMatrix(rotationMatrix, null,
     bufferedAccelGData, bufferedMagnetData);
  }

  if (MODUS == 3) {
   rootMeanSquareBuffer(bufferedMagnetData, magnetData);
   SensorManager.getRotationMatrix(rotationMatrix, null, accelGData,
     bufferedMagnetData);
  }

  if (MODUS == 4) {
   rootMeanSquareBuffer(bufferedAccelGData, accelGData);
   SensorManager.getRotationMatrix(rotationMatrix, null,
     bufferedAccelGData, magnetData);
  }

  if (MODUS == 5) {
   // this mode uses the sensor data recieved from the orientation
   // sensor
   resultingAngles = orientationData.clone();
   if ((-90 > resultingAngles[1]) || (resultingAngles[1] > 90)) {
    resultingAngles[1] = orientationData[0];
    resultingAngles[2] = orientationData[1];
    resultingAngles[0] = orientationData[2];
   }
  }

  if (MODUS == 6) {
   SensorManager.getRotationMatrix(rotationMatrix, null, accelGData,
     magnetData);
   final float[] anglesInRadians = new float[3];
   SensorManager.getOrientation(rotationMatrix, anglesInRadians);
   //TODO check for landscape mode
   resultingAngles[0] = anglesInRadians[0] * rad2deg;
   resultingAngles[1] = anglesInRadians[1] * rad2deg;
   resultingAngles[2] = anglesInRadians[2] * -rad2deg;
  }

  if (MODUS == 7) {
   SensorManager.getRotationMatrix(rotationMatrix, null, accelGData,
     magnetData);

   rotationMatrix = transpose(rotationMatrix);
   /*
    * this assumes that the rotation matrices are multiplied in x y z
    * order Rx*Ry*Rz
    */

   resultingAngles[2] = (float) (Math.asin(rotationMatrix[2]));
   final float cosB = (float) Math.cos(resultingAngles[2]);
   resultingAngles[2] = resultingAngles[2] * rad2deg;
   resultingAngles[0] = -(float) (Math.acos(rotationMatrix[0] / cosB))
     * rad2deg;
   resultingAngles[1] = (float) (Math.acos(rotationMatrix[10] / cosB))
     * rad2deg;
  }

  if (MODUS == 8) {
   SensorManager.getRotationMatrix(rotationMatrix, null, accelGData,
     magnetData);
   rotationMatrix = transpose(rotationMatrix);
   /*
    * this assumes that the rotation matrices are multiplied in z y x
    */

   resultingAngles[2] = (float) (Math.asin(-rotationMatrix[8]));
   final float cosB = (float) Math.cos(resultingAngles[2]);
   resultingAngles[2] = resultingAngles[2] * rad2deg;
   resultingAngles[1] = (float) (Math.acos(rotationMatrix[9] / cosB))
     * rad2deg;
   resultingAngles[0] = (float) (Math.asin(rotationMatrix[4] / cosB))
     * rad2deg;
  }

  if (MODUS == 9) {
   SensorManager.getRotationMatrix(rotationMatrix, null, accelGData,
     magnetData);
   rotationMatrix = transpose(rotationMatrix);
   /*
    * this assumes that the rotation matrices are multiplied in z x y
    * 
    * note z axis looks good at this one
    */

   resultingAngles[1] = (float) (Math.asin(rotationMatrix[9]));
   final float minusCosA = -(float) Math.cos(resultingAngles[1]);
   resultingAngles[1] = resultingAngles[1] * rad2deg;
   resultingAngles[2] = (float) (Math.asin(rotationMatrix[8]
     / minusCosA))
     * rad2deg;
   resultingAngles[0] = (float) (Math.asin(rotationMatrix[1]
     / minusCosA))
     * rad2deg;
  }

  if (MODUS == 10) {
   SensorManager.getRotationMatrix(rotationMatrix, null, accelGData,
     magnetData);
   rotationMatrix = transpose(rotationMatrix);
   /*
    * this assumes that the rotation matrices are multiplied in y x z
    */

   resultingAngles[1] = (float) (Math.asin(-rotationMatrix[6]));
   final float cosA = (float) Math.cos(resultingAngles[1]);
   resultingAngles[1] = resultingAngles[1] * rad2deg;
   resultingAngles[2] = (float) (Math.asin(rotationMatrix[2] / cosA))
     * rad2deg;
   resultingAngles[0] = (float) (Math.acos(rotationMatrix[5] / cosA))
     * rad2deg;
  }

  if (MODUS == 11) {
   SensorManager.getRotationMatrix(rotationMatrix, null, accelGData,
     magnetData);
   rotationMatrix = transpose(rotationMatrix);
   /*
    * this assumes that the rotation matrices are multiplied in y z x
    */

   resultingAngles[0] = (float) (Math.asin(rotationMatrix[4]));
   final float cosC = (float) Math.cos(resultingAngles[0]);
   resultingAngles[0] = resultingAngles[0] * rad2deg;
   resultingAngles[2] = (float) (Math.acos(rotationMatrix[0] / cosC))
     * rad2deg;
   resultingAngles[1] = (float) (Math.acos(rotationMatrix[5] / cosC))
     * rad2deg;
  }

  if (MODUS == 12) {
   SensorManager.getRotationMatrix(rotationMatrix, null, accelGData,
     magnetData);
   rotationMatrix = transpose(rotationMatrix);
   /*
    * this assumes that the rotation matrices are multiplied in x z y
    */

   resultingAngles[0] = (float) (Math.asin(-rotationMatrix[1]));
   final float cosC = (float) Math.cos(resultingAngles[0]);
   resultingAngles[0] = resultingAngles[0] * rad2deg;
   resultingAngles[2] = (float) (Math.acos(rotationMatrix[0] / cosC))
     * rad2deg;
   resultingAngles[1] = (float) (Math.acos(rotationMatrix[5] / cosC))
     * rad2deg;
  }
  logOutput();
 }

 /**
  * transposes the matrix because it was transposted (inverted, but here its
  * the same, because its a rotation matrix) to be used for opengl
  * 
  * @param source
  * @return
  */
 private float[] transpose(float[] source) {
  final float[] result = source.clone();
  if (TRY_TRANSPOSED_VERSION) {
   result[1] = source[4];
   result[2] = source[8];
   result[4] = source[1];
   result[6] = source[9];
   result[8] = source[2];
   result[9] = source[6];
  }
  // the other values in the matrix are not relevant for rotations
  return result;
 }

 private void rootMeanSquareBuffer(float[] target, float[] values) {

  final float amplification = 200.0f;
  float buffer = 20.0f;

  target[0] += amplification;
  target[1] += amplification;
  target[2] += amplification;
  values[0] += amplification;
  values[1] += amplification;
  values[2] += amplification;

  target[0] = (float) (Math
    .sqrt((target[0] * target[0] * buffer + values[0] * values[0])
      / (1 + buffer)));
  target[1] = (float) (Math
    .sqrt((target[1] * target[1] * buffer + values[1] * values[1])
      / (1 + buffer)));
  target[2] = (float) (Math
    .sqrt((target[2] * target[2] * buffer + values[2] * values[2])
      / (1 + buffer)));

  target[0] -= amplification;
  target[1] -= amplification;
  target[2] -= amplification;
  values[0] -= amplification;
  values[1] -= amplification;
  values[2] -= amplification;
 }

 private void loadNewSensorData(SensorEvent event) {
  final int type = event.sensor.getType();
  if (type == Sensor.TYPE_ACCELEROMETER) {
   accelGData = event.values.clone();
  }
  if (type == Sensor.TYPE_MAGNETIC_FIELD) {
   magnetData = event.values.clone();
  }
  if (type == Sensor.TYPE_ORIENTATION) {
   orientationData = event.values.clone();
  }
 }

 private void logOutput() {
  if (mCount++ > 30) {
   mCount = 0;
   Log.d("Compass", "yaw0: " + (int) (resultingAngles[0])
     + "  pitch1: " + (int) (resultingAngles[1]) + "  roll2: "
     + (int) (resultingAngles[2]));
  }
 }
}

解决方案

After analyze your code above, in method 5 you are assigning the orientation data as follows,

resultingAngles[1] = orientationData[0]; // orientation z axis to y axis
resultingAngles[2] = orientationData[1]; // orientation x axis to z axis 
resultingAngles[0] = orientationData[2]; // orientation y axis to x axis

You have done rotation in y z x manner. Try to change the orientation..

I think it might be the problem there.. Please check and let me know..

Please refer the documentation for the event values, http://developer.android.com/guide/topics/sensors/sensors_position.html

Thanks for your tough work..

这篇关于如何使用onSensorChanged传感器数据结合的OpenGL的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆