Android应用程序跟踪点:Video.calcOpticalFlow(...)方法不能正常工作? [英] Android App Tracking Points: Video.calcOpticalFlow(...) method not working properly?

查看:373
本文介绍了Android应用程序跟踪点:Video.calcOpticalFlow(...)方法不能正常工作?的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

我目前正在建立一个Android应用程序使用的OpenCV,允许用户跟踪使用智能手机的摄像头移动的物体点。类似code C ++中这不正是我要寻找可以在下面的链接中找到:的光流法C ++示例code

我一直在谷歌搜索和计算器四处寻找,但我仍然无法弄清楚,为什么我的code不工作。我能我每次美元一定当场p $ PSS时间放在屏幕上点,但似乎分,甚至一动不动,我在镜头前移动对象。用于计算opitcal流的方法,如下:

 无效org.opencv.video.Video.calcOpticalFlowPyrLK(太prevImg,垫nextImg,MatOfPoint2f prevPts,MatOfPoint2f nextPts,MatOfByte状态,MatOfFloat ERR)

我相信我通过计算连续图像的光流所需的确切参数,但由于某种原因,它不工作。下面是我的code:

 包org.opencv.UActivity;// include文件
...公共类U2Activity扩展活动实现OnTouchListener,CvCameraViewListener2 {  私有静态最后弦乐TAG =OCVSample ::活动;  私人垫nextGray,Rscale;
  私人垫prevGray;
  私人MatOfPoint2f prev2D,next2D;
  私人MatOfByte状态;
  私人MatOfFloat犯错;
  私人标色;  私人CameraBridgeViewBase mOpenCvCameraView;
  私人BaseLoaderCallback mLoaderCallback =新BaseLoaderCallback(本){
      @覆盖
      公共无效onManagerConnected(INT状态){
          开关(状态){
              案例LoaderCallbackInterface.SUCCESS:
              {
                  Log.i(TAG的OpenCV加载成功);
                  mOpenCvCameraView.enableView();
                  mOpenCvCameraView.setOnTouchListener(U2Activity.this);
              }打破;
              默认:
              {
                  super.onManagerConnected(状态);
              }打破;
          }
      }
  };  公共U2Activity(){
      Log.i(TAG,实例化新的+ this.getClass());
  }  / **当第一次创建活动调用。 * /
  @覆盖
  公共无效的onCreate(捆绑savedInstanceState){
      Log.i(TAG叫的onCreate);
      super.onCreate(savedInstanceState);
      requestWindowFeature(Window.FEATURE_NO_TITLE);
      。getWindow()addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);      的setContentView(R.layout.u2_surface_view);      mOpenCvCameraView =(CameraBridgeViewBase)findViewById(R.id.u2_activity_surface_view);
      mOpenCvCameraView.setCvCameraViewListener(本);      颜色=新标量(0,255,0);  }  @覆盖
  公共无效的onPause()
  {
      super.onPause();
      如果(mOpenCvCameraView!= NULL)
          mOpenCvCameraView.disableView();
  }  @覆盖
  公共无效onResume()
  {
      super.onResume();
      OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3,对此,mLoaderCallback);
  }  公共无效的onDestroy(){
      super.onDestroy();
      如果(mOpenCvCameraView!= NULL)
          mOpenCvCameraView.disableView();
  }  公共无效onCameraViewStarted(INT宽度,高度INT){
      nextGray =新垫(高度,宽度,CvType.CV_8UC1); // unsigned char型
      Rscale =新垫(高度,宽度,CvType.CV_8UC1);
      prevGray =新垫(高度,宽度,CvType.CV_8UC1);      prev2D =新MatOfPoint2f(新点());
      next2D =新MatOfPoint2f(新点());
      状态=新MatOfByte();
      ERR =新MatOfFloat();
  }  公共无效onCameraViewStopped(){
      nextGray.release();
      Rscale.release();
  }  公共布尔onTouch(视图V,MotionEvent事件){      INT COLS = nextGray.cols();
      INT行= nextGray.rows();      INT xOffset =(mOpenCvCameraView.getWidth() - COLS)/ 2;
      INT yOffset =(mOpenCvCameraView.getHeight() - 行)/ 2;      INT X =(INT)event.getX() - xOffset;
      INT Y =(int)的event.getY() - yOffset;      如果((X小于0)||(Y℃下)||(X> COLS)||(Y>行))返回false;      prev2D.push_back(新MatOfPoint2f(新点((双)X,(双)Y)));
      next2D.push_back(新MatOfPoint2f(新点()));      返回false; //不需要后续触摸事件
  }  公共垫onCameraFrame(CvCameraViewFrame inputFrame){
      nextGray = inputFrame.gray(); //获取当前图像
      Rscale = nextGray; //使当前图像的副本
      如果(prevGray.empty())prevGray = nextGray; //在启动时没有prevGray。复制当前。
      Video.calcOpticalFlowPyrLK(prevGray,nextGray,prev2D,next2D,状态,ERR); // Calc的光流
      prevGray = nextGray; //覆盖原有图像(prevGray)
      prev2D = next2D; //覆盖原有点坐标
      的for(int i = 0; I< next2D.toArray()长;我++){//绘制点在图像中
          Core.circle(Rscale,next2D.toArray()[I],3,颜色);
      }
      返回Rscale;
  }
}


解决方案

解决

我改了:

  prevGray = nextGray;
  prev2D = next2D;

  nextGray.copyTo(prevGray);
  next2D.copyTo(prev2D);

我希望它能帮助任何人都遇到类似的问题。

I am currently trying to create an Android App using OpenCV that allows the user to track points of moving objects using the smartphone camera. An analogous code in C++ that does exactly what I am looking for can be found in the following link: OpticalFlow C++ Sample Code

I have been Googling and looking around in StackOverflow, but I still can't figure out why my code is not working. I am able to place points on the screen every time I press on a certain spot, but the points seem motionless even as I move objects in front of the camera. The method used to calculate the opitcal flow is the following:

void org.opencv.video.Video.calcOpticalFlowPyrLK(Mat prevImg, Mat nextImg, MatOfPoint2f prevPts, MatOfPoint2f nextPts, MatOfByte status, MatOfFloat err)

I believe I am passing the exact parameters needed to calculate the optical flow of consecutive images, but for some reason it's not working. Below is my code:

package org.opencv.UActivity;

//INCLUDE FILES
...

public class U2Activity extends Activity implements OnTouchListener,CvCameraViewListener2{



  private static final String  TAG              = "OCVSample::Activity";

  private Mat                       nextGray,Rscale;  
  private Mat                       prevGray;
  private MatOfPoint2f              prev2D,next2D;
  private MatOfByte                 status; 
  private MatOfFloat                err; 
  private Scalar                    color;

  private CameraBridgeViewBase mOpenCvCameraView;
  private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
      @Override
      public void onManagerConnected(int status) {
          switch (status) {
              case LoaderCallbackInterface.SUCCESS:
              {
                  Log.i(TAG, "OpenCV loaded successfully");
                  mOpenCvCameraView.enableView();
                  mOpenCvCameraView.setOnTouchListener(U2Activity.this);
              } break;
              default:
              {
                  super.onManagerConnected(status);
              } break;
          }
      }
  };

  public U2Activity() {
      Log.i(TAG, "Instantiated new " + this.getClass());
  }

  /** Called when the activity is first created. */
  @Override
  public void onCreate(Bundle savedInstanceState) {
      Log.i(TAG, "called onCreate");
      super.onCreate(savedInstanceState);
      requestWindowFeature(Window.FEATURE_NO_TITLE);
      getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

      setContentView(R.layout.u2_surface_view);

      mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.u2_activity_surface_view);
      mOpenCvCameraView.setCvCameraViewListener(this);

      color = new Scalar(0, 255, 0);

  }

  @Override
  public void onPause()
  {
      super.onPause();
      if (mOpenCvCameraView != null)
          mOpenCvCameraView.disableView();
  }

  @Override
  public void onResume()
  {
      super.onResume();
      OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3, this, mLoaderCallback);
  }

  public void onDestroy() {
      super.onDestroy();
      if (mOpenCvCameraView != null)
          mOpenCvCameraView.disableView();
  }

  public void onCameraViewStarted(int width, int height) {
      nextGray = new Mat(height, width, CvType.CV_8UC1); //unsigned char
      Rscale = new Mat(height, width, CvType.CV_8UC1);
      prevGray = new Mat(height, width, CvType.CV_8UC1);

      prev2D = new MatOfPoint2f(new Point());
      next2D = new MatOfPoint2f(new Point());                           
      status = new MatOfByte();                             
      err = new MatOfFloat();   
  }

  public void onCameraViewStopped() {
      nextGray.release();
      Rscale.release();
  }

  public boolean onTouch(View v, MotionEvent event) {

      int cols = nextGray.cols();
      int rows = nextGray.rows();

      int xOffset = (mOpenCvCameraView.getWidth() - cols) / 2;
      int yOffset = (mOpenCvCameraView.getHeight() - rows) / 2;

      int x = (int)event.getX() - xOffset;
      int y = (int)event.getY() - yOffset;

      if ((x < 0) || (y < 0) || (x > cols) || (y > rows)) return false;

      prev2D.push_back(new MatOfPoint2f(new Point((double)x,(double)y)));
      next2D.push_back(new MatOfPoint2f(new Point()));

      return false; // don't need subsequent touch events
  }

  public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
      nextGray = inputFrame.gray(); //get current image
      Rscale = nextGray; //make a copy of current image
      if(prevGray.empty()) prevGray = nextGray; //on start there is no prevGray. Copy current.
      Video.calcOpticalFlowPyrLK(prevGray,nextGray,prev2D,next2D,status,err);  //Calc the Optical Flow
      prevGray = nextGray; //Overwrite old Image (prevGray)
      prev2D = next2D; //Overwrite old point coordinates
      for(int i=0;i<next2D.toArray().length;i++){ //Draw the points in the image
          Core.circle(Rscale, next2D.toArray()[i], 3, color);
      }
      return Rscale;   
  }
}

解决方案

SOLVED:

I changed:

  prevGray = nextGray;
  prev2D = next2D;

to:

  nextGray.copyTo(prevGray);
  next2D.copyTo(prev2D);

I hope it helps anyone encountering similar problems.

这篇关于Android应用程序跟踪点:Video.calcOpticalFlow(...)方法不能正常工作?的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆