安卓:在服务中使用的OpenCV VideoCapture [英] Android: using OpenCV VideoCapture in service

查看:2281
本文介绍了安卓:在服务中使用的OpenCV VideoCapture的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

我使用的是启动Android设备启动时的服务。这是因为我并不需要一个可见的活动。正常工作为止。但是现在我试图打开相机(在MyService.onStart),并做一些基本的图像处理。我理解的是默认的Andr​​oid相机类需要进行视频preVIEW的表面。这就是为什么我想要使用的OpenCV的VideoCapture。

但我得到这个错误:


  

没有找到实现本机
  Lorg / OpenCV的/ highgui / VideoCapture; .n_VideoCapture:(I)Ĵ


我不知道这是否是因为我没有以下行中使用的主要活动OpenCV的例子中使用。问题是,如何这在我服务的整合,当初始化VideoCapture成员。

  OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_5,对此,mLoaderCallback);

下面是我的code为止。大多数OpenCV的code是从OpenCV中的NativeCameraView和CameraBridgeViewBase采取

 包com.example.boot;进口org.opencv.android.Utils;
进口org.opencv.core.Mat;
进口org.opencv.core.Size;
进口org.opencv.highgui.Highgui;
进口org.opencv.highgui.VideoCapture;进口android.app.Service;
进口android.content.Intent;
进口android.graphics.Bitmap;
进口android.os.IBinder;
进口android.util.Log;
进口android.widget.Toast;公共最后一类为MyService延伸服务
{
    私有静态最后弦乐TAG =为MyService
    私人布尔mStopThread;
    私人螺纹mThread;
    私人VideoCapture mCamera;
    私人诠释mFrameWidth;
    私人诠释mFrameHeight;
    私人INT mCameraIndex = -1;
    私人位图mCacheBitmap;    @覆盖
    公众的IBinder onBind(意向意图){
        返回null;
    }    公共无效的onDestroy(){        this.disconnectCamera();        Toast.makeText(这一点,停止服务,Toast.LENGTH_LONG).show();
        Log.d(TAG的onDestroy);
    }    @覆盖
    公共无效调用onStart(意向意图,诠释startid)
    {
        Log.d(TAG,service.onStart:开始);        尝试
        {
            如果(!connectCamera(640,480))
                Log.e(TAG,无法连接相机);
            其他
                Log.d(TAG相机连接成功);
        }
        赶上(例外五)
        {
            Log.e(TAGMyServer.connectCamera抛出一个异常:+ e.getMessage());
        }        Toast.makeText(这一点,服务启动,Toast.LENGTH_LONG).show();
        Log.d(TAG,service.onStart:末);
    }    私人布尔connectCamera(INT宽度,高度INT){
        / *第一步 - 初始化相机连接* /
        如果(!initializeCamera(宽度,高度))
            返回false;        / *现在我们可以开始更新线程* /
        mThread =新主题(新CameraWorker());
        mThread.start();        返回true;
    }    私人布尔initializeCamera(INT宽度,高度INT){
        同步(本){            如果(mCameraIndex == -1)
                mCamera =新VideoCapture(Highgui.CV_CAP_ANDROID);
            其他
                mCamera =新VideoCapture(Highgui.CV_CAP_ANDROID + mCameraIndex);            如果(mCamera == NULL)
                返回false;            如果(mCamera.isOpened()== FALSE)
                返回false;            //java.util.List<Size>大小= mCamera.getSupported previewSizes();            / *选择适合面考虑允许的最大值大小* /
            尺寸框架尺寸=新尺寸(宽,高);            mFrameWidth =(INT)frameSize.width;
            mFrameHeight =(INT)frameSize.height;            AllocateCache();            mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH,frameSize.width);
            mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT,frameSize.height);
        }        Log.i(TAG,选择的摄像机帧大小=(+ mFrameWidth +,+ mFrameHeight +));        返回true;
    }    保护无效AllocateCache()
    {
        mCacheBitmap = Bitmap.createBitmap(mFrameWidth,mFrameHeight,Bitmap.Config.ARGB_8888);
    }    私人无效releaseCamera(){
        同步(本){
            如果(mCamera!= NULL){
                mCamera.release();
            }
        }
    }    私人无效disconnectCamera(){
        / * 1,我们需要停止线程哪个更新帧
         * 2.停止相机和释放
         * /
        尝试{
            mStopThread = TRUE;
            mThread.join();
        }赶上(InterruptedException的E){
            e.printStackTrace();
        } {最后
            mThread = NULL;
            mStopThread = FALSE;
        }        / *现在松开相机* /
        releaseCamera();
    }    保护无效deliverAndDrawFrame(NativeCameraFrame帧)
    {
        垫修改= frame.rgba();        布尔bmpValid =真;
        如果(修改!= NULL){
            尝试{
                Utils.matToBitmap(修改,mCacheBitmap);
            }赶上(例外五){
                Log.e(TAG,垫类型:+修改);
                Log.e(TAG,位图类型:+ mCacheBitmap.getWidth()+*+ mCacheBitmap.getHeight());
                Log.e(TAGUtils.matToBitmap()抛出一个异常:+ e.getMessage());
                bmpValid = FALSE;
            }
        }
    }    私有类NativeCameraFrame
    {
        公共垫RGBA(){
            mCapture.retrieve(mRgba,Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
            返回mRgba;
        }        公共垫灰色(){
            mCapture.retrieve(mGray,Highgui.CV_CAP_ANDROID_GREY_FRAME);
            返回mGray;
        }        公共NativeCameraFrame(VideoCapture捕获){
            mCapture =捕捉;
            mGray =新垫();
            mRgba =新垫();
        }        私人VideoCapture mCapture;
        私人垫mRgba;
        私人垫mGray;
    };    私有类CameraWorker实现Runnable
    {
        公共无效的run()
        {
            做
            {
                如果(!mCamera.grab()){
                    Log.e(TAG相机架抢失败);
                    打破;
                }                deliverAndDrawFrame(新NativeCameraFrame(mCamera));            }而(!mStopThread);
        }
    }
}


解决方案

你提到的线( initAsync )实际上是用来加载OpenCV的经理。这应该是你做的第一件事,因此它也许应该去年初在onStart()

I'm using a service that is started when the Android device boots. This is because I don't need a visible activity. Works fine so far. But now I'm trying to open the camera (in MyService.onStart) and do some basic image processing. I understood that the default Android camera class needs a surface for video preview. That's why I want to use the VideoCapture from OpenCV.

But I get this error:

No implementation found for native Lorg/opencv/highgui/VideoCapture;.n_VideoCapture:(I)J

I'm wondering if this is because I don't have the following line as used in OpenCV examples using a main Activity. The question is, how to integrate this in my service and when to initialize the VideoCapture member.

OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_5, this, mLoaderCallback);

Here's my code so far. Most of the OpenCV code is taken from OpenCV's NativeCameraView and CameraBridgeViewBase

package com.example.boot;

import org.opencv.android.Utils;
import org.opencv.core.Mat;
import org.opencv.core.Size;
import org.opencv.highgui.Highgui;
import org.opencv.highgui.VideoCapture;

import android.app.Service;
import android.content.Intent;
import android.graphics.Bitmap;
import android.os.IBinder;
import android.util.Log;
import android.widget.Toast;

public final class MyService extends Service
{
    private static final String TAG = "MyService";
    private boolean mStopThread;
    private Thread mThread;
    private VideoCapture mCamera;
    private int mFrameWidth;
    private int mFrameHeight;
    private int mCameraIndex = -1;
    private Bitmap mCacheBitmap;

    @Override
    public IBinder onBind(Intent intent) {
        return null;
    }

    public void onDestroy() {

        this.disconnectCamera();

        Toast.makeText(this, "service stopped", Toast.LENGTH_LONG).show();
        Log.d(TAG, "onDestroy");
    }

    @Override
    public void onStart(Intent intent, int startid)
    {           
        Log.d(TAG, "service.onStart: begin");

        try
        {
            if (!connectCamera(640, 480))
                Log.e(TAG, "Could not connect camera");
            else
                Log.d(TAG, "Camera successfully connected");
        }
        catch(Exception e)
        {
            Log.e(TAG, "MyServer.connectCamera throws an exception: " + e.getMessage());
        }

        Toast.makeText(this, "service started", Toast.LENGTH_LONG).show();
        Log.d(TAG, "service.onStart: end");
    }

    private boolean connectCamera(int width, int height) {
        /* First step - initialize camera connection */
        if (!initializeCamera(width, height))
            return false;

        /* now we can start update thread */
        mThread = new Thread(new CameraWorker());
        mThread.start();

        return true;
    }

    private boolean initializeCamera(int width, int height) {
        synchronized (this) {

            if (mCameraIndex == -1)
                mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
            else
                mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID + mCameraIndex);

            if (mCamera == null)
                return false;

            if (mCamera.isOpened() == false)
                return false;

            //java.util.List<Size> sizes = mCamera.getSupportedPreviewSizes();

            /* Select the size that fits surface considering maximum size allowed */
            Size frameSize = new Size(width, height);

            mFrameWidth = (int)frameSize.width;
            mFrameHeight = (int)frameSize.height;

            AllocateCache();

            mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, frameSize.width);
            mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, frameSize.height);
        }

        Log.i(TAG, "Selected camera frame size = (" + mFrameWidth + ", " + mFrameHeight + ")");

        return true;
    }

    protected void AllocateCache()
    {
        mCacheBitmap = Bitmap.createBitmap(mFrameWidth, mFrameHeight, Bitmap.Config.ARGB_8888);
    }

    private void releaseCamera() {
        synchronized (this) {
            if (mCamera != null) {
                mCamera.release();
            }
        }
    }

    private void disconnectCamera() {
        /* 1. We need to stop thread which updating the frames
         * 2. Stop camera and release it
         */
        try {
            mStopThread = true;
            mThread.join();
        } catch (InterruptedException e) {
            e.printStackTrace();
        } finally {
            mThread =  null;
            mStopThread = false;
        }

        /* Now release camera */
        releaseCamera();
    }

    protected void deliverAndDrawFrame(NativeCameraFrame frame) 
    {
        Mat modified = frame.rgba();

        boolean bmpValid = true;
        if (modified != null) {
            try {
                Utils.matToBitmap(modified, mCacheBitmap);
            } catch(Exception e) {
                Log.e(TAG, "Mat type: " + modified);
                Log.e(TAG, "Bitmap type: " + mCacheBitmap.getWidth() + "*" + mCacheBitmap.getHeight());
                Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
                bmpValid = false;
            }
        }
    }    

    private class NativeCameraFrame 
    {
        public Mat rgba() {
            mCapture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
            return mRgba;
        }

        public Mat gray() {
            mCapture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
            return mGray;
        }

        public NativeCameraFrame(VideoCapture capture) {
            mCapture = capture;
            mGray = new Mat();
            mRgba = new Mat();
        }

        private VideoCapture mCapture;
        private Mat mRgba;
        private Mat mGray;
    };

    private class CameraWorker implements Runnable 
    {
        public void run() 
        {
            do 
            {
                if (!mCamera.grab()) {
                    Log.e(TAG, "Camera frame grab failed");
                    break;
                }

                deliverAndDrawFrame(new NativeCameraFrame(mCamera));

            } while (!mStopThread);
        }
    }
}

解决方案

The line you mention (initAsync) is actually used to load the OpenCV Manager. That should be the very first thing you do, and therefore it should probably go in the beginning of onStart().

这篇关于安卓:在服务中使用的OpenCV VideoCapture的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆