如何使用OpenCV C ++拍摄kinect视频图像和深度图像? [英] How to take kinect video image and depth image with openCV c++?

查看:119
本文介绍了如何使用OpenCV C ++拍摄kinect视频图像和深度图像?的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

我是关于opencv(c ++)和kinect的新手.我尝试从kinect用c ++拍摄视频图像.我到处搜索,但没有找到任何东西.因为人们是使用openNI或OpenKinect制作的.我不想使用这个库.我该怎么做??

I'm new about opencv(c++) and kinect. I try to take a video image with c++ from kinect. I search everywhere but I didn't find anything. Because people are made using openNI or OpenKinect. I don't want to use this lib. How can I do it??

谢谢!

推荐答案

您可以使用kinect for Windows SDK抓取帧,然后将其转换为opencv格式.请参阅在Visual Studio中执行此操作的此代码示例(在此

You could use the kinect for windows SDK to grab the frames, and then convert them to an opencv format. See this code example which does that in visual studio (found in this thread on the microsoft forums), unfortunately I don't have a kinect right now to test the code:

#include "stdafx.h"

#define COLOR_WIDTH 640    
#define COLOR_HIGHT 480    
#define DEPTH_WIDTH 320    
#define DEPTH_HIGHT 240    
#define SKELETON_WIDTH 640    
#define SKELETON_HIGHT 480    
#define CHANNEL 3

BYTE buf[DEPTH_WIDTH * DEPTH_HIGHT * CHANNEL];

int drawColor(HANDLE h, IplImage* color)    
{
    const NUI_IMAGE_FRAME * pImageFrame = NULL;
    HRESULT hr = NuiImageStreamGetNextFrame(h, 0, &pImageFrame);
    if (FAILED(hr)) 
    {
        cout << "Get Image Frame Failed" << endl;
        return -1;
    }
    NuiImageBuffer * pTexture = pImageFrame->pFrameTexture;
    KINECT_LOCKED_RECT LockedRect;
    pTexture->LockRect(0, &LockedRect, NULL, 0);
    if (LockedRect.Pitch != 0)
    {
        BYTE * pBuffer = (BYTE*) LockedRect.pBits;
        cvSetData(color, pBuffer, LockedRect.Pitch);
    }
    cvShowImage("color image", color);
    NuiImageStreamReleaseFrame(h, pImageFrame);
    return 0;
}

int drawDepth(HANDLE h, IplImage* depth)
{
    const NUI_IMAGE_FRAME * pImageFrame = NULL;
    HRESULT hr = NuiImageStreamGetNextFrame(h, 0, &pImageFrame);
    if (FAILED(hr))
    {
        cout << "Get Image Frame Failed" << endl;
        return -1;
    }
    //  temp1 = depth;
    NuiImageBuffer * pTexture = pImageFrame->pFrameTexture;
    KINECT_LOCKED_RECT LockedRect;
    pTexture->LockRect(0, &LockedRect, NULL, 0);
    if (LockedRect.Pitch != 0)
    {
        USHORT * pBuff = (USHORT*) LockedRect.pBits;
        for (int i = 0; i < DEPTH_WIDTH * DEPTH_HIGHT; i++)
        {
            BYTE index = pBuff[i] & 0x07;
            USHORT realDepth = (pBuff[i] & 0xFFF8) >> 3;
            BYTE scale = 255 - (BYTE)(256 * realDepth / 0x0fff);
            buf[CHANNEL * i] = buf[CHANNEL * i + 1] = buf[CHANNEL * i + 2] = 0;
            switch (index)
            {
            case 0:
                buf[CHANNEL * i] = scale / 2;
                buf[CHANNEL * i + 1] = scale / 2;
                buf[CHANNEL * i + 2] = scale / 2;
                break;
            case 1:
                buf[CHANNEL * i] = scale;
                break;
            case 2:
                buf[CHANNEL * i + 1] = scale;
                break;
            case 3:
                buf[CHANNEL * i + 2] = scale;
                break;
            case 4:
                buf[CHANNEL * i] = scale;
                buf[CHANNEL * i + 1] = scale;
                break;
            case 5:
                buf[CHANNEL * i] = scale;
                buf[CHANNEL * i + 2] = scale;
                break;
            case 6:
                buf[CHANNEL * i + 1] = scale;
                buf[CHANNEL * i + 2] = scale;
                break;
            case 7:
                buf[CHANNEL * i] = 255 - scale / 2;
                buf[CHANNEL * i + 1] = 255 - scale / 2;
                buf[CHANNEL * i + 2] = 255 - scale / 2;
                break;
            }
        }
        cvSetData(depth, buf, DEPTH_WIDTH * CHANNEL);
    }
    NuiImageStreamReleaseFrame(h, pImageFrame);
    cvShowImage("depth image", depth);
    return 0;
}

int drawSkeleton(IplImage* skeleton)
{
    NUI_SKELETON_FRAME SkeletonFrame;
    CvPoint pt[20];
    HRESULT hr = NuiSkeletonGetNextFrame(0, &SkeletonFrame);
    bool bFoundSkeleton = false;
    for (int i = 0; i < NUI_SKELETON_COUNT; i++)
    {
        if (SkeletonFrame.SkeletonData[i].eTrackingState
                == NUI_SKELETON_TRACKED)
        {
            bFoundSkeleton = true;
        }
    }
    // Has skeletons!
    //
    if (bFoundSkeleton)
    {
        NuiTransformSmooth(&SkeletonFrame, NULL);
        memset(skeleton->imageData, 0, skeleton->imageSize);
        for (int i = 0; i < NUI_SKELETON_COUNT; i++)
        {
            if (SkeletonFrame.SkeletonData[i].eTrackingState
                    == NUI_SKELETON_TRACKED)
            {
                for (int j = 0; j < NUI_SKELETON_POSITION_COUNT; j++)
                {
                    float fx, fy;
                    NuiTransformSkeletonToDepthImageF(
                            SkeletonFrame.SkeletonData[i].SkeletonPositions[j],
                            &fx, &fy);
                    pt[j].x = (int) (fx * SKELETON_WIDTH + 0.5f);
                    pt[j].y = (int) (fy * SKELETON_HIGHT + 0.5f);
                    cvCircle(skeleton, pt[j], 5, CV_RGB(255, 0, 0), -1);
                }

                cvLine(skeleton, pt[NUI_SKELETON_POSITION_HEAD],
                        pt[NUI_SKELETON_POSITION_SHOULDER_CENTER],
                        CV_RGB(0, 255, 0));

                cvLine(skeleton, pt[NUI_SKELETON_POSITION_SHOULDER_CENTER],
                        pt[NUI_SKELETON_POSITION_SPINE], CV_RGB(0, 255, 0));

                cvLine(skeleton, pt[NUI_SKELETON_POSITION_SPINE],
                        pt[NUI_SKELETON_POSITION_HIP_CENTER],
                        CV_RGB(0, 255, 0));

                cvLine(skeleton, pt[NUI_SKELETON_POSITION_HAND_RIGHT],
                        pt[NUI_SKELETON_POSITION_WRIST_RIGHT],
                        CV_RGB(0, 255, 0));

                cvLine(skeleton, pt[NUI_SKELETON_POSITION_WRIST_RIGHT],
                        pt[NUI_SKELETON_POSITION_ELBOW_RIGHT],
                        CV_RGB(0, 255, 0));

                cvLine(skeleton, pt[NUI_SKELETON_POSITION_ELBOW_RIGHT],
                        pt[NUI_SKELETON_POSITION_SHOULDER_RIGHT],
                        CV_RGB(0, 255, 0));

                cvLine(skeleton, pt[NUI_SKELETON_POSITION_SHOULDER_RIGHT],
                        pt[NUI_SKELETON_POSITION_SHOULDER_CENTER],
                        CV_RGB(0, 255, 0));

                cvLine(skeleton, pt[NUI_SKELETON_POSITION_SHOULDER_CENTER],
                        pt[NUI_SKELETON_POSITION_SHOULDER_LEFT],
                        CV_RGB(0, 255, 0));

                cvLine(skeleton, pt[NUI_SKELETON_POSITION_SHOULDER_LEFT],
                        pt[NUI_SKELETON_POSITION_ELBOW_LEFT],
                        CV_RGB(0, 255, 0));

                cvLine(skeleton, pt[NUI_SKELETON_POSITION_ELBOW_LEFT],
                        pt[NUI_SKELETON_POSITION_WRIST_LEFT],
                        CV_RGB(0, 255, 0));

                cvLine(skeleton, pt[NUI_SKELETON_POSITION_WRIST_LEFT],
                        pt[NUI_SKELETON_POSITION_HAND_LEFT], CV_RGB(0, 255, 0));

                cvLine(skeleton, pt[NUI_SKELETON_POSITION_HIP_CENTER],
                        pt[NUI_SKELETON_POSITION_HIP_RIGHT], CV_RGB(0, 255, 0));

                cvLine(skeleton, pt[NUI_SKELETON_POSITION_HIP_RIGHT],
                        pt[NUI_SKELETON_POSITION_KNEE_RIGHT],
                        CV_RGB(0, 255, 0));

                cvLine(skeleton, pt[NUI_SKELETON_POSITION_KNEE_RIGHT],
                        pt[NUI_SKELETON_POSITION_ANKLE_RIGHT],
                        CV_RGB(0, 255, 0));

                cvLine(skeleton, pt[NUI_SKELETON_POSITION_ANKLE_RIGHT],
                        pt[NUI_SKELETON_POSITION_FOOT_RIGHT],
                        CV_RGB(0, 255, 0));

                cvLine(skeleton, pt[NUI_SKELETON_POSITION_HIP_CENTER],
                        pt[NUI_SKELETON_POSITION_HIP_LEFT], CV_RGB(0, 255, 0));

                cvLine(skeleton, pt[NUI_SKELETON_POSITION_HIP_LEFT],
                        pt[NUI_SKELETON_POSITION_KNEE_LEFT], CV_RGB(0, 255, 0));

                cvLine(skeleton, pt[NUI_SKELETON_POSITION_KNEE_LEFT],
                        pt[NUI_SKELETON_POSITION_ANKLE_LEFT],
                        CV_RGB(0, 255, 0));

                cvLine(skeleton, pt[NUI_SKELETON_POSITION_ANKLE_LEFT],
                        pt[NUI_SKELETON_POSITION_FOOT_LEFT], CV_RGB(0, 255, 0));
            }
        }
    }
    cvShowImage("skeleton image", skeleton);
    return 0;
}

int main(int argc, char * argv[])
{
    IplImage* color = cvCreateImageHeader(cvSize(COLOR_WIDTH, COLOR_HIGHT), IPL_DEPTH_8U, 4);

    IplImage* depth = cvCreateImageHeader(cvSize(DEPTH_WIDTH, DEPTH_HIGHT),IPL_DEPTH_8U, CHANNEL);

    IplImage* skeleton = cvCreateImage(cvSize(SKELETON_WIDTH, SKELETON_HIGHT),IPL_DEPTH_8U, CHANNEL);

    cvNamedWindow("color image", CV_WINDOW_AUTOSIZE);

    cvNamedWindow("depth image", CV_WINDOW_AUTOSIZE);

    cvNamedWindow("skeleton image", CV_WINDOW_AUTOSIZE);

    HRESULT hr = NuiInitialize(
            NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX
            | NUI_INITIALIZE_FLAG_USES_COLOR
            | NUI_INITIALIZE_FLAG_USES_SKELETON);

    if (hr != S_OK)
    {
        cout << "NuiInitialize failed" << endl;
        return hr;
    }

    HANDLE h1 = CreateEvent(NULL, TRUE, FALSE, NULL);
    HANDLE h2 = NULL;
    hr = NuiImageStreamOpen(NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480,
            0, 2, h1, &h2);
    if (FAILED(hr))
    {
        cout << "Could not open image stream video" << endl;
        return hr;
    }

    HANDLE h3 = CreateEvent(NULL, TRUE, FALSE, NULL);
    HANDLE h4 = NULL;
    hr = NuiImageStreamOpen(NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX,
            NUI_IMAGE_RESOLUTION_320x240, 0, 2, h3, &h4);
    if (FAILED(hr))
    {
        cout << "Could not open depth stream video" << endl;
        return hr;
    }

    HANDLE h5 = CreateEvent(NULL, TRUE, FALSE, NULL);
    hr = NuiSkeletonTrackingEnable(h5, 0);
    if (FAILED(hr))
    {
        cout << "Could not open skeleton stream video" << endl;
        return hr;
    }

    while (1)
    {
        WaitForSingleObject(h1, INFINITE);
        drawColor(h2, color);
        WaitForSingleObject(h3, INFINITE);
        drawDepth(h4, depth);
        WaitForSingleObject(h5, INFINITE);
        drawSkeleton(skeleton);

        //exit
        int c = cvWaitKey(1);
        if (c == 27 || c == 'q' || c == 'Q')
            break;
    }

    cvReleaseImageHeader(&depth);
    cvReleaseImageHeader(&color);
    cvReleaseImage(&skeleton);
    cvDestroyWindow("depth image");
    cvDestroyWindow("color image");
    cvDestroyWindow("skeleton image");

    NuiShutdown();

    return 0;

}

这篇关于如何使用OpenCV C ++拍摄kinect视频图像和深度图像?的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆