从Kinect SDK V1 Beta转换为V1.8 [英] Converting from Kinect SDK V1 Beta to V1.8
本文介绍了从Kinect SDK V1 Beta转换为V1.8的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!
问题描述
- 我正在使用开源代码来跟踪为SDK 1 Beta创建的球,并希望将其转换为SDK V1.8。我不能使用V2,因为我没有Windows 8.我设法转换了大部分代码。但是,有
没有PlanarImage的翻译,所以我创建了一些替换。我找不到e.ColorImageFrame.Image的翻译(第187行)。我设法清除了错误,但我得到了一堆空引用。 - 这是使用SDK 1 Beta的未经编辑的开源代码。
< pre class ="prettyprint">使用System;
使用System.Collections.Generic;
使用System.Linq;
使用System.Text;
使用Microsoft.Xna.Framework.Graphics;
使用Microsoft.Research.Kinect;
使用Microsoft.Research.Kinect.Nui;
使用Microsoft.Xna.Framework;
使用System.Threading;使用Emgu.CV
;
使用Emgu.CV.Util;
使用Emgu.CV.Structure;
名称空间KTLib
{
公共类KinectInterface
{
public Runtime nui;
Color [] colorFrameData;
Texture2D colorFrameTex;
Texture2D depthFrameTex;
public Texture2D ColorFrameTex
{
get {return colorFrameTex; }
}
public Texture2D DepthFrameTex
{
get {return depthFrameTex; }
}
public Image< Gray,Byte> FullDepth;
公共事件Action OnDepthFrame;
public ushort [] depthMM;
public const int w = 640;
public const int h = 480;
GraphicsDevice gd;
FPSCounter fps;
public int FPS
{
get {return fps.FPS; }
}
双theta;
void initKinect()
{
nui = Runtime.Kinects [0];
尝试
{
nui.Initialize(RuntimeOptions.UseDepth | RuntimeOptions.UseColor);
}
catch(InvalidOperationException)
{
抛出新异常("运行时初始化失败。请确保插入Kinect设备。");
返回;
}
尝试
{
nui.VideoStream.Open(ImageStreamType.Video,2,ImageResolution.Resolution640x480,ImageType.Color);
nui.DepthStream.Open(ImageStreamType.Depth,2,ImageResolution.Resolution640x480,ImageType.Depth);
}
catch(InvalidOperationException)
{
抛出新的异常("无法打开流。请确保指定支持的图像类型和分辨率。");
返回;
}
nui.DepthFrameReady + = new EventHandler< ImageFrameReadyEventArgs>(nui_DepthFrameReady);
nui.VideoFrameReady + = new EventHandler< ImageFrameReadyEventArgs>(nui_VideoFrameReady);
nui.NuiCamera.ElevationAngle = 10;
theta = nui.NuiCamera.ElevationAngle * Math.PI / 180;
}
浮动tanTmax,tanPmax;
void initMaths()
{
float FOV_x = 57 *(float)(Math.PI / 180);
float FOV_y = 43 *(float)(Math.PI / 180);
float tmax = FOV_x / 2;
float pmax = FOV_y / 2;
tanTmax =(float)Math.Tan(tmax);
tanPmax =(float)Math.Tan(pmax);
}
public KinectInterface(GraphicsDevice gd)
{
this.gd = gd;
initMaths();
colorFrameTex = new Texture2D(gd,w,h);
fps = new FPSCounter();
}
public void Start()
{
initKinect();
Ready = false;
}
public bool Ready;
Texture2D KVideoToTex(PlanarImage img)
{
Texture2D tex = new Texture2D(gd,img.Width,img.Height);
colorFrameData = new Color [img.Width * img.Height];
for(int i = 0; i< colorFrameData.Length; i ++)
{
colorFrameData [i] .R = img.Bits [4 * i + 2] ;
colorFrameData [i] .G = img.Bits [4 * i + 1];
colorFrameData [i] .B = img.Bits [4 * i];
colorFrameData [i] .A = 255;
}
tex.SetData(colorFrameData);
返回tex;
}
void nui_VideoFrameReady(object sender,ImageFrameReadyEventArgs e)
{
colorFrameTex = KVideoToTex(e.ImageFrame.Image);
}
void processDepthFrame(byte [] depthFrame16)
{
depthMM = new ushort [w * h];
byte [] depth8 = new byte [w * h];
// for(int i16 = 0,i = 0; i16< depthFrame16.Length; i16 + = 2,i ++)
// {
// ushort packet =(ushort) ((depthFrame16 [i16 + 1]<< 8)| depthFrame16 [i16]);
// ushort depth =(ushort)(0x0FFF& packet);
// depthMM [i] =深度;
//}
for(int y = 0; y< h; y ++)
for(int x = 0; x< w; x ++)
{
int i = y * w + x;
int ref_i = y * w + w - 1 - x; //反映x。反射深度流。
ushort packet =(ushort)((depthFrame16 [2 * i + 1]<< 8)| depthFrame16 [2 * i]);
ushort depthVal =(ushort)(0x0FFF& packet);
depthMM [ref_i] = depthVal;
if(depthVal!= 0)
{
depth8 [ref_i] =(byte)(depthVal>> 4);
}
else
{
depth8 [ref_i] =(byte)255;
}
}
FullDepth = Helpers.ImageFromArray8(depth8,w,h);
}
Texture2D generateDepthTex()
{
Texture2D tex = new Texture2D(gd,w,h);
颜色[]数据=新颜色[w * h];
for(int i = 0; i< data.Length; i ++)
{
ushort depth = depthMM [i];
// float intensity = 1 - (float)(depth-800)/(float)0x0fff;
byte val =(byte)〜(depth>> 4);
颜色c = new Color();
if(depth == 0)
c = Color.Gray;
else
{
// c = Color.Lerp(Color.Black,Color.White,intensity);
c.R = val;
c.G = val;
c.B = val;
c.A = 255;
}
data [i] = c;
}
tex.SetData< Color>(数据);
返回tex;
}
public Vector3 RotateXCCW(Vector3 v,double theta)
{
var vrot = new Vector3();
vrot.Z = v.Z *(float)Math.Cos(theta) - v.Y *(float)Math.Sin(theta);
vrot.Y = v.Z *(float)Math.Sin(theta)+ v.Y *(float)Math.Cos(theta);
vrot.X = v.X;
返回vrot;
}
public Vector3 UnprojectDepth(float depth_mm,float px,float py)
{
float z = depth_mm / 1000f;
float xnorm = 2f *((float)px / w - 0.5f); // [ - 1,1]
float ynorm = 2f * - ((float)py / h - 0.5f); // [ - 1,1]
// float FOV_x = 57 *(float)(Math.PI / 180);
// float FOV_y = 43 *(float)(Math.PI / 180);
// float tmax = FOV_x / 2;
// float pmax = FOV_y / 2;
// float xproj = z * xnorm *(float)Math.Tan(tmax);
// float yproj = z * ynorm *(float)Math.Tan(pmax);
float xproj = z * xnorm * tanTmax;
float yproj = z * ynorm * tanPmax;
var v = new Vector3(xproj,yproj,z);
//正确的仰角
v = RotateXCCW(v,theta);
返回v;
}
public Vector3 UnprojectDepth(int px,int py)
{
ushort depth = depthMM [px + py * w];
if(深度== 0)//无数据
返回Vector3.Zero;
返回UnprojectDepth(depth,px,py);
}
public bool ProjectToPx(Vector3 v,out Vector2 proj)
{
v = RotateXCCW(v,-theta);
float FOV_x = 57 *(float)(Math.PI / 180);
float FOV_y = 43 *(float)(Math.PI / 180);
float tmax = FOV_x / 2;
float pmax = FOV_y / 2;
var xnorm = v.X /(v.Z *(float)Math.Tan(tmax));
var ynorm = v.Y /(v.Z *(float)Math.Tan(pmax));
float x =(float)(xnorm + 1)/ 2 * w;
float y =(float)(1 - ynorm)/ 2 * h;
proj = new Vector2(x,y);
返回v.Z> 0.0; //如果在投影平面后面的球是无效的
}
公共bool GetColorFromDepth(int x,int y,out颜色c)
{
c = Color.Black;
int cX,cY;
nui.NuiCamera.GetColorPixelCoordinatesFromDepthPixel(ImageResolution.Resolution640x480,新ImageViewArea()中,x / 2,Y / 2,(短)(depthMM [X + Y * W]<< 3), out cX,out cY);
if(cX!= -1&& colorFrameData!= null&& cX< w&& cY< h)
{
c = colorFrameData [cX + cY * w];
返回true;
}
返回false;
}
void nui_DepthFrameReady(object sender,ImageFrameReadyEventArgs e)
{
processDepthFrame(e.ImageFrame.Image.Bits);
depthFrameTex = generateDepthTex();
if(OnDepthFrame!= null)
OnDepthFrame();
fps.PushFrame();
Ready = true;
}
}
}
- 第二个代码是我的失败尝试转换为SDK V1.8
- 所做的更改:
- 已删除 Microsoft.Reasearch.Kinect和Microsoft.Research.Kinect.Nui
- 已添加 Microsoft.Kinect
- 已删除"Runtime nui"
- 已添加 KinectSensor nui
- 已删除 nui = Runtime.Kinect [0];
- 已添加 nui = KinectSensor.KinectSensor [0];
- 已删除来自运行时的尝试和聊天
- 已添加
- &NBSP; &NBSP; &NBSP; &NBSP; &NBSP; nui.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30);
&NBSP; &NBSP; &NBSP; &NBSP; &NBSP; nui.DepthStream.Enable(); - 已更改 EvenHandlers从ImageFrameReady到DepthImageFrameReady和ColorImageFrameReady
- 已添加 nui.Start() ;
- 更改 PlanarImage to ColorImageFrame
- 更改 img.bits到新字节pixelData
- 移动 processDepthFrame到ColorFrameReady,因为DepthFrameReady传递了太大的数字。
- 无法找到e.ImageFrame.Image的替换。注意:e.ColorImageFrame.Image不起作用)
- 已删除过时的代码 &NBSP; nui.NuiCamera.GetColorPixelCoordinatesFromDepthPixel(ImageResolution.Resolution640x480,new ImageViewArea(),x / 2,y / 2,(short)(depthMM [x + y * w]<< 3),out cX,out cY);
- 已添加
跨度> DepthImagePoint depthPoint =新DepthImagePoint();
<跨度类= "x_x_x_x_x_x_x_x_x_x_Apple标签跨度" 样式= "空白:预"> 跨度> ColorImagePoint重点色=
nui.CoordinateMapper.MapDepthPointToColorPoint(DepthImageFormat.Resolution640x480Fps30,depthPoint,
ColorImageFormat.RawBayerResolution640x480Fps30);
- 更改 cx和cy到colorPoint.X和colorPoint.Y
使用System;
使用System.Collections.Generic;
使用System.Linq;
使用System.Text;
使用Microsoft.Xna.Framework.Graphics;
// ------旧代码
//使用Microsoft.Research.Kinect;
//使用Microsoft.Research.Kinect.Nui;
// ----使用Microsoft.Kinect的新代码
;
// -----------
使用Microsoft.Xna.Framework;
使用System.Threading;使用Emgu.CV
;
使用Emgu.CV.Util;
使用Emgu.CV.Structure;
名称空间KTLib
{
公共类KinectInterface
{
// --------旧代码
// public Runtime nui;
// --------新代码
public KinectSensor nui;
//
Color [] colorFrameData;
Texture2D colorFrameTex;
Texture2D depthFrameTex;
byte [] pixelData;
public Texture2D ColorFrameTex
{
get {return colorFrameTex; }
}
public Texture2D DepthFrameTex
{
get {return depthFrameTex; }
}
public Image< Gray,Byte> FullDepth;
公共事件Action OnDepthFrame;
public ushort [] depthMM;
public const int w = 640;
public const int h = 480;
GraphicsDevice gd;
FPSCounter fps;
public int FPS
{
get {return fps.FPS; }
}
双theta;
void initKinect()
{
//检查Kinect
// if(KinectSensor.KinectSensors.Count == 0)
// {
//返回"错误:未检测到Kinect传感器!";
//}
// --------旧代码
// nui = Runtime.Kinects [0];
// --------新代码
nui = KinectSensor.KinectSensors [0];
// ----------
/ *
尝试
{
nui.Initialize(RuntimeOptions.UseDepth | RuntimeOptions.UseColor);
}
catch(InvalidOperationException)
{
抛出新异常("运行时初始化失败。请确保插入Kinect设备。");
返回;
}
尝试
{
nui.VideoStream.Open(ImageStreamType.Video,2,ImageResolution.Resolution640x480,ImageType.Color);
nui.DepthStream.Open(ImageStreamType.Depth,2,ImageResolution.Resolution640x480,ImageType.Depth);
}
catch(InvalidOperationException)
{
抛出新的异常("无法打开流。请确保指定支持的图像类型和分辨率。");
返回;
}
* /
//启用所有streamins
nui.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30);
nui.DepthStream.Enable();
nui.DepthFrameReady + = new EventHandler< DepthImageFrameReadyEventArgs>(nui_DepthFrameReady);
nui.ColorFrameReady + = new EventHandler< ColorImageFrameReadyEventArgs>(nui_ColorFrameReady);
//启动传感器
nui.Start();
nui.ElevationAngle = 10;
theta = nui.ElevationAngle * Math.PI / 180;
}
浮动tanTmax,tanPmax;
void initMaths()
{
float FOV_x = 57 *(float)(Math.PI / 180);
float FOV_y = 43 *(float)(Math.PI / 180);
float tmax = FOV_x / 2;
float pmax = FOV_y / 2;
tanTmax =(float)Math.Tan(tmax);
tanPmax =(float)Math.Tan(pmax);
}
public KinectInterface(GraphicsDevice gd)
{
this.gd = gd;
initMaths();
colorFrameTex = new Texture2D(gd,w,h);
fps = new FPSCounter();
}
public void Start()
{
initKinect();
Ready = false;
}
public bool Ready;
//此时可能不是
的Texture2D KVideoToTex(ColorImageFrameReadyEventArgs E)
{使用
(ColorImageFrame IMG = e.OpenColorImageFrame())
$ { b $ b if(img!= null)
{
if(pixelData == null)
{
pixelData = new byte [img.PixelDataLength];
}
img.CopyPixelDataTo(pixelData);
}
Texture2D tex = new Texture2D(gd,img.Width,img.Height);
colorFrameData = new Color [img.Width * img.Height];
for(int i = 0; i< colorFrameData.Length; i ++)
{
colorFrameData [i] .R = pixelData [4 * i + 2];
colorFrameData [i] .G = pixelData [4 * i + 1];
colorFrameData [i] .B = pixelData [4 * i];
colorFrameData [i] .A = 255;
}
tex.SetData(colorFrameData);
返回tex;
}
}
void nui_ColorFrameReady(object sender,ColorImageFrameReadyEventArgs e)
{
// bool recievedData = false;
using(ColorImageFrame colorImageFrame = e.OpenColorImageFrame())
{
if(colorImageFrame!= null)
{
if(pixelData == null)
{
pixelData = new byte [colorImageFrame.PixelDataLength];
}
colorImageFrame.CopyPixelDataTo(pixelData);
// recievedData = true;
}
processDepthFrame(pixelData);
}
//重要!!!没有翻译e.ColorFrame.Image
colorFrameTex = KVideoToTex(e.ImageFrame.Image);
}
void processDepthFrame(byte [] depthFrame16)
{
depthMM = new ushort [w * h];
byte [] depth8 = new byte [w * h];
// for(int i16 = 0,i = 0; i16< depthFrame16.Length; i16 + = 2,i ++)
// {
// ushort packet =(ushort) ((depthFrame16 [i16 + 1]<< 8)| depthFrame16 [i16]);
// ushort depth =(ushort)(0x0FFF& packet);
// depthMM [i] =深度;
//}
for(int y = 0; y< h; y ++)
for(int x = 0; x< w; x ++)
{
int i = y * w + x;
int ref_i = y * w + w - 1 - x; //反映x。反射深度流。
ushort packet =(ushort)((depthFrame16 [2 * i + 1]<< 8)| depthFrame16 [2 * i]);
ushort depthVal =(ushort)(0x0FFF& packet);
depthMM [ref_i] = depthVal;
if(depthVal!= 0)
{
depth8 [ref_i] =(byte)(depthVal>> 4);
}
else
{
depth8 [ref_i] =(byte)255;
}
}
FullDepth = Helpers.ImageFromArray8(depth8,w,h);
}
Texture2D generateDepthTex()
{
Texture2D tex = new Texture2D(gd,w,h);
颜色[]数据=新颜色[w * h];
for(int i = 0; i< data.Length; i ++)
{
ushort depth = depthMM [i];
// float intensity = 1 - (float)(depth-800)/(float)0x0fff;
byte val =(byte)〜(depth>> 4);
颜色c = new Color();
if(depth == 0)
c = Color.Gray;
else
{
// c = Color.Lerp(Color.Black,Color.White,intensity);
c.R = val;
c.G = val;
c.B = val;
c.A = 255;
}
data [i] = c;
}
tex.SetData< Color>(数据);
返回tex;
}
public Vector3 RotateXCCW(Vector3 v,double theta)
{
var vrot = new Vector3();
vrot.Z = v.Z *(float)Math.Cos(theta) - v.Y *(float)Math.Sin(theta);
vrot.Y = v.Z *(float)Math.Sin(theta)+ v.Y *(float)Math.Cos(theta);
vrot.X = v.X;
返回vrot;
}
public Vector3 UnprojectDepth(float depth_mm,float px,float py)
{
float z = depth_mm / 1000f;
float xnorm = 2f *((float)px / w - 0.5f); // [ - 1,1]
float ynorm = 2f * - ((float)py / h - 0.5f); // [ - 1,1]
// float FOV_x = 57 *(float)(Math.PI / 180);
// float FOV_y = 43 *(float)(Math.PI / 180);
// float tmax = FOV_x / 2;
// float pmax = FOV_y / 2;
// float xproj = z * xnorm *(float)Math.Tan(tmax);
// float yproj = z * ynorm *(float)Math.Tan(pmax);
float xproj = z * xnorm * tanTmax;
float yproj = z * ynorm * tanPmax;
var v = new Vector3(xproj,yproj,z);
//正确的仰角
v = RotateXCCW(v,theta);
返回v;
}
public Vector3 UnprojectDepth(int px,int py)
{
ushort depth = depthMM [px + py * w];
if(深度== 0)//无数据
返回Vector3.Zero;
返回UnprojectDepth(depth,px,py);
}
public bool ProjectToPx(Vector3 v,out Vector2 proj)
{
v = RotateXCCW(v,-theta);
float FOV_x = 57 *(float)(Math.PI / 180);
float FOV_y = 43 *(float)(Math.PI / 180);
float tmax = FOV_x / 2;
float pmax = FOV_y / 2;
var xnorm = v.X /(v.Z *(float)Math.Tan(tmax));
var ynorm = v.Y /(v.Z *(float)Math.Tan(pmax));
float x =(float)(xnorm + 1)/ 2 * w;
float y =(float)(1 - ynorm)/ 2 * h;
proj = new Vector2(x,y);
返回v.Z> 0.0; //如果在投影平面后面的球是无效的
}
公共bool GetColorFromDepth(int x,int y,out颜色c)
{
c = Color.Black;
// int cX,cY;
// -------新代码
DepthImagePoint depthPoint = new DepthImagePoint();
ColorImagePoint colorPoint = nui.CoordinateMapper.MapDepthPointToColorPoint(DepthImageFormat.Resolution640x480Fps30,depthPoint,ColorImageFormat.RawBayerResolution640x480Fps30);
// -----旧代码
//nui.NuiCamera.GetColorPixelCoordinatesFromDepthPixel(ImageResolution.Resolution640x480,new ImageViewArea(),x / 2,y / 2,(short)(depthMM [x + y * w]<< 3),out cX,out cY);
// ----------------
if(colorPoint.X!= -1&& colorFrameData!= null&& colorPoint.X < w&& colorPoint.Y< h)
{
c = colorFrameData [colorPoint.X + colorPoint.Y * w];
返回true;
}
返回false;
}
空隙nui_DepthFrameReady(对象发件人,DepthImageFrameReadyEventArgs E)
{
//processDepthFrame(e.ImageFrame.Image.Bits);
// processDepthFrame();
depthFrameTex = generateDepthTex();
if(OnDepthFrame!= null)
OnDepthFrame();
fps.PushFrame();
Ready = true;
}
}
}
&NBSP; &NBSP; &NBSP; &NBSP; &NBSP; &NBSP; &NBSP; &NBSP;&NBSP;
$
解决方案
请参阅文档条目以获取移植帮助:
http://msdn.microsoft.com/en-us/library/hh855351.aspx
- I am taking an Open source code for tracking a ball that was created for the SDK 1 Beta, and would like to convert it to the SDK V1.8. I can not use V2 because I do not have Windows 8. I managed to convert the majority of the code. However, there is no translation for PlanarImage so I created some substitutions. I could not find a translation for e.ColorImageFrame.Image either(Line 187). I managed to clear the errors, but I get a bunch of null references.
- This is the unedited, opensource code using SDK 1 Beta.
using System; using System.Collections.Generic; using System.Linq; using System.Text; using Microsoft.Xna.Framework.Graphics; using Microsoft.Research.Kinect; using Microsoft.Research.Kinect.Nui; using Microsoft.Xna.Framework; using System.Threading; using Emgu.CV; using Emgu.CV.Util; using Emgu.CV.Structure; namespace KTLib { public class KinectInterface { public Runtime nui; Color[] colorFrameData; Texture2D colorFrameTex; Texture2D depthFrameTex; public Texture2D ColorFrameTex { get { return colorFrameTex; } } public Texture2D DepthFrameTex { get { return depthFrameTex; } } public Image<Gray, Byte> FullDepth; public event Action OnDepthFrame; public ushort[] depthMM; public const int w = 640; public const int h = 480; GraphicsDevice gd; FPSCounter fps; public int FPS { get { return fps.FPS; } } double theta; void initKinect() { nui = Runtime.Kinects[0]; try { nui.Initialize(RuntimeOptions.UseDepth | RuntimeOptions.UseColor); } catch (InvalidOperationException) { throw new Exception("Runtime initialization failed. Please make sure Kinect device is plugged in."); return; } try { nui.VideoStream.Open(ImageStreamType.Video, 2, ImageResolution.Resolution640x480, ImageType.Color); nui.DepthStream.Open(ImageStreamType.Depth, 2, ImageResolution.Resolution640x480, ImageType.Depth); } catch (InvalidOperationException) { throw new Exception("Failed to open stream. Please make sure to specify a supported image type and resolution."); return; } nui.DepthFrameReady += new EventHandler<ImageFrameReadyEventArgs>(nui_DepthFrameReady); nui.VideoFrameReady += new EventHandler<ImageFrameReadyEventArgs>(nui_VideoFrameReady); nui.NuiCamera.ElevationAngle = 10; theta = nui.NuiCamera.ElevationAngle * Math.PI / 180; } float tanTmax, tanPmax; void initMaths() { float FOV_x = 57 * (float)(Math.PI / 180); float FOV_y = 43 * (float)(Math.PI / 180); float tmax = FOV_x / 2; float pmax = FOV_y / 2; tanTmax = (float)Math.Tan(tmax); tanPmax = (float)Math.Tan(pmax); } public KinectInterface(GraphicsDevice gd) { this.gd = gd; initMaths(); colorFrameTex = new Texture2D(gd, w, h); fps = new FPSCounter(); } public void Start() { initKinect(); Ready = false; } public bool Ready; Texture2D KVideoToTex(PlanarImage img) { Texture2D tex = new Texture2D(gd, img.Width, img.Height); colorFrameData = new Color[img.Width * img.Height]; for (int i = 0; i < colorFrameData.Length; i++) { colorFrameData[i].R = img.Bits[4 * i + 2]; colorFrameData[i].G = img.Bits[4 * i + 1]; colorFrameData[i].B = img.Bits[4 * i]; colorFrameData[i].A = 255; } tex.SetData(colorFrameData); return tex; } void nui_VideoFrameReady(object sender, ImageFrameReadyEventArgs e) { colorFrameTex = KVideoToTex(e.ImageFrame.Image); } void processDepthFrame(byte[] depthFrame16) { depthMM = new ushort[w * h]; byte[] depth8 = new byte[w * h]; //for (int i16 = 0, i = 0; i16 < depthFrame16.Length; i16 += 2, i++) //{ // ushort packet = (ushort)((depthFrame16[i16 + 1] << 8) | depthFrame16[i16]); // ushort depth = (ushort)(0x0FFF & packet); // depthMM[i] = depth; //} for (int y = 0; y < h; y++) for (int x = 0; x < w; x++) { int i = y * w + x; int ref_i = y * w + w - 1 - x; //reflected x. the depth stream is reflected. ushort packet = (ushort)((depthFrame16[2 * i + 1] << 8) | depthFrame16[2 * i]); ushort depthVal = (ushort)(0x0FFF & packet); depthMM[ref_i] = depthVal; if (depthVal != 0) { depth8[ref_i] = (byte)(depthVal >> 4); } else { depth8[ref_i] = (byte)255; } } FullDepth = Helpers.ImageFromArray8(depth8, w, h); } Texture2D generateDepthTex() { Texture2D tex = new Texture2D(gd, w, h); Color[] data = new Color[w * h]; for (int i = 0; i < data.Length; i++) { ushort depth = depthMM[i]; //float intensity = 1 - (float)(depth-800) / (float)0x0fff; byte val = (byte)~(depth >> 4); Color c = new Color(); if (depth == 0) c = Color.Gray; else { // c = Color.Lerp(Color.Black, Color.White, intensity); c.R = val; c.G = val; c.B = val; c.A = 255; } data[i] = c; } tex.SetData<Color>(data); return tex; } public Vector3 RotateXCCW(Vector3 v, double theta) { var vrot = new Vector3(); vrot.Z = v.Z * (float)Math.Cos(theta) - v.Y * (float)Math.Sin(theta); vrot.Y = v.Z * (float)Math.Sin(theta) + v.Y * (float)Math.Cos(theta); vrot.X = v.X; return vrot; } public Vector3 UnprojectDepth(float depth_mm, float px, float py) { float z = depth_mm / 1000f; float xnorm = 2f * ((float)px / w - 0.5f); //[-1, 1] float ynorm = 2f * -((float)py / h - 0.5f); //[-1, 1] //float FOV_x = 57 * (float)(Math.PI / 180); //float FOV_y = 43 * (float)(Math.PI / 180); //float tmax = FOV_x / 2; //float pmax = FOV_y / 2; //float xproj = z * xnorm * (float)Math.Tan(tmax); //float yproj = z * ynorm * (float)Math.Tan(pmax); float xproj = z * xnorm * tanTmax; float yproj = z * ynorm * tanPmax; var v = new Vector3(xproj, yproj, z); //correct for elevation angle v = RotateXCCW(v, theta); return v; } public Vector3 UnprojectDepth(int px, int py) { ushort depth = depthMM[px + py * w]; if (depth == 0) //no data return Vector3.Zero; return UnprojectDepth(depth, px, py); } public bool ProjectToPx(Vector3 v, out Vector2 proj) { v = RotateXCCW(v, -theta); float FOV_x = 57 * (float)(Math.PI / 180); float FOV_y = 43 * (float)(Math.PI / 180); float tmax = FOV_x / 2; float pmax = FOV_y / 2; var xnorm = v.X / (v.Z * (float)Math.Tan(tmax)); var ynorm = v.Y / (v.Z * (float)Math.Tan(pmax)); float x = (float)(xnorm + 1) / 2 * w; float y = (float)(1 - ynorm) / 2 * h; proj = new Vector2(x, y); return v.Z > 0.0; //invalid if ball behind plane of projection } public bool GetColorFromDepth(int x, int y, out Color c) { c = Color.Black; int cX, cY; nui.NuiCamera.GetColorPixelCoordinatesFromDepthPixel(ImageResolution.Resolution640x480, new ImageViewArea(), x/2, y/2, (short)(depthMM[x + y*w] << 3), out cX, out cY); if (cX != -1 && colorFrameData != null && cX < w && cY < h) { c = colorFrameData[cX + cY * w]; return true; } return false; } void nui_DepthFrameReady(object sender, ImageFrameReadyEventArgs e) { processDepthFrame(e.ImageFrame.Image.Bits); depthFrameTex = generateDepthTex(); if (OnDepthFrame != null) OnDepthFrame(); fps.PushFrame(); Ready = true; } } }
- This second code is my failed attempt at converting to SDK V1.8
- Changes Made:
- Deleted Microsoft.Reasearch.Kinect and Microsoft.Research.Kinect.Nui
- Added Microsoft.Kinect
- Deleted "Runtime nui"
- Added KinectSensor nui
- Deleted nui = Runtime.Kinect[0];
- Added nui = KinectSensor.KinectSensor[0];
- Deleted Tries and chates from runtime
- Added
- nui.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30);
nui.DepthStream.Enable(); - Changed EvenHandlers from ImageFrameReady to DepthImageFrameReady and ColorImageFrameReady
- Added nui.Start();
- Changed PlanarImage to ColorImageFrame
- Changed img.bits to a new byte pixelData
- moved processDepthFrame to ColorFrameReady because DepthFrameReady passed numbers that were too large.
- Could not find a substitution for e.ImageFrame.Image. Note: e.ColorImageFrame.Image doesn't work)
- Deleted obsolete code nui.NuiCamera.GetColorPixelCoordinatesFromDepthPixel(ImageResolution.Resolution640x480, new ImageViewArea(), x/2, y/2, (short)(depthMM[x + y*w] << 3), out cX, out cY);
- Added
DepthImagePoint depthPoint = new DepthImagePoint();
ColorImagePoint colorPoint =
nui.CoordinateMapper.MapDepthPointToColorPoint(DepthImageFormat.Resolution640x480Fps30, depthPoint,
ColorImageFormat.RawBayerResolution640x480Fps30);
- Changed cx and cy to colorPoint.X and colorPoint.Y
using System; using System.Collections.Generic; using System.Linq; using System.Text; using Microsoft.Xna.Framework.Graphics; //------Old Code //using Microsoft.Research.Kinect; //using Microsoft.Research.Kinect.Nui; //----New Code using Microsoft.Kinect; //----------- using Microsoft.Xna.Framework; using System.Threading; using Emgu.CV; using Emgu.CV.Util; using Emgu.CV.Structure; namespace KTLib { public class KinectInterface { //--------Old Code //public Runtime nui; //--------New Code public KinectSensor nui; // Color[] colorFrameData; Texture2D colorFrameTex; Texture2D depthFrameTex; byte[] pixelData; public Texture2D ColorFrameTex { get { return colorFrameTex; } } public Texture2D DepthFrameTex { get { return depthFrameTex; } } public Image<Gray, Byte> FullDepth; public event Action OnDepthFrame; public ushort[] depthMM; public const int w = 640; public const int h = 480; GraphicsDevice gd; FPSCounter fps; public int FPS { get { return fps.FPS; } } double theta; void initKinect() { //Check for Kinect //if (KinectSensor.KinectSensors.Count == 0) //{ // return "Error: No Kinect Sensor Detected!"; // } //--------Old Code //nui = Runtime.Kinects[0]; //--------New Code nui = KinectSensor.KinectSensors[0]; //---------- /* try { nui.Initialize(RuntimeOptions.UseDepth | RuntimeOptions.UseColor); } catch (InvalidOperationException) { throw new Exception("Runtime initialization failed. Please make sure Kinect device is plugged in."); return; } try { nui.VideoStream.Open(ImageStreamType.Video, 2, ImageResolution.Resolution640x480, ImageType.Color); nui.DepthStream.Open(ImageStreamType.Depth, 2, ImageResolution.Resolution640x480, ImageType.Depth); } catch (InvalidOperationException) { throw new Exception("Failed to open stream. Please make sure to specify a supported image type and resolution."); return; } */ //Enable all streamins nui.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30); nui.DepthStream.Enable(); nui.DepthFrameReady += new EventHandler<DepthImageFrameReadyEventArgs>(nui_DepthFrameReady); nui.ColorFrameReady += new EventHandler<ColorImageFrameReadyEventArgs>(nui_ColorFrameReady); //start the sensor nui.Start(); nui.ElevationAngle = 10; theta = nui.ElevationAngle * Math.PI / 180; } float tanTmax, tanPmax; void initMaths() { float FOV_x = 57 * (float)(Math.PI / 180); float FOV_y = 43 * (float)(Math.PI / 180); float tmax = FOV_x / 2; float pmax = FOV_y / 2; tanTmax = (float)Math.Tan(tmax); tanPmax = (float)Math.Tan(pmax); } public KinectInterface(GraphicsDevice gd) { this.gd = gd; initMaths(); colorFrameTex = new Texture2D(gd, w, h); fps = new FPSCounter(); } public void Start() { initKinect(); Ready = false; } public bool Ready; //Might not be right Texture2D KVideoToTex(ColorImageFrameReadyEventArgs e) { using (ColorImageFrame img = e.OpenColorImageFrame()) { if (img != null) { if (pixelData == null) { pixelData = new byte[img.PixelDataLength]; } img.CopyPixelDataTo(pixelData); } Texture2D tex = new Texture2D(gd, img.Width, img.Height); colorFrameData = new Color[img.Width * img.Height]; for (int i = 0; i < colorFrameData.Length; i++) { colorFrameData[i].R = pixelData[4 * i + 2]; colorFrameData[i].G = pixelData[4 * i + 1]; colorFrameData[i].B = pixelData[4 * i]; colorFrameData[i].A = 255; } tex.SetData(colorFrameData); return tex; } } void nui_ColorFrameReady(object sender, ColorImageFrameReadyEventArgs e) { //bool recievedData = false; using (ColorImageFrame colorImageFrame = e.OpenColorImageFrame()) { if (colorImageFrame != null) { if (pixelData == null) { pixelData = new byte[colorImageFrame.PixelDataLength]; } colorImageFrame.CopyPixelDataTo(pixelData); //recievedData = true; } processDepthFrame(pixelData); } // Important !!! No translation for e.ColorFrame.Image colorFrameTex = KVideoToTex(e.ImageFrame.Image); } void processDepthFrame(byte[] depthFrame16) { depthMM = new ushort[w * h]; byte[] depth8 = new byte[w * h]; //for (int i16 = 0, i = 0; i16 < depthFrame16.Length; i16 += 2, i++) //{ // ushort packet = (ushort)((depthFrame16[i16 + 1] << 8) | depthFrame16[i16]); // ushort depth = (ushort)(0x0FFF & packet); // depthMM[i] = depth; //} for (int y = 0; y < h; y++) for (int x = 0; x < w; x++) { int i = y * w + x; int ref_i = y * w + w - 1 - x; //reflected x. the depth stream is reflected. ushort packet = (ushort)((depthFrame16[2 * i + 1] << 8) | depthFrame16[2 * i]); ushort depthVal = (ushort)(0x0FFF & packet); depthMM[ref_i] = depthVal; if (depthVal != 0) { depth8[ref_i] = (byte)(depthVal >> 4); } else { depth8[ref_i] = (byte)255; } } FullDepth = Helpers.ImageFromArray8(depth8, w, h); } Texture2D generateDepthTex() { Texture2D tex = new Texture2D(gd, w, h); Color[] data = new Color[w * h]; for (int i = 0; i < data.Length; i++) { ushort depth = depthMM[i]; //float intensity = 1 - (float)(depth-800) / (float)0x0fff; byte val = (byte)~(depth >> 4); Color c = new Color(); if (depth == 0) c = Color.Gray; else { // c = Color.Lerp(Color.Black, Color.White, intensity); c.R = val; c.G = val; c.B = val; c.A = 255; } data[i] = c; } tex.SetData<Color>(data); return tex; } public Vector3 RotateXCCW(Vector3 v, double theta) { var vrot = new Vector3(); vrot.Z = v.Z * (float)Math.Cos(theta) - v.Y * (float)Math.Sin(theta); vrot.Y = v.Z * (float)Math.Sin(theta) + v.Y * (float)Math.Cos(theta); vrot.X = v.X; return vrot; } public Vector3 UnprojectDepth(float depth_mm, float px, float py) { float z = depth_mm / 1000f; float xnorm = 2f * ((float)px / w - 0.5f); //[-1, 1] float ynorm = 2f * -((float)py / h - 0.5f); //[-1, 1] //float FOV_x = 57 * (float)(Math.PI / 180); //float FOV_y = 43 * (float)(Math.PI / 180); //float tmax = FOV_x / 2; //float pmax = FOV_y / 2; //float xproj = z * xnorm * (float)Math.Tan(tmax); //float yproj = z * ynorm * (float)Math.Tan(pmax); float xproj = z * xnorm * tanTmax; float yproj = z * ynorm * tanPmax; var v = new Vector3(xproj, yproj, z); //correct for elevation angle v = RotateXCCW(v, theta); return v; } public Vector3 UnprojectDepth(int px, int py) { ushort depth = depthMM[px + py * w]; if (depth == 0) //no data return Vector3.Zero; return UnprojectDepth(depth, px, py); } public bool ProjectToPx(Vector3 v, out Vector2 proj) { v = RotateXCCW(v, -theta); float FOV_x = 57 * (float)(Math.PI / 180); float FOV_y = 43 * (float)(Math.PI / 180); float tmax = FOV_x / 2; float pmax = FOV_y / 2; var xnorm = v.X / (v.Z * (float)Math.Tan(tmax)); var ynorm = v.Y / (v.Z * (float)Math.Tan(pmax)); float x = (float)(xnorm + 1) / 2 * w; float y = (float)(1 - ynorm) / 2 * h; proj = new Vector2(x, y); return v.Z > 0.0; //invalid if ball behind plane of projection } public bool GetColorFromDepth(int x, int y, out Color c) { c = Color.Black; //int cX, cY; //-------New Code DepthImagePoint depthPoint = new DepthImagePoint(); ColorImagePoint colorPoint = nui.CoordinateMapper.MapDepthPointToColorPoint(DepthImageFormat.Resolution640x480Fps30, depthPoint, ColorImageFormat.RawBayerResolution640x480Fps30); //-----Old Code //nui.NuiCamera.GetColorPixelCoordinatesFromDepthPixel(ImageResolution.Resolution640x480, new ImageViewArea(), x/2, y/2, (short)(depthMM[x + y*w] << 3), out cX, out cY); //---------------- if (colorPoint.X != -1 && colorFrameData != null && colorPoint.X < w && colorPoint.Y < h) { c = colorFrameData[colorPoint.X + colorPoint.Y * w]; return true; } return false; } void nui_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { //processDepthFrame(e.ImageFrame.Image.Bits); //processDepthFrame(); depthFrameTex = generateDepthTex(); if (OnDepthFrame != null) OnDepthFrame(); fps.PushFrame(); Ready = true; } } }
解决方案
See the documentation entry for porting assistance: http://msdn.microsoft.com/en-us/library/hh855351.aspx
这篇关于从Kinect SDK V1 Beta转换为V1.8的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!
查看全文