国产探花免费观看_亚洲丰满少妇自慰呻吟_97日韩有码在线_资源在线日韩欧美_一区二区精品毛片,辰东完美世界有声小说,欢乐颂第一季,yy玄幻小说排行榜完本

首頁 > 編程 > C++ > 正文

基于C++實(shí)現(xiàn)kinect+opencv 獲取深度及彩色數(shù)據(jù)

2020-05-23 14:11:43
字體:
供稿:網(wǎng)友
本文的主要思想是Kinect SDK 讀取彩色、深度、骨骼信息并用OpenCV顯示,非常的實(shí)用,有需要的小伙伴可以參考下
 

開發(fā)環(huán)境 vs2010+OPENCV2.4.10

首先,下載最新的Kinect 2 SDK  http://www.microsoft.com/en-us/kinectforwindows/develop/downloads-docs.aspx

下載之后不要插入Kinect,最好也不用插入除了鍵盤鼠標(biāo)以外的其它USB設(shè)備,然后安裝SDK,安裝完成之后插入Kinect,會有安裝新設(shè)備的提示。安裝完成之后可以去“開始”那里找到兩個(gè)新安裝的軟件,一個(gè)是可以顯示Kinect深度圖,另外一個(gè)軟件展示SDK中的各種例子程序。

進(jìn)入SDK的安裝目錄,可以找到sample這個(gè)文件夾,里面是四種語言編寫的例子,其中native是C++的,managed是C#的,還有另外兩種語言不熟悉,我就熟悉C++,反正只是試試的,就用C++了。

opencv+kinect .cpp

#include <opencv2/opencv.hpp> #include<iostream>//windows的頭文件,必須要,不然NuiApi.h用不了#include <Windows.h>//Kinect for windows 的頭文件#include "NuiApi.h" using namespace std;using namespace cv; #include <d3d11.h>  //最遠(yuǎn)距離(mm)const int MAX_DISTANCE = 3500;//最近距離(mm)const int MIN_DISTANCE = 200; const LONG m_depthWidth = 640;const LONG m_depthHeight = 480;const LONG m_colorWidth = 640;const LONG m_colorHeight = 480;const LONG cBytesPerPixel = 4; int main(){  //彩色圖像  Mat image_rgb;  //深度圖像  Mat image_depth;   //創(chuàng)建一個(gè)MAT  image_rgb.create(480,640,CV_8UC3);  image_depth.create(480,640,CV_8UC1);   //一個(gè)KINECT實(shí)例指針  INuiSensor* m_pNuiSensor = NULL;   if (m_pNuiSensor != NULL)  {    return 0;  }   //記錄當(dāng)前連接KINECT的數(shù)量(為多連接做準(zhǔn)備)  int iSensorCount;  //獲得當(dāng)前KINECT的數(shù)量  HRESULT hr = NuiGetSensorCount(&iSensorCount);    //按照序列初始化KINETC實(shí)例,這里就連接了一個(gè)KINECT,所以沒有用到循環(huán)  hr = NuiCreateSensorByIndex(iSensorCount - 1, &m_pNuiSensor);  //初始化,讓其可以接收彩色和深度數(shù)據(jù)流  hr = m_pNuiSensor->NuiInitialize(NUI_INITIALIZE_FLAG_USES_COLOR | NUI_INITIALIZE_FLAG_USES_DEPTH);   //判斷是否出錯(cuò)  if (FAILED(hr))  {    cout<<"NuiInitialize failed"<<endl;    return hr;  }   //彩色圖像獲取下一幀事件  HANDLE nextColorFrameEvent = CreateEvent(NULL, TRUE, FALSE, NULL);  //彩色圖像事件句柄  HANDLE colorStreamHandle = NULL;  //深度圖像獲取下一幀事件  HANDLE nextDepthFrameEvent = CreateEvent(NULL, TRUE, FALSE, NULL);  //深度圖像事件句柄  HANDLE depthStreamHandle = NULL;   //實(shí)例打開數(shù)據(jù)流,這里NUI_IMAGE_TYPE_COLOR表示彩色圖像  hr = m_pNuiSensor->NuiImageStreamOpen(NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, 0,2,nextColorFrameEvent,&colorStreamHandle);   if( FAILED( hr ) )//判斷是否提取正確  {    cout<<"Could not open color image stream video"<<endl;    m_pNuiSensor->NuiShutdown();    return hr;  }   //實(shí)例打開數(shù)據(jù)流,這里NUI_IMAGE_TYPE_DEPTH表示深度圖像  hr = m_pNuiSensor->NuiImageStreamOpen(NUI_IMAGE_TYPE_DEPTH, NUI_IMAGE_RESOLUTION_640x480, 0,2, nextDepthFrameEvent, &depthStreamHandle);   if( FAILED( hr ) )//判斷是否提取正確  {    cout<<"Could not open color image stream video"<<endl;    m_pNuiSensor->NuiShutdown();    return hr;  }     cv::namedWindow("depth", CV_WINDOW_AUTOSIZE);  moveWindow("depth",300,600);  cv::namedWindow("colorImage",CV_WINDOW_AUTOSIZE);  moveWindow("colorImage",0,200);   while (1)  {    NUI_IMAGE_FRAME pImageFrame_rgb;    NUI_IMAGE_FRAME pImageFrame_depth;     //無限等待新的彩色數(shù)據(jù),等到后返回    if (WaitForSingleObject(nextColorFrameEvent, 0) == 0)    {      //從剛才打開數(shù)據(jù)流的流句柄中得到該幀數(shù)據(jù),讀取到的數(shù)據(jù)地址存于pImageFrame      hr = m_pNuiSensor->NuiImageStreamGetNextFrame(colorStreamHandle, 0, &pImageFrame_rgb);      if (FAILED(hr))      {        cout<<"Could not get color image"<<endl;        m_pNuiSensor->NuiShutdown();        return -1;      }       INuiFrameTexture *pTexture = pImageFrame_rgb.pFrameTexture;      NUI_LOCKED_RECT lockedRect;       //提取數(shù)據(jù)幀到LockedRect,它包括兩個(gè)數(shù)據(jù)對象:pitch每行字節(jié)數(shù),pBits第一個(gè)字節(jié)地址      //并鎖定數(shù)據(jù),這樣當(dāng)我們讀數(shù)據(jù)的時(shí)候,kinect就不會去修改它        pTexture->LockRect(0, &lockedRect, NULL, 0);      //確認(rèn)獲得的數(shù)據(jù)是否有效      if (lockedRect.Pitch != 0)      {        //將數(shù)據(jù)轉(zhuǎn)換為OpenCV的Mat格式        for (int i = 0; i < image_rgb.rows; i++)        {          //第i行的指針          uchar *prt = image_rgb.ptr(i);           //每個(gè)字節(jié)代表一個(gè)顏色信息,直接使用uchar          uchar *pBuffer = (uchar*)(lockedRect.pBits) + i * lockedRect.Pitch;           for (int j = 0; j < image_rgb.cols; j++)          {              prt[3 * j] = pBuffer[4 * j];//內(nèi)部數(shù)據(jù)是4個(gè)字節(jié),0-1-2是BGR,第4個(gè)現(xiàn)在未使用            prt[3 * j + 1] = pBuffer[4 * j + 1];            prt[3 * j + 2] = pBuffer[4 * j + 2];          }        }         imshow("colorImage",image_rgb);        //解除鎖定        pTexture->UnlockRect(0);        //釋放幀        m_pNuiSensor->NuiImageStreamReleaseFrame(colorStreamHandle, &pImageFrame_rgb );      }      else      {        cout<<"Buffer length of received texture is bogus/r/n"<<endl;      }       BOOL nearMode;      INuiFrameTexture* pColorToDepthTexture;         //深度圖像的處理      if (WaitForSingleObject(nextDepthFrameEvent, INFINITE) == 0)      {         hr = m_pNuiSensor->NuiImageStreamGetNextFrame(depthStreamHandle, 0 , &pImageFrame_depth);         if (FAILED(hr))        {          cout<<"Could not get color image"<<endl;          NuiShutdown();          return -1;        }         hr = m_pNuiSensor->NuiImageFrameGetDepthImagePixelFrameTexture(           depthStreamHandle, &pImageFrame_depth, &nearMode, &pColorToDepthTexture);         INuiFrameTexture *pTexture = pImageFrame_depth.pFrameTexture;        NUI_LOCKED_RECT lockedRect;        NUI_LOCKED_RECT ColorToDepthLockRect;          pTexture->LockRect(0, &lockedRect, NULL, 0);        pColorToDepthTexture->LockRect(0,&ColorToDepthLockRect,NULL,0);          //歸一化        for (int i = 0; i < image_depth.rows; i++)        {          uchar *prt = image_depth.ptr<uchar>(i);           uchar* pBuffer = (uchar*)(lockedRect.pBits) + i * lockedRect.Pitch;          //這里需要轉(zhuǎn)換,因?yàn)槊總€(gè)深度數(shù)據(jù)是2個(gè)字節(jié),應(yīng)將BYTE轉(zhuǎn)成USHORT          USHORT *pBufferRun = (USHORT*)pBuffer;           for (int j = 0; j < image_depth.cols; j++)          {            //先向,將數(shù)據(jù)歸一化處理,對深度距離在300mm-3500mm范圍內(nèi)的像素,映射到【0—255】內(nèi),            //超出范圍的,都去做是邊緣像素            if (pBufferRun[j] << 3 > MAX_DISTANCE) prt[j] = 255;            else if(pBufferRun[j] << 3 < MIN_DISTANCE) prt[j] = 0;            else prt[j] = (BYTE)(256 * (pBufferRun[j] << 3)/ MAX_DISTANCE);          }        }        imshow("depth", image_depth);           //接下來是對齊部分,將前景摳出來         //存放深度點(diǎn)的參數(shù)        NUI_DEPTH_IMAGE_POINT* depthPoints = new NUI_DEPTH_IMAGE_POINT[640 * 480];        if (ColorToDepthLockRect.Pitch != 0)         {           HRESULT hrState = S_OK;                      //一個(gè)能在不同空間坐標(biāo)轉(zhuǎn)變的類(包括:深度,彩色,骨骼)          INuiCoordinateMapper* pMapper;            //設(shè)置KINECT實(shí)例的空間坐標(biāo)系          hrState = m_pNuiSensor->NuiGetCoordinateMapper(&pMapper);            if (FAILED(hrState))           {             return hrState;           }            //重要的一步:從顏色空間映射到深度空間。參數(shù)說明:          //【參數(shù)1】:彩色圖像的類型          //【參數(shù)2】:彩色圖像的分辨率          //【參數(shù)3】:深度圖像的分辨率          //【參數(shù)4】:深度圖像的個(gè)數(shù)          //【參數(shù)5】:深度像素點(diǎn)數(shù)          //【參數(shù)6】:取內(nèi)存的大小,個(gè)數(shù)。類型為NUI_DEPTH_IMAGE_PIXEL          //【參數(shù)7】:存放映射結(jié)果點(diǎn)的參數(shù)          hrState = pMapper->MapColorFrameToDepthFrame(NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, NUI_IMAGE_RESOLUTION_640x480,             640 * 480, (NUI_DEPTH_IMAGE_PIXEL*)ColorToDepthLockRect.pBits,640 * 480, depthPoints);            if (FAILED(hrState))           {             return hrState;           }             //顯示的圖像          Mat show;          show.create(480,640,CV_8UC3);          show = 0;           for (int i = 0; i < image_rgb.rows; i++)          {            for (int j = 0; j < image_rgb.cols; j++)            {              uchar *prt_rgb = image_rgb.ptr(i);              uchar *prt_show = show.ptr(i);              //在內(nèi)存中偏移量              long index = i * 640 + j;               //從保存了映射坐標(biāo)的數(shù)組中獲取點(diǎn)              NUI_DEPTH_IMAGE_POINT depthPointAtIndex = depthPoints[index];                //邊界判斷              if (depthPointAtIndex.x >= 0 && depthPointAtIndex.x < image_depth.cols &&                depthPointAtIndex.y >=0 && depthPointAtIndex.y < image_depth.rows)              {                //深度判斷,在MIN_DISTANCE與MAX_DISTANCE之間的當(dāng)成前景,顯示出來                //這個(gè)使用也很重要,當(dāng)使用真正的深度像素點(diǎn)再在深度圖像中獲取深度值來判斷的時(shí)候,會出錯(cuò)                if (depthPointAtIndex.depth >= MIN_DISTANCE && depthPointAtIndex.depth <= MAX_DISTANCE)                {                  prt_show[3 * j]   = prt_rgb[j * 3];                  prt_show[3 * j + 1] = prt_rgb[j * 3 + 1];                  prt_show[3 * j + 2] = prt_rgb[j * 3 + 2];                }              }            }          }          imshow("show", show);        }         delete []depthPoints;                 pTexture->UnlockRect(0);        m_pNuiSensor->NuiImageStreamReleaseFrame(depthStreamHandle, &pImageFrame_depth);      }       else      {        cout<<"Buffer length of received texture is bogus/r/n"<<endl;      }    }     if (cvWaitKey(20) == 27)      break;  }  return 0;}


發(fā)表評論 共有條評論
用戶名: 密碼:
驗(yàn)證碼: 匿名發(fā)表
主站蜘蛛池模板: 大余县| 陆丰市| 钟山县| 玉溪市| 阿拉善左旗| 新巴尔虎右旗| 宜昌市| 饶阳县| 津市市| 义马市| 镇平县| 固阳县| 通化县| 平乐县| 东莞市| 尉犁县| 双江| 玉龙| 华池县| 化德县| 德钦县| 县级市| 成武县| 霍州市| 封开县| 东海县| 禹城市| 繁峙县| 当阳市| 淮安市| 博白县| 涟源市| 巧家县| 阳春市| 凤阳县| 武义县| 黔西县| 永州市| 蒲江县| 西青区| 和硕县|