記錄一下創新項目的學習進程。本項目基於Kinect2.0採集室內場景深度信息,在Pointnet框架中進行學習,實現識別的效果。微信
先放下配置的環境,具體教程後續更新。app
配置環境:win10+vs2019+opencv+PCL1.10.1+Kinect v2
框架
1.1包含目錄:工具
C:\Program Files\Microsoft SDKs\Kinect\v2.0_1409\inc
D:\PCL 1.10.1\3rdParty\VTK\include\vtk-8.2
D:\PCL 1.10.1\3rdParty\Qhull\include
C:\Program Files\OpenNI2\Include
D:\PCL 1.10.1\3rdParty\FLANN\include
D:\PCL 1.10.1\3rdParty\Eigen\eigen3
D:\PCL 1.10.1\3rdParty\Boost\include\boost-1_72
D:\PCL 1.10.1\include\pcl-1.10學習
1.2庫目錄:ui
我直接把opencv+Kinect+PCL都貼上來了,讀者請按照本身的地址。
C:\Program Files\Microsoft SDKs\Kinect\v2.0_1409\Lib\x64
D:\PCL 1.10.1\lib
D:\PCL 1.10.1\3rdParty\VTK\lib
D:\PCL 1.10.1\3rdParty\Qhull\lib
C:\Program Files\OpenNI2\Lib
D:\PCL 1.10.1\3rdParty\FLANN\lib
D:\PCL 1.10.1\3rdParty\Boost\libspa
1.3附加依賴項:
kinect20.lib
opencv_world412d.lib.net
2.1點雲線程
點雲數據是指在一個三維座標系中的一組向量的集合。這些向量一般以X,Y,Z三維座標的形式表示,通常主要表明一個物體的外表面幾何形狀,除此以外點雲數據還能夠附帶RGB信息,即每一個座標點的顏色信息,或者是其餘的信息。
PCL這個開源庫專門處理pcd格式的文件,它實現了點雲獲取、濾波、分割、配準、檢索、特徵提取、識別、追蹤、曲面重建、可視化等。orm
2.1代碼
咱們經過kinect v2自帶的SDK進行修改,調動kinect相機獲取點雲圖像並保存爲pcd格式
#include <Kinect.h>
#include <opencv2\opencv.hpp>
#include <pcl/visualization/cloud_viewer.h>
#include <pcl/visualization/pcl_visualizer.h>
#include <opencv2/highgui/highgui.hpp>
#include <pcl/point_cloud.h>
#include <pcl/io/pcd_io.h>
using namespace cv;
using namespace std;
IKinectSensor* pSensor;
ICoordinateMapper* pMapper;
const int iDWidth = 512, iDHeight = 424;//深度圖尺寸
const int iCWidth = 1920, iCHeight = 1080;//彩色圖尺寸
CameraSpacePoint depth2xyz[iDWidth * iDHeight];
ColorSpacePoint depth2rgb[iCWidth * iCHeight];
void viewerOneOff(pcl::visualization::PCLVisualizer& viewer)
{
viewer.setBackgroundColor(1.0, 1.0, 1.0);//設置背景顏色
}
//啓動Kinect
bool initKinect()
{
if (FAILED(GetDefaultKinectSensor(&pSensor))) return false;
if (pSensor)
{
pSensor->get_CoordinateMapper(&pMapper);
pSensor->Open();
cout << "已打開相機" << endl;
return true;
}
else return false;
}
//獲取深度幀
Mat DepthData()
{
IDepthFrameSource* pFrameSource = nullptr;
pSensor->get_DepthFrameSource(&pFrameSource);
IDepthFrameReader* pFrameReader = nullptr;
pFrameSource->OpenReader(&pFrameReader);
IDepthFrame* pFrame = nullptr;
Mat mDepthImg(iDHeight, iDWidth, CV_16UC1);
while (true)
{
if (pFrameReader->AcquireLatestFrame(&pFrame) == S_OK)
{
pFrame->CopyFrameDataToArray(iDWidth * iDHeight, reinterpret_cast<UINT16*>(mDepthImg.data));
cout << "已獲取深度幀" << endl;
pFrame->Release();
return mDepthImg;
break;
}
}
}
//獲取彩色幀
Mat RGBData()
{
IColorFrameSource* pFrameSource = nullptr;
pSensor->get_ColorFrameSource(&pFrameSource);
IColorFrameReader* pFrameReader = nullptr;
pFrameSource->OpenReader(&pFrameReader);
IColorFrame* pFrame = nullptr;
Mat mColorImg(iCHeight, iCWidth, CV_8UC4);
while (true)
{
if (pFrameReader->AcquireLatestFrame(&pFrame) == S_OK)
{
pFrame->CopyConvertedFrameDataToArray(iCWidth * iCHeight * 4, mColorImg.data, ColorImageFormat_Bgra);
cout << "已獲取彩色幀" << endl;
pFrame->Release();
return mColorImg;
break;
}
}
}
int main()
{
initKinect();
pcl::visualization::CloudViewer viewer("Cloud Viewer");//簡單顯示點雲的可視化工具類
viewer.runOnVisualizationThreadOnce(viewerOneOff);//點雲顯示線程,渲染輸出時每次都調用
pcl::PointCloud<pcl::PointXYZRGBA>::Ptr cloud(new pcl::PointCloud<pcl::PointXYZRGBA>);
Mat mColorImg;
Mat mDepthImg;
int count = 0;
while (count <= 0)
{
Sleep(5000);
mColorImg = RGBData();
mDepthImg = DepthData();
imshow("RGB", mColorImg);
pMapper->MapDepthFrameToColorSpace(iDHeight * iDWidth, reinterpret_cast<UINT16*>(mDepthImg.data), iDHeight* iDWidth, depth2rgb);//深度圖到顏色的映射
pMapper->MapDepthFrameToCameraSpace(iDHeight * iDWidth, reinterpret_cast<UINT16*>(mDepthImg.data), iDHeight* iDWidth, depth2xyz);//深度圖到相機三維空間的映射
float maxX = depth2xyz[0].X, maxY = depth2xyz[0].Y, maxZ = depth2xyz[0].Z;
float minX = depth2xyz[0].X, minY = depth2xyz[0].Y, minZ = depth2xyz[0].Z;
for (size_t i = 0; i < iDWidth; i++)
{
for (size_t j = 0; j < iDHeight; j++)
{
pcl::PointXYZRGBA pointTemp;
if (depth2xyz[i + j * iDWidth].Z > 0.5 && depth2rgb[i + j * iDWidth].X < 1920 && depth2rgb[i + j * iDWidth].X>0 && depth2rgb[i + j * iDWidth].Y < 1080 && depth2rgb[i + j * iDWidth].Y>0)
{
pointTemp.x = -depth2xyz[i + j * iDWidth].X;
if (depth2xyz[i + j * iDWidth].X > maxX) maxX = -depth2xyz[i + j * iDWidth].X;
if (depth2xyz[i + j * iDWidth].X < minX) minX = -depth2xyz[i + j * iDWidth].X;
pointTemp.y = depth2xyz[i + j * iDWidth].Y;
if (depth2xyz[i + j * iDWidth].Y > maxY) maxY = depth2xyz[i + j * iDWidth].Y;
if (depth2xyz[i + j * iDWidth].Y < minY) minY = depth2xyz[i + j * iDWidth].Y;
pointTemp.z = depth2xyz[i + j * iDWidth].Z;
if (depth2xyz[i + j * iDWidth].Z != 0.0)
{
if (depth2xyz[i + j * iDWidth].Z > maxZ) maxZ = depth2xyz[i + j * iDWidth].Z;
if (depth2xyz[i + j * iDWidth].Z < minZ) minZ = depth2xyz[i + j * iDWidth].Z;
}
pointTemp.b = mColorImg.at<cv::Vec4b>(depth2rgb[i + j * iDWidth].Y, depth2rgb[i + j * iDWidth].X)[0];
pointTemp.g = mColorImg.at<cv::Vec4b>(depth2rgb[i + j * iDWidth].Y, depth2rgb[i + j * iDWidth].X)[1];
pointTemp.r = mColorImg.at<cv::Vec4b>(depth2rgb[i + j * iDWidth].Y, depth2rgb[i + j * iDWidth].X)[2];
pointTemp.a = mColorImg.at<cv::Vec4b>(depth2rgb[i + j * iDWidth].Y, depth2rgb[i + j * iDWidth].X)[3];
cloud->push_back(pointTemp);
}
}
}
//pcl::io::savePCDFileASCII("deng.pcd", *cloud);
/*char image_name[13];
sprintf(image_name, "%s%d%s", "C:/Users/Xu_Ruijie/Desktop", count, ".pcd");
imwrite(image_name, im);*/
string s = "我來了";
s += ".pcd";
pcl::io::savePCDFile(s, *cloud, false);
std::cerr << "Saved " << cloud->points.size() << " data points." << std::endl;
viewer.showCloud(cloud);
count++;
mColorImg.release();
mDepthImg.release();
cloud->clear();
waitKey(10);
}
return 0;
}
如圖所示的燈即爲獲得的點雲圖像。
本文分享自微信公衆號 - WHICH工做室(which_cn)。
若有侵權,請聯繫 support@oschina.cn 刪除。
本文參與「OSC源創計劃」,歡迎正在閱讀的你也加入,一塊兒分享。