• Kinect2.0点云数据获取


    接上一篇:Kinect2.0获取数据

    http://blog.csdn.net/jiaojialulu/article/details/53087988

    博主好细心,代码基本上帖过来就可以用,注释掉的部分改成文件输出就可以了!

    #include "stdafx.h"
    #include "kinect.h"
    #include <iostream>
    #include <opencv2/core/core.hpp>  
    #include <opencv2/highgui/highgui.hpp>  
    using namespace cv;
    using namespace std;
    
    // 安全释放指针
    template<class Interface>
    inline void SafeRelease(Interface *& pInterfaceToRelease)
    {
    	if (pInterfaceToRelease != NULL)
    	{
    		pInterfaceToRelease->Release();
    		pInterfaceToRelease = NULL;
    	}
    }
    
    int _tmain(int argc, _TCHAR* argv[])
    {
    	// 获取Kinect设备
    	IKinectSensor* m_pKinectSensor;
    	HRESULT hr;
    	hr = GetDefaultKinectSensor(&m_pKinectSensor);
    	if (FAILED(hr))
    	{
    		return hr;
    	}
    
    	IMultiSourceFrameReader* m_pMultiFrameReader=NULL;
    	if (m_pKinectSensor)
    	{
    		hr = m_pKinectSensor->Open();
    		if (SUCCEEDED(hr))
    		{
    			// 获取多数据源到读取器  
    			hr = m_pKinectSensor->OpenMultiSourceFrameReader(
    				FrameSourceTypes::FrameSourceTypes_Color |
    				FrameSourceTypes::FrameSourceTypes_Infrared |
    				FrameSourceTypes::FrameSourceTypes_Depth,
    				&m_pMultiFrameReader);
    		}
    	}
    
    	if (!m_pKinectSensor || FAILED(hr))
    	{
    		return E_FAIL;
    	}
    	// 三个数据帧及引用
    	IDepthFrameReference* m_pDepthFrameReference = NULL;
    	IColorFrameReference* m_pColorFrameReference = NULL;
    	IInfraredFrameReference* m_pInfraredFrameReference = NULL;
    	IInfraredFrame* m_pInfraredFrame = NULL;
    	IDepthFrame* m_pDepthFrame = NULL;
    	IColorFrame* m_pColorFrame = NULL;
    	// 三个图片格式
    	Mat i_rgb(1080, 1920, CV_8UC4);      //注意:这里必须为4通道的图,Kinect的数据只能以Bgra格式传出
    	Mat i_depth(424, 512, CV_8UC1);
    	Mat i_ir(424, 512, CV_16UC1);
    
    	UINT16 *depthData = new UINT16[424 * 512];
    	IMultiSourceFrame* m_pMultiFrame = nullptr;
    	while (true)
    	{
    		// 获取新的一个多源数据帧
    		hr = m_pMultiFrameReader->AcquireLatestFrame(&m_pMultiFrame);
    		if (FAILED(hr) || !m_pMultiFrame)
    		{
    			//cout << "!!!" << endl;
    			continue;
    		}
    
    		// 从多源数据帧中分离出彩色数据,深度数据和红外数据
    		if (SUCCEEDED(hr))
    			hr = m_pMultiFrame->get_ColorFrameReference(&m_pColorFrameReference);
    		if (SUCCEEDED(hr))
    			hr = m_pColorFrameReference->AcquireFrame(&m_pColorFrame);
    		if (SUCCEEDED(hr))
    			hr = m_pMultiFrame->get_DepthFrameReference(&m_pDepthFrameReference);
    		if (SUCCEEDED(hr))
    			hr = m_pDepthFrameReference->AcquireFrame(&m_pDepthFrame);
    		if (SUCCEEDED(hr))
    			hr = m_pMultiFrame->get_InfraredFrameReference(&m_pInfraredFrameReference);
    		if (SUCCEEDED(hr))
    			hr = m_pInfraredFrameReference->AcquireFrame(&m_pInfraredFrame);
    
    		// color拷贝到图片中
    		UINT nColorBufferSize = 1920 * 1080 * 4;
    		if (SUCCEEDED(hr))
    			hr = m_pColorFrame->CopyConvertedFrameDataToArray(nColorBufferSize, reinterpret_cast<BYTE*>(i_rgb.data), ColorImageFormat::ColorImageFormat_Bgra);
    
    		// depth拷贝到图片中
    		if (SUCCEEDED(hr))
    		{
    			hr = m_pDepthFrame->CopyFrameDataToArray(424 * 512, depthData);
    			for (int i = 0; i < 512 * 424; i++)
    			{
    				// 0-255深度图,为了显示明显,只取深度数据的低8位
    				BYTE intensity = static_cast<BYTE>(depthData[i] % 256);
    				reinterpret_cast<BYTE*>(i_depth.data)[i] = intensity;
    			}
    			ICoordinateMapper*      m_pCoordinateMapper=NULL;
    			hr = m_pKinectSensor->get_CoordinateMapper(&m_pCoordinateMapper);
    			ColorSpacePoint* m_pColorCoordinates = new ColorSpacePoint[512 * 424];
    			HRESULT hr = m_pCoordinateMapper->MapDepthFrameToColorSpace(512 * 424, depthData, 512 * 424, m_pColorCoordinates);
    		
    			Mat i_depthToRgb(424, 512, CV_8UC4);
    			if (SUCCEEDED(hr))
    			{
    				for (int i = 0; i < 424 * 512; i++)
    				{
    					ColorSpacePoint p = m_pColorCoordinates[i];
    					if (p.X != -std::numeric_limits<float>::infinity() && p.Y != -std::numeric_limits<float>::infinity())
    					{
    						int colorX = static_cast<int>(p.X + 0.5f);
    						int colorY = static_cast<int>(p.Y + 0.5f);
    
    						if ((colorX >= 0 && colorX < 1920) && (colorY >= 0 && colorY < 1080))
    						{
    							i_depthToRgb.data[i * 4] = i_rgb.data[(colorY * 1920 + colorX) * 4];
    							i_depthToRgb.data[i * 4 + 1] = i_rgb.data[(colorY * 1920 + colorX) * 4 + 1];
    							i_depthToRgb.data[i * 4 + 2] = i_rgb.data[(colorY * 1920 + colorX) * 4 + 2];
    							i_depthToRgb.data[i * 4 + 3] = i_rgb.data[(colorY * 1920 + colorX) * 4 + 3];
    						}
    					}
    				}
    			}
    			imshow("rgb2depth", i_depthToRgb);
    			if (waitKey(1) == VK_ESCAPE)
    				break;
    			CameraSpacePoint* m_pCameraCoordinates = new CameraSpacePoint[512 * 424];
    			if (SUCCEEDED(hr))
    			{
    				HRESULT hr = m_pCoordinateMapper->MapDepthFrameToCameraSpace(512 * 424, depthData, 512 * 424, m_pCameraCoordinates);
    			}
    			if (SUCCEEDED(hr))
    			{
    				for (int i = 0; i < 512 * 424; i++)
    				{
    					CameraSpacePoint p = m_pCameraCoordinates[i];
    					if (p.X != -std::numeric_limits<float>::infinity() && p.Y != -std::numeric_limits<float>::infinity() && p.Z != -std::numeric_limits<float>::infinity())
    					{
    						float cameraX = static_cast<float>(p.X);
    						float cameraY = static_cast<float>(p.Y);
    						float cameraZ = static_cast<float>(p.Z);
    
    						//cout << "x: " << cameraX << "y: " << cameraY << "z: " << cameraZ << endl;
    						//GLubyte *rgb = new GLubyte();
    						//rgb[2] = i_depthToRgb.data[i * 4 + 0];
    						//rgb[1] = i_depthToRgb.data[i * 4 + 1];
    						//rgb[0] = i_depthToRgb.data[i * 4 + 2];
    						//// 显示点
    						//glColor3ubv(rgb);
    						//glVertex3f(cameraX, -cameraY, cameraZ);
    					}
    				}
    			}
    			
    		}
    
    
    		// 显示
    		/*imshow("rgb", i_rgb);
    		if (waitKey(1) == VK_ESCAPE)
    			break;*/
    		imshow("depth", i_depth);
    		if (waitKey(1) == VK_ESCAPE)
    			break;
    
    		
    		// 释放资源
    		SafeRelease(m_pColorFrame);
    		SafeRelease(m_pDepthFrame);
    		SafeRelease(m_pInfraredFrame);
    		SafeRelease(m_pColorFrameReference);
    		SafeRelease(m_pDepthFrameReference);
    		SafeRelease(m_pInfraredFrameReference);
    		SafeRelease(m_pMultiFrame);
    	}
    	// 关闭窗口,设备
    	cv::destroyAllWindows();
    	m_pKinectSensor->Close();
    	std::system("pause");
    
    	return 0;
    }
    

    我们实验室的一帧数据,哈哈!  

    上面的代码有内存泄露,程序运行一段时间把我的机器物理内存都占满了,下面代码更新一下!

    代码在这里

  • 相关阅读:
    Hello,world的几种写法!
    浮动与清除浮动
    css中表格的table-layout属性特殊用法
    CSS之照片集效果
    CSS之transition过渡练习
    CSS之过渡简单应用—日落西山
    CSS之立方体绘画步骤
    CSS之立体球体
    transform
    Vue.sync修饰符与this.$emit('update:xxx', newXXX)
  • 原文地址:https://www.cnblogs.com/yhlx125/p/6643821.html
Copyright © 2020-2023  润新知