• kinect 深度图与彩色图对齐程序


    //#include "duiqi.hpp"

     

    #include "kinect.h"

    #include <iostream>

    #include "opencv2/opencv.hpp"

    #include <opencv2/core/core.hpp>

    #include <opencv2/highgui/highgui.hpp>

     

    #include <fstream>

     

    using namespace cv;

    using namespace std;

     

    Mat depthFilter(UINT16 *depthData);

     

     

    template<class Interface>

    inline void SafeRelease(Interface *& pInterfaceToRelease)

    {

    if (pInterfaceToRelease != NULL)

    {

    pInterfaceToRelease->Release();

    pInterfaceToRelease = NULL;

    }

    }

     

    UINT16 uDepthMin = 0, uDepthMax = 0;

     

     

    int main()

    {

    IKinectSensor* m_pKinectSensor;

    HRESULT hr;

    ICoordinateMapper*      m_pCoordinateMapper = NULL;

    CameraIntrinsics* m_pCameraIntrinsics = new CameraIntrinsics();

     

     

    hr = GetDefaultKinectSensor(&m_pKinectSensor);

    if (FAILED(hr))

    {

    return hr;

    }

     

    IMultiSourceFrameReader* m_pMultiFrameReader = NULL;

     

    if (m_pKinectSensor)

    {

    hr = m_pKinectSensor->Open();

    if (SUCCEEDED(hr))

    {

    hr = m_pKinectSensor->OpenMultiSourceFrameReader(

    FrameSourceTypes::FrameSourceTypes_Color |

    FrameSourceTypes::FrameSourceTypes_Infrared |

    FrameSourceTypes::FrameSourceTypes_Depth,

    &m_pMultiFrameReader);

    }

    }

     

    if (SUCCEEDED(hr))

    {

    hr = m_pKinectSensor->get_CoordinateMapper(&m_pCoordinateMapper);

    }

     

    if (!m_pKinectSensor || FAILED(hr))

    {

    return E_FAIL;

    }

     

     

    IMultiSourceFrame* m_pMultiFrame = nullptr;

     

    IDepthFrameReference* m_pDepthFrameReference = NULL;

    IColorFrameReference* m_pColorFrameReference = NULL;

    IInfraredFrameReference* m_pInfraredFrameReference = NULL;

    IInfraredFrame* m_pInfraredFrame = NULL;

    IDepthFrame* m_pDepthFrame = NULL;

    IColorFrame* m_pColorFrame = NULL;

     

     

    Mat rgb(1080, 1920, CV_8UC4);

    Mat rgb_resize(540, 960, CV_8UC4);

     

    DepthSpacePoint*        m_pDepthCoordinates = NULL;

    ColorSpacePoint*        m_pColorCoordinates = NULL;

    CameraSpacePoint*       m_pCameraCoordinates = NULL;

     

    m_pColorCoordinates = new ColorSpacePoint[512 * 424];

    m_pCameraCoordinates = new CameraSpacePoint[512 * 424];

     

    UINT16 *depthData = new UINT16[424 * 512];

    Mat depth(424, 512, CV_16UC1);

    Mat depth8U(424, 512, CV_8U);

     

    vector<DepthSpacePoint> depthSpacePoints(1920 * 1080);

    Mat CoordinateMapperMat(1080, 1520, CV_8U);

    Mat CoordinateMapperMat_resize(540, 960, CV_8U);

     

    int savecount = 0;

     

    while (true)

    {

    hr = m_pMultiFrameReader->AcquireLatestFrame(&m_pMultiFrame);

    if (FAILED(hr) || !m_pMultiFrame)

    {

    continue;

    }

     

    if (SUCCEEDED(hr))

    hr = m_pMultiFrame->get_ColorFrameReference(&m_pColorFrameReference);

    if (SUCCEEDED(hr))

    hr = m_pColorFrameReference->AcquireFrame(&m_pColorFrame);

    if (SUCCEEDED(hr))

    hr = m_pMultiFrame->get_DepthFrameReference(&m_pDepthFrameReference);

    if (SUCCEEDED(hr))

    hr = m_pDepthFrameReference->AcquireFrame(&m_pDepthFrame);

     

    /*m_pDepthFrame->get_DepthMinReliableDistance(&uDepthMin);

    m_pDepthFrame->get_DepthMaxReliableDistance(&uDepthMax);

    cout << "Reliable Distance: " << uDepthMin << " - " << uDepthMax << endl;*/

     

    savecount++;

    cout << savecount << endl;

    ostringstream savecountstr;

    savecountstr << savecount;

     

     

    UINT nColorBufferSize = 1920 * 1080 * 4;

    if (SUCCEEDED(hr))

    {

    hr = m_pColorFrame->CopyConvertedFrameDataToArray(nColorBufferSize, reinterpret_cast<BYTE*>(rgb.data), ColorImageFormat::ColorImageFormat_Bgra);

     

    Rect rect(200, 0, 1520, 1080);

    Mat rgb_roi = rgb(rect);

    resize(rgb_roi, rgb_resize, Size(), 0.4, 0.4);

    imshow("color_resize", rgb_resize);

    imwrite("D:/file/hust/ARcodes/ARKinect∂‘∆Î181107/ARKinect/save6/color/" + savecountstr.str() + ".png", rgb_resize);

    }

     

     

     

    UINT nDepthBufferSize = 424 * 512;

    if (SUCCEEDED(hr))

    {

    hr = m_pDepthFrame->CopyFrameDataToArray(nDepthBufferSize, reinterpret_cast<UINT16*>(depthData));

    m_pDepthFrame->CopyFrameDataToArray(nDepthBufferSize, reinterpret_cast<UINT16*>(depth.data));

                

                //16位转成了8位显示

    depth.convertTo(depth8U, CV_8U, 255.0f / 4500.0f);

    imshow("depth", depth8U);

    }

     

     

            //16位数据

    Mat filterDepth = depthFilter(depthData);

    Mat filterDepth8U;

    filterDepth.convertTo(filterDepth8U, CV_8U, 255.0f / 4500.0f);

     

     

    if (SUCCEEDED(hr))

    {

    hr = m_pCoordinateMapper->MapColorFrameToDepthSpace(424 * 512, reinterpret_cast<UINT16*>(filterDepth.data), 1920 * 1080, &depthSpacePoints[0]);

    }

     

     

    if (SUCCEEDED(hr))

    {

    CoordinateMapperMat = Scalar(0, 0, 0, 0); // ∂®“ÂŒ™Mat(colorHeight, colorWidth, CV_8UC4)

    for (int y = 0; y < 1080; y++)

    {

    for (int x = 200; x < 1720; x++)

    //for (int x = 0; x < 1920; x++)

    {

    unsigned int index = y * 1920 + x;

    DepthSpacePoint p = depthSpacePoints[index];

    if (p.X != -std::numeric_limits<float>::infinity() && p.Y != -std::numeric_limits<float>::infinity())

    {

    int depthX = static_cast<int>(p.X + 0.2f);

    int depthY = static_cast<int>(p.Y + 0.2f);

    if ((depthX >= 0) && (depthX < 512) && (depthY >= 0) && (depthY < 424))

    {

    CoordinateMapperMat.at<uchar>(y, x - 200) = filterDepth8U.at<uchar>(depthY, depthX);

    }

    }

    }

    }

     

    resize(CoordinateMapperMat, CoordinateMapperMat_resize, Size(), 0.4, 0.4);

    imshow("CoordinateMapper", CoordinateMapperMat_resize);

     

    imwrite("D:/file/hust/ARcodes/ARKinect∂‘∆Î181107/ARKinect/save6/result/" + savecountstr.str() + ".png", CoordinateMapperMat_resize);

    }

     

     

     

    int c = waitKey(1);

    if ((char)c == VK_ESCAPE)

    break;

     

     

    SafeRelease(m_pColorFrame);

    SafeRelease(m_pDepthFrame);

    SafeRelease(m_pColorFrameReference);

    SafeRelease(m_pDepthFrameReference);

    SafeRelease(m_pMultiFrame);

    }

     

    cv::destroyAllWindows();

    SafeRelease(m_pCoordinateMapper);

    m_pKinectSensor->Close();

    std::system("pause");

    return 0;

    }

     

     

    Mat depthFilter(UINT16 *depthData) {

    Mat i_before(424, 512, CV_8UC4);

    Mat i_after(424, 512, CV_8UC4);

    Mat i_result(424, 512, CV_16UC1);

    cv::Mat i_result8U;

     

    unsigned short maxDepth = 0;

    unsigned short iZeroCountBefore = 0;

    unsigned short iZeroCountAfter = 0;

    unsigned short* depthArray = (unsigned short*)depthData;

     

    for (int i = 0; i < 512 * 424; i++)

    {

    int row = i / 512;

    int col = i % 512;

     

    unsigned short depthValue = depthArray[row * 512 + col];

    if (depthValue == 0)

    {

    i_before.data[i * 4] = 255;

    i_before.data[i * 4 + 1] = 0;

    i_before.data[i * 4 + 2] = 0;

    i_before.data[i * 4 + 3] = depthValue / 256;

    iZeroCountBefore++;

    }

    else

    {

    i_before.data[i * 4] = depthValue / 4500.0f * 256;

    i_before.data[i * 4 + 1] = depthValue / 4500.0f * 256;

    i_before.data[i * 4 + 2] = depthValue / 4500.0f * 256;

    i_before.data[i * 4 + 3] = depthValue / 4500.0f * 256;

    }

    maxDepth = depthValue > maxDepth ? depthValue : maxDepth;

    }

    //cout << "max depth value: " << maxDepth << endl;

     

     

    unsigned short* smoothDepthArray = (unsigned short*)i_result.data;

    int widthBound = 512 - 1;

    int heightBound = 424 - 1;

     

     

    int innerBandThreshold = 1;

    int outerBandThreshold = 3;

     

     

    for (int depthArrayRowIndex = 0; depthArrayRowIndex<424; depthArrayRowIndex++)

    {

    for (int depthArrayColumnIndex = 0; depthArrayColumnIndex < 512; depthArrayColumnIndex++)

    {

    int depthIndex = depthArrayColumnIndex + (depthArrayRowIndex * 512);

     

    if (depthArray[depthIndex] == 0)

    {

    int x = depthIndex % 512;

    int y = (depthIndex - x) / 512;

     

    unsigned short filterCollection[24][2] = { 0 };

     

    int innerBandCount = 0;

    int outerBandCount = 0;

     

    for (int yi = -2; yi < 3; yi++)

    {

    for (int xi = -2; xi < 3; xi++)

    {

    if (xi != 0 || yi != 0)

    {

    int xSearch = x + xi;

    int ySearch = y + yi;

     

    if (xSearch >= 0 && xSearch <= widthBound &&

    ySearch >= 0 && ySearch <= heightBound)

    {

    int index = xSearch + (ySearch * 512);

     

    if (depthArray[index] != 0)

    {

    for (int i = 0; i < 24; i++)

    {

    if (filterCollection[i][0] == depthArray[index])

    {

    filterCollection[i][1]++;

    break;

    }

    else if (filterCollection[i][0] == 0)

    {

    filterCollection[i][0] = depthArray[index];

    filterCollection[i][1]++;

    break;

    }

    }

     

    if (yi != 2 && yi != -2 && xi != 2 && xi != -2)

    innerBandCount++;

    else

    outerBandCount++;

    }

    }

    }

    }

    }

     

    if (innerBandCount >= innerBandThreshold || outerBandCount >= outerBandThreshold)

    {

    short frequency = 0;

    short depth = 0;

     

    for (int i = 0; i < 24; i++)

    {

     

    if (filterCollection[i][0] == 0)

    break;

    if (filterCollection[i][1] > frequency)

    {

    depth = filterCollection[i][0];

    frequency = filterCollection[i][1];

    }

    }

     

    smoothDepthArray[depthIndex] = depth;

    }

    else

    {

    smoothDepthArray[depthIndex] = 0;

    }

    }

    else

    {

    smoothDepthArray[depthIndex] = depthArray[depthIndex];

    }

    }

    }

     

    for (int i = 0; i < 512 * 424; i++)

    {

    int row = i / 512;

    int col = i % 512;

     

    unsigned short depthValue = smoothDepthArray[row * 512 + col];

    if (depthValue == 0)

    {

    i_after.data[i * 4] = 255;

    i_after.data[i * 4 + 1] = 0;

    i_after.data[i * 4 + 2] = 0;

    i_after.data[i * 4 + 3] = depthValue / 256;

    iZeroCountAfter++;

    }

    else

    {

    i_after.data[i * 4] = depthValue / 4500.0f * 256;

    i_after.data[i * 4 + 1] = depthValue / 4500.0f * 256;

    i_after.data[i * 4 + 2] = depthValue / 4500.0f * 256;

    i_after.data[i * 4 + 3] = depthValue / 4500.0f * 256;

    }

    }

     

    i_result.convertTo(i_result8U, CV_8U, 255.0f / 4500.0f);   // uDepthMax

     

    return i_result;

    }

  • 相关阅读:
    获取指定路径下的文件夹及文件名称
    Unity3D两种方式实现游戏视频播放
    Unity Steam_VR Camera
    Unity Steam_VR 开发工具插件 VRTK自带案例分析(第一部分)
    Unity Steam_VR 开发工具插件 VRTK自带案例分析(第二部分)
    weblogic安装注意事项_linux
    从英文变形规则计算到Restful Api设计
    如何正确使用Cocoapods
    clang -rewrite-objc的使用点滴
    kubernetes听云实战发布版
  • 原文地址:https://www.cnblogs.com/sofard/p/10450577.html
Copyright © 2020-2023  润新知