使用Kinect2.0獲取點雲以在GLUT中顯示


這篇文章用來記錄Kinect2.0如何生成點雲.

以下示例源自Kinect提供的example修改完成,其名稱會在小標題下方注解.


 

首先,要獲取點雲需要獲取圖像的深度數據顏色數據.最后再將深度數據與顏色數據轉為點雲.

 

1.獲取圖像深度數據:

  基於Depth Basic -D2D Example修改

HRESULT CMotionRecognition::GetDepthImage(){

    if (!m_pDepthFrameReader)
    {
        return E_FAIL;
    }

    IDepthFrame    * pDepthFrame = nullptr;

    HRESULT hr = m_pDepthFrameReader->AcquireLatestFrame(&pDepthFrame);

    if (SUCCEEDED(hr)){
        IFrameDescription * pFrameDescription = nullptr;
        USHORT nDepthMinReliableDistance = 0;
        USHORT nDepthMaxDistance = 0;
        UINT16 *pBuffer = NULL;
        UINT nBufferSize = 0;

        if (SUCCEEDED(hr))
        {
            hr = pDepthFrame->get_FrameDescription(&pFrameDescription);
        }

        if (SUCCEEDED(hr))
        {
            hr = pFrameDescription->get_Width(&nDepthWidth);
        }

        if (SUCCEEDED(hr))
        {
            hr = pFrameDescription->get_Height(&nDepthHeight);
        }

        if (SUCCEEDED(hr))
        {
            hr = pDepthFrame->get_DepthMinReliableDistance(&nDepthMinReliableDistance);
        }

        if (SUCCEEDED(hr))
        {
            // In order to see the full range of depth (including the less reliable far field depth)
            // we are setting nDepthMaxDistance to the extreme potential depth threshold
            nDepthMaxDistance = USHRT_MAX;
            // Note:  If you wish to filter by reliable depth distance, uncomment the following line.
            //// hr = pDepthFrame->get_DepthMaxReliableDistance(&nDepthMaxDistance);
        }

        if (SUCCEEDED(hr))
        {
            hr = pDepthFrame->AccessUnderlyingBuffer(&nBufferSize, &pBuffer);
        }

        if (SUCCEEDED(hr))
        {
            ConvertMat_depth(pBuffer, nDepthMinReliableDistance, nDepthMaxDistance);
        }

        SafeRelease(pFrameDescription);
    }

    SafeRelease(pDepthFrame);

    return hr;
}

 

2.獲取圖像顏色數據:

  基於Color Basic-D2D Example修改

HRESULT CMotionRecognition::GetColorImage(){

    if (!m_pColorFrameReader)
    {
        return E_FAIL;
    }

    IColorFrame* pColorFrame = NULL;

    HRESULT hr = m_pColorFrameReader->AcquireLatestFrame(&pColorFrame);

    if (SUCCEEDED(hr))
    {
        INT64 nTime = 0;
        IFrameDescription* pFrameDescription = NULL;
        ColorImageFormat imageFormat = ColorImageFormat_None;
        UINT nBufferSize = 0;
        RGBQUAD *pBuffer = NULL;

        hr = pColorFrame->get_RelativeTime(&nTime);

        if (SUCCEEDED(hr))
        {
            hr = pColorFrame->get_FrameDescription(&pFrameDescription);
        }

        if (SUCCEEDED(hr))
        {
            hr = pFrameDescription->get_Width(&nColorWidth);
        }

        if (SUCCEEDED(hr))
        {
            hr = pFrameDescription->get_Height(&nColorHeight);
        }

        if (SUCCEEDED(hr))
        {
            hr = pColorFrame->get_RawColorImageFormat(&imageFormat);
        }

        if (SUCCEEDED(hr))
        {
            if (imageFormat == ColorImageFormat_Bgra)
            {
                hr = pColorFrame->AccessRawUnderlyingBuffer(&nBufferSize, reinterpret_cast<BYTE**>(&pBuffer));
            }
            else if (m_pColorRGBX)
            {
                pBuffer = m_pColorRGBX;
                nBufferSize = nColorWidth * nColorHeight * sizeof(RGBQUAD);
                hr = pColorFrame->CopyConvertedFrameDataToArray(nBufferSize, reinterpret_cast<BYTE*>(pBuffer), ColorImageFormat_Bgra);
            }
            else
            {
                hr = E_FAIL;
            }
        }

        if (SUCCEEDED(hr))
        {
            ConvertMat_color(pBuffer, nColorWidth, nColorHeight);
        }

        SafeRelease(pFrameDescription);

    }

    SafeRelease(pColorFrame);

    return hr;

}

 

 

3.處理圖像數據函數

1/2中有一個ConvertMat_*函數,他是負責處理獲取的圖像顏色數據的,因為點雲的轉換需要深度數據和圖像顏色數據,注意在這還可以創建OpenCV的Mat.

但這里只給出將獲取的數據轉存到pDepthBuffer(類中的一個成員)中的案例.


ConvertMat_depth()
void CMotionRecognition::ConvertMat_depth(const UINT16* _pBuffer, USHORT nMinDepth, USHORT nMaxDepth)
{
    const UINT16
        * pBuffer   = _pBuffer,
        * pBufferEnd = _pBuffer + (nDepthWidth * nDepthHeight);

    UINT16 * pDepthBufferTmp    = pDepthBuffer;

    while (pBuffer < pBufferEnd)
    {
        *pDepthBufferTmp = *pBuffer;

        ++pDepthBufferTmp;
        ++pBuffer;
    }

}

 



ConvertMat_color()
void CMotionRecognition::ConvertMat_color(const RGBQUAD* _pBuffer, int nWidth, int nHeight)
{
    const RGBQUAD
        * pBuffer     = _pBuffer,
        * pBufferEnd = pBuffer + (nWidth * nHeight);

    RGBQUAD * pBufferTmp = m_pColorRGBX;

    while (pBuffer < pBufferEnd)
    {
        *pBufferTmp = *pBuffer;
        ++pBufferTmp;
        ++pBuffer;
    }

}

 

 

4.合成為點雲:

  基於CoordinateMappingBasics-D2D Example修改

osg::ref_ptr<osg::Node> CMotionRecognition::AssembleAsPointCloud(float _angle, int _axisX, int _axisY, int _axisZ)
{
    if (!m_pKinectSensor)
    {
        return E_FAIL;
    }
    // osg空間坐標
    osg::ref_ptr<osg::Vec3Array> point3dVec = new osg::Vec3Array();
    // osg顏色值
    osg::ref_ptr<osg::Vec4Array> colorVec = new osg::Vec4Array();

    ICoordinateMapper * m_pCoordinateMapper = nullptr;

    HRESULT hr = m_pKinectSensor->get_CoordinateMapper(&m_pCoordinateMapper);

    for (size_t y = 0; y != nDepthHeight; y++)
    {
        for (size_t x = 0; x != nDepthWidth; x++)
        {
            DepthSpacePoint depthSpacePoint = { static_cast<float>(x), static_cast<float>(y) };
            UINT16 currDepth = pDepthBuffer[y * nDepthWidth + x];

       // Coordinate Mapping Depth to Color Space ColorSpacePoint colorSpacePoint
= { 0.0f, 0.0f }; m_pCoordinateMapper->MapDepthPointToColorSpace(depthSpacePoint, currDepth, &colorSpacePoint); int colorX = static_cast<int>(std::floor(colorSpacePoint.X + 0.5f)), colorY = static_cast<int>(std::floor(colorSpacePoint.Y + 0.5f)); if ((0 <= colorX) && (colorX < nColorWidth) && (0 <= colorY) && (colorY < nColorHeight)) { RGBQUAD color = m_pColorRGBX[colorY * nColorWidth + colorX]; colorVec->push_back(osg::Vec4f((float)color.rgbBlue / 255, (float)color.rgbGreen / 255, (float)color.rgbRed / 255, 1)); }
       // Coordinate Mapping Depth to Camera Space CameraSpacePoint cameraSpacePoint
= { 0.0f, 0.0f, 0.0f }; m_pCoordinateMapper->MapDepthPointToCameraSpace(depthSpacePoint, currDepth, &cameraSpacePoint); if ((0 <= colorX) && (colorX < nColorWidth) && (0 <= colorY) && (colorY < nColorHeight)) { point3dVec->push_back(osg::Vec3(cameraSpacePoint.X, cameraSpacePoint.Y, cameraSpacePoint.Z)); } } } // 葉節點 osg::ref_ptr<osg::Geode> geode = new osg::Geode(); // 用來存儲幾何數據信息 構造圖像 保存了頂點數組數據的渲染指令 osg::ref_ptr<osg::Geometry> geom = new osg::Geometry();    geom->setVertexArray(point3dVec.get()); geom->setColorArray(colorVec.get());
// 每一個顏色對應着一個頂點 geom->setColorBinding(osg::Geometry::BIND_PER_VERTEX); // 指定數據繪制的方式 geom->addPrimitiveSet(new osg::DrawArrays(osg::PrimitiveSet::POINTS, 0, point3dVec->size())); // 加載到Geode中 geode->addDrawable(geom.get());
  return geode; }

 

 

 

下面是使用GLUT顯示的結果:

 

 

可以看到幀率只有2.1左右,在后期要是在需要做處理的話則要更小了.

若誰還有更好的辦法生成點雲的話,歡迎留言 : )

 


免責聲明!

本站轉載的文章為個人學習借鑒使用,本站對版權不負任何法律責任。如果侵犯了您的隱私權益,請聯系本站郵箱yoyou2525@163.com刪除。



 
粵ICP備18138465號   © 2018-2025 CODEPRJ.COM