談談NITE 2的第一個程序UserViewer


    我覺得學習一個新的API,最好的辦法是學習它提供的Samples。今天根據自己的了解,分享我自己對新的NITE 2的了解,首先看看NITE2 中提供的UserViewer,將它提供的工程文件夾導入到VS2010中,運行,直接看運行結果:

     當人體出現在Kinect面前時,就能獲取身體深度圖像,當做出POSE_CROSSED_HANDS(“雙手抱胸”)的動作時,就開始骨骼跟蹤,最多獲取全身15個主要關節點坐標的3D信息。現在開始代碼的了解。首先看主函數main():

#include "Viewer.h"

int main(int argc, char** argv)
{
    openni::Status rc = openni::STATUS_OK;

    // 調用SampleViewer類構造函數,初始化
    SampleViewer sampleViewer("葉梅樹的骨骼跟蹤");

    // 調用SampleViewer的init進行初始化跟蹤
    rc = sampleViewer.Init(argc, argv);
    if (rc != openni::STATUS_OK)
    {
        return 1;
    }
    // 調用Run函數開始循環跟蹤
    sampleViewer.Run();
}

    現在看看SampleViewer類怎么初始化,怎么獲取人體深度信息,怎么開始實時跟蹤人體骨骼信息的:

 1 /************************************************************************/
 2 /* 類初始化,並創建m_pUserTracker對象,就是人體跟蹤器                                   */
 3 /************************************************************************/
 4 SampleViewer::SampleViewer(const char* strSampleName)
 5 {
 6     ms_self = this;
 7     strncpy(m_strSampleName, strSampleName, ONI_MAX_STR);
 8     m_pUserTracker = new nite::UserTracker;
 9 }
10 ...
11 openni::Status SampleViewer::Init(int argc, char **argv)
12 {
13     m_pTexMap = NULL;
14 
15     // OpenNI初始化
16     openni::Status rc = openni::OpenNI::initialize();
17      
18     // 只有一台體感設備時,初始化為openni::ANY_DEVICE。
19     const char* deviceUri = openni::ANY_DEVICE;
20     for (int i = 1; i < argc-1; ++i)
21     {
22         if (strcmp(argv[i], "-device") == 0)
23         {
24             deviceUri = argv[i+1];
25             break;
26         }
27     }
28 
29     // 打開體感設備
30     openni::Status rc = m_device.open(deviceUri);
31     if (rc != openni::STATUS_OK)
32     {
33         printf("Open Device failed:\n%s\n", openni::OpenNI::getExtendedError());
34         return rc;
35     }
36 
37     /************************************************************************/
38     /*NITE初始化時其實是調用NiteCAPI.h底層函數 niteInitialize().
39     static Status initialize()
40     {
41     return (Status)niteInitialize();
42     }                                                                     */
43     /************************************************************************/
44     nite::NiTE::initialize();
45 
46     /************************************************************************/
47     /* m_pUserTracker->create(&m_device)創建開始跟蹤“指定打開的體感設備的人體信息”,
48        函數實現為(並且調用NiteCAPI.h底層函數niteInitializeUserTrackerByDevice()):
49        Status create(openni::Device* pDevice = NULL)
50        {
51        if (pDevice == NULL)
52        {
53        return (Status)niteInitializeUserTracker(&m_UserTrackerHandle);
54        // Pick a device
55        }
56        return (Status)niteInitializeUserTrackerByDevice(pDevice, &m_UserTrackerHandle);
57        }
58     /************************************************************************/
59     if (m_pUserTracker->create(&m_device) != nite::STATUS_OK)
60     {
61         return openni::STATUS_ERROR;
62     }
63 
80     return InitOpenGL(argc, argv);
81 
82 }
83 openni::Status SampleViewer::Run()    //Does not return
84 {
85     // 開始循環跟蹤人體深度數據和人體骨骼坐標了。回調函數為Display()函數。
86     glutMainLoop();
87 
88     return openni::STATUS_OK;
89 }

    現在看看回調函數Display()函數的前半部分:主要是定位到人體深度圖像,並做像素轉換處理,顯示出來。

View Code
void SampleViewer::Display()
{
    nite::UserTrackerFrameRef userTrackerFrame;
    openni::VideoFrameRef depthFrame;
    nite::Status rc = m_pUserTracker->readFrame(&userTrackerFrame);
    if (rc != nite::STATUS_OK)
    {
        printf("GetNextData failed\n");
        return;
    }

    depthFrame = userTrackerFrame.getDepthFrame();

    if (m_pTexMap == NULL)
    {
        // Texture map init
        m_nTexMapX = MIN_CHUNKS_SIZE(depthFrame.getVideoMode().getResolutionX(), TEXTURE_SIZE);
        m_nTexMapY = MIN_CHUNKS_SIZE(depthFrame.getVideoMode().getResolutionY(), TEXTURE_SIZE);
        m_pTexMap = new openni::RGB888Pixel[m_nTexMapX * m_nTexMapY];
    }

    const nite::UserMap& userLabels = userTrackerFrame.getUserMap();

    glClear (GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

    glMatrixMode(GL_PROJECTION);
    glPushMatrix();
    glLoadIdentity();
    glOrtho(0, GL_WIN_SIZE_X, GL_WIN_SIZE_Y, 0, -1.0, 1.0);

    if (depthFrame.isValid() && g_drawDepth)
    {
        const openni::DepthPixel* pDepth = (const openni::DepthPixel*)depthFrame.getData();
        int width = depthFrame.getWidth();
        int height = depthFrame.getHeight();
        // Calculate the accumulative histogram (the yellow display...)
        memset(m_pDepthHist, 0, MAX_DEPTH*sizeof(float));
        int restOfRow = depthFrame.getStrideInBytes() / sizeof(openni::DepthPixel) - width;

        unsigned int nNumberOfPoints = 0;
        for (int y = 0; y < height; ++y)
        {
            for (int x = 0; x < width; ++x, ++pDepth)
            {
                if (*pDepth != 0)
                {
                    m_pDepthHist[*pDepth]++;
                    nNumberOfPoints++;
                }
            }
            pDepth += restOfRow;
        }
        for (int nIndex=1; nIndex<MAX_DEPTH; nIndex++)
        {
            m_pDepthHist[nIndex] += m_pDepthHist[nIndex-1];
        }
        if (nNumberOfPoints)
        {
            for (int nIndex=1; nIndex<MAX_DEPTH; nIndex++)
            {
                m_pDepthHist[nIndex] = (unsigned int)(256 * (1.0f - (m_pDepthHist[nIndex] / nNumberOfPoints)));
            }
        }
    }

    memset(m_pTexMap, 0, m_nTexMapX*m_nTexMapY*sizeof(openni::RGB888Pixel));

    float factor[3] = {1, 1, 1};
    // check if we need to draw depth frame to texture
    if (depthFrame.isValid() && g_drawDepth)
    {
        const nite::UserId* pLabels = userLabels.getPixels();

        const openni::DepthPixel* pDepthRow = (const openni::DepthPixel*)depthFrame.getData();
        openni::RGB888Pixel* pTexRow = m_pTexMap + depthFrame.getCropOriginY() * m_nTexMapX;
        int rowSize = depthFrame.getStrideInBytes() / sizeof(openni::DepthPixel);

        for (int y = 0; y < depthFrame.getHeight(); ++y)
        {
            const openni::DepthPixel* pDepth = pDepthRow;
            openni::RGB888Pixel* pTex = pTexRow + depthFrame.getCropOriginX();

            for (int x = 0; x < depthFrame.getWidth(); ++x, ++pDepth, ++pTex, ++pLabels)
            {
                if (*pDepth != 0)
                {
                    if (*pLabels == 0)
                    {
                        if (!g_drawBackground)
                        {
                            factor[0] = factor[1] = factor[2] = 0;

                        }
                        else
                        {
                            factor[0] = Colors[colorCount][0];
                            factor[1] = Colors[colorCount][1];
                            factor[2] = Colors[colorCount][2];
                        }
                    }
                    else
                    {
                        factor[0] = Colors[*pLabels % colorCount][0];
                        factor[1] = Colors[*pLabels % colorCount][1];
                        factor[2] = Colors[*pLabels % colorCount][2];
                    }
//                    // Add debug lines - every 10cm
//                     else if ((*pDepth / 10) % 10 == 0)
//                     {
//                         factor[0] = factor[2] = 0;
//                     }

                    int nHistValue = m_pDepthHist[*pDepth];
                    pTex->r = nHistValue*factor[0];
                    pTex->g = nHistValue*factor[1];
                    pTex->b = nHistValue*factor[2];

                    factor[0] = factor[1] = factor[2] = 1;
                }
            }

            pDepthRow += rowSize;
            pTexRow += m_nTexMapX;
        }
    }

    glTexParameteri(GL_TEXTURE_2D, GL_GENERATE_MIPMAP_SGIS, GL_TRUE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, m_nTexMapX, m_nTexMapY, 0, GL_RGB, GL_UNSIGNED_BYTE, m_pTexMap);

    // Display the OpenGL texture map
    glColor4f(1,1,1,1);

    glEnable(GL_TEXTURE_2D);
    glBegin(GL_QUADS);

    g_nXRes = depthFrame.getVideoMode().getResolutionX();
    g_nYRes = depthFrame.getVideoMode().getResolutionY();

    // upper left
    glTexCoord2f(0, 0);
    glVertex2f(0, 0);
    // upper right
    glTexCoord2f((float)g_nXRes/(float)m_nTexMapX, 0);
    glVertex2f(GL_WIN_SIZE_X, 0);
    // bottom right
    glTexCoord2f((float)g_nXRes/(float)m_nTexMapX, (float)g_nYRes/(float)m_nTexMapY);
    glVertex2f(GL_WIN_SIZE_X, GL_WIN_SIZE_Y);
    // bottom left
    glTexCoord2f(0, (float)g_nYRes/(float)m_nTexMapY);
    glVertex2f(0, GL_WIN_SIZE_Y);

    glEnd();
    glDisable(GL_TEXTURE_2D);

處理完之后,開始跟蹤人體骨骼坐標:

// 讀取人體跟蹤器中的所有人
    const nite::Array<nite::UserData>& users = userTrackerFrame.getUsers();
    for (int i = 0; i < users.getSize(); ++i)
    {
        const nite::UserData& user = users[i];

        updateUserState(user, userTrackerFrame.getTimestamp());
        if (user.isNew())
        {
            // 如果是新出現在Kinect,開始跟蹤人體骨骼
            m_pUserTracker->startSkeletonTracking(user.getId());
            // 開始檢測該人做的姿勢是不是(POSE_CROSSED_HANDS)
            m_pUserTracker->startPoseDetection(user.getId(), nite::POSE_CROSSED_HANDS);
        }
        else if (!user.isLost())
        {
            // 如果沒有丟失跟蹤的人,就“畫出”相關信息
            if (g_drawStatusLabel)
            {
                DrawStatusLabel(m_pUserTracker, user);
            }
            if (g_drawCenterOfMass)
            {
                DrawCenterOfMass(m_pUserTracker, user);
            }
            if (g_drawBoundingBox)
            {
                DrawBoundingBox(user);
            }

            // 當人體骨骼跟蹤狀態處於"SKELETON_TRACKED"就“畫出”人體主要骨骼點的坐標和他們之間的連線。
            if (users[i].getSkeleton().getState() == nite::SKELETON_TRACKED && g_drawSkeleton)
            {
                DrawSkeleton(m_pUserTracker, user);
            }
        }

        if (m_poseUser == 0 || m_poseUser == user.getId())
        {
            const nite::PoseData& pose = user.getPose(nite::POSE_CROSSED_HANDS);

            if (pose.isEntered())
            {
                // Start timer
                sprintf(g_generalMessage, "In exit pose. Keep it for %d second%s to exit\n", g_poseTimeoutToExit/1000, g_poseTimeoutToExit/1000 == 1 ? "" : "s");
                printf("Counting down %d second to exit\n", g_poseTimeoutToExit/1000);
                m_poseUser = user.getId();
                m_poseTime = userTrackerFrame.getTimestamp();
            }
            else if (pose.isExited())
            {
                memset(g_generalMessage, 0, sizeof(g_generalMessage));
                printf("Count-down interrupted\n");
                m_poseTime = 0;
                m_poseUser = 0;
            }
            else if (pose.isHeld())
            {
                // tick
                if (userTrackerFrame.getTimestamp() - m_poseTime > g_poseTimeoutToExit * 1000)
                {
                    printf("Count down complete. Exit...\n");
                    Finalize();
                    exit(2);
                }
            }
        }
    }

    if (g_drawFrameId)
    {
        DrawFrameId(userTrackerFrame.getFrameIndex());
    }

    if (g_generalMessage[0] != '\0')
    {
        char *msg = g_generalMessage;
        glColor3f(1.0f, 0.0f, 0.0f);
        glRasterPos2i(100, 20);
        glPrintString(GLUT_BITMAP_HELVETICA_18, msg);
    }



    // Swap the OpenGL display buffers
    glutSwapBuffers();

}

    其中“畫出”骨骼坐標和它們之間的函數如下:

void DrawSkeleton(nite::UserTracker* pUserTracker, const nite::UserData& userData)
{
    DrawLimb(pUserTracker, userData.getSkeleton().getJoint(nite::JOINT_HEAD), userData.getSkeleton().getJoint(nite::JOINT_NECK), userData.getId() % colorCount);

    DrawLimb(pUserTracker, userData.getSkeleton().getJoint(nite::JOINT_LEFT_SHOULDER), userData.getSkeleton().getJoint(nite::JOINT_LEFT_ELBOW), userData.getId() % colorCount);
    DrawLimb(pUserTracker, userData.getSkeleton().getJoint(nite::JOINT_LEFT_ELBOW), userData.getSkeleton().getJoint(nite::JOINT_LEFT_HAND), userData.getId() % colorCount);

    DrawLimb(pUserTracker, userData.getSkeleton().getJoint(nite::JOINT_RIGHT_SHOULDER), userData.getSkeleton().getJoint(nite::JOINT_RIGHT_ELBOW), userData.getId() % colorCount);
    DrawLimb(pUserTracker, userData.getSkeleton().getJoint(nite::JOINT_RIGHT_ELBOW), userData.getSkeleton().getJoint(nite::JOINT_RIGHT_HAND), userData.getId() % colorCount);

    DrawLimb(pUserTracker, userData.getSkeleton().getJoint(nite::JOINT_LEFT_SHOULDER), userData.getSkeleton().getJoint(nite::JOINT_RIGHT_SHOULDER), userData.getId() % colorCount);

    DrawLimb(pUserTracker, userData.getSkeleton().getJoint(nite::JOINT_LEFT_SHOULDER), userData.getSkeleton().getJoint(nite::JOINT_TORSO), userData.getId() % colorCount);
    DrawLimb(pUserTracker, userData.getSkeleton().getJoint(nite::JOINT_RIGHT_SHOULDER), userData.getSkeleton().getJoint(nite::JOINT_TORSO), userData.getId() % colorCount);

    DrawLimb(pUserTracker, userData.getSkeleton().getJoint(nite::JOINT_TORSO), userData.getSkeleton().getJoint(nite::JOINT_LEFT_HIP), userData.getId() % colorCount);
    DrawLimb(pUserTracker, userData.getSkeleton().getJoint(nite::JOINT_TORSO), userData.getSkeleton().getJoint(nite::JOINT_RIGHT_HIP), userData.getId() % colorCount);

    DrawLimb(pUserTracker, userData.getSkeleton().getJoint(nite::JOINT_LEFT_HIP), userData.getSkeleton().getJoint(nite::JOINT_RIGHT_HIP), userData.getId() % colorCount);


    DrawLimb(pUserTracker, userData.getSkeleton().getJoint(nite::JOINT_LEFT_HIP), userData.getSkeleton().getJoint(nite::JOINT_LEFT_KNEE), userData.getId() % colorCount);
    DrawLimb(pUserTracker, userData.getSkeleton().getJoint(nite::JOINT_LEFT_KNEE), userData.getSkeleton().getJoint(nite::JOINT_LEFT_FOOT), userData.getId() % colorCount);

    DrawLimb(pUserTracker, userData.getSkeleton().getJoint(nite::JOINT_RIGHT_HIP), userData.getSkeleton().getJoint(nite::JOINT_RIGHT_KNEE), userData.getId() % colorCount);
    DrawLimb(pUserTracker, userData.getSkeleton().getJoint(nite::JOINT_RIGHT_KNEE), userData.getSkeleton().getJoint(nite::JOINT_RIGHT_FOOT), userData.getId() % colorCount);
}

     綜上,定位跟蹤人體骨骼坐標,基本包括以下幾個步驟:
    1. 初始化OpenNI、NITE等;

    2. 創建 new nite::UserTracker對象;

    3. m_pUserTracker->create(&m_device)

    4. 開始跟蹤人體骨骼  m_pUserTracker->startSkeletonTracking(user.getId());
    5. 開始檢測該人做的姿勢是不是(POSE_CROSSED_HANDS):
m_pUserTracker->startPoseDetection(user.getId(), nite::POSE_CROSSED_HANDS);

    6. 最后獲取人體骨骼坐標信息,主要包括15個骨骼點:

JOINT_HEAD  
JOINT_NECK  
JOINT_LEFT_SHOULDER  
JOINT_RIGHT_SHOULDER  
JOINT_LEFT_ELBOW  
JOINT_RIGHT_ELBOW  
JOINT_LEFT_HAND  
JOINT_RIGHT_HAND  
JOINT_TORSO  
JOINT_LEFT_HIP  
JOINT_RIGHT_HIP  
JOINT_LEFT_KNEE  
JOINT_RIGHT_KNEE  
JOINT_LEFT_FOOT  
JOINT_RIGHT_FOOT

    最后附上一些有關跟蹤骨骼信息的類型和函數:

    一:之前的NITE1我們需要做出“投降”的姿勢,才能跟蹤骨骼坐標,但現在多了一個姿勢,“雙手抱胸”。我想后面一個會更加容易獲得,具體姿勢類型為enum nite::PoseType:POSE_PSI和POSE_CROSSED_HANDS。

     二:骨骼的狀態,通過對獲取的骨骼跟蹤狀態,進行相關的處理。

enum  nite::SkeletonState {   nite::SKELETON_NONE, nite::SKELETON_CALIBRATING, nite::SKELETON_TRACKED, nite::SKELETON_CALIBRATION_ERROR_NOT_IN_POSE,   nite::SKELETON_CALIBRATION_ERROR_HANDS, nite::SKELETON_CALIBRATION_ERROR_HEAD, nite::SKELETON_CALIBRATION_ERROR_LEGS, nite::SKELETON_CALIBRATION_ERROR_TORSO }

     三:當然最重要的是nite::UserTrackerFrameRef,我把它叫做人體跟蹤快照,對於這個類的含義的描述是這樣的:

Snapshot of the User Tracker algorithm. It holds all the users identified at this time, including their position, skeleton and such, as well as the floor plane。具體是什么意思,你說的算~~~

    四: 其實封裝好的底層人體跟蹤函數也不多,和手的跟蹤差不多,也很好理解

// UserTracker
NITE_API NiteStatus niteInitializeUserTracker(NiteUserTrackerHandle*);
NITE_API NiteStatus niteInitializeUserTrackerByDevice(void*, NiteUserTrackerHandle*);
NITE_API NiteStatus niteShutdownUserTracker(NiteUserTrackerHandle);

NITE_API NiteStatus niteStartSkeletonTracking(NiteUserTrackerHandle, NiteUserId);
NITE_API void niteStopSkeletonTracking(NiteUserTrackerHandle, NiteUserId);
NITE_API bool niteIsSkeletonTracking(NiteUserTrackerHandle, NiteUserId);

NITE_API NiteStatus niteSetSkeletonSmoothing(NiteUserTrackerHandle, float);
NITE_API NiteStatus niteGetSkeletonSmoothing(NiteUserTrackerHandle, float*);

NITE_API NiteStatus niteStartPoseDetection(NiteUserTrackerHandle, NiteUserId, NitePoseType);
NITE_API void niteStopPoseDetection(NiteUserTrackerHandle, NiteUserId, NitePoseType);
NITE_API void niteStopAllPoseDetection(NiteUserTrackerHandle, NiteUserId);

NITE_API NiteStatus niteRegisterUserTrackerCallbacks(NiteUserTrackerHandle, NiteUserTrackerCallbacks*, void*);
NITE_API void niteUnregisterUserTrackerCallbacks(NiteUserTrackerHandle, NiteUserTrackerCallbacks*);

NITE_API NiteStatus niteReadUserTrackerFrame(NiteUserTrackerHandle, NiteUserTrackerFrame**);

    總結:對於NITE2的理解,只有兩個”手的跟蹤“和”人體骨骼跟蹤“,但對他們的封裝更加簡單了,我們也更加容易輕松的通過NITE2獲取我們所需要的”手和人體骨骼“。對於獲取信息之后如何處理,想怎么處理,已經不管NITE2的事情了。下面就讓我們自己開始寫自己的程序吧,相信看完之前的三篇”隨筆博文“和這篇博文,

談談OpenNI 2的安裝

談談OpenNI 2的第一個程序

談談NITE 2的第一個程序HandViewer

你也能輕松的開始使用OpenNI2 SDK 和NITE2了吧~~~


免責聲明!

本站轉載的文章為個人學習借鑒使用,本站對版權不負任何法律責任。如果侵犯了您的隱私權益,請聯系本站郵箱yoyou2525@163.com刪除。



 
粵ICP備18138465號   © 2018-2025 CODEPRJ.COM