談談NITE 2的第一個程序HandViewer


    我覺得學習一個新的API,最好的辦法是學習它提供的Samples。今天根據自己的了解,分享我自己對新的NITE 2的了解,首先看看NITE2 中提供的HandViewer,將它提供的工程文件夾導入到VS2010中,運行,直接看運行結果:

     當一只手做出“向前推”或者“左右搖擺”時,就能獲取手心坐標,並進行跟蹤,其它手做相同的動作也能識別並跟蹤。現在開始代碼的了解。首先看主函數main():

#include "Viewer.h"

int main(int argc, char** argv)
{
    openni::Status rc = openni::STATUS_OK;

    // 調用SampleViewer類構造函數,初始化
    SampleViewer sampleViewer("葉梅樹的手跟蹤");

    // 調用SampleViewer的init進行初始化跟蹤
    rc = sampleViewer.Init(argc, argv);
    if (rc != openni::STATUS_OK)
    {
        return 1;
    }
    // 調用Run函數開始循環跟蹤
    sampleViewer.Run();
}

    現在看看SampleViewer類怎么初始化,怎么獲取定位手心,怎么實時跟蹤手的:

 1 /************************************************************************/
 2 /* 類初始化,並創建m_pHandTracker對象。                                   */
 3 /************************************************************************/
 4 SampleViewer::SampleViewer(const char* strSampleName)
 5 {
 6     ms_self = this;
 7     strncpy(m_strSampleName, strSampleName, ONI_MAX_STR);
 8     m_pHandTracker = new nite::HandTracker;
 9 }
10 ...
11 openni::Status SampleViewer::Init(int argc, char **argv)
12 {
13     m_pTexMap = NULL;
14 
15     // OpenNI初始化
16     openni::OpenNI::initialize();
17 
18     // 只有一台體感設備時,初始化為openni::ANY_DEVICE。
19     const char* deviceUri = openni::ANY_DEVICE;
20     for (int i = 1; i < argc-1; ++i)
21     {
22         if (strcmp(argv[i], "-device") == 0)
23         {
24             deviceUri = argv[i+1];
25             break;
26         }
27     }
28 
29     // 打開體感設備
30     openni::Status rc = m_device.open(deviceUri);
31     if (rc != openni::STATUS_OK)
32     {
33         printf("Open Device failed:\n%s\n", openni::OpenNI::getExtendedError());
34         return rc;
35     }
36 
37     /************************************************************************/
38     /*NITE初始化時其實是調用NiteCAPI.h底層函數 niteInitialize().
39     static Status initialize()
40     {
41     return (Status)niteInitialize();
42     }                                                                     */
43     /************************************************************************/
44     nite::NiTE::initialize();
45 
46     /************************************************************************/
47     /* m_pHandTracker->create(&m_device)創建開始跟蹤“指定打開的體感設備的手坐標”,
48        函數實現為(並且調用NiteCAPI.h底層函數niteInitializeHandTrackerByDevice()):
49        Status create(openni::Device* pDevice = NULL)
50        {
51        if (pDevice == NULL)
52        {
53        return (Status)niteInitializeHandTracker(&m_handTrackerHandle);
54        // Pick a device
55        }
56        return (Status)niteInitializeHandTrackerByDevice(pDevice, &m_handTrackerHandle);
57        }
58     /************************************************************************/
59     if (m_pHandTracker->create(&m_device) != nite::STATUS_OK)
60     {
61         return openni::STATUS_ERROR;
62     }
63 
64     /************************************************************************/
65     /*開始跟蹤指定設備中手的手勢探測,如果做出“手搖擺”或者“推”的手勢,則開始定位獲取
66       該手的手心坐標,並跟蹤。。。
67       /** Available gestures types 有效手勢一共有三個,如下*/
68         /*
69         typedef enum
70         {
71             GESTURE_WAVE,
72             GESTURE_CLICK,
73             GESTURE_HAND_RAISE
74         } GestureType;
75         */
76     /************************************************************************/
77     m_pHandTracker->startGestureDetection(nite::GESTURE_WAVE);
78     m_pHandTracker->startGestureDetection(nite::GESTURE_CLICK);
79 
80     return InitOpenGL(argc, argv);
81 
82 }
83 openni::Status SampleViewer::Run()    //Does not return
84 {
85     // 開始循環跟蹤手了。回調函數為Display()函數。
86     glutMainLoop();
87 
88     return openni::STATUS_OK;
89 }

    現在看看回調函數Display()函數的前半部分:主要是定位到手的深度圖像,並做處理。

View Code
void SampleViewer::Display()
{
    // 該類的作用是
    /** Snapshot of the Hand Tracker algorithm. 
        It holds all the hands identified at this time, 
        as well as the detected gestures 
    */
    nite::HandTrackerFrameRef handFrame;

    // 獲取深度數據
    openni::VideoFrameRef depthFrame;

    // 調用readFrame()類的作用和原型是:
    /** Get the next snapshot of the algorithm */
    /*Status readFrame(HandTrackerFrameRef* pFrame)
    {
        NiteHandTrackerFrame *pNiteFrame = NULL;
        Status rc = (Status)niteReadHandTrackerFrame(m_handTrackerHandle, &pNiteFrame);
        pFrame->setReference(m_handTrackerHandle, pNiteFrame);

        return rc;
    }
    */
    nite::Status rc = m_pHandTracker->readFrame(&handFrame);
    if (rc != nite::STATUS_OK)
    {
        printf("GetNextData failed\n");
        return;
    }
    // 讀取手的深度數據
    depthFrame = handFrame.getDepthFrame();

    if (m_pTexMap == NULL)
    {
        // Texture map init
        m_nTexMapX = MIN_CHUNKS_SIZE(depthFrame.getVideoMode().getResolutionX(), TEXTURE_SIZE);
        m_nTexMapY = MIN_CHUNKS_SIZE(depthFrame.getVideoMode().getResolutionY(), TEXTURE_SIZE);
        m_pTexMap = new openni::RGB888Pixel[m_nTexMapX * m_nTexMapY];
    }


    glClear (GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

    glMatrixMode(GL_PROJECTION);
    glPushMatrix();
    glLoadIdentity();
    glOrtho(0, GL_WIN_SIZE_X, GL_WIN_SIZE_Y, 0, -1.0, 1.0);

    if (depthFrame.isValid())
    {
        // 處理獲取到的手的深度數據
        const openni::DepthPixel* pDepth = (const openni::DepthPixel*)depthFrame.getData();
        int width = depthFrame.getWidth();
        int height = depthFrame.getHeight();
        // Calculate the accumulative histogram (the yellow display...)
        memset(m_pDepthHist, 0, MAX_DEPTH*sizeof(float));
        int restOfRow = depthFrame.getStrideInBytes() / sizeof(openni::DepthPixel) - width;

        unsigned int nNumberOfPoints = 0;
        for (int y = 0; y < height; ++y)
        {
            for (int x = 0; x < width; ++x, ++pDepth)
            {
                if (*pDepth != 0)
                {
                    m_pDepthHist[*pDepth]++;
                    nNumberOfPoints++;
                }
            }
            pDepth += restOfRow;
        }
        for (int nIndex=1; nIndex<MAX_DEPTH; nIndex++)
        {
            m_pDepthHist[nIndex] += m_pDepthHist[nIndex-1];
        }
        if (nNumberOfPoints)
        {
            for (int nIndex=1; nIndex<MAX_DEPTH; nIndex++)
            {
                m_pDepthHist[nIndex] = (unsigned int)(256 * (1.0f - (m_pDepthHist[nIndex] / nNumberOfPoints)));
            }
        }
    }

    memset(m_pTexMap, 0, m_nTexMapX*m_nTexMapY*sizeof(openni::RGB888Pixel));

    float factor[3] = {1, 1, 1};
    // check if we need to draw depth frame to texture
    if (depthFrame.isValid() && g_drawDepth)
    {
        const openni::DepthPixel* pDepthRow = (const openni::DepthPixel*)depthFrame.getData();
        openni::RGB888Pixel* pTexRow = m_pTexMap + depthFrame.getCropOriginY() * m_nTexMapX;
        int rowSize = depthFrame.getStrideInBytes() / sizeof(openni::DepthPixel);

        for (int y = 0; y < depthFrame.getHeight(); ++y)
        {
            const openni::DepthPixel* pDepth = pDepthRow;
            openni::RGB888Pixel* pTex = pTexRow + depthFrame.getCropOriginX();

            for (int x = 0; x < depthFrame.getWidth(); ++x, ++pDepth, ++pTex)
            {
                if (*pDepth != 0)
                {
                    factor[0] = Colors[colorCount][0];
                    factor[1] = Colors[colorCount][1];
                    factor[2] = Colors[colorCount][2];

                    int nHistValue = m_pDepthHist[*pDepth];
                    pTex->r = nHistValue*factor[0];
                    pTex->g = nHistValue*factor[1];
                    pTex->b = nHistValue*factor[2];

                    factor[0] = factor[1] = factor[2] = 1;
                }
            }

            pDepthRow += rowSize;
            pTexRow += m_nTexMapX;
        }
    }

    glTexParameteri(GL_TEXTURE_2D, GL_GENERATE_MIPMAP_SGIS, GL_TRUE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, m_nTexMapX, m_nTexMapY, 0, GL_RGB, GL_UNSIGNED_BYTE, m_pTexMap);

    // Display the OpenGL texture map
    glColor4f(1,1,1,1);

    glEnable(GL_TEXTURE_2D);
    glBegin(GL_QUADS);

    g_nXRes = depthFrame.getVideoMode().getResolutionX();
    g_nYRes = depthFrame.getVideoMode().getResolutionY();

    // upper left
    glTexCoord2f(0, 0);
    glVertex2f(0, 0);
    // upper right
    glTexCoord2f((float)g_nXRes/(float)m_nTexMapX, 0);
    glVertex2f(GL_WIN_SIZE_X, 0);
    // bottom right
    glTexCoord2f((float)g_nXRes/(float)m_nTexMapX, (float)g_nYRes/(float)m_nTexMapY);
    glVertex2f(GL_WIN_SIZE_X, GL_WIN_SIZE_Y);
    // bottom left
    glTexCoord2f(0, (float)g_nYRes/(float)m_nTexMapY);
    glVertex2f(0, GL_WIN_SIZE_Y);

    glEnd();
    glDisable(GL_TEXTURE_2D);

處理完之后,開始跟蹤手心坐標:

// 獲取定位的手的快照信息,讀取此時一共有多少個手勢
    const nite::Array<nite::GestureData>& gestures = handFrame.getGestures();
    for (int i = 0; i < gestures.getSize(); ++i)
    {
        // 當獲取的手勢是正確完成了
        if (gestures[i].isComplete())
        {
            // 就開始定位此時手勢的坐標
            const nite::Point3f& position = gestures[i].getCurrentPosition();
            printf("Gesture %d at (%f,%f,%f)\n", gestures[i].getType(), position.x, position.y, position.z);

            
            // nite::HandId newId ===>typedef short int HandId;
            nite::HandId newId;
            // 開始跟蹤該有效手勢的手心坐標,並確定該手的Id。
            // 函數原型為:NITE_API NiteStatus niteStartHandTracking(NiteHandTrackerHandle, const NitePoint3f*, NiteHandId* pNewHandId);
            m_pHandTracker->startHandTracking(gestures[i].getCurrentPosition(), &newId);
        }
    }

    // 獲取定位手。
    const nite::Array<nite::HandData>& hands= handFrame.getHands();
    for (int i = 0; i < hands.getSize(); ++i)
    {
        const nite::HandData& user = hands[i];

        if (!user.isTracking())
        {
            printf("Lost hand %d\n", user.getId());
            nite::HandId id = user.getId();
            HistoryBuffer<20>* pHistory = g_histories[id];
            g_histories.erase(g_histories.find(id));
            delete pHistory;
        }
        else
        {
            if (user.isNew())
            {
                printf("Found hand %d\n", user.getId());
                g_histories[user.getId()] = new HistoryBuffer<20>;
            }
            // Add to history
            HistoryBuffer<20>* pHistory = g_histories[user.getId()];
            pHistory->AddPoint(user.getPosition());
            // Draw history
            DrawHistory(m_pHandTracker, user.getId(), pHistory);
        }
    }

    if (g_drawFrameId)
    {
        DrawFrameId(handFrame.getFrameIndex());
    }

    // Swap the OpenGL display buffers
    glutSwapBuffers();

}

     綜上,定位跟蹤手坐標,基本包括以下幾個步驟:
    1. 初始化OpenNI、NITE等;

    2. 創建 new nite::HandTracker對象;

    3. m_pHandTracker->create(&m_device)

    4. 開始探測手勢(一共三種手勢可以被探測到):m_pHandTracker->startGestureDetection(nite::GESTURE_CLICK);

    5. 每次深度數據快照,獲取此時的有效手勢,並定位開始跟蹤有效“手”,主要代碼為:

// 獲取定位的手的快照信息,讀取此時一共有多少個手勢
    const nite::Array<nite::GestureData>& gestures = handFrame.getGestures();
    for (int i = 0; i < gestures.getSize(); ++i)
    {
        // 當獲取的手勢是正確完成了
        if (gestures[i].isComplete())
        {
            // 就開始定位此時手勢的坐標
            const nite::Point3f& position = gestures[i].getCurrentPosition();
            printf("Gesture %d at (%f,%f,%f)\n", gestures[i].getType(), position.x, position.y, position.z);

            
            // nite::HandId newId ===>typedef short int HandId;
            nite::HandId newId;
            // 開始跟蹤該有效手勢的手心坐標,並確定該手的Id。
            // 函數原型為:NITE_API NiteStatus niteStartHandTracking(NiteHandTrackerHandle, const NitePoint3f*, NiteHandId* pNewHandId);
            m_pHandTracker->startHandTracking(gestures[i].getCurrentPosition(), &newId);
        }
    }

     6. 讀取已被確定的手,並做自己需要的處理----->開始自己的想要完成的工作:
        

// 獲取定位手。
    const nite::Array<nite::HandData>& hands= handFrame.getHands();
    for (int i = 0; i < hands.getSize(); ++i)
    {
        const nite::HandData& user = hands[i];

        if (!user.isTracking())
        {
            printf("Lost hand %d\n", user.getId());
            nite::HandId id = user.getId();
            HistoryBuffer<20>* pHistory = g_histories[id];
            g_histories.erase(g_histories.find(id));
            delete pHistory;
        }
        else
        {
            if (user.isNew())
            {
                printf("Found hand %d\n", user.getId());
                g_histories[user.getId()] = new HistoryBuffer<20>;
            }
            // Add to history
            HistoryBuffer<20>* pHistory = g_histories[user.getId()];
            pHistory->AddPoint(user.getPosition());
            // Draw history
            DrawHistory(m_pHandTracker, user.getId(), pHistory);
        }
    }

     這里就算完成了跟蹤手的基本步驟了。再讓我們看看NITE2 封裝好的底層NiteCAPI.h庫有關手的封裝函數是什么樣的吧:

// HandTracker
// 跟蹤默認設備的
NITE_API NiteStatus niteInitializeHandTracker(NiteHandTrackerHandle*); // 跟蹤指定設備的
NITE_API NiteStatus niteInitializeHandTrackerByDevice(
void*, NiteHandTrackerHandle*); // 停止跟蹤
NITE_API NiteStatus niteShutdownHandTracker(NiteHandTrackerHandle); // 當確定是做出有效手勢后,開始定位是新的手,並開始跟蹤手的運動
NITE_API NiteStatus niteStartHandTracking(NiteHandTrackerHandle,
const NitePoint3f*, NiteHandId* pNewHandId); // 停止指定Id的手的跟蹤
NITE_API
void niteStopHandTracking(NiteHandTrackerHandle, NiteHandId); // 停止所有手的跟蹤
NITE_API
void niteStopAllHandTracking(NiteHandTrackerHandle); // 我不知道
NITE_API NiteStatus niteSetHandSmoothingFactor(NiteHandTrackerHandle,
float); NITE_API NiteStatus niteGetHandSmoothingFactor(NiteHandTrackerHandle, float*); // 注冊和取消注冊回調函數(是吧~~~)
NITE_API NiteStatus niteRegisterHandTrackerCallbacks(NiteHandTrackerHandle, NiteHandTrackerCallbacks
*, void*); NITE_API void niteUnregisterHandTrackerCallbacks(NiteHandTrackerHandle, NiteHandTrackerCallbacks*); //讀取深度信息快照
NITE_API NiteStatus niteReadHandTrackerFrame(NiteHandTrackerHandle, NiteHandTrackerFrame
**); // 類似com組件。。。
NITE_API NiteStatus niteHandTrackerFrameAddRef(NiteHandTrackerHandle, NiteHandTrackerFrame
*); NITE_API NiteStatus niteHandTrackerFrameRelease(NiteHandTrackerHandle, NiteHandTrackerFrame*); // 探測手勢
NITE_API NiteStatus niteStartGestureDetection(NiteHandTrackerHandle, NiteGestureType); // 停止某種類型手勢的探測
NITE_API
void niteStopGestureDetection(NiteHandTrackerHandle, NiteGestureType); // 停止所有類型手勢的探測
NITE_API
void niteStopAllGestureDetection(NiteHandTrackerHandle); // 這個就是有關獲取手的坐標信息之后的一個真實場景坐標與使用場景坐標的變換處理。。。
NITE_API NiteStatus niteConvertJointCoordinatesToDepth(NiteUserTrackerHandle userTracker,
float x, float y, float z, float* pX, float* pY); NITE_API NiteStatus niteConvertDepthCoordinatesToJoint(NiteUserTrackerHandle userTracker, int x, int y, int z, float* pX, float* pY); NITE_API NiteStatus niteConvertHandCoordinatesToDepth(NiteHandTrackerHandle handTracker, float x, float y, float z, float* pX, float* pY); NITE_API NiteStatus niteConvertDepthCoordinatesToHand(NiteHandTrackerHandle handTracker, int x, int y, int z, float* pX, float* pY);

     總結:要了解這些函數的作用最好的辦法是自己寫寫代碼多調用自己,就知道具體是什么作用的,反正NITE提供的手的跟蹤函數確實不多,就這么幾個,要了解怎么使用也不是很難。我覺得至少被NITE1 直觀多了,也沒那么復雜了。

 


免責聲明!

本站轉載的文章為個人學習借鑒使用,本站對版權不負任何法律責任。如果侵犯了您的隱私權益,請聯系本站郵箱yoyou2525@163.com刪除。



 
粵ICP備18138465號   © 2018-2025 CODEPRJ.COM