全景图生成mesh的纹理


上一篇文章 https://www.cnblogs.com/lovebay/p/11423576.html ,我们使用MPA算法实现了 点云生成mesh,但仅仅实现mesh的顶点着色,为了让mesh有更丰富更真实的纹理信息,这篇文章将实现全景图像为mesh生成高清的纹理,实现方法如下:

第一步:mesh的法线估计(正确的法向)

第二步:mesh批量分割(得到多个mesh小块),分割原则:同一个mesh小块,在某个方向的投影可见的。

第三步:将多个mesh小块批量生成高清纹理。

第四步:将多个mesh小块对应的高清纹理图合并(得到大的纹理图)。

第五步:uv保存,mesh模型导出(obj格式)。

 

流程

核心代码:(稍后上传)

const int img_cols = image_panorama.cols;
    const int img_rows = image_panorama.rows;
    MeshParameterization mp;
    //std::unique_ptr<Mesh> inMesh;
    //std::vector<Mesh::Material> inMaterial;
    const size_t width = 1024;
    const size_t height = 1024;
    out_texture = cv::Mat(height, width, CV_8UC4, Scalar(0, 0, 0, 0));

    mp.runMeshParameterize(inMesh, inMaterial, width, height);

    size_t facenum = inMesh->GetFaceCount();
    std::vector<PointT> uv_triangulation_center(facenum);
    std::vector<PointT[3]> uv_triangulation(facenum);
    //std::vector<Transform> uv_w_tran;
    //std::vector<Plane> w_planes(facenum);
    std::vector<PointT[3]> w_triangulation(facenum);

    for (size_t i = 0; i < facenum; i++)
    {
        size_t pindex = 3 * i;
         uvpt1 =inMesh->mTexCoords[inMesh->mIndices[pindex]];
         uvpt2 = inMesh->mTexCoords[inMesh->mIndices[pindex +1]];
         uvpt3 = inMesh->mTexCoords[inMesh->mIndices[pindex +2]];

         wpt1 = inMesh->mPositions[inMesh->mIndices[pindex]];
         wpt2 = inMesh->mPositions[inMesh->mIndices[pindex + 1]];
         wpt3 = inMesh->mPositions[inMesh->mIndices[pindex + 2]];

        uv_triangulation_center[i].x = (uvpt1.x + uvpt2.x + uvpt3.x) / 3.0;
        uv_triangulation_center[i].y = (uvpt1.y + uvpt2.y + uvpt3.y) / 3.0;
        uv_triangulation_center[i].z = 0.0;

        uv_triangulation[i][0] = PointT(uvpt1.x, uvpt1.y, 0.0);
        uv_triangulation[i][1] = PointT(uvpt2.x, uvpt2.y, 0.0);
        uv_triangulation[i][2] = PointT(uvpt3.x, uvpt3.y, 0.0);

        cv::Mat rvec;
        cv::Mat tvec;
        vector<PointT> Points3D;
        vector<PointT2d> Points2D;
        Points3D.push_back(PointT(wpt1.x, wpt1.y, wpt1.z));
        Points3D.push_back(PointT(wpt2.x, wpt2.y, wpt2.z));
        Points3D.push_back(PointT(wpt3.x, wpt3.y, wpt3.z));

        Points2D.push_back(PointT2d(uvpt1.x, uvpt1.y));
        Points2D.push_back(PointT2d(uvpt2.x, uvpt2.y));
        Points2D.push_back(PointT2d(uvpt3.x, uvpt3.y));

        //caculateRT(Points3D, Points2D, rvec, tvec);
        //uv_w_tran.push_back(Transform(rvec, tvec));

        //Plane pl;
        //getPlaneEquation(Points3D, pl);
        //w_planes[i] = pl;

        w_triangulation[i][0] = PointT(wpt1.x, wpt1.y, wpt1.z);
        w_triangulation[i][1] = PointT(wpt2.x, wpt2.y, wpt2.z);
        w_triangulation[i][2] = PointT(wpt3.x, wpt3.y, wpt3.z);

    }

    kd::KDTree<PointT> tree;
    tree.setInputPointCloud(uv_triangulation_center);
    tree.setNumOfLeafData(30);
    tree.buildKDTree();

    //cv::Mat texture_image(1024, 1024, CV_8UC4, cv::Scalar(0, 0, 0, 255));
    for (size_t i = 0; i < width; i++)    //cols
    {
        for (size_t j = 0; j < height; j++)  //rows
        {
            double x = i * 1.0 / 1024.0;
            double y = j * 1.0 / 1024.0;
            PointT  searchPoint(x,y,0.0);
            //const int k = 10;
            std::vector<size_t> searchIndex;
            std::vector<float> searchDistance;
            const float redius = 0.1;
            //tree.runKNNSearchK(searchPoint,k, searchIndex, searchDistance);
            tree.runKNNSearchRadius(searchPoint, 0.10, searchIndex, searchDistance);

            int inside = -1;
            int index_temp = 0;
            for (size_t t = 0; t < searchIndex.size(); t++)
            {
                if (pointAndTriangle(uv_triangulation[searchIndex[t]][0],
                    uv_triangulation[searchIndex[t]][1],
                    uv_triangulation[searchIndex[t]][2], searchPoint)!=0) {   //inside or on

                    index_temp = searchIndex[t];
                    inside = 0;
                    break;
                }
            }

            if (inside == 0)
            {
                PointT pt3d;
                //uv2world(uv_w_tran[searchIndex[t]].rvec, uv_w_tran[searchIndex[t]].tvec, w_planes[searchIndex[t]], searchPoint, pt3d);
                uv2world(uv_triangulation[index_temp], searchPoint, w_triangulation[index_temp], pt3d);

                //pt3d  retransformation   R T
                PointT pt_cv = pt3d - position_panorama;
                Eigen::Vector3d pt_temp = rotation_panorama.inverse() * (Eigen::Vector3d(pt_cv.x, pt_cv.y, pt_cv.z));

                //Sphere project
                Sphere sp(0.0, 0.0, 0.0, radius_);
                PointT pt_projected;
                sphereProject(PointT(pt_temp[0], pt_temp[1], pt_temp[2]), sp, pt_projected);

                //search panorama pix pos
                int row, col;
                caculatePosInPanorama(pt_projected, img_rows, img_cols, row, col);
                //Vec4b bgr = src.at<Vec4b>(row, col);
                out_texture.at<Vec4b>(j, i)[0] = image_panorama.at<cv::Vec3b>(row, col)[0];
                out_texture.at<Vec4b>(j, i)[1] = image_panorama.at<cv::Vec3b>(row, col)[1];
                out_texture.at<Vec4b>(j, i)[2] = image_panorama.at<cv::Vec3b>(row, col)[2];
                out_texture.at<Vec4b>(j, i)[3] = 255;

            }

        }
    }

纹理结果

 

 

映射结果

 


免责声明!

本站转载的文章为个人学习借鉴使用,本站对版权不负任何法律责任。如果侵犯了您的隐私权益,请联系本站邮箱yoyou2525@163.com删除。



 
粤ICP备18138465号  © 2018-2025 CODEPRJ.COM