• 博客园logo
  • 会员
  • 众包
  • 新闻
  • 博问
  • 闪存
  • 赞助商
  • HarmonyOS
  • Chat2DB
    • 搜索
      所有博客
    • 搜索
      当前博客
  • 写随笔 我的博客 短消息 简洁模式
    用户头像
    我的博客 我的园子 账号设置 会员中心 简洁模式 ... 退出登录
    注册 登录
MKT-porter
博客园    首页    新随笔    联系   管理    订阅  订阅
C++(3-1 ) 3D-3D ICP SVD RANSCE

 

 https://blog.csdn.net/sinat_29886521/article/details/77506426

https://www.cnblogs.com/bokeyuan-dlam/articles/15078895.html 

 

 

8.1 RANSAC (Random sample consensus)
内点、外点:
内点就是应该算作正确的点,外点是不该算作正确的点(图像匹配任务就是不该匹配上的点)
算法思想【以直线拟合任务为例】:
由于增大数据采样量也会增加外点数量,因此不能作为解决办法。因此少量多次
少量指的是每次拟合只使用少量的点,多次指的是多次拟合。
这样总能找到一种解使得包含最多的内点

 使用好RANSAC算法的方式(减少采样轮数):
1.减少外点的比例 2.减少每次采样所利用的样本个数(下图两个坐标)

 

 

 

例子1 工程调用

 

 作用 

1 3D-3D位姿求解

2 ranck 随机取样找内点数据优化结果,排除离群点干扰

 

 

CMakeLists.txt

cmake_minimum_required(VERSION 3.5)
project(ICP_SVD_example)

# Set C++ standard to C++11
set(CMAKE_CXX_STANDARD 11)
set(CMAKE_CXX_STANDARD_REQUIRED ON)

# Find Eigen library
find_package(Eigen3 REQUIRED)

# Include directories for Eigen
include_directories(${EIGEN3_INCLUDE_DIR})

# Add executable target
add_executable(icp_svd_example main.cpp)

# Link Eigen library
target_link_libraries(icp_svd_example Eigen3::Eigen)

 

API_3D_3D.hpp

main.cpp

#include <iostream>
#include <Eigen/Dense>
#include <vector>
#include <cmath>
#include <random>    

#include "API_3D_3D.hpp"   
 
using namespace Eigen;
using namespace std;



 
int main() {

    //Test_3();

    //1-1 创建测试点 调用点代替
    vector<Vector3d> source_points;
    vector<Vector3d> target_points;


    // 1-2 设置变换矩阵
    Matrix3d currentR;
    Vector3d currentT;
    double currentScale=20;
    //currentR<<1,0,0,0,1,0,0,0,1;// 单位阵
    currentR<<0,-1,0,1,0,0,0,0,1; // 旋转90度
    currentT<<1000,1000,1000;
    // 1-3 获取随机测试点
    Get_TestValue(source_points,target_points,currentScale,currentR,currentT);
 
    //2 设置优化参数
    
    int time_=1000;// 迭代次数
    int random_poinst_num=4;// 每次抽取的点数 起码3个点
    double inliner_error_thresh=3;// 如果目标点是 GNSS的enu坐标(民用10米 rtk 1-10cm)


    //3-0 记录最优结果 Variables to store results
    double scale;
    Matrix3d R;
    Vector3d t;
    
    //3-1 开始计算
    API_ransac_ICP_3D_3D_sRt_inliner_sR(source_points, target_points, time_, inliner_error_thresh,random_poinst_num, scale, R, t);  
    cout << "Scale factor s: " << scale << endl;
    cout << "Rotation matrix R:\n" << R << endl;
    cout << "Translation vector t:\n" << t << endl;
   
    //4 统计结果 
    computeError(source_points, target_points, scale, R, t);
   



    return 0;
}

  

  

API_3D_3D.hpp

#include <iostream>
#include <Eigen/Dense>
#include <vector>
#include <cmath>
#include <random>    

 
 
using namespace Eigen;
using namespace std;


// 点云类型定义
typedef vector<Vector3d> PointCloud;



 //  变换点和目标点计算误差
double computeError(const vector<Vector3d>& source_points,
                    const vector<Vector3d>& target_points,
                    const double scale, const Matrix3d& R, const Vector3d& t) {
    // Compute transformation error using point-to-point distances
    double total_error = 0.0;
    double Mean_error = 0.0;
    double max_error = 0.0;
    double min_error = 9999;
 
    int num_points = source_points.size();

    vector<double> error_; 
    for (int i = 0; i < num_points; ++i) {
        Vector3d transformed_point = scale * R * source_points[i] + t;

        double cur_error =(transformed_point - target_points[i]).norm();
        error_.push_back(cur_error);

    }


    for(auto cur_error : error_){
           
        if(cur_error>max_error){max_error=cur_error;}

        if(cur_error<min_error){min_error=cur_error;}

        total_error = total_error+cur_error;  //向量的二范数

    }

    Mean_error=total_error / num_points;

    cout<<"Mean_error  " <<Mean_error << "   max_error   "<< max_error<< "   min_error   "<< min_error<< " 总点数  " << num_points <<endl;
    return Mean_error;  // Mean error
}



// 计算点云的每个点的范数平方 -- 用于计算S尺度
vector<double> computeNormSquared(const vector<Vector3d>& cloud) {
    vector<double> norms;

    for (const auto& point : cloud) {
        double normSquared = 0.0;
        for (int i = 0; i < point.size(); ++i)
        {
            normSquared += point[i] * point[i];
        }
        norms.push_back(normSquared);
    }
    
    
    return norms;
} 
 
// 核心公式 3d-3d sbd分解计算
/*

输入点云 一般是SLAM vo的enu坐标点  vector<Vector3d>& source_points,
目标点云 GNSS的enu坐标点             const vector<Vector3d>& target_points,
输出
 尺度             double& scale, 
 旋转             Matrix3d& R, 
 位移             Vector3d& t

 结果使用
   target_points=scale*R*source_points+t

*/
void API_ICP_3D_3D_sRt(const vector<Vector3d>& source_points,
              const vector<Vector3d>& target_points,
              double& scale, Matrix3d& R, Vector3d& t) {
 
    int N = source_points.size();
 
    // 计算质心
    Vector3d centroid_source = Vector3d::Zero();
    Vector3d centroid_target = Vector3d::Zero();
    for(int i=0;i<N;i++){
        centroid_source += source_points[i];
        centroid_target += target_points[i];
    }
    centroid_source /= N;
    centroid_target /= N;

    /// 中心化点云
    vector<Vector3d> centered_source_points (N);
    vector<Vector3d> centered_target_points (N);
    for(int i=0;i<N;i++){
        centered_source_points[i]= source_points[i] - centroid_source;
        centered_target_points[i]= target_points[i] - centroid_target;

    }
     
    // 计算尺度因子 s 方法2
    // 计算点云的每个点的范数平方

    double sum_source = 0.0;
    double sum_target = 0.0;
    for (size_t i = 0; i < N; ++i) {
        sum_source += centered_source_points[i].norm(); // norm---sqrt(x^2+y^2+z^2)
        sum_target += centered_target_points[i].norm();
    }
    double scale_ = sum_target /sum_source;
    scale = scale_;

    // vector<double> source_norm = computeNormSquared(centered_source_points); // 每一行的 source_norm[i]= x^2+y^2+z^2
    // vector<double> target_norm = computeNormSquared(centered_target_points);
    // double sum_source = 0.0;
    // double sum_target = 0.0;
    // for (size_t i = 0; i < N; ++i) {
    //     sum_source += sqrt(source_norm[i]) ;
    //     sum_target += sqrt(target_norm[i]) ;
    // }
    // double scale_ = sum_target /sum_source;
    // scale = scale_;
    // 计算尺度因子 s 方法1
    // double trace = 0.0;
    // for (int i = 0; i < N; ++i) {
    //     trace += centered_target_points[i].transpose() * R * centered_source_points[i];
    // }
    // double src_norm = 0.0;
    // for (int i = 0; i < N; ++i) {
    //     src_norm += centered_source_points[i].squaredNorm();
    // }
    // scale = trace / src_norm;



    // 计算协方差矩阵 H
    Matrix3d H = Matrix3d::Zero();
   
     
    for (int i = 0; i < N; ++i) {
        // 是否乘上scale没有区别 centered_target_points
        H += centered_target_points[i] * centered_source_points[i].transpose();
        //H += scale*centered_source_points[i] * centered_target_points[i].transpose();  
    }
     

    // 使用奇异值分解H SVD
    JacobiSVD<Matrix3d> svd(H, ComputeFullU | ComputeFullV);
    Matrix3d U = svd.matrixU();
    Matrix3d V = svd.matrixV();
   

    // 计算旋转矩阵R
    R = U*  V.transpose() ; // src到dis点变换关系 对应后面计算t
   
    

    if (U.determinant() * V.determinant() < 0) // R的行列式<0 取反
    {
        for (int x = 0; x < 3; ++x)
        {
            U(x, 2) *= -1;
        }
        R = U*  V.transpose() ;
    }
    
    //R = V * U.transpose();
    // 计算平移向量t   两个质心算位移
    t = centroid_target - scale * R * centroid_source;
}
 


void API_ransac_ICP_3D_3D_sRt_inliner_sR(const vector<Vector3d>& source_points,
                     const vector<Vector3d>& target_points,
                     const int num_iterations,        //ransac随机抽取验证次数
                     const double error_threshold,    // 误差阈值 3          
                     const int random_poinst_num,         
                     double& best_scale, Matrix3d& best_R, Vector3d& best_t) {
 
  

    // 1 数据准备
    int best_inlier_count = 0; // 最大内点数
 
    int N = source_points.size();

    //cout<<  "总点数  " << N << endl;
 
    // 全体点计算质心
    Vector3d centroid_source = Vector3d::Zero();
    Vector3d centroid_target = Vector3d::Zero();
    for(int i=0;i<N;i++){
        centroid_source += source_points[i];
        centroid_target += target_points[i];
    }
    centroid_source /= N;
    centroid_target /= N;

    /// 全体点中心化点云
    vector<Vector3d> centered_source_points (N);
    vector<Vector3d> centered_target_points (N);
    for(int i=0;i<N;i++){
        centered_source_points[i]= source_points[i] - centroid_source;
        centered_target_points[i]= target_points[i] - centroid_target;

    }
     

    // 3使用ransc 找出内点剔除外点,计算更加准确的sRt
    // 使用 std::random_device 生成真随机数种子
    std::random_device rd;
    // 使用梅森旋转算法引擎 mt19937,以 rd() 作为种子
    std::mt19937 gen(rd());
    // 定义一个均匀整数分布,范围是[1, 100]
    //std::uniform_int_distribution<int> dist(1, 100);
    uniform_int_distribution<int> dist(0, N - 1);
    // 生成随机数
    //int random_number = dist(gen);
 
     
    // RANSAC loop num_iterations 迭代次数
    for (int iteration = 0; iteration < num_iterations; ++iteration) {
        // Randomly select 3 points to form a minimal sample
 
        vector<Vector3d> source_points_temp;
        vector<Vector3d> target_points_temp;
 
        for (int num=0;num<4;num++){ // 最少3个点  取4个点
 
            int idx = dist(gen);
            source_points_temp.push_back(source_points[idx]);
            target_points_temp.push_back(target_points[idx]);
 
        }
 
        // 本次随机计算结果  Perform ICP with these initial parameters
        double scale;
        Matrix3d R;
        Vector3d t;
        API_ICP_3D_3D_sRt(source_points_temp,target_points_temp, scale, R, t);
 
        // Compute error with all points and count inliers
        int inlier_count = 0;
         

        vector<Vector3d> source_points_inliner;
        vector<Vector3d> target_points_inliner;

        
       
        for (int i = 0; i < N; ++i) {   
           Vector3d centered_source2target_points_i = scale*R*centered_source_points[i];//  使用本次随机抽取样本计算的sRt结果 ,对所有原始点的去中心点变换到目标点的中心点 尺度和旋转位置 
           double source_norm_i=centered_source2target_points_i.norm(); //去中心化的点长2范数度 norm---sqrt(x^2+y^2+z^2)
           double target_norm_i=centered_target_points[i].norm(); //去中心化的点长2范数度  norm---sqrt(x^2+y^2+z^2)

           double error = abs(source_norm_i  - target_norm_i);//计算误差 如果目标点是GNSS的enu坐标 单位是米

           if (error < error_threshold) {// 取3米
                inlier_count++; // 记录本次内点数目
                // 找出内点集合
                source_points_inliner.push_back(source_points[i]); 
                target_points_inliner.push_back(target_points[i]);
           }

        }

        
        // 如果本次内点多余历史最好内点数目 本次结果作为最好结果
        if (inlier_count > best_inlier_count) {

            API_ICP_3D_3D_sRt(source_points_inliner,target_points_inliner, scale, R, t);// 使用内点再次计算最优的srt

            best_inlier_count = inlier_count;
            best_scale = scale;
            best_R = R;
            best_t = t;

            if(iteration==num_iterations-1) cout<<"最大内点数:" <<inlier_count <<endl;
        }
    }

    



}
 
 

void Get_TestValue(vector<Vector3d>& source_points ,vector<Vector3d>& target_points,double currentScale,Matrix3d currentR,Vector3d currentT){

   
    //1-1 随机数产生原始点
    std::random_device rd; // 使用 std::random_device 生成真随机数种子
    std::mt19937 gen(rd());// 使用梅森旋转算法引擎 mt19937,以 rd() 作为种子
    uniform_int_distribution<int> randanm_once1(0, 100);//定义一个均匀整数分布
    int random_number = randanm_once1(gen);// 生成随机数
    
    int N = 100; // 100个点
    float per=0.01;// 假设10%点是对的
    double noise_error=10;// 噪声范围


    //1-2 随机创建原始点 
    for (int i = 0; i < N; ++i) {
        source_points.push_back(Vector3d(randanm_once1(gen),
                                         randanm_once1(gen),
                                         randanm_once1(gen)));
    }
    
 
    //2 设置目标点 = 原始点+噪声 

    //2-1 设置随机噪声
    
    std::uniform_int_distribution<int> randanm_once2(-noise_error/2, noise_error/2);
    //int random_number = randanm_once2(gen);
 
    
    for(int i=0;i<N;i++){
       
        Vector3d dst_i = currentScale * currentR * source_points[i] + currentT;// 目标点=s*R*原始点+t
        Vector3d noise(randanm_once2(gen) ,randanm_once2(gen) ,randanm_once2(gen));// 噪声
        
        // 目标点组成
        if (i > int(N*(1-per))){target_points.push_back(dst_i );} // 目标点真值
        else{target_points.push_back(dst_i + noise);} // 目标点真值+噪声 模拟视觉slam 和GNSS(民用10米 rtk 1-10cm)本身误差
        
        // cout<< i << " "                         
        //         <<"  source_points: "<< source_points[i][0]<< " "<<source_points[i][1]<<" "<<source_points[i][2] 
        //         <<"  noise: "<< noise[0]<< " "<<noise[1]<<" "<<noise[2] 
        //         <<"  target_points: "<< dst_i[0]<< " "<<dst_i[1]<<" "<<dst_i[2] 
        //         <<"  target_points_niose: "<< target_points[i][0]<< " "<<target_points[i][1]<<" "<<target_points[i][2] 
        //         << endl;
               
    }
 
 
    // 目标点真值+超级噪声 (模拟视觉SLAM某次错误的位姿)加上几个特别的离群来捣乱 验证内点的筛选重要性
    target_points[50]+=(Vector3d){100,100,100};

    target_points[60]+=(Vector3d){-100,-100,100};

    target_points[70]+=(Vector3d){-100,-100,-100};

    target_points[80]+=(Vector3d){-100,100,-100};

} 

  

 

测试例子2 学习代码

 

 

对比了四种不同的优化方式

原始点100个 随机噪声[-5,5] 真值10% 90%加噪声

 

   
    //1-1 随机数产生原始点
    std::random_device rd; // 使用 std::random_device 生成真随机数种子
    std::mt19937 gen(rd());// 使用梅森旋转算法引擎 mt19937,以 rd() 作为种子
    uniform_int_distribution<int> randanm_once1(0, 100);//定义一个均匀整数分布
    int random_number = randanm_once1(gen);// 生成随机数
    
    int N = 100; // 100个点
    float per=0.01;// 假设10%点是对的
    double noise_error=10;// 噪声范围

  计入超级离群点干扰

 
    // 目标点真值+超级噪声 (模拟视觉SLAM某次错误的位姿)加上几个特别的离群来捣乱 验证内点的筛选重要性
    target_points[50]+=(Vector3d){100,100,100};

    target_points[60]+=(Vector3d){-100,-100,100};

    target_points[70]+=(Vector3d){-100,-100,-100};

    target_points[80]+=(Vector3d){-100,100,-100};

  

设置的变换矩阵真值

    double currentScale=20;
    //currentR<<1,0,0,0,1,0,0,0,1;// 单位阵
    currentR<<0,-1,0,1,0,0,0,0,1; // 旋转90度
    currentT<<1000,1000,1000;

  

优化的参数

    //2 设置优化参数
    
    int time_=1000;// 迭代次数
    int random_poinst_num=4;// 每次抽取的点数 起码3个点
    double inliner_error_thresh=3;// 如果目标点是 GNSS的enu坐标(民用10米 rtk 1-10cm)

  

 最后统计误差

 

 

 很明显

 cout << "========1 全部点参与计算 中等 离群点干扰严重 ==========: " << endl;
 cout << "========2  ransac 最糟糕 单纯随机性太偶然,不找内点==========: " << endl;
cout << "========3 ransac 次优 ransac挑选出内点去掉离群点, 内点整体参与计算sRT,T会引入新的误差 ==========: " << endl;
在变换后的目标点上找内点
因为 t= p_target-s*R*p_source
s和 R的误差再次引入t,这导致计算的变换目标点误差更大,进一步导致error更大

cout << "========4 ransac 最优 ransac挑选出内点去掉离群点, 内点整体参与计算sR,相当于1和2的联合优化 ==========: " << endl;

在去中心化的点集合上计算得到内点,只单纯计算s和R,精度更高。

  

 

代码2 学习代码

 main.cpp

#include <iostream>
#include <Eigen/Dense>
#include <vector>
#include <cmath>
#include <random>    

 
 
using namespace Eigen;
using namespace std;


// 点云类型定义
typedef vector<Vector3d> PointCloud;



 //  变换点和目标点计算误差
double computeError(const vector<Vector3d>& source_points,
                    const vector<Vector3d>& target_points,
                    const double scale, const Matrix3d& R, const Vector3d& t) {
    // Compute transformation error using point-to-point distances
    double total_error = 0.0;
    double Mean_error = 0.0;
    double max_error = 0.0;
    double min_error = 9999;
 
    int num_points = source_points.size();

    vector<double> error_; 
    for (int i = 0; i < num_points; ++i) {
        Vector3d transformed_point = scale * R * source_points[i] + t;

        double cur_error =(transformed_point - target_points[i]).norm();
        error_.push_back(cur_error);

    }


    for(auto cur_error : error_){
           
        if(cur_error>max_error){max_error=cur_error;}

        if(cur_error<min_error){min_error=cur_error;}

        total_error = total_error+cur_error;  //向量的二范数

    }

    Mean_error=total_error / num_points;

    cout<<"Mean_error  " <<Mean_error << "   max_error   "<< max_error<< "   min_error   "<< min_error<< " 总点数  " << num_points <<endl;
    return Mean_error;  // Mean error
}



// 计算点云的每个点的范数平方 -- 用于计算S尺度
vector<double> computeNormSquared(const vector<Vector3d>& cloud) {
    vector<double> norms;

    for (const auto& point : cloud) {
        double normSquared = 0.0;
        for (int i = 0; i < point.size(); ++i)
        {
            normSquared += point[i] * point[i];
        }
        norms.push_back(normSquared);
    }
    
    
    return norms;
} 
 
// 核心公式 3d-3d sbd分解计算
/*

输入点云 一般是SLAM vo的enu坐标点  vector<Vector3d>& source_points,
目标点云 GNSS的enu坐标点             const vector<Vector3d>& target_points,
输出
 尺度             double& scale, 
 旋转             Matrix3d& R, 
 位移             Vector3d& t

 结果使用
   target_points=scale*R*source_points+t

*/
void API_ICP_3D_3D_sRt(const vector<Vector3d>& source_points,
              const vector<Vector3d>& target_points,
              double& scale, Matrix3d& R, Vector3d& t) {
 
    int N = source_points.size();
 
    // 计算质心
    Vector3d centroid_source = Vector3d::Zero();
    Vector3d centroid_target = Vector3d::Zero();
    for(int i=0;i<N;i++){
        centroid_source += source_points[i];
        centroid_target += target_points[i];
    }
    centroid_source /= N;
    centroid_target /= N;

    /// 中心化点云
    vector<Vector3d> centered_source_points (N);
    vector<Vector3d> centered_target_points (N);
    for(int i=0;i<N;i++){
        centered_source_points[i]= source_points[i] - centroid_source;
        centered_target_points[i]= target_points[i] - centroid_target;

    }
     
    // 计算尺度因子 s 方法2
    // 计算点云的每个点的范数平方

    double sum_source = 0.0;
    double sum_target = 0.0;
    for (size_t i = 0; i < N; ++i) {
        sum_source += centered_source_points[i].norm(); // norm---sqrt(x^2+y^2+z^2)
        sum_target += centered_target_points[i].norm();
    }
    double scale_ = sum_target /sum_source;
    scale = scale_;

    // vector<double> source_norm = computeNormSquared(centered_source_points); // 每一行的 source_norm[i]= x^2+y^2+z^2
    // vector<double> target_norm = computeNormSquared(centered_target_points);
    // double sum_source = 0.0;
    // double sum_target = 0.0;
    // for (size_t i = 0; i < N; ++i) {
    //     sum_source += sqrt(source_norm[i]) ;
    //     sum_target += sqrt(target_norm[i]) ;
    // }
    // double scale_ = sum_target /sum_source;
    // scale = scale_;
    // 计算尺度因子 s 方法1
    // double trace = 0.0;
    // for (int i = 0; i < N; ++i) {
    //     trace += centered_target_points[i].transpose() * R * centered_source_points[i];
    // }
    // double src_norm = 0.0;
    // for (int i = 0; i < N; ++i) {
    //     src_norm += centered_source_points[i].squaredNorm();
    // }
    // scale = trace / src_norm;



    // 计算协方差矩阵 H
    Matrix3d H = Matrix3d::Zero();
   
     
    for (int i = 0; i < N; ++i) {
        // 是否乘上scale没有区别 centered_target_points
        H += centered_target_points[i] * centered_source_points[i].transpose();
        //H += scale*centered_source_points[i] * centered_target_points[i].transpose();  
    }
     

    // 使用奇异值分解H SVD
    JacobiSVD<Matrix3d> svd(H, ComputeFullU | ComputeFullV);
    Matrix3d U = svd.matrixU();
    Matrix3d V = svd.matrixV();
   

    // 计算旋转矩阵R
    R = U*  V.transpose() ; // src到dis点变换关系 对应后面计算t
   
    

    if (U.determinant() * V.determinant() < 0) // R的行列式<0 取反
    {
        for (int x = 0; x < 3; ++x)
        {
            U(x, 2) *= -1;
        }
        R = U*  V.transpose() ;
    }
    
    //R = V * U.transpose();
    // 计算平移向量t   两个质心算位移
    t = centroid_target - scale * R * centroid_source;
}
 
 // 不带尺度的计算
 /*
 typedef Vector3d Point3f;

 void pose_estimation_3d3d (
    const vector<Point3f>& pts1,
    const vector<Point3f>& pts2,
    Mat& R, Mat& t
)
{
    Point3f p1, p2;     // center of mass
    int N = pts1.size();
    for ( int i=0; i<N; i++ )
    {
        p1 += pts1[i];
        p2 += pts2[i];
    }
    p1 = Point3f( Vec3f(p1) /  N);
    p2 = Point3f( Vec3f(p2) / N);
    vector<Point3f>     q1 ( N ), q2 ( N ); // remove the center
    for ( int i=0; i<N; i++ )
    {
        q1[i] = pts1[i] - p1;
        q2[i] = pts2[i] - p2;
    }

    // compute q1*q2^T
    Eigen::Matrix3d W = Eigen::Matrix3d::Zero();
    for ( int i=0; i<N; i++ )
    {
        W += Eigen::Vector3d ( q1[i].x, q1[i].y, q1[i].z ) * Eigen::Vector3d ( q2[i].x, q2[i].y, q2[i].z ).transpose();
    }
    cout<<"W="<<W<<endl;

    // SVD on W
    Eigen::JacobiSVD<Eigen::Matrix3d> svd ( W, Eigen::ComputeFullU|Eigen::ComputeFullV );
    Eigen::Matrix3d U = svd.matrixU();
    Eigen::Matrix3d V = svd.matrixV();
    
    if (U.determinant() * V.determinant() < 0)
    {
        for (int x = 0; x < 3; ++x)
        {
            U(x, 2) *= -1;
        }
    }
    
    cout<<"U="<<U<<endl;
    cout<<"V="<<V<<endl;

    Eigen::Matrix3d R_ = U* ( V.transpose() );
    Eigen::Vector3d t_ = Eigen::Vector3d ( p1.x, p1.y, p1.z ) - R_ * Eigen::Vector3d ( p2.x, p2.y, p2.z );

    // convert to cv::Mat
    R = ( Mat_<double> ( 3,3 ) <<
          R_ ( 0,0 ), R_ ( 0,1 ), R_ ( 0,2 ),
          R_ ( 1,0 ), R_ ( 1,1 ), R_ ( 1,2 ),
          R_ ( 2,0 ), R_ ( 2,1 ), R_ ( 2,2 )
        );
    t = ( Mat_<double> ( 3,1 ) << t_ ( 0,0 ), t_ ( 1,0 ), t_ ( 2,0 ) );
}
*/




/*
随机样本选择:
 
从输入数据中随机选择一个小的子集作为初始样本集合(比如选择3个点来估计初始的变换参数)。
模型拟合:
 
使用选定的样本集合来估计模型的参数(比如平移、旋转等变换参数)。
一致性检验:
 
将所有其他数据点与估计模型进行比较,计算它们与模型之间的拟合误差或距离。如果数据点与模型的拟合误差在某个阈值范围内,则认为它们是一致的(inliers)。
重复迭代:
 
重复上述步骤多次(通常是预先设定的次数),选择具有最大一致性(最多inliers)的模型参数作为最终的拟合结果。

 */
 
void API_ransac_ICP_3D_3D_sRt_noinliner(const vector<Vector3d>& source_points,
                     const vector<Vector3d>& target_points,
                     const int num_iterations,        //ransac随机抽取验证次数
                     const double error_threshold,    // 误差阈值 3
                     const int random_poinst_num,       
                     double& best_scale, Matrix3d& best_R, Vector3d& best_t) {
 
    int num_points = source_points.size();


    int best_inlier_count = 0;
 
    // 使用 std::random_device 生成真随机数种子
    std::random_device rd;
    // 使用梅森旋转算法引擎 mt19937,以 rd() 作为种子
    std::mt19937 gen(rd());
    // 定义一个均匀整数分布,范围是[1, 100]
    //std::uniform_int_distribution<int> dist(1, 100);
    uniform_int_distribution<int> dist(0, num_points - 1);
    // 生成随机数
    //int random_number = dist(gen);
 
     
    // RANSAC loop num_iterations 迭代次数
    for (int iteration = 0; iteration < num_iterations; ++iteration) {
        // Randomly select 3 points to form a minimal sample
 
        vector<Vector3d> source_points_temp;
        vector<Vector3d> target_points_temp;
 
        for (int num=0;num<4;num++){ // 最少3个点  取4个点
 
            int idx = dist(gen);
            source_points_temp.push_back(source_points[idx]);
            target_points_temp.push_back(target_points[idx]);
 
        }
 
        // 本次计算结果  Perform ICP with these initial parameters
        double scale;
        Matrix3d R;
        Vector3d t;
        API_ICP_3D_3D_sRt(source_points_temp,target_points_temp, scale, R, t);
 
        // Compute error with all points and count inliers
        int inlier_count = 0;
         
        //  使用本次随机抽取样本计算的sRt结果 ,对所有原始点变换,计算误差是否在阈值内,统计内点最多的情况,记录对应sRt
        for (int i = 0; i < num_points; ++i) {


            Vector3d transformed_point = scale * R * source_points[i] + t;
            double error = (transformed_point - target_points[i]).norm();  //向量的二范数
        
            //double error = computeError({source_points[i]}, {target_points[i]}, scale, R, t);
            if (error < error_threshold) {
                inlier_count++;
            }
        }
 
        // Update best model if this model is better
        if (inlier_count > best_inlier_count) {
            if(iteration==num_iterations-1) cout<<"最大内点数:" <<inlier_count <<endl;
            best_inlier_count = inlier_count;
            best_scale = scale;
            best_R = R;
            best_t = t;
        }
    }
}
 

void API_ransac_ICP_3D_3D_sRt_inliner_sRt(const vector<Vector3d>& source_points,
                     const vector<Vector3d>& target_points,
                     const int num_iterations,        //ransac随机抽取验证次数
                     const double error_threshold,    // 误差阈值 3
                     const int random_poinst_num,       
                     double& best_scale, Matrix3d& best_R, Vector3d& best_t) {
 
    int num_points = source_points.size();


    int best_inlier_count = 0;
 
    // 使用 std::random_device 生成真随机数种子
    std::random_device rd;
    // 使用梅森旋转算法引擎 mt19937,以 rd() 作为种子
    std::mt19937 gen(rd());
    // 定义一个均匀整数分布,范围是[1, 100]
    //std::uniform_int_distribution<int> dist(1, 100);
    uniform_int_distribution<int> dist(0, num_points - 1);
    // 生成随机数
    //int random_number = dist(gen);
 
     
    // RANSAC loop num_iterations 迭代次数
    for (int iteration = 0; iteration < num_iterations; ++iteration) {
        // Randomly select 3 points to form a minimal sample
 
        vector<Vector3d> source_points_temp;
        vector<Vector3d> target_points_temp;
 
        for (int num=0;num<4;num++){ // 最少3个点  取4个点
 
            int idx = dist(gen);
            source_points_temp.push_back(source_points[idx]);
            target_points_temp.push_back(target_points[idx]);
 
        }
 
        // 本次计算结果  Perform ICP with these initial parameters
        double scale;
        Matrix3d R;
        Vector3d t;
        API_ICP_3D_3D_sRt(source_points_temp,target_points_temp, scale, R, t);
 
        // Compute error with all points and count inliers
        int inlier_count = 0;
         



        
        vector<Vector3d> source_points_inliner;
        vector<Vector3d> target_points_inliner;

        //  使用本次随机抽取样本计算的sRt结果 ,对所有原始点变换,计算误差是否在阈值内,统计内点最多的情况,记录对应sRt
        for (int i = 0; i < num_points; ++i) {
            //double error = computeError({source_points[i]}, {target_points[i]}, scale, R, t); 

            Vector3d transformed_point = scale * R * source_points[i] + t;
            double error = (transformed_point - target_points[i]).norm();  //向量的二范数    

            if (error < error_threshold) {
                inlier_count++;
                // 找出内点 
                source_points_inliner.push_back(source_points[i]);
                target_points_inliner.push_back(target_points[i]);
            }
        }
        
        // Update best model if this model is better
        if (inlier_count > best_inlier_count) {

            API_ICP_3D_3D_sRt(source_points_inliner,target_points_inliner, scale, R, t);// 使用内点再次计算最优的srt

            best_inlier_count = inlier_count;
            best_scale = scale;
            best_R = R;
            best_t = t;

            if(iteration==num_iterations-1) cout<<"最大内点数:" <<inlier_count <<endl;
        }
    }
}
 
 




void API_ransac_ICP_3D_3D_sRt_inliner_sR(const vector<Vector3d>& source_points,
                     const vector<Vector3d>& target_points,
                     const int num_iterations,        //ransac随机抽取验证次数
                     const double error_threshold,    // 误差阈值 3          
                     const int random_poinst_num,         
                     double& best_scale, Matrix3d& best_R, Vector3d& best_t) {
 
  

    // 1 数据准备
    int best_inlier_count = 0; // 最大内点数
 
    int N = source_points.size();

    //cout<<  "总点数  " << N << endl;
 
    // 全体点计算质心
    Vector3d centroid_source = Vector3d::Zero();
    Vector3d centroid_target = Vector3d::Zero();
    for(int i=0;i<N;i++){
        centroid_source += source_points[i];
        centroid_target += target_points[i];
    }
    centroid_source /= N;
    centroid_target /= N;

    /// 全体点中心化点云
    vector<Vector3d> centered_source_points (N);
    vector<Vector3d> centered_target_points (N);
    for(int i=0;i<N;i++){
        centered_source_points[i]= source_points[i] - centroid_source;
        centered_target_points[i]= target_points[i] - centroid_target;

    }
     

    // 3使用ransc 找出内点剔除外点,计算更加准确的sRt
    // 使用 std::random_device 生成真随机数种子
    std::random_device rd;
    // 使用梅森旋转算法引擎 mt19937,以 rd() 作为种子
    std::mt19937 gen(rd());
    // 定义一个均匀整数分布,范围是[1, 100]
    //std::uniform_int_distribution<int> dist(1, 100);
    uniform_int_distribution<int> dist(0, N - 1);
    // 生成随机数
    //int random_number = dist(gen);
 
     
    // RANSAC loop num_iterations 迭代次数
    for (int iteration = 0; iteration < num_iterations; ++iteration) {
        // Randomly select 3 points to form a minimal sample
 
        vector<Vector3d> source_points_temp;
        vector<Vector3d> target_points_temp;
 
        for (int num=0;num<4;num++){ // 最少3个点  取4个点
 
            int idx = dist(gen);
            source_points_temp.push_back(source_points[idx]);
            target_points_temp.push_back(target_points[idx]);
 
        }
 
        // 本次随机计算结果  Perform ICP with these initial parameters
        double scale;
        Matrix3d R;
        Vector3d t;
        API_ICP_3D_3D_sRt(source_points_temp,target_points_temp, scale, R, t);
 
        // Compute error with all points and count inliers
        int inlier_count = 0;
         

        vector<Vector3d> source_points_inliner;
        vector<Vector3d> target_points_inliner;

        
       
        for (int i = 0; i < N; ++i) {   
           Vector3d centered_source2target_points_i = scale*R*centered_source_points[i];//  使用本次随机抽取样本计算的sRt结果 ,对所有原始点的去中心点变换到目标点的中心点 尺度和旋转位置 
           double source_norm_i=centered_source2target_points_i.norm(); //去中心化的点长2范数度 norm---sqrt(x^2+y^2+z^2)
           double target_norm_i=centered_target_points[i].norm(); //去中心化的点长2范数度  norm---sqrt(x^2+y^2+z^2)

           double error = abs(source_norm_i  - target_norm_i);//计算误差 如果目标点是GNSS的enu坐标 单位是米

           if (error < error_threshold) {// 取3米
                inlier_count++; // 记录本次内点数目
                // 找出内点集合
                source_points_inliner.push_back(source_points[i]); 
                target_points_inliner.push_back(target_points[i]);
           }

        }

        
        // 如果本次内点多余历史最好内点数目 本次结果作为最好结果
        if (inlier_count > best_inlier_count) {

            API_ICP_3D_3D_sRt(source_points_inliner,target_points_inliner, scale, R, t);// 使用内点再次计算最优的srt

            best_inlier_count = inlier_count;
            best_scale = scale;
            best_R = R;
            best_t = t;

            if(iteration==num_iterations-1) cout<<"最大内点数:" <<inlier_count <<endl;
        }
    }

    



}
 
 



void Get_TestValue(vector<Vector3d>& source_points ,vector<Vector3d>& target_points,double currentScale,Matrix3d currentR,Vector3d currentT){

   
    //1-1 随机数产生原始点
    std::random_device rd; // 使用 std::random_device 生成真随机数种子
    std::mt19937 gen(rd());// 使用梅森旋转算法引擎 mt19937,以 rd() 作为种子
    uniform_int_distribution<int> randanm_once1(0, 100);//定义一个均匀整数分布
    int random_number = randanm_once1(gen);// 生成随机数
    
    int N = 100; // 100个点
    float per=0.01;// 假设10%点是对的
    double noise_error=10;// 噪声范围


    //1-2 随机创建原始点 
    for (int i = 0; i < N; ++i) {
        source_points.push_back(Vector3d(randanm_once1(gen),
                                         randanm_once1(gen),
                                         randanm_once1(gen)));
    }
    
 
    //2 设置目标点 = 原始点+噪声 

    //2-1 设置随机噪声
    
    std::uniform_int_distribution<int> randanm_once2(-noise_error/2, noise_error/2);
    //int random_number = randanm_once2(gen);
 
    
    for(int i=0;i<N;i++){
       
        Vector3d dst_i = currentScale * currentR * source_points[i] + currentT;// 目标点=s*R*原始点+t
        Vector3d noise(randanm_once2(gen) ,randanm_once2(gen) ,randanm_once2(gen));// 噪声
        
        // 目标点组成
        if (i > int(N*(1-per))){target_points.push_back(dst_i );} // 目标点真值
        else{target_points.push_back(dst_i + noise);} // 目标点真值+噪声 模拟视觉slam 和GNSS(民用10米 rtk 1-10cm)本身误差
        
        // cout<< i << " "                         
        //         <<"  source_points: "<< source_points[i][0]<< " "<<source_points[i][1]<<" "<<source_points[i][2] 
        //         <<"  noise: "<< noise[0]<< " "<<noise[1]<<" "<<noise[2] 
        //         <<"  target_points: "<< dst_i[0]<< " "<<dst_i[1]<<" "<<dst_i[2] 
        //         <<"  target_points_niose: "<< target_points[i][0]<< " "<<target_points[i][1]<<" "<<target_points[i][2] 
        //         << endl;
               
    }
 
 
    // 目标点真值+超级噪声 (模拟视觉SLAM某次错误的位姿)加上几个特别的离群来捣乱 验证内点的筛选重要性
    target_points[50]+=(Vector3d){100,100,100};

    target_points[60]+=(Vector3d){-100,-100,100};

    target_points[70]+=(Vector3d){-100,-100,-100};

    target_points[80]+=(Vector3d){-100,100,-100};

} 


// 测试3
void Test_3(){
 
 
 
    //1-1 创建测试点
    vector<Vector3d> source_points;
    vector<Vector3d> target_points;


    // 1-2 设置变换矩阵
    Matrix3d currentR;
    Vector3d currentT;
    double currentScale=20;
    //currentR<<1,0,0,0,1,0,0,0,1;// 单位阵
    currentR<<0,-1,0,1,0,0,0,0,1; // 旋转90度
    currentT<<1000,1000,1000;

    Get_TestValue(source_points,target_points,currentScale,currentR,currentT);
 
    //2 设置优化参数
    
    int time_=1000;// 迭代次数
    int random_poinst_num=4;// 每次抽取的点数 起码3个点
    double inliner_error_thresh=3;// 如果目标点是 GNSS的enu坐标(民用10米 rtk 1-10cm)

    // 统计四种方法的优缺点
    double t_error1,t_error2,t_error3,t_error4;
    double s1,s2,s3,s4;

    //3-0 记录最优结果 Variables to store results
    double scale;
    Matrix3d R;
    Vector3d t;

     //3-1 开始计算 Perform RANSAC-based 3D-3D ICP
    //  100 迭代次数 误差阈值 1
    cout << "========1 全部点参与计算 中等 离群点干扰严重 ==========: " << endl;
    API_ICP_3D_3D_sRt(source_points, target_points, scale, R, t);
    // Output results
    cout << "Scale factor s: " << scale << endl;
    cout << "Rotation matrix R:\n" << R << endl;
    cout << "Translation vector t:\n" << t << endl;
    t_error1= sqrt(pow((currentT[0]-t[0]),2)+pow((currentT[1]-t[1]),2)+pow((currentT[2]-t[2]),2));
    cout << "位移的error:\n" << t_error1 << endl;
    computeError(source_points, target_points, scale, R, t);
    s1=currentScale-scale;

    
     //3-2 开始计算 Perform RANSAC-based 3D-3D ICP
    cout << "========2  ransac 最糟糕 单纯随机性太偶然,不找内点==========: " << endl;
    //  100 迭代次数 误差阈值 1
    API_ransac_ICP_3D_3D_sRt_noinliner(source_points, target_points, time_, inliner_error_thresh,random_poinst_num, scale, R, t);
    // Output results    
    cout << "Scale factor s: " << scale << endl;
    cout << "Rotation matrix R:\n" << R << endl;
    cout << "Translation vector t:\n" << t << endl;
    t_error2= sqrt(pow((currentT[0]-t[0]),2)+pow((currentT[1]-t[1]),2)+pow((currentT[2]-t[2]),2));
    cout << "位移的error:\n" << t_error2 << endl;
    computeError(source_points, target_points, scale, R, t);
    s2=currentScale-scale;


    cout << "========3 ransac 次优 ransac挑选出内点去掉离群点, 内点整体参与计算sRT,T会引入新的误差 ==========: " << endl;
    API_ransac_ICP_3D_3D_sRt_inliner_sRt(source_points, target_points, time_, inliner_error_thresh,random_poinst_num, scale, R, t);  
    cout << "Scale factor s: " << scale << endl;
    cout << "Rotation matrix R:\n" << R << endl;
    cout << "Translation vector t:\n" << t << endl;
    t_error3= sqrt(pow((currentT[0]-t[0]),2)+pow((currentT[1]-t[1]),2)+pow((currentT[2]-t[2]),2));
    cout << "位移的error:\n" << t_error3 << endl;
    computeError(source_points, target_points, scale, R, t);
    s3=currentScale-scale;
    //cout << "位移的error:  1 all " <<  t_error1  << " 2 ransc  " <<t_error2 << "  3 ranc+inliner "<<t_error3 << endl;

    cout << "========4 ransac 最优 ransac挑选出内点去掉离群点, 内点整体参与计算sR,相当于1和2的联合优化 ==========: " << endl;

     scale=0;
     R;
     t={0,0,0};
    API_ransac_ICP_3D_3D_sRt_inliner_sR(source_points, target_points, time_, inliner_error_thresh,random_poinst_num, scale, R, t);  
    cout << "Scale factor s: " << scale << endl;
    cout << "Rotation matrix R:\n" << R << endl;
    cout << "Translation vector t:\n" << t << endl;
    t_error4= sqrt(pow((currentT[0]-t[0]),2)+pow((currentT[1]-t[1]),2)+pow((currentT[2]-t[2]),2));
    cout << "位移的error:\n" << t_error4 << endl;
    computeError(source_points, target_points, scale, R, t);
    s4=currentScale-scale;

    cout << "位移的error:  1 all " <<  t_error1  << " 2 ransc  " <<t_error2 << "  3 ranc+inliner-sRt "<<t_error3 << "  t_error4-sR  "<<t_error4 <<endl;
    cout << "尺度的error:  1 all " <<  s1  << " 2 ransc  " <<s2 << "  3 ranc+inliner-sRt  "<<s3 << "  t_error4-sRt   "<<s4 <<endl;




 
}
 
 
 
int main() {

    Test_3();
    return 0;
}

  

OPENVSLAm修改后的代码

API_3D_3D_sRt.h

#ifndef API_3D_3D_sRt_H
#define API_3D_3D_sRt_H



#include <iostream>
#include <Eigen/Dense>
#include <vector>
#include <cmath>
#include <random>   
 
  
  
using namespace Eigen;
using namespace std;
 

// 计算均方根误差
double cal_rmse_t_v3s_v3s( vector<Eigen::Vector3d> &groundtruth,vector<Eigen::Vector3d> &estimated);

 
// 核心公式 3d-3d sbd分解计算
/*

 
单纯的根据3D点匹配对计算整体sRt关系
输入点云 一般是SLAM vo的enu坐标点  vector<Vector3d>& source_points,
目标点云 GNSS的enu坐标点             const vector<Vector3d>& target_points,
输出
 尺度             double& scale,
 旋转             Matrix3d& R,
 位移             Vector3d& t
 
 结果使用
   target_points=scale*R*source_points+t


重点
https://www.cnblogs.com/gooutlook/p/18353030
现成的库直接调用 但是没有都没有进行随机一致采样 
 
*/
void API_ICP_3D_3D_sRt(const vector<Vector3d>& source_points,
              const vector<Vector3d>& target_points,
              double& scale, Matrix3d& R, Vector3d& t) ;
  
 
 // 使用中心化点云找内点 误差目前理想 API_ICP_3D_3D_sRt + 随机一致性优化
void API_ransac_ICP_3D_3D_sRt_inliner_sR(const vector<Vector3d>& source_points,
                     const vector<Vector3d>& target_points,
                     int num_iterations,        //ransac随机抽取验证次数
                     const double error_threshold,    // 误差阈值 3         
                     
                     double& best_scale, Matrix3d& best_R, Vector3d& best_t);

// 使用直接变换点找内点 误差较大 API_ICP_3D_3D_sRt + 随机一致性优化
void API_ransac_ICP_3D_3D_sRt_inliner_sRt(const vector<Vector3d>& source_points,
                     const vector<Vector3d>& target_points,
                     int num_iterations,        //ransac随机抽取验证次数
                     const double error_threshold,    // 误差阈值 3         
                     
                     double& best_scale, Matrix3d& best_R, Vector3d& best_t) ;
#endif 
  
 

  

API_3D_3D_sRt.cc

#ifndef API_3D_3D_sRt_CPP
#define API_3D_3D_sRt_CPP



#include "API_3D_3D_sRt.h"


/*

1. RANSAC随机采样一致算法
这种方法的目的是,从所有数据中选择正确的数据,用于估计。为了方便,先给几个定义。

点:每一个数据,SLAM里指的是匹配的点对
野值/外点:错误的点
内点:正确的点
内点集:内点的集合
外点集:外点的集合
模型:带估计的参数
 :估计模型所需要的最小点数
 :所有的点形成的点集
显然,内点集就是我们想要找的正确数据。RANSAC的想法是从点集中随机的选择出 
 个点,估计出一个模型,查看剩余点是否符合这个模型。如果大部分点都符合这个模型,就相当于我们找到了一个合适的模型,自然的符合模型的点就是内点啦。由于是随机选择,一次可能难以找到正确的模型,因此需要多次才可以。下面给出完整的RANSAC算法。

STEP 1. 随机的从 
 中选择 
 个点作为一个样本,估计出模型
STEP 2. 确定在模型距离阈值 
内的数据点集 
 , 
 即可为内点集
STEP 3. 如果 
 的大小大于某个阈值 
 ,用所有内点 
 重新估计一个更精确的模型
STEP 4. 如果 
 的大小小于阈值 
 ,则重复1步骤
STEP 5. 经过 
 次试验,选择最大一致集 
 ,并用 
 的所有点重新估计模型
RANSAC里面有三个重要的参数需要确定,分别是 
 、 
 、 
 。



*/
  
  
double cal_rmse_t_v3s_v3s( vector<Eigen::Vector3d> &groundtruth,vector<Eigen::Vector3d> &estimated){


 // // compute rmse
  double rmse = 0;
  for (size_t i = 0; i < estimated.size(); i++) {
    Eigen::Vector3d p1 = estimated[i];
    Eigen::Vector3d p2 = groundtruth[i];
  
    Eigen::Vector3d error  = (p2- p1);
    rmse += error.squaredNorm();//用 squaredNorm() 计算误差平方和 norm平方根

  }
  rmse = rmse / double(estimated.size());
  rmse = sqrt(rmse);
  
  //cout << "RMSE = " << rmse << endl;
  return rmse;
}



 

  
// 核心公式 3d-3d sbd分解计算
/*
 
输入点云 一般是SLAM vo的enu坐标点  vector<Vector3d>& source_points,
目标点云 GNSS的enu坐标点             const vector<Vector3d>& target_points,
输出
 尺度             double& scale,
 旋转             Matrix3d& R,
 位移             Vector3d& t
 
 结果使用
   target_points=scale*R*source_points+t
 
*/
void API_ICP_3D_3D_sRt(const vector<Vector3d>& source_points,
              const vector<Vector3d>& target_points,
              double& scale, Matrix3d& R, Vector3d& t) {
  
    int N = source_points.size();
  
    // 计算质心
    Vector3d centroid_source = Vector3d::Zero();
    Vector3d centroid_target = Vector3d::Zero();
    for(int i=0;i<N;i++){
        centroid_source += source_points[i];
        centroid_target += target_points[i];
    }
    centroid_source /= N;
    centroid_target /= N;
 
    /// 中心化点云
    vector<Vector3d> centered_source_points (N);
    vector<Vector3d> centered_target_points (N);
    for(int i=0;i<N;i++){
        centered_source_points[i]= source_points[i] - centroid_source;
        centered_target_points[i]= target_points[i] - centroid_target;
 
    }
      
    // 计算尺度因子 s 方法2
    // 计算点云的每个点的范数平方
 
    double sum_source = 0.0;
    double sum_target = 0.0;
    for (size_t i = 0; i < N; ++i) {
        sum_source += centered_source_points[i].norm(); // norm---sqrt(x^2+y^2+z^2)
        sum_target += centered_target_points[i].norm();
    }
    double scale_ = sum_target /sum_source;
    scale = scale_;
 
    // vector<double> source_norm = computeNormSquared(centered_source_points); // 每一行的 source_norm[i]= x^2+y^2+z^2
    // vector<double> target_norm = computeNormSquared(centered_target_points);
    // double sum_source = 0.0;
    // double sum_target = 0.0;
    // for (size_t i = 0; i < N; ++i) {
    //     sum_source += sqrt(source_norm[i]) ;
    //     sum_target += sqrt(target_norm[i]) ;
    // }
    // double scale_ = sum_target /sum_source;
    // scale = scale_;
    // 计算尺度因子 s 方法1
    // double trace = 0.0;
    // for (int i = 0; i < N; ++i) {
    //     trace += centered_target_points[i].transpose() * R * centered_source_points[i];
    // }
    // double src_norm = 0.0;
    // for (int i = 0; i < N; ++i) {
    //     src_norm += centered_source_points[i].squaredNorm();
    // }
    // scale = trace / src_norm;
 
 
 
    // 计算协方差矩阵 H
    Matrix3d H = Matrix3d::Zero();
    
      
    for (int i = 0; i < N; ++i) {
        // 是否乘上scale没有区别 centered_target_points
        H += centered_target_points[i] * centered_source_points[i].transpose();
        //H += scale*centered_source_points[i] * centered_target_points[i].transpose(); 
    }
      
 
    // 使用奇异值分解H SVD
    JacobiSVD<Matrix3d> svd(H, ComputeFullU | ComputeFullV);
    Matrix3d U = svd.matrixU();
    Matrix3d V = svd.matrixV();
    
 
    // 计算旋转矩阵R
    R = U*  V.transpose() ; // src到dis点变换关系 对应后面计算t
    
     
 
    if (U.determinant() * V.determinant() < 0) // R的行列式<0 取反
    {
        for (int x = 0; x < 3; ++x)
        {
            U(x, 2) *= -1;
        }
        R = U*  V.transpose() ;
    }
     
    //R = V * U.transpose();
    // 计算平移向量t   两个质心算位移
    t = centroid_target - scale * R * centroid_source;
}
  
 //https://zhuanlan.zhihu.com/p/62175983
 // 使用 sr变换原始点的去中心点后,和目标点的去中心点计算的误差。
// 1随机抽取计算SRt  2 使用去中心点集合找内点,3使用最大内点数计算最有SRT
void API_ransac_ICP_3D_3D_sRt_inliner_sR(const vector<Vector3d>& source_points,
                     const vector<Vector3d>& target_points,
                     int num_iterations,        //ransac随机抽取验证次数
                     const double error_threshold,    // 误差阈值 3         
                     
                     double& best_scale, Matrix3d& best_R, Vector3d& best_t) {
  
   




 
    // 1 数据准备

    int best_inlier_count = 0; // 最大内点数
    int N = source_points.size();
    //cout<<  "总点数  " << N << endl;

    int num_rancek_temp = N * (N - 1) / 2; // 随机挑选2个点验证 一共多少次
    if(num_rancek_temp<num_iterations) // 限制下最大次数 节省时间 1000次足以找出
    {
        num_iterations=num_rancek_temp;//ransac随机抽取验证次数
    }
      

  
    // 全体点计算质心
    Vector3d centroid_source = Vector3d::Zero();
    Vector3d centroid_target = Vector3d::Zero();
    for(int i=0;i<N;i++){
        centroid_source += source_points[i];
        centroid_target += target_points[i];
    }
    centroid_source /= N;
    centroid_target /= N;
 
    /// 全体点中心化点云
    vector<Vector3d> centered_source_points (N);
    vector<Vector3d> centered_target_points (N);
    for(int i=0;i<N;i++){
        centered_source_points[i]= source_points[i] - centroid_source;
        centered_target_points[i]= target_points[i] - centroid_target;
 
    }
      
 
    // 3使用ransc 找出内点剔除外点,计算更加准确的sRt
    // 使用 std::random_device 生成真随机数种子
    std::random_device rd;
    // 使用梅森旋转算法引擎 mt19937,以 rd() 作为种子
    std::mt19937 gen(rd());
    // 定义一个均匀整数分布,范围是[1, 100]
    //std::uniform_int_distribution<int> dist(1, 100);
    uniform_int_distribution<int> dist(0, N - 1);
    // 生成随机数
    //int random_number = dist(gen);
  
      
    // RANSAC loop num_iterations 迭代次数
    for (int iteration = 0; iteration < num_iterations; ++iteration) {
        // Randomly select 3 points to form a minimal sample
  
        vector<Vector3d> source_points_temp;
        vector<Vector3d> target_points_temp;
  
        for (int num=0;num<4;num++){ // 最少3个点  取4个点
  
            int idx = dist(gen);// 随机数 
            source_points_temp.push_back(source_points[idx]);
            target_points_temp.push_back(target_points[idx]);
  
        }
  
        // 本次随机计算结果  Perform ICP with these initial parameters
        double scale;
        Matrix3d R;
        Vector3d t;
        API_ICP_3D_3D_sRt(source_points_temp,target_points_temp, scale, R, t);
  
        // Compute error with all points and count inliers
        int inlier_count = 0;
        
          
 
        vector<Vector3d> source_points_inliner;
        vector<Vector3d> target_points_inliner;
 
         
        
        for (int i = 0; i < N; ++i) {  
           Vector3d centered_source2target_points_i = scale*R*centered_source_points[i];//  使用本次随机抽取样本计算的sRt结果 ,对所有原始点的去中心点变换到目标点的中心点 尺度和旋转位置
           double source_norm_i=centered_source2target_points_i.norm(); //去中心化的点长2范数度 norm---sqrt(x^2+y^2+z^2)
           double target_norm_i=centered_target_points[i].norm(); //去中心化的点长2范数度  norm---sqrt(x^2+y^2+z^2)

           //Vector3d source_norm_i=centered_source2target_points_i; //去中心化的点长2范数度 norm---sqrt(x^2+y^2+z^2)
           //Vector3d target_norm_i=centered_target_points[i]; //去中心化的点长2范数度  norm---sqrt(x^2+y^2+z^2)
           // 卡方分布中 3纬度变量 (x1-x1')^2+(y1-y1')^2+(z1-z1')^2 < 7.815  95%    
           // 卡方分布中 2纬度变量 (x1-x1')^2+(y1-y1')^2 < 5.99  95%    
           double error = abs(source_norm_i  - target_norm_i);//计算误差 如果目标点是GNSS的enu坐标 单位是米
 
           if (error < error_threshold) {// 取3米

                
                inlier_count++; // 记录本次内点数目
                // 找出内点集合
                source_points_inliner.push_back(source_points[i]);
                target_points_inliner.push_back(target_points[i]);
           }
 
        }
        
                
        // for (int i = 0; i < N; ++i) {  
        //    Vector3d centered_source2target_points_i = scale*R*centered_source_points[i];//  使用本次随机抽取样本计算的sRt结果 ,对所有原始点的去中心点变换到目标点的中心点 尺度和旋转位置
        //    //double source_norm_i=centered_source2target_points_i.norm(); //去中心化的点长2范数度 norm---sqrt(x^2+y^2+z^2)
        //    //double target_norm_i=centered_target_points[i].norm(); //去中心化的点长2范数度  norm---sqrt(x^2+y^2+z^2)

        //    Vector3d source_norm_i=centered_source2target_points_i; //去中心化的点长2范数度 norm---sqrt(x^2+y^2+z^2)
        //    Vector3d target_norm_i=centered_target_points[i]; //去中心化的点长2范数度  norm---sqrt(x^2+y^2+z^2)
        //    // 卡方分布中 3纬度变量 (x1-x1')^2+(y1-y1')^2+(z1-z1')^2 < 7.815  95%    
        //    // 卡方分布中 2纬度变量 (x1-x1')^2+(y1-y1')^2 < 5.99  95%    
        //    Vector3d error_v3d = (source_norm_i  - target_norm_i);//计算误差 如果目标点是GNSS的enu坐标 单位是米
           
        //    double error=error_v3d.squaredNorm();
        //    cout<< num_iterations <<"  "<<i <<"  "<<error <<endl;

        //    if (error < error_threshold) {// 取3米

                
        //         inlier_count++; // 记录本次内点数目
        //         // 找出内点集合
        //         source_points_inliner.push_back(source_points[i]);
        //         target_points_inliner.push_back(target_points[i]);
        //    }
 
        // }
         
        // 如果本次内点多余历史最好内点数目 本次结果作为最好结果
        if (inlier_count > best_inlier_count) {
 
            API_ICP_3D_3D_sRt(source_points_inliner,target_points_inliner, scale, R, t);// 使用内点再次计算最优的srt
 
            best_inlier_count = inlier_count;
            best_scale = scale;
            best_R = R;
            best_t = t;
            
        }

        if(iteration==num_iterations-1) {
            cout<< "API_3D_3D_sRt  sRt计算 总数点数 "<< N << "  最大内点数: " << best_inlier_count <<endl;        
        }
    }
 
     
 
 
 
}


// 使用 srt变换原始点后,计算的误差。
// 不建议使用 ,t是由sR计算出来的,sr本身就是估计的,在引入t误差更大,所以还是用sr变换原始点的去中心点集合,和目标点的去中心点集合整体算误差。 
void API_ransac_ICP_3D_3D_sRt_inliner_sRt(const vector<Vector3d>& source_points,
                     const vector<Vector3d>& target_points,
                     int num_iterations,        //ransac随机抽取验证次数
                     const double error_threshold,    // 误差阈值 3         
                     
                     double& best_scale, Matrix3d& best_R, Vector3d& best_t) {
  
   




 
    // 1 数据准备

    int best_inlier_count = 0; // 最大内点数
    int N = source_points.size();
    //cout<<  "总点数  " << N << endl;

    int num_rancek_temp = N * (N - 1) / 2; // 随机挑选2个点验证 一共多少次
    if(num_rancek_temp<num_iterations) // 限制下最大次数 节省时间 1000次足以找出
    {
        num_iterations=num_rancek_temp;//ransac随机抽取验证次数
    }
      

  
    // 全体点计算质心
    Vector3d centroid_source = Vector3d::Zero();
    Vector3d centroid_target = Vector3d::Zero();
    for(int i=0;i<N;i++){
        centroid_source += source_points[i];
        centroid_target += target_points[i];
    }
    centroid_source /= N;
    centroid_target /= N;
 
    /// 全体点中心化点云
    vector<Vector3d> centered_source_points (N);
    vector<Vector3d> centered_target_points (N);
    for(int i=0;i<N;i++){
        centered_source_points[i]= source_points[i] - centroid_source;
        centered_target_points[i]= target_points[i] - centroid_target;
 
    }
      
 
    // 3使用ransc 找出内点剔除外点,计算更加准确的sRt
    // 使用 std::random_device 生成真随机数种子
    std::random_device rd;
    // 使用梅森旋转算法引擎 mt19937,以 rd() 作为种子
    std::mt19937 gen(rd());
    // 定义一个均匀整数分布,范围是[1, 100]
    //std::uniform_int_distribution<int> dist(1, 100);
    uniform_int_distribution<int> dist(0, N - 1);
    // 生成随机数
    //int random_number = dist(gen);
  
      
    // RANSAC loop num_iterations 迭代次数
    for (int iteration = 0; iteration < num_iterations; ++iteration) {
        // Randomly select 3 points to form a minimal sample
  
        vector<Vector3d> source_points_temp;
        vector<Vector3d> target_points_temp;
  
        for (int num=0;num<4;num++){ // 最少3个点  取4个点
  
            int idx = dist(gen);// 随机数 
            source_points_temp.push_back(source_points[idx]);
            target_points_temp.push_back(target_points[idx]);
  
        }
  
        // 本次随机计算结果  Perform ICP with these initial parameters
        double scale;
        Matrix3d R;
        Vector3d t;
        API_ICP_3D_3D_sRt(source_points_temp,target_points_temp, scale, R, t);
  
        // Compute error with all points and count inliers
        int inlier_count = 0;
        
          
 
        vector<Vector3d> source_points_inliner;
        vector<Vector3d> target_points_inliner;
 
         
        
        for (int i = 0; i < N; ++i) {  
           Vector3d source_target_point_i = scale*R*source_points[i]+t;//  使用本次随机抽取样本计算的sRt结果 ,对所有原始点的去中心点变换到目标点的中心点 尺度和旋转位置
           Vector3d target_point_i=target_points[i]; //去中心化的点长2范数度 norm---sqrt(x^2+y^2+z^2)

           
           //Vector3d source_norm_i=centered_source2target_points_i; //去中心化的点长2范数度 norm---sqrt(x^2+y^2+z^2)
           //Vector3d target_norm_i=centered_target_points[i]; //去中心化的点长2范数度  norm---sqrt(x^2+y^2+z^2)
           // 卡方分布中 3纬度变量 (x1-x1')^2+(y1-y1')^2+(z1-z1')^2 < 7.815  95%    
           // 卡方分布中 2纬度变量 (x1-x1')^2+(y1-y1')^2 < 5.99  95%    
           double error = (source_target_point_i  - target_point_i).norm();//计算误差 如果目标点是GNSS的enu坐标 单位是米
           error=error*error;
 
           if (error < error_threshold) {// 取3米

                
                inlier_count++; // 记录本次内点数目
                // 找出内点集合
                source_points_inliner.push_back(source_points[i]);
                target_points_inliner.push_back(target_points[i]);
           }
 
        }
        
        
         
        // 如果本次内点多余历史最好内点数目 本次结果作为最好结果
        if (inlier_count > best_inlier_count) {
 
            API_ICP_3D_3D_sRt(source_points_inliner,target_points_inliner, scale, R, t);// 使用内点再次计算最优的srt
 
            best_inlier_count = inlier_count;
            best_scale = scale;
            best_R = R;
            best_t = t;
            
        }

        if(iteration==num_iterations-1) {
            cout<< "API_3D_3D_sRt ==  srt计算 总数点数 "<< N << "  最大内点数: " << inlier_count <<endl;        
        }
    }
 
     
 
 
 
}


#endif 
  
 

  

API_G2O_BA.h

#ifndef G2OBA_H
#define G2OBA_H

#include "openvslam/type.h"

#include <g2o/core/base_vertex.h>
#include <g2o/core/base_binary_edge.h>
#include <g2o/core/base_unary_edge.h>
#include <g2o/types/slam3d/se3quat.h>




#include "openvslam/optimize/g2o/se3/shot_vertex.h"


#include <g2o/core/robust_kernel_impl.h>



#include <iostream>

using namespace std;



namespace openvslam {
namespace optimize {
namespace g2o {
namespace se3 {

// class shot_vertex final : public ::g2o::BaseVertex<6, ::g2o::SE3Quat> {
// public:
//     EIGEN_MAKE_ALIGNED_OPERATOR_NEW

//     shot_vertex()
//     : BaseVertex<6, ::g2o::SE3Quat>() {}

//     bool read(std::istream& is) {
//         Vec7_t estimate;
//         for (unsigned int i = 0; i < 7; ++i) {
//             is >> estimate(i);
//         }
//         ::g2o::SE3Quat g2o_cam_pose_wc;
//         g2o_cam_pose_wc.fromVector(estimate);
//         setEstimate(g2o_cam_pose_wc.inverse());
//         return true;
//     }

//     bool write(std::ostream& os) const {
//         ::g2o::SE3Quat g2o_cam_pose_wc(estimate().inverse());
//         for (unsigned int i = 0; i < 7; ++i) {
//             os << g2o_cam_pose_wc[i] << " ";
//         }
//         return os.good();
//     }

//     void setToOriginImpl() override {
//         _estimate = ::g2o::SE3Quat();
//     }

//     void oplusImpl(const number_t* update_) override {
//         Eigen::Map<const Vec6_t> update(update_);
//         setEstimate(::g2o::SE3Quat::exp(update) * estimate());
//     }
// };



/*

Matrix3D Rbc=toEigenMatrix3d(se3bc.R());
Vector3D tbc=toG2oVector3D(se3bc.tvec);
g2o::SE3Quat Tbc=g2o::SE3Quat(Rbc, tbc);
使用SE3Quat成员函数to_homogeneous_matrix()将SE3Quat->Eigen::Matrix<double,4,4>
Eigen::MatrixXd toEigenMatrixXd(const cv::Mat &cvMat) 
{
    Eigen::MatrixXd eigenMat;
    eigenMat.resize(cvMat.rows, cvMat.cols);
    for (int i=0; i<cvMat.rows; i++)
        for (int j=0; j<cvMat.cols; j++)
            eigenMat(i,j) = cvMat.at<float>(i,j);

    return eigenMat;
}

*/

/// g2o edge  观测值维度3 类型 Vector3d  优化节点第一个  VertexPose
class GNSSConstraint_sRt_Edge : public ::g2o::BaseUnaryEdge<3, Eigen::Vector3d, openvslam::optimize::g2o::se3::shot_vertex> {
public:
  EIGEN_MAKE_ALIGNED_OPERATOR_NEW;
 
  GNSSConstraint_sRt_Edge(Eigen::Matrix4d T_VO_to_GNSS_ENU ) : _T_VO_to_GNSS_ENU(T_VO_to_GNSS_ENU) {}
 
  virtual void computeError() override {
        const openvslam::optimize::g2o::se3::shot_vertex* v = static_cast<const shot_vertex*>(_vertices[0]);
        ::g2o::SE3Quat cam_pose = v->estimate(); // 位姿节点优化后的估计值 = g2o_cam_pose_cw 世界到相机
        // openvslam G2O shot_vertex.cc 这里estimate = g2o_cam_pose_wc.inverse() 也就是世界到相机的变换矩阵
        // 正常情况下,单纯考虑GNSS计算误差方便性,位姿节点的estimate应该直接就是全局位置 ,也就是g2o_cam_pose_wc 相机到世界变换矩阵。这样可以直接和观测值相减
        // 但是openvslam这么设计位姿节点的estimate , 是为了便于重投影误差计算 ,直接使用estimate= g2o_cam_pose_cw (世界到相机)将世界坐标系下的地图点变换到相机坐标系下的3D点
        // g2o_cam_pose_wc 相机到世界 =    g2o_cam_pose_cw.inverse()
        // g2o_cam_pose_cw 世界到相机 =    g2o_cam_pose_wc.inverse()

        const  Eigen::Matrix3d R_cw = cam_pose.rotation().toRotationMatrix();
        const  Eigen::Vector3d t_cw = cam_pose.translation();
        // T=[R t]   T逆矩阵=[R逆 -R逆*t]
        //   [0 1]          [0    1  ]
        const  Eigen::Matrix3d R_wc = R_cw.transpose() ;
        const  Eigen::Vector3d t_wc = -R_cw.transpose() * t_cw; // 相机位姿(R t)在世界坐标系位置x y z 

        Eigen::Matrix3d R_cw_to_gw =_T_VO_to_GNSS_ENU.block<3, 3>(0, 0); // 包含了尺度
        Eigen::Vector3d t_cw_to_gw = _T_VO_to_GNSS_ENU.block<3, 1>(0, 3);// 


        Eigen::Vector3d t_gw = R_cw_to_gw*t_wc+t_cw_to_gw;// t_wc相机在VO-enu坐标系下的位置变换到GNSS-enu坐标系下
     
        _error = t_gw - _measurement; //_measurement GNSS 真值在设定的参考点下的ENU世界坐标X Y Z  调用setmeasurement() 设置  Vector3d
       
  }

//  virtual void linearizeOplus() override {

//     ::g2o::BaseUnaryEdge<3, Eigen::Vector3d, openvslam::optimize::g2o::se3::shot_vertex>::linearizeOplus();// 继承自动求导

//  }
 
  bool read(istream &in) {}
 
  bool write(ostream &out) const {}
 
protected:
  Eigen::Matrix4d _T_VO_to_GNSS_ENU; // 真值 
};




} // namespace se3
} // namespace g2o
} // namespace optimize
} // namespace openvslam
#endif 

  

API_G2O_BA.cpp

#ifndef G2OBA_CPP
#define G2OBA_CPP


#include "API_G2O_BA.h"


#endif 

  

 

oepncv调用代码

local_bundle_adjuster.h

#ifndef OPENVSLAM_OPTIMIZE_LOCAL_BUNDLE_ADJUSTER_H
#define OPENVSLAM_OPTIMIZE_LOCAL_BUNDLE_ADJUSTER_H

namespace openvslam {

namespace data {
class keyframe;
class map_database;
} // namespace data

namespace optimize {

class local_bundle_adjuster {
public:
    /**
     * Constructor
     * @param map_db
     * @param num_first_iter
     * @param num_second_iter
     */
    explicit local_bundle_adjuster(const unsigned int num_first_iter = 5,
                                   const unsigned int num_second_iter = 10);

    /**
     * Destructor
     */
    virtual ~local_bundle_adjuster() = default;

    /**
     * Perform optimization
     * @param curr_keyfrm
     * @param force_stop_flag
     */
    //void optimize(data::keyframe* curr_keyfrm, bool* const force_stop_flag) const;
    void optimize(data::keyframe* curr_keyfrm, bool* const force_stop_flag,data::map_database* map_db_) const;
    void optimize_with_gnss(data::keyframe* curr_keyfrm, bool* const force_stop_flag, data::map_database* map_db_, Eigen::Matrix4d  *T_VO_to_GNSS_ENU_) const;

    void optimize_all_gnss(bool* const force_stop_flag,data::map_database* map_db_ ) const ;

private:
    //! number of iterations of first optimization
    const unsigned int num_first_iter_;
    //! number of iterations of second optimization
    const unsigned int num_second_iter_;
};

} // namespace optimize
} // namespace openvslam

#endif // OPENVSLAM_OPTIMIZE_LOCAL_BUNDLE_ADJUSTER_H

  

local_bundle_adjuster.cc

#include "openvslam/data/keyframe.h"
#include "openvslam/data/landmark.h"
#include "openvslam/data/map_database.h"
#include "openvslam/optimize/local_bundle_adjuster.h"
#include "openvslam/optimize/g2o/landmark_vertex_container.h"
#include "openvslam/optimize/g2o/se3/shot_vertex_container.h"
#include "openvslam/optimize/g2o/se3/reproj_edge_wrapper.h"
#include "openvslam/util/converter.h"

#include <unordered_map>

#include <Eigen/StdVector>
#include <g2o/core/solver.h>
#include <g2o/core/block_solver.h>
#include <g2o/core/sparse_optimizer.h>
#include <g2o/core/robust_kernel_impl.h>
#include <g2o/types/sba/types_six_dof_expmap.h>
#include <g2o/solvers/eigen/linear_solver_eigen.h>
#include <g2o/solvers/dense/linear_solver_dense.h>
#include <g2o/solvers/csparse/linear_solver_csparse.h>
#include <g2o/core/optimization_algorithm_levenberg.h>

// 自己添加的优化
#include "openvslam/gnss_src/API_3D_3D_sRt.h"
#include "openvslam/gnss_src/API_G2O_BA.h"
#include "openvslam/gnss_src/API_GNSS_ENU_TXT_YAML.h"


namespace openvslam {
namespace optimize {

local_bundle_adjuster::local_bundle_adjuster(const unsigned int num_first_iter,
                                             const unsigned int num_second_iter)
    : num_first_iter_(num_first_iter), num_second_iter_(num_second_iter) {}

/*
从关键帧集合提取县级位置和GNSS-enu坐标
all_dbkeyfrms 关集帧集合
*/

void API_keyframe_to_vector3dList(std::vector< data::keyframe*> &all_dbkeyfrms , vector<Eigen::Vector3d> &source_points_vo,vector<Eigen::Vector3d> &target_points_gnss)
{

    //std::vector< data::keyframe*> all_dbkeyfrms = map_db_->get_all_keyframes();
    auto cam_info = static_cast<camera::perspective*>(all_dbkeyfrms[0]->camera_);
    std::sort(all_dbkeyfrms.begin(), all_dbkeyfrms.end(), [&](data::keyframe *keyfrm_1, data::keyframe *keyfrm_2)
            { return *keyfrm_1 < *keyfrm_2; });

    for (int i=0;i< all_dbkeyfrms.size();i++) {
        //auto local_keyfrm = id_local_keyfrm_pair.second;
        data::keyframe* local_keyfrm=all_dbkeyfrms[i];

        if (!local_keyfrm) {
            continue;
        }
        if (local_keyfrm->will_be_erased()) {
            continue;
        }

        Eigen::Matrix4d cam_pose_cw_i = local_keyfrm->get_cam_pose();
        const Eigen::Matrix4d cam_pose_wc_i = cam_pose_cw_i.inverse();
        Eigen::Vector3d cam_pose_i_t =  cam_pose_wc_i.block<3, 1>(0, 3);


        gnss_data gnss_data_i = local_keyfrm->Gnss_data;
        bool can_use=local_keyfrm->Gnss_data.can_use;

        if(!can_use){// gnss是否可用可用
            continue;           
        }

        Eigen::Vector3d gnss_data_i_xyz(gnss_data_i.x,gnss_data_i.y,gnss_data_i.z);

        //gnss_data_i_xyz[2]=0;// 固定高度
      
        source_points_vo.push_back(cam_pose_i_t);
        target_points_gnss.push_back(gnss_data_i_xyz);
  
    }
   //cout<< "从关键帧加载数据   总关键帧 "<<  all_dbkeyfrms.size()<< "  有效且有GNSS的关键帧  " <<  source_points_vo.size() <<endl;

}




// 获取一级共视觉关键帧集合 
void Get_first_covisibilities_Keyframes(openvslam::data::keyframe* curr_keyfrm,std::unordered_map<unsigned int, data::keyframe*> &local_keyfrms){

    //std::unordered_map<unsigned int, data::keyframe*> local_keyfrms;

    const auto curr_covisibilities = curr_keyfrm->graph_node_->get_covisibilities();
    for (auto local_keyfrm : curr_covisibilities) {
        if (!local_keyfrm) {
            continue;
        }
        if (local_keyfrm->will_be_erased()) {
            continue;
        }

        local_keyfrms[local_keyfrm->id_] = local_keyfrm;
    }

}



// 获取二级共视觉关键帧集合 共视关键帧的共视关键帧  扩大优化范围 。
// 注意区分:不同于地图点的共视帧, 该集合的二级共视帧和当前帧未必有共视地图点
void Get_second_covisibilities_Keyframes(openvslam::data::keyframe* curr_keyfrm,std::unordered_map<unsigned int, data::keyframe*> &local_keyfrms){

    //std::unordered_map<unsigned int, data::keyframe*> local_keyfrms;

    const auto curr_covisibilities = curr_keyfrm->graph_node_->get_covisibilities();
    for (auto local_keyfrm : curr_covisibilities) {
        if (!local_keyfrm) {
            continue;
        }
        if (local_keyfrm->will_be_erased()) {
            continue;
        }


        local_keyfrms[local_keyfrm->id_] = local_keyfrm;
        const auto second_covisibilities = local_keyfrm->graph_node_->get_covisibilities();

        for (auto second_local_keyfrm : second_covisibilities) {
            if (!second_local_keyfrm) {
                continue;
            }
            if (second_local_keyfrm->will_be_erased()) {
                continue;
            }

            if (local_keyfrms.count(second_local_keyfrm->id_))// 如果已经存在
            {
                continue;
            }

            local_keyfrms[second_local_keyfrm->id_] = second_local_keyfrm;
            
        }

    }

}





void local_bundle_adjuster::optimize(openvslam::data::keyframe* curr_keyfrm, bool* const force_stop_flag,data::map_database* map_db_) const {
    // 1. local/fixed keyframes, local landmarksを集計する

    // correct local keyframes of the current keyframe
    std::unordered_map<unsigned int, data::keyframe*> local_keyfrms;

    local_keyfrms[curr_keyfrm->id_] = curr_keyfrm;
    const auto curr_covisibilities = curr_keyfrm->graph_node_->get_covisibilities();
    for (auto local_keyfrm : curr_covisibilities) {
        if (!local_keyfrm) {
            continue;
        }
        if (local_keyfrm->will_be_erased()) {
            continue;
        }

        local_keyfrms[local_keyfrm->id_] = local_keyfrm;
    }

    // correct local landmarks seen in local keyframes
    std::unordered_map<unsigned int, data::landmark*> local_lms;




    for (auto local_keyfrm : local_keyfrms) {
        const auto landmarks = local_keyfrm.second->get_landmarks();
        for (auto local_lm : landmarks) {
            if (!local_lm) {
                continue;
            }
            if (local_lm->will_be_erased()) {
                continue;
            }

            // 重複を避ける
            if (local_lms.count(local_lm->id_)) {
                continue;
            }

            local_lms[local_lm->id_] = local_lm;
        }
    }

    // fixed keyframes: keyframes which observe local landmarks but which are NOT in local keyframes
    std::unordered_map<unsigned int, data::keyframe*> fixed_keyfrms;

    for (auto local_lm : local_lms) {
        const auto observations = local_lm.second->get_observations();
        for (auto& obs : observations) {
            auto fixed_keyfrm = obs.first;
            if (!fixed_keyfrm) {
                continue;
            }
            if (fixed_keyfrm->will_be_erased()) {
                continue;
            }

            // local keyframesに属しているときは追加しない
            if (local_keyfrms.count(fixed_keyfrm->id_)) {
                continue;
            }

            // 重複を避ける
            if (fixed_keyfrms.count(fixed_keyfrm->id_)) {
                continue;
            }

            fixed_keyfrms[fixed_keyfrm->id_] = fixed_keyfrm;
        }
    }

    // 2. optimizerを構築

    auto linear_solver = ::g2o::make_unique<::g2o::LinearSolverCSparse<::g2o::BlockSolver_6_3::PoseMatrixType>>();
    auto block_solver = ::g2o::make_unique<::g2o::BlockSolver_6_3>(std::move(linear_solver));
    auto algorithm = new ::g2o::OptimizationAlgorithmLevenberg(std::move(block_solver));

    ::g2o::SparseOptimizer optimizer;
    optimizer.setAlgorithm(algorithm);

    if (force_stop_flag) {
        optimizer.setForceStopFlag(force_stop_flag);
    }

    // 3. keyframeをg2oのvertexに変換してoptimizerにセットする

    // shot vertexのcontainer
    g2o::se3::shot_vertex_container keyfrm_vtx_container(0, local_keyfrms.size() + fixed_keyfrms.size());
    // vertexに変換されたkeyframesを保存しておく
    std::unordered_map<unsigned int, data::keyframe*> all_keyfrms;



    // local keyframes 等待优化的帧
    for (auto& id_local_keyfrm_pair : local_keyfrms) {
        auto local_keyfrm = id_local_keyfrm_pair.second;

        all_keyfrms.emplace(id_local_keyfrm_pair);
        auto keyfrm_vtx = keyfrm_vtx_container.create_vertex(local_keyfrm, local_keyfrm->id_ == 0);
        optimizer.addVertex(keyfrm_vtx);
    }

    // fixed keyframes 二级共同视关键帧固定
    for (auto& id_fixed_keyfrm_pair : fixed_keyfrms) {
        auto fixed_keyfrm = id_fixed_keyfrm_pair.second;

        all_keyfrms.emplace(id_fixed_keyfrm_pair);
        auto keyfrm_vtx = keyfrm_vtx_container.create_vertex(fixed_keyfrm, true);
        optimizer.addVertex(keyfrm_vtx);
    }

    // 4. keyframeとlandmarkのvertexをreprojection edgeで接続する

    // landmark vertexのcontainer
    g2o::landmark_vertex_container lm_vtx_container(keyfrm_vtx_container.get_max_vertex_id() + 1, local_lms.size());

    // reprojection edgeのcontainer
    using reproj_edge_wrapper = g2o::se3::reproj_edge_wrapper<data::keyframe>;
    std::vector<reproj_edge_wrapper> reproj_edge_wraps;
    reproj_edge_wraps.reserve(all_keyfrms.size() * local_lms.size());

    // 有意水準5%のカイ2乗値
    // 自由度n=2
    constexpr float chi_sq_2D = 5.99146;
    const float sqrt_chi_sq_2D = std::sqrt(chi_sq_2D);
    // 自由度n=3
    constexpr float chi_sq_3D = 7.81473;
    const float sqrt_chi_sq_3D = std::sqrt(chi_sq_3D);

    for (auto& id_local_lm_pair : local_lms) {
        auto local_lm = id_local_lm_pair.second;

        // landmarkをg2oのvertexに変換してoptimizerにセットする
        auto lm_vtx = lm_vtx_container.create_vertex(local_lm, false);
        optimizer.addVertex(lm_vtx);

        const auto observations = local_lm->get_observations();
        for (const auto& obs : observations) {
            auto keyfrm = obs.first;
            auto idx = obs.second;
            if (!keyfrm) {
                continue;
            }
            if (keyfrm->will_be_erased()) {
                continue;
            }

            const auto keyfrm_vtx = keyfrm_vtx_container.get_vertex(keyfrm);
            const auto& undist_keypt = keyfrm->undist_keypts_.at(idx);
            const float x_right = keyfrm->stereo_x_right_.at(idx);
            const float inv_sigma_sq = keyfrm->inv_level_sigma_sq_.at(undist_keypt.octave);
            const auto sqrt_chi_sq = (keyfrm->camera_->setup_type_ == camera::setup_type_t::Monocular)
                                         ? sqrt_chi_sq_2D
                                         : sqrt_chi_sq_3D;
            auto reproj_edge_wrap = reproj_edge_wrapper(keyfrm, keyfrm_vtx, local_lm, lm_vtx,
                                                        idx, undist_keypt.pt.x, undist_keypt.pt.y, x_right,
                                                        inv_sigma_sq, sqrt_chi_sq);
            reproj_edge_wraps.push_back(reproj_edge_wrap);
            optimizer.addEdge(reproj_edge_wrap.edge_);
        }
    }

    // 5. 1回目の最適化を実行

    if (force_stop_flag) {
        if (*force_stop_flag) {
            return;
        }
    }

    optimizer.initializeOptimization();
    optimizer.optimize(num_first_iter_);

    // 6. アウトライア除去をして2回目の最適化を実行

    bool run_robust_BA = true;

    if (force_stop_flag) {
        if (*force_stop_flag) {
            run_robust_BA = false;
        }
    }

    if (run_robust_BA) {
        for (auto& reproj_edge_wrap : reproj_edge_wraps) {
            auto edge = reproj_edge_wrap.edge_;

            auto local_lm = reproj_edge_wrap.lm_;
            if (local_lm->will_be_erased()) {
                continue;
            }

            if (reproj_edge_wrap.is_monocular_) {
                if (chi_sq_2D < edge->chi2() || !reproj_edge_wrap.depth_is_positive()) {
                    reproj_edge_wrap.set_as_outlier();
                }
            }
            else {
                if (chi_sq_3D < edge->chi2() || !reproj_edge_wrap.depth_is_positive()) {
                    reproj_edge_wrap.set_as_outlier();
                }
            }

            edge->setRobustKernel(nullptr);
        }

        optimizer.initializeOptimization();
        optimizer.optimize(num_second_iter_);
    }

    // 7. アウトライアを集計する

    std::vector<std::pair<data::keyframe*, data::landmark*>> outlier_observations;
    outlier_observations.reserve(reproj_edge_wraps.size());

    for (auto& reproj_edge_wrap : reproj_edge_wraps) {
        auto edge = reproj_edge_wrap.edge_;

        auto local_lm = reproj_edge_wrap.lm_;
        if (local_lm->will_be_erased()) {
            continue;
        }

        if (reproj_edge_wrap.is_monocular_) {
            if (chi_sq_2D < edge->chi2() || !reproj_edge_wrap.depth_is_positive()) {
                outlier_observations.emplace_back(std::make_pair(reproj_edge_wrap.shot_, reproj_edge_wrap.lm_));
            }
        }
        else {
            if (chi_sq_3D < edge->chi2() || !reproj_edge_wrap.depth_is_positive()) {
                outlier_observations.emplace_back(std::make_pair(reproj_edge_wrap.shot_, reproj_edge_wrap.lm_));
            }
        }
    }

    // 8. 情報を更新

    {
        std::lock_guard<std::mutex> lock(data::map_database::mtx_database_);

        if (!outlier_observations.empty()) {
            for (auto& outlier_obs : outlier_observations) {
                auto keyfrm = outlier_obs.first;
                auto lm = outlier_obs.second;
                keyfrm->erase_landmark(lm);
                lm->erase_observation(keyfrm);
            }
        }

        for (auto id_local_keyfrm_pair : local_keyfrms) {
            auto local_keyfrm = id_local_keyfrm_pair.second;

            auto keyfrm_vtx = keyfrm_vtx_container.get_vertex(local_keyfrm);
            local_keyfrm->set_cam_pose(keyfrm_vtx->estimate());
        }




        for (auto id_local_lm_pair : local_lms) {
            auto local_lm = id_local_lm_pair.second;

            auto lm_vtx = lm_vtx_container.get_vertex(local_lm);
            local_lm->set_pos_in_world(lm_vtx->estimate());
            local_lm->update_normal_and_depth();
        }
    }






   // 全体关键帧位姿参与sRt计算 ============================================================
    std::vector< data::keyframe*> all_dbkeyfrms = map_db_->get_all_keyframes();


//    std::sort(all_keyfrms.begin(), all_keyfrms.end(), [&](data::keyframe *keyfrm_1, data::keyframe *keyfrm_2)
//            { return *keyfrm_1 < *keyfrm_2; });

    vector<Eigen::Vector3d> source_points_vo;
    vector<Eigen::Vector3d> target_points_gnss;

    Eigen::Matrix4d T_VO_to_GNSS_ENU = Eigen::Matrix4d::Identity();
    bool use_gnss_optimize=0;
    // 计算sRT 变换关系
    if(all_dbkeyfrms.size()>4){

        for (int i=0;i< all_dbkeyfrms.size();i++) {
            //auto local_keyfrm = id_local_keyfrm_pair.second;
            data::keyframe* local_keyfrm=all_dbkeyfrms[i];

            if (!local_keyfrm) {
                continue;
            }
            if (local_keyfrm->will_be_erased()) {
                continue;
            }


            Eigen::Matrix4d cam_pose_cw_i = local_keyfrm->get_cam_pose();
            const Eigen::Matrix4d cam_pose_wc_i = cam_pose_cw_i.inverse();
            Eigen::Vector3d cam_pose_i_t =  cam_pose_wc_i.block<3, 1>(0, 3);

            gnss_data gnss_data_i = local_keyfrm->Gnss_data;
            bool can_use=local_keyfrm->Gnss_data.can_use;

            if(!can_use){// 如果gnss可用
                continue;           
            }

            Eigen::Vector3d gnss_data_i_xyz(gnss_data_i.x,gnss_data_i.y,gnss_data_i.z);

            //gnss_data_i_xyz[2]=0;// 固定高度

            source_points_vo.push_back(cam_pose_i_t);
            target_points_gnss.push_back(gnss_data_i_xyz);
            //std::cout<<  "cam_pose_i_t : " <<cam_pose_i_t[0] <<  "  " << cam_pose_i_t[1] <<  "  "<<cam_pose_i_t[2] <<  "  "<<std::endl;
            //std::cout<<  "gnss_data_i_xyz : " <<gnss_data_i_xyz[0]<<  "  "  <<gnss_data_i_xyz[1]<<  "  " <<gnss_data_i_xyz[2]<<  "  " << std::endl;
        }
         
        
        if(source_points_vo.size()>4 && source_points_vo.size()== target_points_gnss.size() ){
            //std::cout<<  "SRT计算 总GNSS点数" << source_points_vo.size()<< "  总关键帧: "<<all_dbkeyfrms.size() <<std::endl;
            //Eigen::Matrix4d T_VO_to_GNSS_ENU = Eigen::Matrix4d::Identity();
            double s;
            Eigen::Matrix3d R;
            Eigen::Vector3d t; 
            double error_th=7.841;
            API_ransac_ICP_3D_3D_sRt_inliner_sR(source_points_vo,target_points_gnss,
                                    1000,        //ransac随机抽取验证次数
                                    7.841,          // 误差阈值 3         
                                    s, R, t) ;
            T_VO_to_GNSS_ENU.block<3, 3>(0, 0) = s * R;
            T_VO_to_GNSS_ENU.block<3, 1>(0, 3) = t;
            //std::cout<< T_VO_to_GNSS_ENU << std::endl;

            vector<Eigen::Vector3d> source_points_vo_out;

            for( int i=0;i<source_points_vo.size();i++){
                source_points_vo[i]= s*R*source_points_vo[i]+t;
            }


            double rmse=cal_rmse_t_v3s_v3s(target_points_gnss,source_points_vo);
            cout <<"当前帧  "<< curr_keyfrm->id_ <<" 纯局部VO优化,  整体关键帧srt计算 RMSE = " << rmse << " 参与计算总点数  " << source_points_vo.size() << "   error_th   " << error_th<< endl;

        }

    }





}




void local_bundle_adjuster::optimize_with_gnss(openvslam::data::keyframe* curr_keyfrm, bool* const force_stop_flag,data::map_database* map_db_, Eigen::Matrix4d  *T_VO_to_GNSS_ENU_ ) const {

    // 1. local/fixed keyframes, local landmarksを集計する


    // 全体关键帧位姿参与sRt计算 开始 ============================================================

    // 1-1获取全体数据点对
    std::vector< data::keyframe*> all_dbkeyfrms = map_db_->get_all_keyframes();
    auto cam_info = static_cast<camera::perspective*>(all_dbkeyfrms[0]->camera_);
    //  cout <<local_keyfrm->id_ << " 相机内参  " 
    //  << "fx: "<<  cam_info->fx_ 
    //  << "fy: "<<  cam_info->fy_ 
    //  << "cx: "<<  cam_info->cx_ 
    //  << "cy: "<<  cam_info->cy_ 
    //  << endl;

    vector<Eigen::Vector3d> source_all_points_vo;
    vector<Eigen::Vector3d> target_all_points_gnss;

    //从关键帧抽取3D点
    API_keyframe_to_vector3dList(all_dbkeyfrms ,source_all_points_vo ,target_all_points_gnss);
    //cout<< "从关键帧加载数据   总关键帧 "<<  all_dbkeyfrms.size()<< "  有效且有GNSS的关键帧  " <<  source_all_points_vo.size() <<endl;

    // 1-2根据全体数据点对计算srt
    Eigen::Matrix4d T_VO_to_GNSS_ENU = Eigen::Matrix4d::Identity();
    double scale=1;
    Eigen::Matrix3d R = Eigen::Matrix3d::Identity();
    Eigen::Vector3d t = Eigen::Vector3d::Identity();; 
    double all_rmse = -1;
    bool use_gnss_optimize=0; // GNSS满足参与优化条件标志位 起码有4个点以上

    
    if(source_all_points_vo.size()>4 && source_all_points_vo.size()== target_all_points_gnss.size() ){
        API_ransac_ICP_3D_3D_sRt_inliner_sR(source_all_points_vo,target_all_points_gnss,
                                1000,        //ransac随机抽取验证次数
                                7.815,          // 误差阈值 3         7.815
                                scale, R, t) ;
        T_VO_to_GNSS_ENU.block<3, 3>(0, 0) = scale * R;
        T_VO_to_GNSS_ENU.block<3, 1>(0, 3) = t;
        *T_VO_to_GNSS_ENU_=T_VO_to_GNSS_ENU;
        //std::cout<< T_VO_to_GNSS_ENU << std::endl;
        use_gnss_optimize = 1;
    }
    else{
        use_gnss_optimize = 0;
    }
   
    if(use_gnss_optimize==1){
        // 1-3全体数据点计算变换后的误差
        for( int i=0;i<source_all_points_vo.size();i++){
            source_all_points_vo[i]= scale*R*source_all_points_vo[i]+t;
        }
        all_rmse=cal_rmse_t_v3s_v3s(target_all_points_gnss,source_all_points_vo);
        cout << " optimize_with_gnss 全局sRt更新前误差 RMSE = " << all_rmse << " 参与计算总点数  " << source_all_points_vo.size() <<  " 尺度 " << scale  <<"  内参fx  "<< cam_info->fx_<<endl;
    }
        
   // 全体关键帧位姿参与sRt计算 结束 ============================================================



    // 1-1 要纠正的局部关键帧
    //std::vector< data::keyframe*> all_dbkeyfrms = map_db_->get_all_keyframes();
    std::unordered_map<unsigned int, data::keyframe*> local_keyfrms;
    local_keyfrms[curr_keyfrm->id_] = curr_keyfrm;


    // 获取一级共视关键帧
    const auto curr_covisibilities = curr_keyfrm->graph_node_->get_covisibilities();
    for (auto local_keyfrm : curr_covisibilities) {
        if (!local_keyfrm) {
            continue;
        }
        if (local_keyfrm->will_be_erased()) {
            continue;
        }

        local_keyfrms[local_keyfrm->id_] = local_keyfrm;
    }

    // if(level==1){
    //      Get_first_covisibilities_Keyframes(curr_keyfrm,local_keyfrms);// 获取一级共视关键帧
    // }
    // else if(level==2){
    //     Get_second_covisibilities_Keyframes(curr_keyfrm,local_keyfrms);// 获取一级+二级共视关键帧  误差过大 扩大优化范围
    // }

    // 1-2 要纠正的局部地图点  correct local landmarks seen in local keyframes
    std::unordered_map<unsigned int, data::landmark*> local_lms;
    for (auto local_keyfrm : local_keyfrms) {
        const auto landmarks = local_keyfrm.second->get_landmarks();
        for (auto local_lm : landmarks) {
            if (!local_lm) {
                continue;
            }
            if (local_lm->will_be_erased()) {
                continue;
            }

            // 重複を避ける
            if (local_lms.count(local_lm->id_)) {
                continue;
            }

            local_lms[local_lm->id_] = local_lm;
        }
    }

    //1-3 固定帧 二级共视关键帧 必然看到了局部地图点,但不属于局部关键帧的,离当前帧比较远 fixed keyframes: keyframes which observe local landmarks but which are NOT in local keyframes
    std::unordered_map<unsigned int, data::keyframe*> fixed_keyfrms;

    for (auto local_lm : local_lms) {
        const auto observations = local_lm.second->get_observations();
        for (auto& obs : observations) {
            auto fixed_keyfrm = obs.first;
            if (!fixed_keyfrm) {
                continue;
            }
            if (fixed_keyfrm->will_be_erased()) {
                continue;
            }

            // local keyframes  避免重复
            if (local_keyfrms.count(fixed_keyfrm->id_)) {
                continue;
            }

            //   避免重复
            if (fixed_keyfrms.count(fixed_keyfrm->id_)) {
                continue;
            }

            fixed_keyfrms[fixed_keyfrm->id_] = fixed_keyfrm;
        }
    }

    // 2. optimizer 构造优化器

    auto linear_solver = ::g2o::make_unique<::g2o::LinearSolverCSparse<::g2o::BlockSolver_6_3::PoseMatrixType>>();
    auto block_solver = ::g2o::make_unique<::g2o::BlockSolver_6_3>(std::move(linear_solver));
    auto algorithm = new ::g2o::OptimizationAlgorithmLevenberg(std::move(block_solver));

    ::g2o::SparseOptimizer optimizer;
    optimizer.setAlgorithm(algorithm);

    if (force_stop_flag) {
        optimizer.setForceStopFlag(force_stop_flag);
    }

    // 3. keyframeをg2oのvertexに変換してoptimizerにセットする

    // shot vertexのcontainer
    g2o::se3::shot_vertex_container keyfrm_vtx_container(0, local_keyfrms.size() + fixed_keyfrms.size());
    // vertexに変換されたkeyframesを保存しておく
    std::unordered_map<unsigned int, data::keyframe*> all_keyfrms;


    // local keyframes 等待优化的帧
    for (auto& id_local_keyfrm_pair : local_keyfrms) {
        auto local_keyfrm = id_local_keyfrm_pair.second;
         
        //==================== 添加VO位姿优化点================== 
        all_keyfrms.emplace(id_local_keyfrm_pair);
        auto keyfrm_vtx = keyfrm_vtx_container.create_vertex(local_keyfrm, local_keyfrm->id_ == 0);
        optimizer.addVertex(keyfrm_vtx);
    }

    // fixed keyframes 二级共同视关键帧固定 
    for (auto& id_fixed_keyfrm_pair : fixed_keyfrms) {
        auto fixed_keyfrm = id_fixed_keyfrm_pair.second;
        all_keyfrms.emplace(id_fixed_keyfrm_pair);

        //gnss_data gnss_data_i = fixed_keyfrm->Gnss_data;
        //==================== 添加VO位姿优化点================== 
        
        /**
         * 当前帧的二级共视帧         当前帧的共视帧的共视帧  未必和当前帧有共同的地图点
         * 由地图点获取的二级共视帧    共视地图点>40 一级共视  共视地图点<40 二级共视  
         */
        //if(level==1){
            auto keyfrm_vtx = keyfrm_vtx_container.create_vertex(fixed_keyfrm, 1);// 不固定 参与全局优化
            optimizer.addVertex(keyfrm_vtx);
        //}
        // else if(level==2){// 1级优化后误差还是很大,增加二级帧优化扩范围,且一级和二级全部优化,地图的二级共视帧不在固定也得参与优化
        //     auto keyfrm_vtx = keyfrm_vtx_container.create_vertex(fixed_keyfrm, 0);// 不固定 参与全局优化
        //     optimizer.addVertex(keyfrm_vtx);
        // }


    }



    if(use_gnss_optimize){
        // 加入GNSS 优化边
        for (auto& id_local_keyfrm_pair : local_keyfrms) {
            auto local_keyfrm = id_local_keyfrm_pair.second;
            const auto keyfrm_vtx = keyfrm_vtx_container.get_vertex(local_keyfrm); 
        
            //==================== 添加GNSS优化点==================
            //use_gnss_optimize=1;
            // 起码GNSS点对有4个以上

                gnss_data gnss_data_i = local_keyfrm->Gnss_data;
                
                if(!gnss_data_i.can_use)
                {
                    continue;       
                }
                Eigen::Vector3d gnss_data_i_xyz(gnss_data_i.x,gnss_data_i.y,gnss_data_i.z);

                auto vi = keyfrm_vtx;// _id 位姿节点

                // Edge
                openvslam::optimize::g2o::se3::GNSSConstraint_sRt_Edge *edge = new openvslam::optimize::g2o::se3::GNSSConstraint_sRt_Edge(T_VO_to_GNSS_ENU); //GNSS ENU真值 3D-2D是图像1的3d点
                //edge->setId(i);
                edge->setVertex(0, vi); // 获取优化第一个节点位姿 没有第二个节点位姿
                edge->setMeasurement(gnss_data_i_xyz); //GNSS ENU真值 3D-2D是图像2的像素点 初始化已经送入

                // 信息矩阵设定 误差权重
                // GNSS观测边误差/米 + 重投影误差边/像素个数 需要变换到同一个量纲 
                // GNSS米误差 e1=GNSS_ENU(X,Y,Z)- sRt * VO_ENU(X,Y,Z)
                // 其中 sRt是VO_ENU到GNSS_ENU的变换矩阵,由全体3D点匹配对ICP+ranck随机优化计算得到
                // 粗略比例计算 GNSS在像素尺度像的误差e2= 尺度s * 焦距fx *GNSS误差e1  
                // 推导原理 https://www.cnblogs.com/gooutlook/p/18349180
                // r总误差 = e2*信息矩阵(默认单位阵)*e2
                //        = e1*信息矩阵(默认单位阵)*e1

                double S_gnss3D_to_vo2D = pow(scale*cam_info->fx_,2);
                edge->setInformation(Eigen::Matrix3d::Identity()*S_gnss3D_to_vo2D);

                // 核函数设定 防止利群点误差过大干扰整体结果
                // 卡方分布中 3纬度变量 (x1-x1')^2+(y1-y1')^2+(z1-z1')^2 < 7.815  95% 内点   
                // 卡方分布中 2纬度变量 (x1-x1')^2+(y1-y1')^2 < 5.99  95%  内点
                ::g2o::RobustKernelHuber* huber_kernel = new ::g2o::RobustKernelHuber;
                huber_kernel->setDelta( std::sqrt(7.815));
                edge->setRobustKernel(huber_kernel);
                optimizer.addEdge(edge);
                //cout<< "测试 ==========  GNSS优化加入 ==========  总边数 " <<   N <<endl;
            

        }
    }//if(use_gnss_optimize)

    // 4. keyframeとlandmarkのvertexをreprojection edgeで接続する

    // landmark vertexのcontainer
    g2o::landmark_vertex_container lm_vtx_container(keyfrm_vtx_container.get_max_vertex_id() + 1, local_lms.size());

    // reprojection edgeのcontainer
    using reproj_edge_wrapper = g2o::se3::reproj_edge_wrapper<data::keyframe>;
    std::vector<reproj_edge_wrapper> reproj_edge_wraps;
    reproj_edge_wraps.reserve(all_keyfrms.size() * local_lms.size());

    // 有意水準5%のカイ2乗値
    // 自由度n=2
    constexpr float chi_sq_2D = 5.99146;
    const float sqrt_chi_sq_2D = std::sqrt(chi_sq_2D);
    // 自由度n=3
    constexpr float chi_sq_3D = 7.81473;
    const float sqrt_chi_sq_3D = std::sqrt(chi_sq_3D);

    for (auto& id_local_lm_pair : local_lms) {
        auto local_lm = id_local_lm_pair.second;

        // landmarkをg2oのvertexに変換してoptimizerにセットする
        auto lm_vtx = lm_vtx_container.create_vertex(local_lm, false);
        optimizer.addVertex(lm_vtx);

        const auto observations = local_lm->get_observations();//  std::map<keyframe*, unsigned int> 
        for (const auto& obs : observations) {
            auto keyfrm = obs.first;
            auto idx = obs.second;
            if (!keyfrm) {
                continue;
            }
            if (keyfrm->will_be_erased()) {
                continue;
            }

            const auto keyfrm_vtx = keyfrm_vtx_container.get_vertex(keyfrm);
            const auto& undist_keypt = keyfrm->undist_keypts_.at(idx);
            const float x_right = keyfrm->stereo_x_right_.at(idx);
            const float inv_sigma_sq = keyfrm->inv_level_sigma_sq_.at(undist_keypt.octave);
            const auto sqrt_chi_sq = (keyfrm->camera_->setup_type_ == camera::setup_type_t::Monocular)
                                         ? sqrt_chi_sq_2D
                                         : sqrt_chi_sq_3D;
            auto reproj_edge_wrap = reproj_edge_wrapper(keyfrm, keyfrm_vtx, local_lm, lm_vtx,
                                                        idx, undist_keypt.pt.x, undist_keypt.pt.y, x_right,
                                                        inv_sigma_sq, sqrt_chi_sq);
            reproj_edge_wraps.push_back(reproj_edge_wrap);
            optimizer.addEdge(reproj_edge_wrap.edge_);
        }
    }

    // 5. 1回目の最適化を実行

    if (force_stop_flag) {
        if (*force_stop_flag) {
            return;
        }
    }

    optimizer.initializeOptimization();
    optimizer.optimize(num_first_iter_);

    // 6. アウトライア除去をして2回目の最適化を実行

    bool run_robust_BA = true;

    if (force_stop_flag) {
        if (*force_stop_flag) {
            run_robust_BA = false;
        }
    }

    if (run_robust_BA) {
        for (auto& reproj_edge_wrap : reproj_edge_wraps) {
            auto edge = reproj_edge_wrap.edge_;

            auto local_lm = reproj_edge_wrap.lm_;
            if (local_lm->will_be_erased()) {
                continue;
            }

            if (reproj_edge_wrap.is_monocular_) {
                if (chi_sq_2D < edge->chi2() || !reproj_edge_wrap.depth_is_positive()) {
                    reproj_edge_wrap.set_as_outlier();
                }
            }
            else {
                if (chi_sq_3D < edge->chi2() || !reproj_edge_wrap.depth_is_positive()) {
                    reproj_edge_wrap.set_as_outlier();
                }
            }

            edge->setRobustKernel(nullptr);
        }

        optimizer.initializeOptimization();
        optimizer.optimize(num_second_iter_);
    }

    // 7. アウトライアを集計する

    std::vector<std::pair<data::keyframe*, data::landmark*>> outlier_observations;
    outlier_observations.reserve(reproj_edge_wraps.size());

    for (auto& reproj_edge_wrap : reproj_edge_wraps) {
        auto edge = reproj_edge_wrap.edge_;

        auto local_lm = reproj_edge_wrap.lm_;
        if (local_lm->will_be_erased()) {
            continue;
        }

        if (reproj_edge_wrap.is_monocular_) {
            if (chi_sq_2D < edge->chi2() || !reproj_edge_wrap.depth_is_positive()) {
                outlier_observations.emplace_back(std::make_pair(reproj_edge_wrap.shot_, reproj_edge_wrap.lm_));
            }
        }
        else {
            if (chi_sq_3D < edge->chi2() || !reproj_edge_wrap.depth_is_positive()) {
                outlier_observations.emplace_back(std::make_pair(reproj_edge_wrap.shot_, reproj_edge_wrap.lm_));
            }
        }
    }

    // 8. 情報を更新

    {
        std::lock_guard<std::mutex> lock(data::map_database::mtx_database_);

        if (!outlier_observations.empty()) {
            for (auto& outlier_obs : outlier_observations) {
                auto keyfrm = outlier_obs.first;
                auto lm = outlier_obs.second;
                keyfrm->erase_landmark(lm);
                lm->erase_observation(keyfrm);
            }
        }

        for (auto id_local_keyfrm_pair : local_keyfrms) {
            auto local_keyfrm = id_local_keyfrm_pair.second;

            auto keyfrm_vtx = keyfrm_vtx_container.get_vertex(local_keyfrm);
            local_keyfrm->set_cam_pose(keyfrm_vtx->estimate());
        }




        for (auto id_local_lm_pair : local_lms) {
            auto local_lm = id_local_lm_pair.second;

            auto lm_vtx = lm_vtx_container.get_vertex(local_lm);
            local_lm->set_pos_in_world(lm_vtx->estimate());
            local_lm->update_normal_and_depth();
        }
    }





    // 全体关键帧位姿参与sRt计算 开始 ============================================================

    //从关键帧抽取优化后的3D点
    source_all_points_vo.clear();
    target_all_points_gnss.clear();

    //从关键帧抽取3D点
    API_keyframe_to_vector3dList(all_dbkeyfrms ,source_all_points_vo ,target_all_points_gnss);

    if(source_all_points_vo.size()>4 && source_all_points_vo.size()== target_all_points_gnss.size() ){
        API_ransac_ICP_3D_3D_sRt_inliner_sR(source_all_points_vo,target_all_points_gnss,
                                1000,        //ransac随机抽取验证次数
                                7.815,          // 误差阈值 3         7.815
                                scale, R, t) ;
        T_VO_to_GNSS_ENU.block<3, 3>(0, 0) = scale * R;
        T_VO_to_GNSS_ENU.block<3, 1>(0, 3) = t;
        *T_VO_to_GNSS_ENU_=T_VO_to_GNSS_ENU;
        //std::cout<<"local_bundle_adjuster.cc === *T_VO_to_GNSS_ENU_ 优化后" <<*T_VO_to_GNSS_ENU_ << std::endl;
        use_gnss_optimize = 1;
    }
    else{
        use_gnss_optimize = 0;
    }
   
    if(use_gnss_optimize==1){
        // 1-3全体数据点计算变换后的误差
        for( int i=0;i<source_all_points_vo.size();i++){
            source_all_points_vo[i]= scale*R*source_all_points_vo[i]+t;
        }
        all_rmse=cal_rmse_t_v3s_v3s(target_all_points_gnss,source_all_points_vo);
        cout << "optimize_with_gnss 全局sRt更新后误差 RMSE = " << all_rmse << " 参与计算总点数  " << source_all_points_vo.size() <<  " 尺度 " << scale  <<"  内参fx  "<< cam_info->fx_<<endl;
    }
        
   // 全体关键帧位姿参与sRt计算 结束 ============================================================

    double error_all_th=2;  
    if(all_rmse>error_all_th){
        optimize_all_gnss(force_stop_flag,map_db_);
        cout << all_rmse  << " 误差过大 开启全局优化 ************************* "  << endl;
        //optimize_with_gnss(curr_keyfrm, force_stop_flag, map_db_,2); // 丢帧且不插入关集帧
        //cout << all_rmse  << " 误差过大 开启一级+二级联合优化 ************************* "  << endl;
        
    }


}





void local_bundle_adjuster::optimize_all_gnss(bool* const force_stop_flag,data::map_database* map_db_ ) const {
    // 1. local/fixed keyframes, local landmarksを集計する


    // 1. データを集める

    

    const auto keyfrms = map_db_->get_all_keyframes();
    const auto lms = map_db_->get_all_landmarks();
    std::vector<bool> is_optimized_lm(lms.size(), true);



   // 全体关键帧位姿参与sRt计算 开始 ============================================================
    std::vector< data::keyframe*> all_dbkeyfrms = map_db_->get_all_keyframes();

    auto cam_info = static_cast<camera::perspective*>(all_dbkeyfrms[0]->camera_);
            //  cout <<local_keyfrm->id_ << " 相机内参  " 
            //  << "fx: "<<  cam_info->fx_ 
            //  << "fy: "<<  cam_info->fy_ 
            //  << "cx: "<<  cam_info->cx_ 
            //  << "cy: "<<  cam_info->cy_ 
            //  << endl;

    std::sort(all_dbkeyfrms.begin(), all_dbkeyfrms.end(), [&](data::keyframe *keyfrm_1, data::keyframe *keyfrm_2)
            { return *keyfrm_1 < *keyfrm_2; });

    vector<Eigen::Vector3d> source_points_vo;
    vector<Eigen::Vector3d> target_points_gnss;

    Eigen::Matrix4d T_VO_to_GNSS_ENU = Eigen::Matrix4d::Identity();
    double s;
    Eigen::Matrix3d R;
    Eigen::Vector3d t; 

    bool use_gnss_optimize=0; // GNSS满足参与优化条件标志位 起码有4个点以上
    // 计算sRT 变换关系
    if(all_dbkeyfrms.size()>4){

        for (int i=0;i< all_dbkeyfrms.size();i++) {
            //auto local_keyfrm = id_local_keyfrm_pair.second;
            data::keyframe* local_keyfrm=all_dbkeyfrms[i];

            if (!local_keyfrm) {
                continue;
            }
            if (local_keyfrm->will_be_erased()) {
                continue;
            }



            Eigen::Matrix4d cam_pose_cw_i = local_keyfrm->get_cam_pose();
            const Eigen::Matrix4d cam_pose_wc_i = cam_pose_cw_i.inverse();
            Eigen::Vector3d cam_pose_i_t =  cam_pose_wc_i.block<3, 1>(0, 3);

            gnss_data gnss_data_i = local_keyfrm->Gnss_data;
            bool can_use=local_keyfrm->Gnss_data.can_use;

            if(!can_use){// gnss是否可用可用
                continue;           
            }

            Eigen::Vector3d gnss_data_i_xyz(gnss_data_i.x,gnss_data_i.y,gnss_data_i.z);

            //gnss_data_i_xyz[2]=0;// 固定高度

            source_points_vo.push_back(cam_pose_i_t);
            target_points_gnss.push_back(gnss_data_i_xyz);
            //std::cout<<  "cam_pose_i_t : " <<cam_pose_i_t[0] <<  "  " << cam_pose_i_t[1] <<  "  "<<cam_pose_i_t[2] <<  "  "<<std::endl;
            //std::cout<<  "gnss_data_i_xyz : " <<gnss_data_i_xyz[0]<<  "  "  <<gnss_data_i_xyz[1]<<  "  " <<gnss_data_i_xyz[2]<<  "  " << std::endl;
        }
         
        
        if(source_points_vo.size()>4 && source_points_vo.size()== target_points_gnss.size() ){
            //std::cout<<  "SRT计算 总GNSS点数" << source_points_vo.size()<< "  总关键帧: "<<all_dbkeyfrms.size() <<std::endl;
            //Eigen::Matrix4d T_VO_to_GNSS_ENU = Eigen::Matrix4d::Identity();
            // double s;
            // Eigen::Matrix3d R;
            // Eigen::Vector3d t; 
            API_ransac_ICP_3D_3D_sRt_inliner_sR(source_points_vo,target_points_gnss,
                                    1000,        //ransac随机抽取验证次数
                                    7.815 ,          // 误差阈值 3    7.815      
                                    s, R, t) ;
            T_VO_to_GNSS_ENU.block<3, 3>(0, 0) = s * R;
            T_VO_to_GNSS_ENU.block<3, 1>(0, 3) = t;
            //std::cout<< T_VO_to_GNSS_ENU << std::endl;

        
            for( int i=0;i<source_points_vo.size();i++){
                source_points_vo[i]= s*R*source_points_vo[i]+t;
            }


            double rmse=cal_rmse_t_v3s_v3s(target_points_gnss,source_points_vo);
            cout << " 全局整体srt优化前 RMSE = " << rmse << " 参与计算总点数  " << source_points_vo.size() <<  " 尺度 " << s << endl;

            use_gnss_optimize=1;

        }

    }
   // 全体关键帧位姿参与sRt计算 结束 ============================================================



    // 2. optimizerを構築

    auto linear_solver = ::g2o::make_unique<::g2o::LinearSolverCSparse<::g2o::BlockSolver_6_3::PoseMatrixType>>();
    auto block_solver = ::g2o::make_unique<::g2o::BlockSolver_6_3>(std::move(linear_solver));
    auto algorithm = new ::g2o::OptimizationAlgorithmLevenberg(std::move(block_solver));

    ::g2o::SparseOptimizer optimizer;
    optimizer.setAlgorithm(algorithm);

    if (force_stop_flag) {
        optimizer.setForceStopFlag(force_stop_flag);
    }

    // 3. keyframeをg2oのvertexに変換してoptimizerにセットする

    // shot vertexのcontainer
    g2o::se3::shot_vertex_container keyfrm_vtx_container(0, keyfrms.size());



   

    // keyframesをoptimizerにセット
    for (const auto keyfrm : keyfrms) {
        if (!keyfrm) {
            continue;
        }
        if (keyfrm->will_be_erased()) {
            continue;
        }
        // 添加图像位姿点
        auto keyfrm_vtx = keyfrm_vtx_container.create_vertex(keyfrm, keyfrm->id_ == 0);
        optimizer.addVertex(keyfrm_vtx);

        // 添加GNSS优化点
        //use_gnss_optimize=1;
        if(use_gnss_optimize){
            
            //keyfrm->Gnss_data.z=0;// 固定高度

            gnss_data gnss_data_i = keyfrm->Gnss_data;

           
            
            Eigen::Vector3d gnss_data_i_xyz(gnss_data_i.x,gnss_data_i.y,gnss_data_i.z);
            if(!gnss_data_i.can_use)
            {
                 continue;       
            }
            //openvslam::optimize::g2o::se3::shot_vertex* vi = static_cast<openvslam::optimize::g2o::se3::shot_vertex*>(optimizer.vertex(keyfrm->id_));
            //Eigen::Vector3d Gnss_enu_i =  gnss_lists[i];
            //Gnss_enu_i[2]=0;// 固定高度
             
            auto vi = keyfrm_vtx;// _id 位姿节点
            
            // Edge
            openvslam::optimize::g2o::se3::GNSSConstraint_sRt_Edge *edge = new openvslam::optimize::g2o::se3::GNSSConstraint_sRt_Edge(T_VO_to_GNSS_ENU); //GNSS ENU真值 3D-2D是图像1的3d点
            //edge->setId(i);
            edge->setVertex(0, vi); // 获取优化第一个节点位姿 没有第二个节点位姿
            edge->setMeasurement(gnss_data_i_xyz); //GNSS ENU真值 3D-2D是图像2的像素点 初始化已经送入
            
            // 信息矩阵设定 误差权重
            // GNSS观测边误差/米 + 重投影误差边/像素个数 需要变换到同一个量纲 
            // GNSS米误差 e1=GNSS_ENU(X,Y,Z)- sRt * VO_ENU(X,Y,Z)
            // 其中 sRt是VO_ENU到GNSS_ENU的变换矩阵,由全体3D点匹配对ICP+ranck随机优化计算得到
            // 粗略比例计算 GNSS在像素尺度像的误差e2= 尺度s * 焦距fx *GNSS误差e1  
            // 推导原理 https://www.cnblogs.com/gooutlook/p/18349180
            // r总误差 = e2*信息矩阵(默认单位阵)*e2
            //        = e1*信息矩阵(默认单位阵)*e1

            double S_gnss3D_to_vo2D = pow(s*cam_info->fx_,2);
            edge->setInformation(Eigen::Matrix3d::Identity()*S_gnss3D_to_vo2D);

            // 核函数设定 防止利群点误差过大干扰整体结果
            // 卡方分布中 3纬度变量 (x1-x1')^2+(y1-y1')^2+(z1-z1')^2 < 7.815  95% 内点   
            // 卡方分布中 2纬度变量 (x1-x1')^2+(y1-y1')^2 < 5.99  95%  内点
            ::g2o::RobustKernelHuber* rk = new ::g2o::RobustKernelHuber;
            rk->setDelta( std::sqrt(7.815));
            edge->setRobustKernel(rk);
            optimizer.addEdge(edge);
            //cout<< "测试 ==========  GNSS优化加入 ==========  总边数 " <<   N <<endl;
        }
    }


 
   // ==============================GNSSy优化 结束=================================================


    // 4. keyframeとlandmarkのvertexをreprojection edgeで接続する

    // landmark vertexのcontainer
    g2o::landmark_vertex_container lm_vtx_container(keyfrm_vtx_container.get_max_vertex_id() + 1, lms.size());

    // reprojection edgeのcontainer
    using reproj_edge_wrapper = g2o::se3::reproj_edge_wrapper<data::keyframe>;
    std::vector<reproj_edge_wrapper> reproj_edge_wraps;
    reproj_edge_wraps.reserve(10 * lms.size());

    // 有意水準5%のカイ2乗値
    // 自由度n=2
    constexpr float chi_sq_2D = 5.99146;
    const float sqrt_chi_sq_2D = std::sqrt(chi_sq_2D);
    // 自由度n=3
    constexpr float chi_sq_3D = 7.81473;
    const float sqrt_chi_sq_3D = std::sqrt(chi_sq_3D); //std::sqrt(7.81473)


    bool use_huber_kernel_=1;

    for (unsigned int i = 0; i < lms.size(); ++i) {
        auto lm = lms.at(i);
        if (!lm) {
            continue;
        }
        if (lm->will_be_erased()) {
            continue;
        }
        // landmarkをg2oのvertexに変換してoptimizerにセットする
        auto lm_vtx = lm_vtx_container.create_vertex(lm, false);
        optimizer.addVertex(lm_vtx);

        unsigned int num_edges = 0;
        const auto observations = lm->get_observations();
        for (const auto& obs : observations) {
            auto keyfrm = obs.first;
            auto idx = obs.second;
            if (!keyfrm) {
                continue;
            }
            if (keyfrm->will_be_erased()) {
                continue;
            }

            if (!keyfrm_vtx_container.contain(keyfrm)) {
                continue;
            }

            const auto keyfrm_vtx = keyfrm_vtx_container.get_vertex(keyfrm);
            const auto& undist_keypt = keyfrm->undist_keypts_.at(idx);
            const float x_right = keyfrm->stereo_x_right_.at(idx);
            const float inv_sigma_sq = keyfrm->inv_level_sigma_sq_.at(undist_keypt.octave);
            const auto sqrt_chi_sq = (keyfrm->camera_->setup_type_ == camera::setup_type_t::Monocular)
                                         ? sqrt_chi_sq_2D
                                         : sqrt_chi_sq_3D;
            auto reproj_edge_wrap = reproj_edge_wrapper(keyfrm, keyfrm_vtx, lm, lm_vtx,
                                                        idx, undist_keypt.pt.x, undist_keypt.pt.y, x_right,
                                                        inv_sigma_sq, sqrt_chi_sq, use_huber_kernel_);
            reproj_edge_wraps.push_back(reproj_edge_wrap);
            optimizer.addEdge(reproj_edge_wrap.edge_);
            ++num_edges;
        }

        if (num_edges == 0) {
            optimizer.removeVertex(lm_vtx);
            is_optimized_lm.at(i) = false;
        }
    }

    // 5. 最適化を実行
    int num_iter_=10;// 优化次数 10

    optimizer.initializeOptimization();
    optimizer.optimize(num_iter_);

    if (force_stop_flag && *force_stop_flag) {
        return;
    }

    // 6. 結果を取り出す

    for (auto keyfrm : keyfrms) {
        if (keyfrm->will_be_erased()) {
            continue;
        }
        auto keyfrm_vtx = keyfrm_vtx_container.get_vertex(keyfrm);
        const auto cam_pose_cw = util::converter::to_eigen_mat(keyfrm_vtx->estimate());
        
        keyfrm->set_cam_pose(cam_pose_cw);
       
    }

    for (unsigned int i = 0; i < lms.size(); ++i) {
        if (!is_optimized_lm.at(i)) {
            continue;
        }

        auto lm = lms.at(i);
        if (!lm) {
            continue;
        }
        if (lm->will_be_erased()) {
            continue;
        }

        auto lm_vtx = lm_vtx_container.get_vertex(lm);
        const Vec3_t pos_w = lm_vtx->estimate();

       
        lm->set_pos_in_world(pos_w);
        lm->update_normal_and_depth();
       
    }

    // 6 更细srt 计算优化后的误差

    if(all_dbkeyfrms.size()>4){

        source_points_vo.clear();
        target_points_gnss.clear();



        for (int i=0;i< all_dbkeyfrms.size();i++) {
            //auto local_keyfrm = id_local_keyfrm_pair.second;
            data::keyframe* local_keyfrm=all_dbkeyfrms[i];

            if (!local_keyfrm) {
                continue;
            }
            if (local_keyfrm->will_be_erased()) {
                continue;
            }


            Eigen::Matrix4d cam_pose_cw_i = local_keyfrm->get_cam_pose();
            const Eigen::Matrix4d cam_pose_wc_i = cam_pose_cw_i.inverse();
            Eigen::Vector3d cam_pose_i_t =  cam_pose_wc_i.block<3, 1>(0, 3);

            gnss_data gnss_data_i = local_keyfrm->Gnss_data;
            bool can_use=local_keyfrm->Gnss_data.can_use;

            if(!can_use){// 如果gnss可用
                continue;           
            }

            Eigen::Vector3d gnss_data_i_xyz(gnss_data_i.x,gnss_data_i.y,gnss_data_i.z);

            //gnss_data_i_xyz[2]=0;// 固定高度

            source_points_vo.push_back(cam_pose_i_t);
            target_points_gnss.push_back(gnss_data_i_xyz);
            //std::cout<<  "cam_pose_i_t : " <<cam_pose_i_t[0] <<  "  " << cam_pose_i_t[1] <<  "  "<<cam_pose_i_t[2] <<  "  "<<std::endl;
            //std::cout<<  "gnss_data_i_xyz : " <<gnss_data_i_xyz[0]<<  "  "  <<gnss_data_i_xyz[1]<<  "  " <<gnss_data_i_xyz[2]<<  "  " << std::endl;
        }
            
        
        if(source_points_vo.size()>4 && source_points_vo.size()== target_points_gnss.size() ){
            //std::cout<<  "SRT计算 总GNSS点数" << source_points_vo.size()<< "  总关键帧: "<<all_dbkeyfrms.size() <<std::endl;
            //Eigen::Matrix4d T_VO_to_GNSS_ENU = Eigen::Matrix4d::Identity();
            double s;
            Eigen::Matrix3d R;
            Eigen::Vector3d t; 
            API_ransac_ICP_3D_3D_sRt_inliner_sR(source_points_vo,target_points_gnss,
                                    1000,        //ransac随机抽取验证次数
                                    7.81    ,          // 误差阈值 3   3 纬度 卡方 95%内点阈值   7.81    
                                    s, R, t) ;
            T_VO_to_GNSS_ENU.block<3, 3>(0, 0) = s * R;
            T_VO_to_GNSS_ENU.block<3, 1>(0, 3) = t;
            //std::cout<< T_VO_to_GNSS_ENU << std::endl;

            for( int i=0;i<source_points_vo.size();i++){
                source_points_vo[i]= s*R*source_points_vo[i]+t;
            }

            
            double rmse=cal_rmse_t_v3s_v3s(target_points_gnss,source_points_vo);
            cout << " 全局整体srt优化后 RMSE = " << rmse << " 参与计算总点数  " << source_points_vo.size() <<  " 尺度 " << s <<" 内参fx "<<   cam_info->fx_  <<endl;




        }
    
    }// 6 更细srt 计算优化后的误差

  }
    






} // namespace optimize
} // namespace openvslam

  API_GNSS_ENU_TXT_YAML.h

#ifndef API_GNSS_ENU_YAML_h
#define API_GNSS_ENU_YAML_h


#include <iostream>
#include <fstream>
#include <sstream>
#include <vector>
#include <string>
 
#include<iomanip>

#include <yaml-cpp/yaml.h>
#include <GeographicLib/LocalCartesian.hpp>
 
using namespace std;
 
 
 
class gnss_data
{
 
public:

   bool can_use;
   // 原始数据
   string time_s;//时间戳名字用于找图像 名字
   double time;  //时间戳 用于计算时间差
   double lat;
   double lon;
   double high;
     
   // ENU坐标GNSS的原点
   double lat0;
   double lon0;
   double high0;
 
   // 在ENU坐标下
   double x;
   double y;
   double z;
 
   double ecef_x;
   double ecef_y;
   double ecef_z;

public:
   // 初始化 赋予原始数据
   gnss_data(double time_,double lat_,double lon_,double high_):time(time_), lat(lat_),lon(lon_),high(high_) {}
   gnss_data() {
        can_use=0; // 无效GNSS数据 不能用
        time_s="0"; 
        time=0;
        lat=0;
        lon=0;
        high=0;
        lat0=0;
        lon0=0;
        high0=0;
        x=0;
        y=0;
        z=0;
        ecef_x=0;
        ecef_y=0;
        ecef_z=0;
   }
   ~gnss_data() {}
   
   // 设置原始GNSS数据
   void Set_Gnss(double time_,double lat_,double lon_,double high_){
        time=time_;
        lat=lat_;
        lon=lon_;
        high=high_;
   };
   // 设置ENU参考点GNSS
   void Set_orinGnss(double lat0_,double lon0_,double high0_){
     lat0=lat0_;
     lon0=lon0_;
     high0=high0_;
   }
   
   // 设置ENU坐标数据
   void Set_ENU(double x_,double y_,double z_){
     x=x_;
     y=y_;
     z=z_;
   }

      // 设置ECEF坐标数据
   void Set_ECEF(double x_,double y_,double z_){
     ecef_x=x_;
     ecef_y=y_;
     ecef_z=z_;
   }
 
};
 
 
class GNSS_TextFileReader {
        
    public:
        std::string filename;
        char delimiter;
        std::vector<std::vector<std::string>> data; // 二维向量,存储每一行拆分后的数据
        std::vector<gnss_data> gnss_List;
    
    
    
    public:
        GNSS_TextFileReader(const std::string &filename, char delimiter)
            : filename(filename), delimiter(delimiter) {}
    
        bool readFile() {
            std::ifstream file(filename);
            if (!file.is_open()) {
                std::cerr << "Error opening file " << filename << std::endl;
                return false;
            }
    
            std::string line;
            while (std::getline(file, line)) {
                std::stringstream ss(line);
                std::string token;
                std::vector<std::string> row;
    
                while (std::getline(ss, token, delimiter)) { // delimiter 分割
                    row.push_back(token);
                }
                data.push_back(row);//  保存string数据double
                
                gnss_data gnss_data_i( stod(row[0]),stod(row[1]),stod(row[2]),stod(row[3])); //保存double数据
                gnss_data_i.time_s=row[0];//  字符串型号时间戳
                gnss_List.push_back(gnss_data_i);
            }
    
            file.close();
    
    
    
            return true;
        }
 
    void printData() {
        // for (const auto &row : data) {
        //     for (const auto &col : row) {
        //         std::cout << col << " ";
        //     }
        //     std::cout << std::endl;
        // }
 
       //for (const auto &gnss_List_i : gnss_List) { 
       for(int i=0;i<gnss_List.size();i++){
        gnss_data gnss_List_i=gnss_List[i];
        cout<< "编号 " << i << " 时间戳 "<< gnss_List_i.time<< " 纬度 " << gnss_List_i.lat <<  "  经度 " << gnss_List_i.lon<< "  高度 "<<  gnss_List_i.high << fixed << setprecision(10)<< endl;
       }
 
    }
 
    const std::vector<std::vector<std::string>>& getData() const {
        return data;
    }
 
    const std::vector<gnss_data>& get_gnss_List() const {
        return gnss_List;
    }
};

bool Get_GNSS_INTI_YAML( std::string read_path,gnss_data &gnss_data_int0);


std::string  Get_YAML(std::string read_path,char *name) ;


// 函数用于按照指定分隔符分割字符串
std::vector<std::string> splitString(const std::string &s, char delim) ;

 
// int main() {
 
//     //string path_="/home/r9000k/v2_project/data/NWPU/";
//     string path_="../config/";
//     string path_config=path_+"FHY_config.yaml";// 初始点
//     string path_GNSS=path_+"FHY_gps.txt";// 原始数据
    

//     // 1 获取参考点
//     gnss_data gnss_data_int0(-1,-1,-1,-1);
//     Get_GNSS_INTI_YAML(path_config,gnss_data_int0);
//     std::cout  << fixed << setprecision(10)<< endl;
//     std::cout << "原点 纬度: " << gnss_data_int0.lat << endl;
//     std::cout << "原点 经度: " << gnss_data_int0.lon << endl;
//     std::cout << "原点 高度: " << gnss_data_int0.high << endl;
    
    
//     // 2 当前点的经纬度和高度,作为局部坐标系的原点
//     double origin_latitude =  gnss_data_int0.lat;   // 纬度
//     double origin_longitude = gnss_data_int0.lon; // 经度
//     double origin_height = gnss_data_int0.high;   // 高度
 
//     // 转化为enu,并设置原点
//     GeographicLib::LocalCartesian geoConverter;
//     geoConverter.Reset(origin_latitude, origin_longitude, origin_height);

//     // 转化为ecef,使用WGS84椭球模型
//     GeographicLib::Geocentric wgs84(GeographicLib::Constants::WGS84_a(), GeographicLib::Constants::WGS84_f());//  6378137  298.257223563LL
//     //GeographicLib::Geocentric cgcs2000(6378137.0, 1.0 / 298.257222101);


//     //3 获取数据点
//     GNSS_TextFileReader reader(path_GNSS, ' '); // 读取路径 分隔符号
    
//     std::vector<gnss_data> gnss_Lists ;
//     if (reader.readFile()) {
//         //reader.printData();
//         gnss_Lists =  reader.get_gnss_List();
//         for(int i=0;i<gnss_Lists.size();i++){
            
//             gnss_Lists[i].Set_orinGnss(gnss_data_int0.lat,gnss_data_int0.lon,gnss_data_int0.high);// 设置原始GNSS点

//             double target_latitude = gnss_Lists[i].lat;
//             double target_longitude = gnss_Lists[i].lon;
//             double target_height = gnss_Lists[i].high;


//             // gnss转化为enu
//             double x, y, z;
//             geoConverter.Forward(target_latitude, target_longitude, target_height, x, y, z);

//             gnss_Lists[i].x=x;
//             gnss_Lists[i].y=y;
//             gnss_Lists[i].z=z;


//             //  WGS84 gnss转化为ecef
//             wgs84.Forward(target_latitude, target_longitude, target_height, x, y, z);
//             gnss_Lists[i].ecef_x=x;
//             gnss_Lists[i].ecef_y=y;
//             gnss_Lists[i].ecef_z=z;      

//             gnss_data gnss_List_i=gnss_Lists[i];
            
//             cout << fixed << setprecision(10)<< endl;
//             cout<< "编号 " << i 
//             << " 时间戳 "<< gnss_List_i.time
//             << " \n纬度 " << gnss_List_i.lat 
//             << " 经度 " << gnss_List_i.lon
//             << " 高度 "<<  gnss_List_i.high 
//             << " \nenu-x " << gnss_List_i.x 
//             << " enu-y " << gnss_List_i.y
//             << " enu-z "<<  gnss_List_i.z 
//             << " \necef_x " << gnss_List_i.ecef_x
//             << " ecef_y " << gnss_List_i.ecef_y
//             << " ecef_z "<<  gnss_List_i.ecef_z
//             << endl;
//         }
//     }
 
 
//     return 0;
// }

#endif

  API_GNSS_ENU_TXT_YAML.cc

#ifndef API_GNSS_ENU_YAML_cpp
#define API_GNSS_ENU_YAML_cpp

#include "API_GNSS_ENU_TXT_YAML.h"

 


 bool Get_GNSS_INTI_YAML( std::string read_path,gnss_data &gnss_data_int0) {
    try {
        // 读取YAML文件
        YAML::Node config = YAML::LoadFile(read_path);
        //std::cout << "读取gnss 初始点位置" << std::endl;
 
        // 访问YAML中的数据
        std::string lat0 = config["Initial.lat"].as<std::string>();
        std::string lon0 = config["Initial.lon"].as<std::string>();
        std::string alt0 = config["Initial.alt"].as<std::string>();
        double time0=0.0;

               // // 打印读取的数据
        
        //std::cout  << fixed << setprecision(10)<< endl;
        // std::cout << "原点 纬度: " << lat0 << std::endl;
        // std::cout << "原点 经度: " << lon0 << std::endl;
        // std::cout << "原点 高度: " << alt0 << std::endl;

        gnss_data_int0.Set_Gnss(time0,stod(lat0),stod(lon0),stod(alt0));
        std::cout << "GNSS原始点加载完毕" << std::endl;

    } catch (const YAML::Exception& e) {
        std::cerr << "YAML Exception: " << e.what() << std::endl;
        return 0;
    }
 
    return 1;
}
 


 std::string Get_YAML(std::string read_path,char *name) {
    try {
        // 读取YAML文件
        YAML::Node config = YAML::LoadFile(read_path);
 
        // 访问YAML中的数据
        std::string name_value = config[name].as<std::string>();
       
        std::cout << "yaml 读取数据: "<<name <<" " <<  name_value << std::endl;
        return name_value;

    } catch (const YAML::Exception& e) {
        std::cerr << "YAML Exception: " << e.what() << std::endl;
        return "error";
    }
 
  
}
 
 
// 函数用于按照指定分隔符分割字符串
 std::vector<std::string> splitString(const std::string &s, char delim) {

    std::vector<std::string> tokens;
    std::stringstream ss(s);
    std::string token;
    while (std::getline(ss, token, delim)) {
        tokens.push_back(token);
    }
    return tokens;
}

 
// int main() {
 
//     //string path_="/home/r9000k/v2_project/data/NWPU/";
//     string path_="../config/";
//     string path_config=path_+"FHY_config.yaml";// 初始点
//     string path_GNSS=path_+"FHY_gps.txt";// 原始数据
    

//     // 1 获取参考点
//     gnss_data gnss_data_int0(-1,-1,-1,-1);
//     Get_GNSS_INTI_YAML(path_config,gnss_data_int0);
//     std::cout  << fixed << setprecision(10)<< endl;
//     std::cout << "原点 纬度: " << gnss_data_int0.lat << endl;
//     std::cout << "原点 经度: " << gnss_data_int0.lon << endl;
//     std::cout << "原点 高度: " << gnss_data_int0.high << endl;
    
    
//     // 2 当前点的经纬度和高度,作为局部坐标系的原点
//     double origin_latitude =  gnss_data_int0.lat;   // 纬度
//     double origin_longitude = gnss_data_int0.lon; // 经度
//     double origin_height = gnss_data_int0.high;   // 高度
 
//     // 转化为enu,并设置原点
//     GeographicLib::LocalCartesian geoConverter;
//     geoConverter.Reset(origin_latitude, origin_longitude, origin_height);

//     // 转化为ecef,使用WGS84椭球模型
//     GeographicLib::Geocentric wgs84(GeographicLib::Constants::WGS84_a(), GeographicLib::Constants::WGS84_f());//  6378137  298.257223563LL
//     //GeographicLib::Geocentric cgcs2000(6378137.0, 1.0 / 298.257222101);


//     //3 获取数据点
//     GNSS_TextFileReader reader(path_GNSS, ' '); // 读取路径 分隔符号
    
//     std::vector<gnss_data> gnss_Lists ;
//     if (reader.readFile()) {
//         //reader.printData();
//         gnss_Lists =  reader.get_gnss_List();
//         for(int i=0;i<gnss_Lists.size();i++){
            
//             gnss_Lists[i].Set_orinGnss(gnss_data_int0.lat,gnss_data_int0.lon,gnss_data_int0.high);// 设置原始GNSS点

//             double target_latitude = gnss_Lists[i].lat;
//             double target_longitude = gnss_Lists[i].lon;
//             double target_height = gnss_Lists[i].high;


//             // gnss转化为enu
//             double x, y, z;
//             geoConverter.Forward(target_latitude, target_longitude, target_height, x, y, z);

//             gnss_Lists[i].x=x;
//             gnss_Lists[i].y=y;
//             gnss_Lists[i].z=z;


//             //  WGS84 gnss转化为ecef
//             wgs84.Forward(target_latitude, target_longitude, target_height, x, y, z);
//             gnss_Lists[i].ecef_x=x;
//             gnss_Lists[i].ecef_y=y;
//             gnss_Lists[i].ecef_z=z;      

//             gnss_data gnss_List_i=gnss_Lists[i];
            
//             cout << fixed << setprecision(10)<< endl;
//             cout<< "编号 " << i 
//             << " 时间戳 "<< gnss_List_i.time
//             << " \n纬度 " << gnss_List_i.lat 
//             << " 经度 " << gnss_List_i.lon
//             << " 高度 "<<  gnss_List_i.high 
//             << " \nenu-x " << gnss_List_i.x 
//             << " enu-y " << gnss_List_i.y
//             << " enu-z "<<  gnss_List_i.z 
//             << " \necef_x " << gnss_List_i.ecef_x
//             << " ecef_y " << gnss_List_i.ecef_y
//             << " ecef_z "<<  gnss_List_i.ecef_z
//             << endl;
//         }
//     }
 
 
//     return 0;
// }

#endif

  

 

posted on 2024-07-16 18:56  MKT-porter  阅读(53)  评论(0)    收藏  举报
刷新页面返回顶部
博客园  ©  2004-2025
浙公网安备 33010602011771号 浙ICP备2021040463号-3