#include <opencv2/opencv.hpp>
#include "opencv2/stitching/detail/autocalib.hpp"
#include "opencv2/stitching/detail/blenders.hpp"
#include "opencv2/stitching/detail/camera.hpp"
#include "opencv2/stitching/detail/exposure_compensate.hpp"
#include "opencv2/stitching/detail/matchers.hpp"
#include "opencv2/stitching/detail/motion_estimators.hpp"
#include "opencv2/stitching/detail/seam_finders.hpp"
#include "opencv2/stitching/detail/util.hpp"
#include "opencv2/stitching/detail/warpers.hpp"
#include "opencv2/stitching/warpers.hpp"

#include <iostream>
#include <fstream> 
#include <string>
#include <iomanip> 
using namespace cv;
using namespace std;
using namespace detail;
int main(int argc, char** argv)
{
    vector<Mat> imgs;    //输入图像
    vector<String>names;
    glob("./pic/*.jpg", names,false);
    for (int i = 0; i < names.size(); i++)
    {
        imgs.push_back(imread(names[i]));
    }
    
    Ptr<FeaturesFinder> finder;    //特征检测
    finder = new OrbFeaturesFinder();
    vector<ImageFeatures> features(2);
    (*finder)(imgs[0], features[0]);
    (*finder)(imgs[1], features[1]);

    vector<MatchesInfo> pairwise_matches;    //特征匹配
    Ptr<FeaturesMatcher> matcher;
    //BestOf2NearestMatcher matcher(false, 0.3f, 6, 6);

    matcher = makePtr<BestOf2NearestRangeMatcher>(5, false, 0.3);
    (*matcher)(features, pairwise_matches);
    matcher->collectGarbage();
    //HomographyBasedEstimator estimator;    //相机参数评估
    Ptr<Estimator> estimator;
    vector<CameraParams> cameras;
    estimator = makePtr<HomographyBasedEstimator>();

    (*estimator)(features, pairwise_matches, cameras);

    for (size_t i = 0; i < cameras.size(); ++i)
    {
        Mat R;
        cameras[i].R.convertTo(R, CV_32F);
        cameras[i].R = R;
    }

    Ptr<detail::BundleAdjusterBase> adjuster;    //相机参数精确评估
    adjuster = new detail::BundleAdjusterReproj();
    adjuster->setConfThresh(1);
    (*adjuster)(features, pairwise_matches, cameras);

    vector<Mat> rmats;
    for (size_t i = 0; i < cameras.size(); ++i)
        rmats.push_back(cameras[i].R.clone());
    waveCorrect(rmats, WAVE_CORRECT_HORIZ);    //波形校正
    for (size_t i = 0; i < cameras.size(); ++i)
        cameras[i].R = rmats[i];

    //图像映射变换
    vector<Point> corners(2);
    vector<UMat> masks_warped(2);
    vector<UMat> images_warped(2);
    vector<Size> sizes(2);
    vector<Mat> masks(2);
    for (int i = 0; i < 2; ++i)
    {
        masks[i].create(imgs[i].size(), CV_8U);
        masks[i].setTo(Scalar::all(255));
    }
    Ptr<WarperCreator> warper_creator;
    warper_creator = new cv::PlaneWarper();
    Ptr<RotationWarper> warper = warper_creator->create(static_cast<float>(cameras[0].focal));
    for (int i = 0; i < 2; ++i)
    {
        Mat_<float> K;
        cameras[i].K().convertTo(K, CV_32F);
        corners[i] = warper->warp(imgs[i], K, cameras[i].R, INTER_LINEAR, BORDER_REFLECT, images_warped[i]);
        sizes[i] = images_warped[i].size();
        warper->warp(masks[i], K, cameras[i].R, INTER_NEAREST, BORDER_CONSTANT, masks_warped[i]);
    }

    //曝光补偿
    Ptr<ExposureCompensator> compensator =
        ExposureCompensator::createDefault(ExposureCompensator::GAIN);
    compensator->feed(corners, images_warped, masks_warped);
    for (int i = 0; i<2; ++i)
    {
        compensator->apply(i, corners[i], images_warped[i], masks_warped[i]);
    }

    //在后面,我们还需要用到映射变换图的掩码masks_warped,因此这里为该变量添加一个副本masks_seam
    vector<UMat> masks_seam(2);
    for (int i = 0; i<2; i++)
        masks_warped[i].copyTo(masks_seam[i]);

    //寻找接缝线
    Ptr<SeamFinder> seam_finder;
    seam_finder = new GraphCutSeamFinder(GraphCutSeamFinder::COST_COLOR_GRAD);
    vector<UMat> images_warped_f(2);
    for (int i = 0; i < 2; ++i)
        images_warped[i].convertTo(images_warped_f[i], CV_32F);
    seam_finder->find(images_warped_f, corners, masks_seam);

    //图像融合
    Ptr<Blender> blender;    //定义图像融合器

     //blender = Blender::createDefault(Blender::NO, false);    //简单融合方法
     /****羽化融合方法*********
     blender = Blender::createDefault(Blender::FEATHER, false);
    FeatherBlender* fb = dynamic_cast<FeatherBlender*>(static_cast<Blender*>(blender));
    fb->setSharpness(0.005);    //设置羽化锐度
    **************/
    blender = Blender::createDefault(Blender::MULTI_BAND, false);    //多频段融合
    MultiBandBlender* mb = dynamic_cast<MultiBandBlender*>(static_cast<Blender*>(blender));
    //设置频段数,即金字塔层数,原则上频段越多越好
    mb->setNumBands(8);

    blender->prepare(corners, sizes);    //生成全景图像区域

    //在融合的时候,最重要的是在接缝线两侧进行处理,而上一步在寻找接缝线后得到的掩码的边界就是接缝线处,因此我们还需要在接缝线两侧开辟一块区域用于融合处理,这一处理过程对羽化方法尤为关键
    //应用膨胀算法缩小掩码面积
    vector<Mat> dilate_img(2);
    Mat element = getStructuringElement(MORPH_RECT, Size(20, 20));    //定义结构元素
    vector<Mat> images_warped_s(2);
    for (int k = 0; k<2; k++)    //遍历所有图像
    {

        images_warped_f[k].convertTo(images_warped_s[k], CV_16S);    //改变数据类型
        dilate(masks_seam[k], masks_seam[k], element);    //膨胀运算
         //映射变换图的掩码和膨胀后的掩码相“与”,从而使扩展的区域仅仅限于接缝线两侧,其他边界处不受影响
        //masks_seam[k] = masks_seam[k] & masks_warped[k];
        Mat wap1, wap2;
        masks_seam[k].copyTo(wap1);
        masks_warped[k].copyTo(wap2);
        wap1 = wap1&wap2;
        wap1.copyTo(masks_seam[k]);

        blender->feed(images_warped_s[k], masks_seam[k], corners[k]);    //初始化数据
    }

    Mat result, result_mask;
    //完成融合操作,得到全景图像result和它的掩码result_mask
    blender->blend(result, result_mask);
    result.convertTo(result,CV_8UC3);
    imwrite("pano.jpg", result);    //存储全景图像
    return 0;
}