#include <iostream>
#include <fstream>
#include <opencv2/opencv.hpp>
#include "Tools.h"
#include "ResourceManager.h"
#include "RobustMatcher.h"
using namespace cv;
using namespace std;
#define DEFAULT_IDCARD_WIDTH  655
#define DEFAULT_IDCARD_HEIGHT  413
#define  FIX_IDCARD_SIZE Size(DEFAULT_IDCARD_WIDTH,DEFAULT_IDCARD_HEIGHT)
static Mat image_template_xingming;
static Mat image_template_haoma;
static Mat image_template_xingbie;
static Mat image_template_minzu;
static Mat image_template_chusheng;
static Mat image_template_zhuzhi;
static Mat image_template_gongmin;
static Mat image_template_shenfen;
//////////////////////////////////////////
static Mat image_template_guohui;
static Mat image_template_zhong;
static Mat image_template_guo;
static Mat image_template_ju;
static Mat image_template_zheng;
static Mat image_template_qianfa;
static Mat image_template_jiguan;
static Mat image_template_youxiao;
static Mat image_template_qixian;
Mat equalizeIntensityHist(const Mat &inputImage) {
    if (inputImage.channels() >= 3) {
        Mat ycrcb;
        cvtColor(inputImage, ycrcb, COLOR_BGR2YCrCb);
        vector<Mat> channels;
        split(ycrcb, channels);
        equalizeHist(channels[0], channels[0]);
        Mat result;
        merge(channels, ycrcb);
        cvtColor(ycrcb, result, COLOR_YCrCb2BGR);
        return result;
    }
    return Mat();
}
void LSDLines(cv::Mat &image, cv::Mat &gray, vector<vector<cv::Vec4i>> &lines) {
    const double THETA = 40.0 / 180;
    const int LINENUM = 8;
    // lines[0] 上
    // lines[1] 下
    // lines[2] 左
    // lines[3] 右
    lines.clear();
//
//    cv::imwrite("d:\\test1234.jpg", dilated_edges);
    vector<cv::Vec4i> tmplines;
    cv::Ptr<cv::LineSegmentDetector> detector = cv::createLineSegmentDetector(cv::LSD_REFINE_NONE, 0.8, 0.6, 1.5, 40);
    //cv::Ptr<cv::LineSegmentDetector> detector = cv::createLineSegmentDetector(cv::LSD_REFINE_NONE, 0.8, 0.6, 1.5, 23.5);
    detector->detect(gray, tmplines);
    //根据直线位置分成四个区域的直线
    vector<cv::Vec4i> ups;
    vector<cv::Vec4i> downs;
    vector<cv::Vec4i> lefts;
    vector<cv::Vec4i> rights;
    for (size_t i = 0; i < tmplines.size(); ++i) {
        cv::Vec4i &line = tmplines[i];
//        if (line[0] == 0 || line[1] == 0 || line[2] == 0 || line[3] == 0) {
//            continue;
//        }
        //cv::line(gray, cv::Point(line[0], line[1]), cv::Point(line[2], line[3]), cv::Scalar(255 * (i <= 1), 0, 255 * (i>1)), 1, CV_AA);
        int detaX = abs(line[0] - line[2]);
        int detaY = abs(line[1] - line[3]);
        if (detaX > detaY && atan(1.0 * detaY / detaX) < THETA)// "水平方向"
        {
            if (std::max(line[1], line[3]) < gray.rows / 3) {
                ups.push_back(line);
                continue;
            }
            if (std::max(line[1], line[3]) > gray.rows * 2 / 3) {
                downs.push_back(line);
                continue;
            }
        }
        if (detaX < detaY && atan(1.0 * detaX / detaY) < THETA) {
            if (std::max(line[0], line[2]) < gray.cols / 4) {
                lefts.push_back(line);
                continue;
            }
            if (std::max(line[0], line[2]) > gray.cols * 3 / 4) {
                rights.push_back(line);
                continue;
            }
        }
    }
    ////过滤短直线
    //if (tmplines.size() > LINENUM){
    // sort(tmplines.begin(), tmplines.end(), [](cv::Vec4i&line1, cv::Vec4i& line2){ \
      //    return (pow(abs(line1[0] - line1[2]), 2) + pow(abs(line1[1] - line1[3]), 2)) > \
      //       (pow(abs(line2[0] - line2[2]), 2) + pow(abs(line2[1] - line2[3]), 2));});
    // vector<cv::Vec4i> ttmplines;
    // //ttmplines.swap(tmplines);
    // int linenum = std::min(LINENUM, static_cast<int>(ttmplines.size() * 0.2));
    // tmplines.clear();
    // tmplines.insert(tmplines.begin(),ttmplines.begin(), ttmplines.begin() + linenum);
    // //tmplines.insert(ttmplines.begin(), ttmplines.begin() + linenum);
    //}
    auto removeshortline = [&LINENUM](vector<cv::Vec4i> &lines) {
        if (lines.size() < LINENUM) return;
        vector<cv::Vec4i> tmplines;
        tmplines.swap(lines);
        sort(tmplines.begin(), tmplines.end(), [](cv::Vec4i &line1, cv::Vec4i &line2) {
            \
            return (pow(abs(line1[0] - line1[2]), 2) + pow(abs(line1[1] - line1[3]), 2)) > \
            (pow(abs(line2[0] - line2[2]), 2) + pow(abs(line2[1] - line2[3]), 2));
        });
        lines.insert(lines.begin(), tmplines.begin(), tmplines.begin() + LINENUM);
    };
    removeshortline(ups);
    lines.push_back(ups);
    removeshortline(downs);
    lines.push_back(downs);
    removeshortline(lefts);
    lines.push_back(lefts);
    removeshortline(rights);
    lines.push_back(rights);
//    for (auto l:ups) {
//        line(image, Point(l[0], l[1]), Point(l[2], l[3]), Scalar(0, 0, 255));
//    }
//    for (auto l:downs) {
//        line(image, Point(l[0], l[1]), Point(l[2], l[3]), Scalar(0, 0, 255));
//    }
//    for (auto l:lefts) {
//        line(image, Point(l[0], l[1]), Point(l[2], l[3]), Scalar(0, 0, 255));
//    }
//    for (auto l:rights) {
//        line(image, Point(l[0], l[1]), Point(l[2], l[3]), Scalar(0, 0, 255));
//    }
//    cv::imwrite("d:\\test123.jpg", image);
}
void mergeLines(vector<vector<cv::Vec4i>> &lines, vector<cv::Vec4i> &edges) {
    for (size_t i = 0; i < lines.size(); ++i) {
        cv::Vec4i edge;
        Tools::MergeLines(lines[i], edge, static_cast<int>(i >= 2));
        edges.push_back(edge);
    }
}
void detect(cv::Mat &image, vector<cv::Vec4i> &edges) {
    edges.clear();
    // edges[0] 上
    // edges[1] 下
    // edges[2] 左
    // edges[3] 右
    cv::Mat gray;
    //image = equalizeIntensityHist(image);
    //GaussianBlur(image, image, cv::Size(3, 3), 0, 0);
    if (image.channels() != 1) {
        cvtColor(image, gray, CV_BGR2GRAY);
    } else {
        gray = image.clone();
    }
    //边缘增强
//    cv::Mat gray_x, gray_y;
//    cv::Sobel(gray, gray_x, CV_16S, 1, 0, 3);
//    gray_x = cv::abs(gray_x);
//    gray_x.convertTo(gray_x, CV_8U);
//    cv::Sobel(gray, gray_y, CV_16S, 0, 1, 3);
//    gray_y = cv::abs(gray_y);
//    gray_y.convertTo(gray_y, CV_8U);
//    cv::add(gray, gray_x, gray);
//    cv::add(gray, gray_y, gray);
//    cv::Mat blurMat, blurMat16;
//    cv::GaussianBlur(gray, blurMat, cv::Size(3, 3), 0, 0);
//    cv::Laplacian(blurMat, blurMat16, CV_16S);
//    blurMat16 = cv::abs(blurMat16);
//    blurMat16.convertTo(blurMat, CV_8U);
//    cv::add(gray, blurMat, gray);
    vector<vector<cv::Vec4i>> lines;
    LSDLines(image, gray, lines);
    if (lines.size() != 4) return;
    mergeLines(lines, edges);
}
Point2f computeIntersect(cv::Vec4i a,
                         cv::Vec4i b) {
    int x1 = a[0], y1 = a[1], x2 = a[2], y2 = a[3], x3 = b[0], y3 = b[1], x4 = b[2], y4 = b[3];
    float denom;
    if (float d = ((float) (x1 - x2) * (y3 - y4)) - ((y1 - y2) * (x3 - x4))) {
        cv::Point2f pt;
        pt.x = ((x1 * y2 - y1 * x2) * (x3 - x4) - (x1 - x2) * (x3 * y4 - y3 * x4)) / d;
        pt.y = ((x1 * y2 - y1 * x2) * (y3 - y4) - (y1 - y2) * (x3 * y4 - y3 * x4)) / d;
        return pt;
    } else
        return cv::Point2f(-1, -1);
}
double angle(cv::Point pt1, cv::Point pt2, cv::Point pt0) {
    double dx1 = pt1.x - pt0.x;
    double dy1 = pt1.y - pt0.y;
    double dx2 = pt2.x - pt0.x;
    double dy2 = pt2.y - pt0.y;
    return (dx1 * dx2 + dy1 * dy2) / sqrt((dx1 * dx1 + dy1 * dy1) * (dx2 * dx2 + dy2 * dy2) + 1e-10);
}
void find_squares(Mat &image, vector<vector<Point> > &squares) {
    squares.clear();
    Mat pyr, timg, gray0(image.size(), CV_8U), gray;
    // karlphillip: dilate the image so this technique can detect the white square,
    Mat blurred(image);
//
//    //边缘增强
    cv::Mat gray_x, gray_y;
    cv::Sobel(blurred, gray_x, CV_16S, 1, 0, 3);
    gray_x = cv::abs(gray_x);
    gray_x.convertTo(gray_x, CV_8U);
    cv::Sobel(blurred, gray_y, CV_16S, 0, 1, 3);
    gray_y = cv::abs(gray_y);
    gray_y.convertTo(gray_y, CV_8U);
    cv::add(blurred, gray_x, blurred);
    cv::add(blurred, gray_y, blurred);
    cv::Mat blurMat, blurMat16;
    cv::GaussianBlur(blurred, blurMat, cv::Size(3, 3), 0, 0);
    cv::Laplacian(blurMat, blurMat16, CV_16S);
    blurMat16 = cv::abs(blurMat16);
    blurMat16.convertTo(blurMat, CV_8U);
    cv::add(blurred, blurMat, blurred);
//
    dilate(blurred, blurred, Mat(), Point(-1, -1), 3, 1, 1);
//    // then blur it so that the ocean/sea become one big segment to avoid detecting them as 2 big squares.
    //medianBlur(blurred, blurred, 3);
//    // down-scale and upscale the image to filter out the noise
//    pyrDown(blurred, pyr, Size(blurred.cols / 2, blurred.rows / 2));
//    pyrUp(pyr, timg, blurred.size());
    blurred = equalizeIntensityHist(blurred);
    //MyGammaCorrection(blurred, blurred, 5);
//    Mat imageGamma(blurred.size(), CV_32FC3);
//    for (int i = 0; i < blurred.rows; i++)
//    {
//        for (int j = 0; j < blurred.cols; j++)
//        {
//            imageGamma.at<Vec3f>(i, j)[0] = (blurred.at<Vec3b>(i, j)[0])*(blurred.at<Vec3b>(i, j)[0])*(blurred.at<Vec3b>(i, j)[0]);
//            imageGamma.at<Vec3f>(i, j)[1] = (blurred.at<Vec3b>(i, j)[1])*(blurred.at<Vec3b>(i, j)[1])*(blurred.at<Vec3b>(i, j)[1]);
//            imageGamma.at<Vec3f>(i, j)[2] = (blurred.at<Vec3b>(i, j)[2])*(blurred.at<Vec3b>(i, j)[2])*(blurred.at<Vec3b>(i, j)[2]);
//        }
//    }
//    //归一化到0~255
//    normalize(imageGamma, imageGamma, 0, 255, CV_MINMAX);
//    convertScaleAbs(imageGamma, blurred);
    //cvtColor(blurred, blurred, COLOR_BGR2GRAY);
    //adaptiveThreshold(blurred, blurred, 255, ADAPTIVE_THRESH_MEAN_C, THRESH_BINARY, 39, 3);
    //imwrite("blurred.jpg", blurred);
    vector<vector<Point> > contours;
    // find squares in the first color plane.
    for (int c = 0; c < 3; c++) {
        int ch[] = {c, 0};
        mixChannels(&blurred, 1, &gray0, 1, ch, 1);
        // try several threshold levels
        const int threshold_level = 60;
        for (int l = 0; l < threshold_level; l++) {
            // Use Canny instead of zero threshold level!
            // Canny helps to catch squares with gradient shading
            if (l == 0) {
                Canny(gray0, gray, 0, 254, 3); //
                // Dilate helps to remove potential holes between edge segments
                dilate(gray, gray, Mat(), Point(-1, -1));
            } else {
                gray = gray0 >= (l + 1) * 255 / threshold_level;
            }
            // Find contours and store them in a list
            findContours(gray, contours, CV_RETR_LIST, CV_CHAIN_APPROX_SIMPLE);
            // Test contours
            vector<Point> approx;
            for (size_t i = 0; i < contours.size(); i++) {
                // approximate contour with accuracy proportional
                // to the contour perimeter
                approxPolyDP(Mat(contours[i]), approx, arcLength(Mat(contours[i]), true) * 0.06, true);
                // Note: absolute value of an area is used because
                // area may be positive or negative - in accordance with the
                // contour orientation
                //isContourConvex(Mat(approx)
                //approx.size() == 4
                //approx.size() >= 4 &&  approx.size() <=6
                if (approx.size() == 4 && isContourConvex(Mat(approx)) &&
                    fabs(contourArea(Mat(approx))) > gray.rows * gray.cols * 0.1) {
                    double maxCosine = 0;
                    //squares.push_back(approx);
                    for (int j = 2; j < 5; j++) {
                        double cosine = fabs(angle(approx[j % 4], approx[j - 2], approx[j - 1]));
                        maxCosine = MAX(maxCosine, cosine);
                    }
                    if (maxCosine < 0.18) {
                        squares.push_back(approx);
                    }
                }
            }
        }
    }
}
void draw_squares(Mat &img, vector<vector<Point> > squares) {
    for (int i = 0; i < squares.size(); i++) {
        for (int j = 0; j < squares[i].size(); j++) {
            cv::line(img, squares[i][j], squares[i][(j + 1) % 4], cv::Scalar(0, 0, 255), 1, CV_AA);
        }
    }
}
double angle(cv::Point pt0, cv::Point pt1) {
    double dx1 = pt1.x - pt0.x;
    double dy1 = pt1.y - pt0.y;
    return abs(dy1 / dx1);
}
Mat whiteBalance(const Mat &source) {
    Mat result;
    vector<Mat> g_vChannels;
    //分离通道
    split(source, g_vChannels);
    Mat imageBlueChannel = g_vChannels[0];
    Mat imageGreenChannel = g_vChannels[1];
    Mat imageRedChannel = g_vChannels[2];
    double imageBlueChannelAvg = 0;
    double imageGreenChannelAvg = 0;
    double imageRedChannelAvg = 0;
    //求各通道的平均值
    imageBlueChannelAvg = mean(imageBlueChannel)[0];
    imageGreenChannelAvg = mean(imageGreenChannel)[0];
    imageRedChannelAvg = mean(imageRedChannel)[0];
    //求出个通道所占增益
    double K = (imageRedChannelAvg + imageGreenChannelAvg + imageRedChannelAvg) / 3;
    double Kb = K / imageBlueChannelAvg;
    double Kg = K / imageGreenChannelAvg;
    double Kr = K / imageRedChannelAvg;
    //更新白平衡后的各通道BGR值
    addWeighted(imageBlueChannel, Kb, 0, 0, 0, imageBlueChannel);
    addWeighted(imageGreenChannel, Kg, 0, 0, 0, imageGreenChannel);
    addWeighted(imageRedChannel, Kr, 0, 0, 0, imageRedChannel);
    merge(g_vChannels, result);//图像各通道合并
    return result;
}
bool findIDCard(Mat &imageRoi) {
    int foundCount1 = 0;
    int foundCount2 = 0;
    int foundCount3 = 0;
    //int foundCount4 = 0;
    int foundCount5 = 0;
    Mat src = imageRoi.clone();
    cvtColor(imageRoi, imageRoi, COLOR_BGR2GRAY);
    cv::GaussianBlur(imageRoi, imageRoi, Size(3, 3), 0);
    resize(imageRoi, imageRoi, FIX_IDCARD_SIZE);
    resize(src, src, FIX_IDCARD_SIZE);
    //resize(image_template_xingming, image_template_xingming, FIX_TEMPLATE_SIZE2);
    image_template_xingming = ResourceManager::getInstance().image_template_xingming;
    image_template_haoma = ResourceManager::getInstance().image_template_haoma;
    image_template_xingbie = ResourceManager::getInstance().image_template_xingbie;
    image_template_minzu = ResourceManager::getInstance().image_template_minzu;
    image_template_chusheng = ResourceManager::getInstance().image_template_chusheng;
    image_template_zhuzhi = ResourceManager::getInstance().image_template_zhuzhi;
    image_template_gongmin = ResourceManager::getInstance().image_template_gongmin;
    image_template_shenfen = ResourceManager::getInstance().image_template_shenfen;
//    image_template_nian = ResourceManager::getInstance().image_template_nian;
//    image_template_yue = ResourceManager::getInstance().image_template_yue;
//    image_template_ri = ResourceManager::getInstance().image_template_ri;
    int xOffsetMax = 60;
    int yOffsetMax = 60;
    ////////////姓名/////////////
    Mat image_matched;
    matchTemplate(imageRoi, image_template_xingming, image_matched, TM_CCOEFF_NORMED);
    static Point xingming_point = ResourceManager::getInstance().xingming_point;
    double minVal, maxVal;
    Point minPt, maxPt;
    minMaxLoc(image_matched, &minVal, &maxVal, &minPt, &maxPt);
    double minMatchQuality = 0.7;
    if (maxVal > minMatchQuality && (abs(maxPt.x - xingming_point.x) < xOffsetMax) &&
        (abs(maxPt.y - xingming_point.y) < yOffsetMax)) {
//        rectangle(src,
//                  Rect(maxPt.x, maxPt.y, image_template_xingming.cols,
//                       image_template_xingming.rows),
//                  255);
        foundCount1++;
    }
    if (foundCount1 == 0) {
        return false;
    }
    ///////////////////性别/////////////////
    static Point xingbie_point = ResourceManager::getInstance().xingbie_point;
    matchTemplate(imageRoi, image_template_xingbie, image_matched, TM_CCOEFF_NORMED);
    minMaxLoc(image_matched, &minVal, &maxVal, &minPt, &maxPt);
    if (maxVal > minMatchQuality && (abs(maxPt.x - xingbie_point.x) < xOffsetMax) &&
        (abs(maxPt.y - xingbie_point.y) < yOffsetMax)) {
//        rectangle(src,
//                  Rect(maxPt.x, maxPt.y, image_template_xingbie.cols, image_template_xingbie.rows),
//                  255);
        foundCount2++;
    }
    if (foundCount2 == 0) {
        ///////////////////民族/////////////////
        Point minzu_point = ResourceManager::getInstance().minzu_point;
        matchTemplate(imageRoi, image_template_minzu, image_matched, TM_CCOEFF_NORMED);
        minMaxLoc(image_matched, &minVal, &maxVal, &minPt, &maxPt);
        if (maxVal > minMatchQuality && (abs(maxPt.x - minzu_point.x) < xOffsetMax) &&
            (abs(maxPt.y - minzu_point.y) < yOffsetMax)) {
//            rectangle(src,
//                      Rect(maxPt.x, maxPt.y, image_template_minzu.cols, image_template_minzu.rows),
//                      255);
            foundCount2++;
        }
    }
    if (foundCount2 == 0) {
        return false;
    }
    ///////////////////出生/////////////////
    Point chusheng_point = ResourceManager::getInstance().chusheng_point;
    matchTemplate(imageRoi, image_template_chusheng, image_matched, TM_CCOEFF_NORMED);
    minMaxLoc(image_matched, &minVal, &maxVal, &minPt, &maxPt);
    if (maxVal > minMatchQuality && (abs(maxPt.x - chusheng_point.x) < xOffsetMax) &&
        (abs(maxPt.y - chusheng_point.y) < yOffsetMax)) {
//        rectangle(src,
//                  Rect(maxPt.x, maxPt.y, image_template_chusheng.cols,
//                       image_template_chusheng.rows),
//                  255);
        foundCount3++;
    }
    if (foundCount3 == 0) {
        ///////////////////住址/////////////////
        Point zhuzhi_point = ResourceManager::getInstance().zhuzhi_point;
        matchTemplate(imageRoi, image_template_zhuzhi, image_matched, TM_CCOEFF_NORMED);
        minMaxLoc(image_matched, &minVal, &maxVal, &minPt, &maxPt);
        if (maxVal > minMatchQuality && (abs(maxPt.x - zhuzhi_point.x) < xOffsetMax) &&
            (abs(maxPt.y - zhuzhi_point.y) < yOffsetMax)) {
//            rectangle(src,
//                      Rect(maxPt.x, maxPt.y, image_template_zhuzhi.cols,
//                           image_template_zhuzhi.rows),
//                      255);
            foundCount3++;
        }
/*        if (foundCount3 == 0) {
            ///////////////////年/////////////////
            Point nian_point = ResourceManager::getInstance().nian_point;
            matchTemplate(imageRoi, image_template_nian, image_matched, TM_CCOEFF_NORMED);
            minMaxLoc(image_matched, &minVal, &maxVal, &minPt, &maxPt);
            if (maxVal > minMatchQuality && (abs(maxPt.x - nian_point.x) < xOffsetMax) &&
                (abs(maxPt.y - nian_point.y) < yOffsetMax)) {
//                rectangle(src,
//                          Rect(maxPt.x, maxPt.y, image_template_nian.cols,
//                               image_template_nian.rows),
//                          255);
                foundCount3++;
            }
            if (foundCount3 == 0) {
                ///////////////////月/////////////////
                Point yue_point = ResourceManager::getInstance().yue_point;
                matchTemplate(imageRoi, image_template_yue, image_matched, TM_CCOEFF_NORMED);
                minMaxLoc(image_matched, &minVal, &maxVal, &minPt, &maxPt);
                if (maxVal > minMatchQuality && (abs(maxPt.x - yue_point.x) < xOffsetMax) &&
                    (abs(maxPt.y - yue_point.y) < yOffsetMax)) {
//                    rectangle(src,
//                              Rect(maxPt.x, maxPt.y, image_template_yue.cols,
//                                   image_template_yue.rows),
//                              255);
                    foundCount3++;
                }
                if (foundCount3 == 0) {
                    ///////////////////日/////////////////
                    Point ri_point = ResourceManager::getInstance().ri_point;
                    matchTemplate(imageRoi, image_template_ri, image_matched, TM_CCOEFF_NORMED);
                    minMaxLoc(image_matched, &minVal, &maxVal, &minPt, &maxPt);
                    if (maxVal > minMatchQuality && (abs(maxPt.x - ri_point.x) < xOffsetMax) &&
                        (abs(maxPt.y - ri_point.y) < yOffsetMax)) {
//                        rectangle(src,
//                                  Rect(maxPt.x, maxPt.y, image_template_ri.cols,
//                                       image_template_ri.rows),
//                                  255);
                        foundCount3++;
                    }
                }
            }
        }*/
    }
    if (foundCount3 == 0) {
        return false;
    }
    ///////////////////号码/////////////////
    Point haoma_point = ResourceManager::getInstance().haoma_point;
    matchTemplate(imageRoi, image_template_haoma, image_matched, TM_CCOEFF_NORMED);
    minMaxLoc(image_matched, &minVal, &maxVal, &minPt, &maxPt);
    if (maxVal > minMatchQuality && (abs(maxPt.x - haoma_point.x) < xOffsetMax) &&
        (abs(maxPt.y - haoma_point.y) < yOffsetMax)) {
//        rectangle(src,
//                  Rect(maxPt.x, maxPt.y, image_template_haoma.cols, image_template_haoma.rows),
//                  255);
        foundCount5++;
        //切割号码
//        Rect rect = Rect(maxPt.x + image_template_haoma.cols + 10, maxPt.y - 5,
//                         imageRoi.cols - (maxPt.x + image_template_haoma.cols) - 20,
//                         image_template_haoma.rows + 10);
//        imwrite("/sdcard/id_num.jpg", src(rect));
    }
    if (foundCount5 == 0) {
        ///////////////////公民/////////////////
        Point gongmin_point = ResourceManager::getInstance().gongmin_point;
        matchTemplate(imageRoi, image_template_gongmin, image_matched, TM_CCOEFF_NORMED);
        minMaxLoc(image_matched, &minVal, &maxVal, &minPt, &maxPt);
        if (maxVal > minMatchQuality && (abs(maxPt.x - gongmin_point.x) < xOffsetMax) &&
            (abs(maxPt.y - gongmin_point.y) < yOffsetMax)) {
//            rectangle(src,
//                      Rect(maxPt.x, maxPt.y, image_template_gongmin.cols,
//                           image_template_gongmin.rows),
//                      255);
            foundCount5++;
        }
        if (foundCount5 == 0) {
            ///////////////////身份/////////////////
            Point shenfen_point = ResourceManager::getInstance().shenfen_point;
            matchTemplate(imageRoi, image_template_shenfen, image_matched, TM_CCOEFF_NORMED);
            minMaxLoc(image_matched, &minVal, &maxVal, &minPt, &maxPt);
            if (maxVal > minMatchQuality && (abs(maxPt.x - shenfen_point.x) < xOffsetMax) &&
                (abs(maxPt.y - shenfen_point.y) < yOffsetMax)) {
//                rectangle(src,
//                          Rect(maxPt.x, maxPt.y, image_template_shenfen.cols,
//                               image_template_shenfen.rows),
//                          255);
                foundCount5++;
            }
        }
    }
    if (foundCount5 == 0) {
        return false;
    }
    //imwrite(ResourceManager::getInstance().ConfigDirPath + "output_front.jpg", src);
    return true;
}
bool findIDCard_Back(Mat &imageRoi) {
    int foundBottomCount1 = 0;
    int foundBottomCount2 = 0;
    int foundLeftCount = 0;
    int foundRightCount = 0;
    Mat src = imageRoi.clone();
    cvtColor(imageRoi, imageRoi, COLOR_BGR2GRAY);
    cv::GaussianBlur(imageRoi, imageRoi, Size(3, 3), 0);
//    static Mat image_template_guohui = ResourceManager::getInstance().image_template_guohui;
//    static Mat image_template_zhong = ResourceManager::getInstance().image_template_zhong;
//    static Mat image_template_guo = ResourceManager::getInstance().image_template_guo;
//    static Mat image_template_ju = ResourceManager::getInstance().image_template_ju;
//    static Mat image_template_zheng = ResourceManager::getInstance().image_template_zheng;
//    static Mat image_template_qianfa = ResourceManager::getInstance().image_template_qianfa;
//    static Mat image_template_jiguan = ResourceManager::getInstance().image_template_jiguan;
//    static Mat image_template_youxiao = ResourceManager::getInstance().image_template_youxiao;
//    static Mat image_template_qixian = ResourceManager::getInstance().image_template_qixian;
    image_template_guohui = ResourceManager::getInstance().image_template_guohui;
    image_template_zhong = ResourceManager::getInstance().image_template_zhong;
    image_template_guo = ResourceManager::getInstance().image_template_guo;
    image_template_ju = ResourceManager::getInstance().image_template_ju;
    image_template_zheng = ResourceManager::getInstance().image_template_zheng;
    image_template_qianfa = ResourceManager::getInstance().image_template_qianfa;
    image_template_jiguan = ResourceManager::getInstance().image_template_jiguan;
    image_template_youxiao = ResourceManager::getInstance().image_template_youxiao;
    image_template_qixian = ResourceManager::getInstance().image_template_qixian;
    int xOffsetMax = 25;
    int yOffsetMax = 20;
    resize(imageRoi, imageRoi, FIX_IDCARD_SIZE);
    resize(src, src, FIX_IDCARD_SIZE);
    ////////////国徽/////////////
    Mat image_matched;
    matchTemplate(imageRoi, image_template_guohui, image_matched, TM_CCOEFF_NORMED);
    Point guohui_point = ResourceManager::getInstance().guohui_point;
    double minVal, maxVal;
    Point minPt, maxPt;
    minMaxLoc(image_matched, &minVal, &maxVal, &minPt, &maxPt);
    double minMatchQuality = 0.7;
    if (maxVal > minMatchQuality && (abs(maxPt.x - guohui_point.x) < xOffsetMax) &&
        (abs(maxPt.y - guohui_point.y) < yOffsetMax)) {
//        rectangle(src,
//                  Rect(maxPt.x, maxPt.y, image_template_guohui.cols, image_template_guohui.rows),
//                  255);
        foundLeftCount++;
    }
    if (foundLeftCount == 0) {
        ///////////////////中/////////////////
        Point zhong_point = ResourceManager::getInstance().zhong_point;
        matchTemplate(imageRoi, image_template_zhong, image_matched, TM_CCOEFF_NORMED);
        minMaxLoc(image_matched, &minVal, &maxVal, &minPt, &maxPt);
        if (maxVal > minMatchQuality && (abs(maxPt.x - zhong_point.x) < xOffsetMax) &&
            (abs(maxPt.y - zhong_point.y) < yOffsetMax)) {
//            rectangle(src,
//                      Rect(maxPt.x, maxPt.y, image_template_zhong.cols, image_template_zhong.rows),
//                      255);
            foundLeftCount++;
        }
        if (foundLeftCount == 0) {
            ///////////////////居/////////////////
            Point ju_point = ResourceManager::getInstance().ju_point;
            matchTemplate(imageRoi, image_template_ju, image_matched, TM_CCOEFF_NORMED);
            minMaxLoc(image_matched, &minVal, &maxVal, &minPt, &maxPt);
            if (maxVal > minMatchQuality && (abs(maxPt.x - ju_point.x) < xOffsetMax) &&
                (abs(maxPt.y - ju_point.y) < yOffsetMax)) {
//                rectangle(src,
//                          Rect(maxPt.x, maxPt.y, image_template_ju.cols, image_template_ju.rows),
//                          255);
                foundLeftCount++;
            }
        }
    }
    if (foundLeftCount == 0) {
        return false;
    }
    ///////////////////国/////////////////
    Point guo_point = ResourceManager::getInstance().guo_point;
    matchTemplate(imageRoi, image_template_guo, image_matched, TM_CCOEFF_NORMED);
    minMaxLoc(image_matched, &minVal, &maxVal, &minPt, &maxPt);
    if (maxVal > minMatchQuality && (abs(maxPt.x - guo_point.x) < xOffsetMax) &&
        (abs(maxPt.y - guo_point.y) < yOffsetMax)) {
//        rectangle(src,
//                  Rect(maxPt.x, maxPt.y, image_template_guo.cols, image_template_guo.rows),
//                  255);
        foundRightCount++;
    }
    if (foundRightCount == 0) {
        ///////////////////证/////////////////
        Point zheng_point = ResourceManager::getInstance().zheng_point;
        matchTemplate(imageRoi, image_template_zheng, image_matched, TM_CCOEFF_NORMED);
        minMaxLoc(image_matched, &minVal, &maxVal, &minPt, &maxPt);
        if (maxVal > minMatchQuality && (abs(maxPt.x - zheng_point.x) < xOffsetMax) &&
            (abs(maxPt.y - zheng_point.y) < yOffsetMax)) {
//            rectangle(src,
//                      Rect(maxPt.x, maxPt.y, image_template_zheng.cols, image_template_zheng.rows),
//                      255);
            foundRightCount++;
        }
    }
    if (foundRightCount == 0) {
        return false;
    }
    ///////////////////签发/////////////////
    Point pointFound1;
    Point qianfa_point = ResourceManager::getInstance().qianfa_point;
    matchTemplate(imageRoi, image_template_qianfa, image_matched, TM_CCOEFF_NORMED);
    minMaxLoc(image_matched, &minVal, &maxVal, &minPt, &maxPt);
    if (maxVal > minMatchQuality && (abs(maxPt.x - qianfa_point.x) < xOffsetMax) &&
        (abs(maxPt.y - qianfa_point.y) < yOffsetMax)) {
//        rectangle(src,
//                  Rect(maxPt.x, maxPt.y, image_template_qianfa.cols, image_template_qianfa.rows),
//                  255);
        foundBottomCount1++;
        pointFound1 = maxPt;
    }
    if (foundBottomCount1 == 0) {
        ///////////////////机关/////////////////
        Point jiguan_point = ResourceManager::getInstance().jiguan_point;
        matchTemplate(imageRoi, image_template_jiguan, image_matched, TM_CCOEFF_NORMED);
        minMaxLoc(image_matched, &minVal, &maxVal, &minPt, &maxPt);
        if (maxVal > minMatchQuality && (abs(maxPt.x - jiguan_point.x) < xOffsetMax) &&
            (abs(maxPt.y - jiguan_point.y) < yOffsetMax)) {
//            rectangle(src,
//                      Rect(maxPt.x, maxPt.y, image_template_jiguan.cols,
//                           image_template_jiguan.rows),
//                      255);
            foundBottomCount1++;
            pointFound1 = maxPt;
        }
    }
    if (foundBottomCount1 == 0) {
        return false;
    }
    ///////////////////期限/////////////////
    Point qixian_point = ResourceManager::getInstance().qixian_point;
    matchTemplate(imageRoi, image_template_qixian, image_matched, TM_CCOEFF_NORMED);
    minMaxLoc(image_matched, &minVal, &maxVal, &minPt, &maxPt);
    if (maxVal > minMatchQuality && (abs(maxPt.x - qixian_point.x) < xOffsetMax) &&
        (abs(maxPt.y - qixian_point.y) < yOffsetMax)) {
//        rectangle(src,
//                  Rect(maxPt.x, maxPt.y, image_template_qixian.cols,
//                       image_template_qixian.rows),
//                  255);
        foundBottomCount2++;
    }
    if (foundBottomCount2 == 0) {
        ///////////////////有效/////////////////
        Point youxiao_point = ResourceManager::getInstance().youxiao_point;
        matchTemplate(imageRoi, image_template_youxiao, image_matched, TM_CCOEFF_NORMED);
        minMaxLoc(image_matched, &minVal, &maxVal, &minPt, &maxPt);
        if (maxVal > minMatchQuality && (abs(maxPt.x - youxiao_point.x) < xOffsetMax) &&
            (abs(maxPt.y - youxiao_point.y) < yOffsetMax)) {
//            rectangle(src,
//                      Rect(maxPt.x, maxPt.y, image_template_youxiao.cols,
//                           image_template_youxiao.rows),
//                      255);
            foundBottomCount2++;
        }
    }
    if (foundBottomCount2 == 0) {
        return false;
    } else {
        if (abs(maxPt.y - pointFound1.y) < 10) {
            return false;
        }
    }
    //imwrite(ResourceManager::getInstance().ConfigDirPath + "output_back.jpg", src);
    return true;
}
// Copy this file from opencv/data/haarscascades to target folder
//string face_cascade_name = "haarcascade_eye.xml";
//string face_cascade_name = "haarcascade_frontalface_alt2.xml";
//CascadeClassifier face_cascade;
Mat rotateMat(Mat mat, int angle) {
    if (angle == 0) {
        return mat;
    }
    Mat M = getRotationMatrix2D(Point2f(mat.cols / 2, mat.rows / 2), angle, 0.9);
    Mat rot_mat;
    warpAffine(mat, rot_mat, M, Size(mat.cols, mat.rows), INTER_LINEAR);
    return rot_mat;
}
// Function detectAndDisplay
//void detectAndDisplay(Mat frame) {
//    std::vector<Rect> faces;
//    Mat frame_gray;
//    Mat crop;
//
//    cvtColor(frame, frame_gray, COLOR_BGR2GRAY);
//    equalizeHist(frame_gray, frame_gray);
//
//    Mat rotatedMat;
//
//    for (int angle = 0; angle < 180; angle += 30) {
//        rotatedMat = rotateMat(frame_gray, angle);
//
//        // Detect faces
//        face_cascade.detectMultiScale(rotatedMat, faces, 1.1, 4,
//                                      0 |
//                                      CV_HAAR_DO_ROUGH_SEARCH,
//                                      Size(30, 30));
//
//        if (faces.size() < 1) {
//            continue;
//        }
//
//        // Set Region of Interest
//        cv::Rect roi_b;
//        cv::Rect roi_c;
//
//        size_t ic = 0; // ic is index of current element
//        int ac = 0; // ac is area of current element
//
//        size_t ib = 0; // ib is index of biggest element
//        int ab = 0; // ab is area of biggest element
//
//        std::cout << "* " << faces.size() << " faces were found." << angle << std::endl;
//
//        for (ic = 0; ic < faces.size(); ic++) // Iterate through all current elements (detected faces)
//        {
//            roi_c.x = faces[ic].x;
//            roi_c.y = faces[ic].y;
//            roi_c.width = (faces[ic].width);
//            roi_c.height = (faces[ic].height);
//
//            ac = roi_c.width * roi_c.height; // Get the area of current element (detected face)
//
//            roi_b.x = faces[ib].x;
//            roi_b.y = faces[ib].y;
//            roi_b.width = (faces[ib].width);
//            roi_b.height = (faces[ib].height);
//
//            ab = roi_b.width *
//                 roi_b.height; // Get the area of biggest element, at beginning it is same as "current" element
//
//            if (ac > ab) {
//                ib = ic;
//                roi_b.x = faces[ib].x;
//                roi_b.y = faces[ib].y;
//                roi_b.width = (faces[ib].width);
//                roi_b.height = (faces[ib].height);
//            }
//
////        Point pt1(faces[ic].x, faces[ic].y); // Display detected faces on main window - live stream from camera
////        Point pt2((faces[ic].x + faces[ic].height), (faces[ic].y + faces[ic].width));
////        rectangle(frame, pt1, pt2, Scalar(0, 255, 0), 2, 8, 0);
////        imwrite("frame.jpg", frame);
//        }
//
//        if (roi_b.area() > 0) {
//            frame = rotateMat(frame, angle);
//            crop = frame(roi_b);
//            //resize(crop, res, Size(128, 128), 0, 0, INTER_LINEAR); // This will be needed later while saving images
//            //cvtColor(crop, gray, CV_BGR2GRAY); // Convert cropped image to Grayscale
//
//            imwrite("face.jpg", crop);
//            imwrite("frame.jpg", frame);
//            return;
//        }
//    }
//
//    for (int angle = -30; angle > -180; angle -= 30) {
//        rotatedMat = rotateMat(frame_gray, angle);
//
//        // Detect faces
//        face_cascade.detectMultiScale(rotatedMat, faces, 1.1, 4,
//                                      0 |
//                                      CV_HAAR_DO_ROUGH_SEARCH,
//                                      Size(30, 30));
//
//        if (faces.size() < 1) {
//            continue;
//        }
//
//        // Set Region of Interest
//        cv::Rect roi_b;
//        cv::Rect roi_c;
//
//        size_t ic = 0; // ic is index of current element
//        int ac = 0; // ac is area of current element
//
//        size_t ib = 0; // ib is index of biggest element
//        int ab = 0; // ab is area of biggest element
//
//        std::cout << "* " << faces.size() << " faces were found." << angle << std::endl;
//
//        for (ic = 0; ic < faces.size(); ic++) // Iterate through all current elements (detected faces)
//        {
//            roi_c.x = faces[ic].x;
//            roi_c.y = faces[ic].y;
//            roi_c.width = (faces[ic].width);
//            roi_c.height = (faces[ic].height);
//
//            ac = roi_c.width * roi_c.height; // Get the area of current element (detected face)
//
//            roi_b.x = faces[ib].x;
//            roi_b.y = faces[ib].y;
//            roi_b.width = (faces[ib].width);
//            roi_b.height = (faces[ib].height);
//
//            ab = roi_b.width *
//                 roi_b.height; // Get the area of biggest element, at beginning it is same as "current" element
//
//            if (ac > ab) {
//                ib = ic;
//                roi_b.x = faces[ib].x;
//                roi_b.y = faces[ib].y;
//                roi_b.width = (faces[ib].width);
//                roi_b.height = (faces[ib].height);
//            }
//
////        Point pt1(faces[ic].x, faces[ic].y); // Display detected faces on main window - live stream from camera
////        Point pt2((faces[ic].x + faces[ic].height), (faces[ic].y + faces[ic].width));
////        rectangle(frame, pt1, pt2, Scalar(0, 255, 0), 2, 8, 0);
////        imwrite("frame.jpg", frame);
//        }
//
//        if (roi_b.area() > 0) {
//            frame = rotateMat(frame, angle);
//            crop = frame(roi_b);
//            //resize(crop, res, Size(128, 128), 0, 0, INTER_LINEAR); // This will be needed later while saving images
//            //cvtColor(crop, gray, CV_BGR2GRAY); // Convert cropped image to Grayscale
//
//            imwrite("face.jpg", crop);
//            imwrite("frame.jpg", frame);
//            return;
//        }
//    }
//
//}
//对轮廓按面积降序排列
bool biggerSort(vector<Point> v1, vector<Point> v2) {
    return contourArea(v1) > contourArea(v2);
}
//对轮廓按面积升序排列
bool smallerSort(vector<Point> v1, vector<Point> v2) {
    return contourArea(v1) < contourArea(v2);
}
auto removeBigRect = [](vector<vector<Point>> &rects) {
    vector<vector<Point>> tmpRects;
    tmpRects.swap(rects);
    std::sort(tmpRects.begin(), tmpRects.end(), smallerSort);
    rects.insert(rects.begin(), tmpRects.begin(), tmpRects.begin() + 8);
};
int main(int argc, char *argv[]) {
    if (argc < 2) {
        return -1;
    }
    double t1 = (double) getTickCount();
    remove((ResourceManager::getInstance().ConfigDirPath + "blurred.jpg").c_str());
    remove((ResourceManager::getInstance().ConfigDirPath + "warpAffine.jpg").c_str());
    remove((ResourceManager::getInstance().ConfigDirPath + "result.jpg").c_str());
    remove((ResourceManager::getInstance().ConfigDirPath + "output_front.jpg").c_str());
    remove((ResourceManager::getInstance().ConfigDirPath + "output_back.jpg").c_str());
    Mat inputSrc = imread(argv[1]);
    int isBackSide = 0;
    if (argc > 2) {
        isBackSide = atoi(argv[2]);
    }
    double fScale = 1.0;
    if (inputSrc.rows > inputSrc.cols) {
        //竖拍
        if (inputSrc.cols > 720) {
            fScale = 720L / (double) inputSrc.cols;     //缩放倍数
        }
    } else {
        //横拍
        if (inputSrc.cols > 960) {
            fScale = 960L / (double) inputSrc.cols;
        }
    }
    //Mat src;
    resize(inputSrc, inputSrc, Size(inputSrc.cols * fScale, inputSrc.rows * fScale));
    //inputSrc = whiteBalance(src);
    //inputSrc = src;
    Mat inputSrc2 = inputSrc.clone();
    if (isBackSide) {
        RobustMatcher rmatcher;
        cv::Mat img1;
        std::vector<cv::KeyPoint> img1_keypoints;
        cv::Mat img1_descriptors;
        cv::Mat img2;
        std::vector<cv::KeyPoint> img2_keypoints;
        cv::Mat img2_descriptors;
        std::vector<cv::DMatch> matches;
        img1 = cv::imread("d:\\shenfenzheng_back.jpg");
        GaussianBlur(img1, img1, Size(5, 5), 0);
        //img2 = cv::imread("d:\\IMG_20171031_090651.jpg");
        img2 = inputSrc2;
        Mat result;
        rmatcher.match(img1, img2, matches, img1_keypoints, img2_keypoints, result);
        Mat dstForBack = result.clone();
        bool success = false;
        if (findIDCard_Back(dstForBack)) {
            imwrite(ResourceManager::getInstance().ConfigDirPath + "output_back.jpg", result);
            success = true;
        }
        if (success) {
            double t2 = (double) getTickCount();
            double time = (t2 - t1) / getTickFrequency();
            std::cout << "time: " << time << std::endl;
            return 0;
        }
    } else {
        Mat res; //分割后图像
        int spatialRad = 10;  //空间窗口大小
        int colorRad = 10;   //色彩窗口大小
        int maxPyrLevel = 2;  //金字塔层数
        pyrMeanShiftFiltering(inputSrc, res, spatialRad, colorRad, maxPyrLevel); //色彩聚类平滑滤波
        //GaussianBlur(res, res, Size(3, 3), 0);
        Mat closerect = getStructuringElement(MORPH_RECT, Size(11, 11)); //进行结构算子生成
        morphologyEx(res, res, MORPH_DILATE, closerect);//进行形态学开运算
        morphologyEx(res, res, MORPH_OPEN, closerect);
        //imwrite("res1.jpg", res);
        RNG rng = theRNG();
        Mat mask(res.rows + 2, res.cols + 2, CV_8UC1, Scalar::all(0));  //掩模
        for (int y = 0; y < res.rows; y++) {
            for (int x = 0; x < res.cols; x++) {
                if (mask.at<uchar>(y + 1, x + 1) == 0)  //非0处即为1,表示已经经过填充,不再处理
                {
                    Scalar newVal(rng(256), rng(256), rng(256));
                    floodFill(res, mask, Point(x, y), newVal, 0, Scalar::all(1), Scalar::all(1)); //执行漫水填充
                }
            }
        }
        inputSrc = res;
        //imwrite("res.jpg", res);
//        Mat temp = imread("d:\\shenfenzheng.jpg");
//        Mat res2; //分割后图像
//        spatialRad = 10;  //空间窗口大小
//        colorRad = 10;   //色彩窗口大小
//        maxPyrLevel = 2;  //金字塔层数
//        pyrMeanShiftFiltering(temp, res2, spatialRad, colorRad, maxPyrLevel); //色彩聚类平滑滤波
//        Mat mask2(res2.rows + 2, res2.cols + 2, CV_8UC1, Scalar::all(0));  //掩模
//        for (int y = 0; y < res2.rows; y++) {
//            for (int x = 0; x < res2.cols; x++) {
//                if (mask2.at<uchar>(y + 1, x + 1) == 0)  //非0处即为1,表示已经经过填充,不再处理
//                {
//                    Scalar newVal(rng(256), rng(256), rng(256));
//                    floodFill(res2, mask2, Point(x, y), newVal, 0, Scalar::all(5), Scalar::all(5)); //执行漫水填充
//                }
//            }
//        }
//        Mat temp = imread("d:\\shenfenzheng.jpg");
//        pyrMeanShiftFiltering(temp, temp, 25, 10);
//        cvtColor(temp, temp, COLOR_BGR2GRAY);
//        threshold(temp, temp, 100, 255, THRESH_BINARY);
////        cv::Size size(3, 3);
////        cv::GaussianBlur(temp, temp, size, 0);
////        adaptiveThreshold(temp, temp, 255, CV_ADAPTIVE_THRESH_MEAN_C, CV_THRESH_BINARY, 75, 10);
////        cv::bitwise_not(temp, temp);
//        Mat closerect = getStructuringElement(MORPH_RECT, Size(11, 11)); //进行结构算子生成
//        morphologyEx(temp, temp, MORPH_OPEN, closerect);
////        //cv::Mat edges;
////        cv::Canny(temp, temp, 0, 255);
//
//        Mat image = inputSrc2.clone();
//        pyrMeanShiftFiltering(image, image, 25, 10);
////        Mat pyr, timg, gray0(image.size(), CV_8U), gray;
////        // karlphillip: dilate the image so this technique can detect the white square,
////        Mat blurred(image);
////        dilate(blurred, blurred, Mat(), Point(-1, -1), 1, 1, 1);
////
////        //边缘增强
////        cv::Mat gray_x, gray_y;
////        cv::Sobel(image, gray_x, CV_16S, 1, 0, 3);
////        gray_x = cv::abs(gray_x);
////        gray_x.convertTo(gray_x, CV_8U);
////        cv::Sobel(image, gray_y, CV_16S, 0, 1, 3);
////        gray_y = cv::abs(gray_y);
////        gray_y.convertTo(gray_y, CV_8U);
////        cv::add(image, gray_x, image);
////        cv::add(image, gray_y, image);
////        cv::Mat blurMat, blurMat16;
////        cv::GaussianBlur(image, blurMat, cv::Size(3, 3), 0, 0);
////        cv::Laplacian(blurMat, blurMat16, CV_16S);
////        blurMat16 = cv::abs(blurMat16);
////        blurMat16.convertTo(blurMat, CV_8U);
////        cv::add(image, blurMat, image);
////
////        // then blur it so that the ocean/sea become one big segment to avoid detecting them as 2 big squares.
////        medianBlur(blurred, blurred, 3);
////
////        // down-scale and upscale the image to filter out the noise
////        pyrDown(blurred, pyr, Size(blurred.cols / 2, blurred.rows / 2));
////        pyrUp(pyr, timg, blurred.size());
//
//        //blurred = equalizeIntensityHist(blurred);
//        cvtColor(image, image, COLOR_BGR2GRAY);
//        threshold(image, image, 150, 255, THRESH_BINARY);
//        //cv::GaussianBlur(image, image, size, 0);
//        adaptiveThreshold(image, image, 255, CV_ADAPTIVE_THRESH_MEAN_C, CV_THRESH_BINARY, 39, 10);
////        cv::bitwise_not(image, image);
//
//        morphologyEx(image, image, MORPH_OPEN, closerect);//进行形态学开运算
        //threshold(image, image, 120, 255, THRESH_BINARY);
        //adaptiveThreshold(blurred, image, 255, ADAPTIVE_THRESH_GAUSSIAN_C, THRESH_BINARY, 39, 3);
//        cv::Mat edges2;
        //cv::Canny(image, image, 127, 255);
//
//        Mat closerect = getStructuringElement(MORPH_RECT, Size(11, 11)); //进行结构算子生成
//        morphologyEx(temp, temp, MORPH_OPEN, closerect);
//        morphologyEx(image, image, MORPH_OPEN, closerect);//进行形态学开运算
/*        Mat image;
        cvtColor(res2, temp, COLOR_BGR2GRAY);
        adaptiveThreshold(temp, temp, 255, ADAPTIVE_THRESH_MEAN_C, THRESH_BINARY, 3, 3);
        imwrite("res2.jpg", temp);
        cvtColor(res, image, COLOR_BGR2GRAY);
        adaptiveThreshold(image, image, 255, ADAPTIVE_THRESH_MEAN_C, THRESH_BINARY, 3, 3);
        imwrite("res.jpg", image);
        vector<vector<Point>> contours_img, contours_img2, contours_template;//目标图,模版图
        findContours(temp, contours_template, CV_RETR_LIST, CHAIN_APPROX_SIMPLE);//提取轮廓元素
        findContours(image, contours_img, CV_RETR_LIST, CHAIN_APPROX_NONE);
//////////////////////////////////////////////
        vector<vector<Point>> squares;
        vector<Point> approx;
        for (size_t i = 0; i < contours_img.size(); i++) {
            // approximate contour with accuracy proportional
            // to the contour perimeter
            approxPolyDP(Mat(contours_img[i]), approx, arcLength(Mat(contours_img[i]), true) * 0.01, true);
            squares.push_back(approx);
        }
        std::sort(squares.begin(), squares.end(), biggerSort);
        std::sort(contours_template.begin(), contours_template.end(), biggerSort);
        Rect rt;
        double pro = 1;//相似度,越接近0越好
        double min_pro = 999;//对应的最优匹配值
        int min_kk = -1;//对应的最优匹配的下标
        for (int kk = 0; kk < squares.size(); kk++) {
            if (contourArea(squares[kk]) < 1000)//面积阈值筛选
            {
                break;
            }
            contours_img2.push_back(squares[kk]);
            rt = boundingRect(squares[kk]);
//            if (rt.height <= rt.width)//垃圾桶是矩形
//            {
//                continue;
//            }
            pro = matchShapes(squares[kk], contours_template[1], CV_CONTOURS_MATCH_I3, 1.0);//进行轮廓匹配
            if (pro < min_pro) {
                min_pro = pro;
                min_kk = kk;
            }
            cout << kk << "==" << pro << endl;
        }
        Mat test1 = inputSrc2.clone();
        drawContours(test1, contours_img2, -1, Scalar(0, 0, 255), 2);
        imwrite("d:\\666.jpg", test1);
        ////////////
        RotatedRect rr = minAreaRect(squares[min_kk]);
        Mat M, rotated, cropped;
        // get angle and size from the bounding box
        float angle = rr.angle;
        Size2f rect_size = rr.size;
        // thanks to http://felix.abecassis.me/2011/10/opencv-rotation-deskewing/
        if (rr.angle < -45.) {
            angle += 90.0;
            swap(rect_size.width, rect_size.height);
        }
        if (rect_size.width > rect_size.height) {
            rect_size.width += rect_size.width * 0.5;
            rect_size.height += rect_size.height * 1;
        } else {
            rect_size.width += rect_size.width * 0.5;
            rect_size.height += rect_size.height * 1;
        }
        // get the rotation matrix
//        M = getRotationMatrix2D(rr.center, angle, 1.0);
//        // perform the affine transformation
//        warpAffine(inputSrc2, rotated, M, inputSrc2.size(), INTER_CUBIC);
//        // crop the resulting image
//        getRectSubPix(rotated, rect_size, rr.center, cropped);
        cropped = rotateMat(inputSrc2, angle);
        imwrite("d:\\test366.jpg", cropped);
        rt = boundingRect(squares[min_kk]);
        rectangle(inputSrc2, rt, Scalar(0, 0, 255), 2);
        imwrite("d:\\test266.jpg", temp);
        imwrite("d:\\test566.jpg", image);
        imwrite("d:\\test466.jpg", inputSrc2);
        return 0;*/
        // Load the cascade
//        if (!face_cascade.load(face_cascade_name)) {
//            printf("--(!)Error loading\n");
//            return (-1);
//        }
//        detectAndDisplay(inputSrc2);
//        return 0;
    }
    //pyrMeanShiftFiltering(inputSrc, inputSrc, 20, 20);
    vector<vector<Point> > squares;
    Mat forSmAT = inputSrc.clone();
    Mat contour = inputSrc2.clone();
    Mat contour2 = inputSrc2.clone();
    find_squares(forSmAT, squares);
    std::cout << "* " << squares.size() << " squares were found." << std::endl;
//    draw_squares(contour, squares);
//    imwrite("contour.jpg", contour);
    vector<Point> minAreaPoints;
    int minArea = inputSrc.cols * inputSrc.rows;
    Rect minRect;
    vector<vector<Point> > squares2;
    for (auto points: squares) {
        Rect test = minAreaRect(points).boundingRect();
        bool valid = true;
        if (test.x < 0) {
            test.x = 0;
            valid = false;
        }
        if (test.y < 0) {
            test.y = 0;
            valid = false;
        }
        if (test.x + test.width > inputSrc.cols) {
            test.width = inputSrc.cols - test.x;
        }
        if (test.y + test.height > inputSrc.rows) {
            test.height = inputSrc.rows - test.y;
        }
        //过滤比例不合理的
        if (test.width < test.height) {
            if (test.height > test.width * 1.8) {
                continue;
            }
        } else {
            if (test.width > test.height * 1.8) {
                continue;
            }
        }
        if (test.area() <= minArea) {
            minArea = test.area();
            minAreaPoints = points;
            minRect = test;
        }
        if (valid) {
            squares2.push_back(points);
        }
    }
    if (squares2.size() == 0 && minAreaPoints.size() > 0) {
        squares2.push_back(minAreaPoints);
    }
    std::cout << "* " << squares2.size() << " squares2 were found." << std::endl;
//    draw_squares(contour2, squares2);
//    imwrite("contour2.jpg", contour2);
    if (squares2.size() > 1) {
        removeBigRect(squares2);
    }
    //if (minRect.area() > 0) {
    for (auto p : squares2) {
        RotatedRect rr = minAreaRect(p);
        //cout << rr.angle << endl;
        // rect is the RotatedRect
        // matrices we'll use
        Mat M, rotated, roiMat, rotated2;
        // get angle and size from the bounding box
        float angle = rr.angle;
        Size2f rect_size = rr.size;
        // thanks to http://felix.abecassis.me/2011/10/opencv-rotation-deskewing/
        if (rr.angle < -45.) {
            angle += 90.0;
            swap(rect_size.width, rect_size.height);
        }
        if (rect_size.width > rect_size.height) {
            rect_size.width += rect_size.width * 0.2;
            rect_size.height += rect_size.height * 0.2;
        } else {
            rect_size.width += rect_size.width * 0.2;
            rect_size.height += rect_size.height * 0.2;
        }
        // get the rotation matrix
        M = getRotationMatrix2D(rr.center, angle, 1.0);
        // perform the affine transformation
        warpAffine(inputSrc, rotated, M, inputSrc.size(), INTER_CUBIC);
        warpAffine(inputSrc2, rotated2, M, inputSrc2.size(), INTER_CUBIC);
        // crop the resulting image
        getRectSubPix(rotated, rect_size, rr.center, roiMat);
        getRectSubPix(rotated2, rect_size, rr.center, rotated2);
        //imwrite("warpAffine.jpg", rotated2);
        //imwrite("warpAffine2.jpg", cropped);
        if (roiMat.cols < roiMat.rows) {
            rotate(roiMat, roiMat, ROTATE_90_COUNTERCLOCKWISE);
            rotate(rotated2, rotated2, ROTATE_90_COUNTERCLOCKWISE);
        }
        Mat roiMat2 = rotated2.clone();
        vector<cv::Vec4i> edges;
        detect(roiMat2, edges);
        if (edges.size() != 4) {
            return 0;
        }
        vector<cv::Point2f> PTransfrom(4);
        vector<cv::Point2f> PTransto(4);
        PTransfrom[0] = Tools::CrossPoint(edges[0], edges[2]); //upleft
        PTransfrom[1] = Tools::CrossPoint(edges[0], edges[3]); //upright
        PTransfrom[2] = Tools::CrossPoint(edges[1], edges[2]); //downleft
        PTransfrom[3] = Tools::CrossPoint(edges[1], edges[3]); //downright
        PTransto[0].x = 0;
        PTransto[0].y = 0;
        PTransto[1].x = 640;
        PTransto[1].y = 0;
        PTransto[2].x = 0;
        PTransto[2].y = 404;
        PTransto[3].x = 640;
        PTransto[3].y = 404;
        Mat tf = cv::getPerspectiveTransform(PTransfrom, PTransto);
        Mat dst = cv::Mat::zeros(cv::Size(640, 404), rotated2.type());
        warpPerspective(rotated2, dst, tf, dst.size(), cv::INTER_CUBIC);
        dst.convertTo(dst, CV_8UC3);
        imwrite(ResourceManager::getInstance().ConfigDirPath + "result.jpg", dst);
        Mat dstForBack = dst.clone();
        Mat dstForFront = dst.clone();
        if (!isBackSide) {
            if (findIDCard(dstForFront)) {
                imwrite(ResourceManager::getInstance().ConfigDirPath + "output_front.jpg", dst);
                break;
            } else {
                Mat dstForFrontRotated = dst.clone();
                rotate(dstForFrontRotated, dstForFrontRotated, ROTATE_180);
                Mat dstForFrontRotated2 = dstForFrontRotated.clone();
                if (findIDCard(dstForFrontRotated2)) {
                    imwrite(ResourceManager::getInstance().ConfigDirPath + "output_front.jpg", dstForFrontRotated);
                    break;
                }
            }
        } else {
            if (findIDCard_Back(dstForBack)) {
                imwrite(ResourceManager::getInstance().ConfigDirPath + "output_back.jpg", dst);
                break;
            } else {
                Mat dstForBackRotated = dst.clone();
                rotate(dstForBackRotated, dstForBackRotated, ROTATE_180);
                Mat dstForBackRotated2 = dstForBackRotated.clone();
                if (findIDCard_Back(dstForBackRotated2)) {
                    imwrite(ResourceManager::getInstance().ConfigDirPath + "output_back.jpg", dstForBackRotated2);
                    break;
                }
            }
        }
//        Mat hMat = dstMat.clone();
//        Mat blurred(dstMat);
//        dilate(blurred, blurred, Mat(), Point(-1, -1));
//        // then blur it so that the ocean/sea become one big segment to avoid detecting them as 2 big squares.
//        medianBlur(blurred, blurred, 3);
//        blurred = equalizeIntensityHist(blurred);
//        //cvtColor(dstMat, hMat, COLOR_GRAY2BGR);
//        cvtColor(blurred, blurred, COLOR_BGR2GRAY);
//        Canny(blurred, blurred, 0, 200, 3);
//        imwrite("d:\\test366.jpg", blurred);
//
//        vector<Vec4i> lines, v1;
//        HoughLinesP(blurred, v1, 3, 0.1 * CV_PI / 180, 200, 200, 20);
//
//        for(auto l : v1)
//        {
//            double angleValue = angle(Point(l[0], l[1]), Point(l[2], l[3]));
//            if(angleValue > 3 || angleValue < 0.1)
//            {
//                lines.push_back(l);
//            }
//        }
        // Expand the lines
//        for (int i = 0; i < lines.size(); i++) {
//            cv::Vec4i v = lines[i];
//            lines[i][0] = 0;
//            lines[i][1] = ((float) v[1] - v[3]) / (v[0] - v[2]) * -v[0] + v[1];
//            lines[i][2] = blurred.cols;
//            lines[i][3] = ((float) v[1] - v[3]) / (v[0] - v[2]) * (blurred.cols - v[2]) + v[3];
//        }
//        std::vector<cv::Point2f> corners;
//        for (int i = 0; i < lines.size(); i++) {
//            for (int j = i + 1; j < lines.size(); j++) {
//                cv::Point2f pt = computeIntersect(lines[i], lines[j]);
//                if (pt.x >= 0 && pt.y >= 0)
//                    corners.push_back(pt);
//            }
//        }
//
//        for (size_t i = 0; i < lines.size(); i++) {
//            Vec4i l = lines[i];
//            line(hMat, Point(l[0], l[1]), Point(l[2], l[3]), Scalar(0, 0, 255), 3, 2);
//        }
//        imwrite("d:\\test666.jpg", hMat);
//        if(minRect.width < minRect.height)
//        {
//            swap(minRect.width, minRect.height);
//            swap(minRect.x, minRect.y);
//        }
//        PTransfrom[0] = minAreaPoints[0]; //upleft
//        PTransfrom[1] = minAreaPoints[3]; //upright
//        PTransfrom[2] = minAreaPoints[1]; //downleft
//        PTransfrom[3] = minAreaPoints[2]; //downright
/*        vector<cv::Point2f> PTransfrom(4);
        vector<cv::Point2f> PTransto(4);
        RotatedRect rr =  minAreaRect(minAreaPoints);
        cout << rr.angle;
        if (minRect.width < minRect.height) {
            PTransfrom[0] = Point(minRect.x + minRect.width, minRect.y); //upleft
            PTransfrom[1] = minRect.br(); //upright
            PTransfrom[2] = minRect.tl(); //downleft
            PTransfrom[3] = Point(minRect.x, minRect.y + minRect.height); //downright
            //Point2f pt = computeIntersect(lines[i], lines[j]);
//            PTransfrom[0] = Point(minRect.x + minRect.width, minRect.y); //upleft
//            PTransfrom[1] = minRect.br(); //upright
//            PTransfrom[2] = minRect.tl(); //downleft
//            PTransfrom[3] = Point(minRect.x, minRect.y + minRect.height); //downright
        } else {
            PTransfrom[0] = minRect.tl(); //upleft
            PTransfrom[1] = Point(minRect.x + minRect.width, minRect.y); //upright
            PTransfrom[2] = Point(minRect.x, minRect.y + minRect.height); //downleft
            PTransfrom[3] = minRect.br(); //downright
        }
        PTransto[0].x = 0;
        PTransto[0].y = 0;
        PTransto[1].x = 640;
        PTransto[1].y = 0;
        PTransto[2].x = 0;
        PTransto[2].y = 404;
        PTransto[3].x = 640;
        PTransto[3].y = 404;
        Mat tf = cv::getPerspectiveTransform(PTransfrom, PTransto);
        Mat dst = cv::Mat::zeros(cv::Size(640, 404), inputSrc2.type());
        warpPerspective(inputSrc2, dst, tf, dst.size(), cv::INTER_CUBIC);
        dst.convertTo(dst, CV_8UC3);
        imwrite("d:\\test666.jpg", dst);*/
//        Rect temp = minRect;
//        temp.width += temp.width * 0.4;
//        temp.height += temp.height * 0.2;
//        temp.x -= temp.width * 0.2;
//        temp.y -= temp.height * 0.1;
//
//        if(temp.x < 0)
//        {
//            temp.x = 0;
//        }
//        if(temp.y < 0)
//        {
//            temp.y = 0;
//        }
//        if(temp.x + temp.width > inputSrc2.cols)
//        {
//            temp.width = inputSrc2.cols - temp.x;
//        }
//        if(temp.y + temp.height > inputSrc2.rows)
//        {
//            temp.height = inputSrc2.rows - temp.y;
//        }
//
//
    }
//    Mat matROI_IDCard;
//    equalizeHist(inputSrc.clone(), matROI_IDCard);
    //cvtColor(matROI_IDCard, matROI_IDCard, COLOR_BGR2GRAY);
//    Mat threshMat;
//    threshMat = inputSrc;
//    marrEdge(matROI_IDCard, threshMat, 9, 1.6);
//    imwrite("d:\\test166.jpg", threshMat);
//    Canny(matROI_IDCard, threshMat, 50, 150, 3);
////    Mat gray_x, gray_y;
////    Mat abs_gray_x, abs_gray_y;
////    Sobel(matROI_IDCard, gray_x, CV_16S, 1, 0, 3, 1, 1, BORDER_DEFAULT);
////    convertScaleAbs(gray_x, abs_gray_x);
////    Sobel(matROI_IDCard, gray_y, CV_16S, 0, 1, 3, 1, 1, BORDER_DEFAULT);
////    convertScaleAbs(gray_y, abs_gray_y);
////    addWeighted(abs_gray_x, 0.5, abs_gray_y, 0.5, 0, threshMat);
//    imwrite("d:\\test66.jpg", threshMat);
//
//
//    Mat hMat;
//    cvtColor(threshMat, hMat, COLOR_GRAY2BGR);
//
//    vector<Vec4i> lines;
//    HoughLinesP(threshMat, lines, 3, CV_PI / 180, 500, 500, 20);
//
//    // Expand the lines
//    for (int i = 0; i < lines.size(); i++) {
//        cv::Vec4i v = lines[i];
//        lines[i][0] = 0;
//        lines[i][1] = ((float) v[1] - v[3]) / (v[0] - v[2]) * -v[0] + v[1];
//        lines[i][2] = threshMat.cols;
//        lines[i][3] = ((float) v[1] - v[3]) / (v[0] - v[2]) * (threshMat.cols - v[2]) + v[3];
//    }
//
//    std::vector<cv::Point2f> corners;
//    for (int i = 0; i < lines.size(); i++) {
//        for (int j = i + 1; j < lines.size(); j++) {
//            cv::Point2f pt = computeIntersect(lines[i], lines[j]);
//            if (pt.x >= 0 && pt.y >= 0)
//                corners.push_back(pt);
//        }
//    }
//
//    for (size_t i = 0; i < lines.size(); i++) {
//        Vec4i l = lines[i];
//        line(hMat, Point(l[0], l[1]), Point(l[2], l[3]), Scalar(0, 0, 255), 3, 2);
//    }
//
//    imwrite("d:\\test166.jpg", hMat);
//
//    threshold(threshMat, threshMat, 0, 255, THRESH_OTSU + THRESH_BINARY);
//    threshMat = matROI_IDCard;
//    Mat dilated_edges;
////    erode(threshMat, dilated_edges, cv::Mat(), cv::Point(-1, -1), 2, 1, 1); // default 3x3 kernel
//    Mat element = getStructuringElement(MORPH_RECT, Size(3, 3));
//    morphologyEx(threshMat, dilated_edges, MORPH_CLOSE, element);
//    morphologyEx(dilated_edges, dilated_edges, MORPH_CLOSE, element);
//    morphologyEx(dilated_edges, dilated_edges, MORPH_DILATE, element);
//    imwrite("d:\\test166.jpg", dilated_edges);
//    threshMat = dilated_edges;
//    cv::Mat dilated_edges;
//    dilate(threshMat, dilated_edges, cv::Mat(), cv::Point(-1, -1), 4, 1, 1); // default 3x3 kernel
//    imwrite("d:\\test66.jpg", dilated_edges);
//    threshMat = dilated_edges;
/*    Mat threshMat;
    //threshold(matROI_IDCard, threshMat, 0, 255, THRESH_OTSU + THRESH_BINARY);
    adaptiveThreshold(matROI_IDCard, threshMat, 255, ADAPTIVE_THRESH_GAUSSIAN_C, THRESH_BINARY, 51, 15);
    cv::Mat edges;
    //cv::Canny(filtered, edges, thresh, thresh * 2, 3);
    cv::Canny(threshMat, edges, 10, 255);
    imwrite("d:\\test166.jpg", edges);
    cv::Mat dilated_edges;
    dilate(edges, dilated_edges, cv::Mat(), cv::Point(-1, -1), 2, 1, 1); // default 3x3 kernel
//    Mat element = getStructuringElement(MORPH_RECT, Size(3, 3));
//    morphologyEx(threshMat, dilated_edges, MORPH_BLACKHAT, element);
//    morphologyEx(threshMat, dilated_edges, MORPH_ERODE, element);
    //morphologyEx(edges, dilated_edges, MORPH_DILATE, element);
    imwrite("d:\\test66.jpg", dilated_edges);
    threshMat = dilated_edges;
    //adaptiveThreshold(matROI_IDCard, threshMat, 255, ADAPTIVE_THRESH_GAUSSIAN_C, THRESH_BINARY, 39, 15);
    //threshold(matROI_IDCard, threshMat, 100, 255, THRESH_OTSU | THRESH_BINARY );
//    Mat blurEdges;
//    cv::Canny(threshMat, blurEdges, 225, 175);*/
//    std::vector<std::vector<cv::Point> > contours;
//    cv::findContours(threshMat, contours, cv::RETR_LIST, cv::CHAIN_APPROX_SIMPLE);
//
//    std::vector<std::vector<cv::Point> > contours2;
//    for (auto c : contours) {
//
//        Rect temp = cv::boundingRect(c);
//
//        if (temp.area() < threshMat.cols * threshMat.rows * 0.9 &&
//            temp.area() > threshMat.cols * threshMat.rows * 0.1) {
//            contours2.push_back(c);
//            rectangle(inputSrc, temp, Scalar(0, 0, 255), 1);
//        }
//
//    }
//    //cv::drawContours(inputSrc, contours, -1, Scalar(255, 0, 0), 2);
//    imwrite("d:\\test2.jpg", inputSrc);
//
//    for (auto c : contours2) {
//        Rect temp = cv::boundingRect(c);
//        temp.width += 150;
//        temp.height += 100;
//        temp.x -= 75;
//        temp.y -= 50;
//
//
//        vector<cv::Vec4i> edges;
//        Mat roiMat = inputSrc2(temp);
//        Mat roiMat2 = equalizeIntensityHist(roiMat);
//        imwrite("d:\\test6666.jpg", roiMat2);
//        detect(roiMat2, edges);
//
//        if (edges.size() != 4) {
//            return 0;
//        }
//
//        vector<cv::Point2f> PTransfrom(4);
//        vector<cv::Point2f> PTransto(4);
//        PTransfrom[0] = Tools::CrossPoint(edges[0], edges[2]); //upleft
//        PTransfrom[1] = Tools::CrossPoint(edges[0], edges[3]); //upright
//        PTransfrom[2] = Tools::CrossPoint(edges[1], edges[2]); //downleft
//        PTransfrom[3] = Tools::CrossPoint(edges[1], edges[3]); //downright
//        PTransto[0].x = 0;
//        PTransto[0].y = 0;
//        PTransto[1].x = 640;
//        PTransto[1].y = 0;
//        PTransto[2].x = 0;
//        PTransto[2].y = 404;
//        PTransto[3].x = 640;
//        PTransto[3].y = 404;
//        Mat tf = cv::getPerspectiveTransform(PTransfrom, PTransto);
//        Mat dst = cv::Mat::zeros(cv::Size(640, 404), roiMat.type());
//        warpPerspective(roiMat, dst, tf, dst.size(), cv::INTER_CUBIC);
//
//        dst.convertTo(dst, CV_8UC3);
//
//        imwrite("d:\\test666.jpg", dst);
//        break;
//    }
    //cv::drawContours(inputSrc, contours2, -1, Scalar(255, 0, 0), 2);
    double t2 = (double) getTickCount();
    double time = (t2 - t1) / getTickFrequency();
    std::cout << "time: " << time << std::endl;
    return 0;
}