matching now uses knnMatching for better accuracy

main
Pavol Debnar 2 years ago
parent 696bd11bc7
commit d535d24082

@ -547,9 +547,15 @@ class PointBase {
//match the descriptors
cv::FlannBasedMatcher matcher;
std::vector<cv::DMatch> matches;
//matcher.match part1
/*std::vector<cv::DMatch> matches;
matcher.match(descriptorsROI,descriptorsImg,matches);
matcher.match(descriptorsROI,descriptorsImg,matches);*/
//trying out knnmatch
std::vector<std::vector<cv::DMatch>> matches;
matcher.knnMatch(descriptorsROI, descriptorsImg, matches, 2); // Find 2 nearest matches
cv::Mat imageMatches;
drawMatches(roi,keypointsROI,imgSmall,keypointsImg,matches,imageMatches);
@ -560,8 +566,8 @@ class PointBase {
//matcher.match part2
/*
double max_dist = 0; double min_dist = 100;
//cout<<"ROWS DESC:"<<descriptorsImg.rows<<"\n";
//cout<<"SIZE DESC:"<<descriptorsImg.size()<<"\n";
@ -585,6 +591,32 @@ class PointBase {
if( matches[i].distance < 3*min_dist ) { good_matches.push_back( matches[i]); }
}
*/
//knnmatcher part2
cout<<"TU BY MALI BYT MATCHE " <<'\n';
std::vector< DMatch > good_matches;
//float ratio = 0.4; // As in Lowe's paper; can be tuned
for (float ratio = 0.4; good_matches.size()<2; ratio+=0.05)
{
good_matches.clear();
cout << "RATIO: " <<ratio <<"\n";
for (int i = 0; i < matches.size(); ++i)
{
if (matches[i][0].distance < ratio * matches[i][1].distance)
{
good_matches.push_back(matches[i][0]);
cout<<"GOOD MATCH DISTANCE " << matches[i][0].distance <<'\n';
}/*
else{
cout<< "BAD MATCH \n";
}*/
}
}
//if there are no good matches, try adaptive thresholding
std::vector< Point2d > obj;
@ -597,11 +629,7 @@ class PointBase {
scene.push_back( static_cast<cv::Point2i>(keypointsROI[ good_matches[i].queryIdx ].pt ));
}
/*cv::Mat imageMatches2;
drawMatches(roi,keypointsROI,imgSmall,keypointsImg,good_matches,imageMatches2);
cv::namedWindow("good_matches");
cv::imshow("good_matches",imageMatches2);
cv::waitKey(0);*/
//find a transformation based on good matches
//we do not need a Homography, since we deal with affine transformations (no viewport transf. are expected)
@ -649,8 +677,16 @@ class PointBase {
Mat roiOverlapAlpha, imgOverlapAlpha, resultRoi;
//blended images of ROI and img, now we need to only add them together
/*
cv::divide(roiMask,(roiMask+imgWarpedMask)/255.0,roiOverlapAlpha);
cv::divide(imgWarpedMask,(roiMask+imgWarpedMask)/255.0,imgOverlapAlpha);
*/
cv::divide(roiDT,(roiDT+imgDT),roiOverlapAlpha);
cv::divide(imgDT,(roiDT+imgDT),imgOverlapAlpha);
normalize(imgOverlapAlpha, imgOverlapAlpha, 0.0, 255.0, NORM_MINMAX);
normalize(roiOverlapAlpha, roiOverlapAlpha, 0.0, 255.0, NORM_MINMAX);
imshow("roiMask", roiMask);
imshow("roiMask+imgWarpedMask", roiMask+imgWarpedMask);
//imshow("roiMask", roiMask);
@ -658,14 +694,17 @@ class PointBase {
roi.convertTo(roi,CV_32FC1);
resultWarp.convertTo(resultWarp,CV_32FC1);
roiOverlapAlpha.convertTo(roiOverlapAlpha,CV_32FC1);
imgOverlapAlpha.convertTo(imgOverlapAlpha,CV_32FC1);
cout<<cv::typeToString(resultWarp.type())<< "resultWarp type \n";
cout<<cv::typeToString(roi.type())<< "roi type \n";
cout<<cv::typeToString(roiOverlapAlpha.type())<< "roiOverlapAlpha type \n";
cout<<"AAAAAAAAAAAAA \n";
multiply(roiOverlapAlpha,roi,roiOverlapAlpha);
multiply(imgOverlapAlpha,resultWarp,imgOverlapAlpha);
normalize(imgOverlapAlpha, imgOverlapAlpha, 0.0, 1.0, NORM_MINMAX);
normalize(roiOverlapAlpha, roiOverlapAlpha, 0.0, 1.0, NORM_MINMAX);
imshow("imgOverlapAlpha", imgOverlapAlpha);
cv::add(roiOverlapAlpha,imgOverlapAlpha, resultRoi);
@ -685,7 +724,7 @@ class PointBase {
cv::cvtColor(resultRoi,resultRoi,COLOR_GRAY2BGR);
resultRoi.copyTo(img(Rect(xLoc, yLoc, xWitdth, yHeight)));
imwrite("AAAAAAA.png", img);
cout<<"AAAAAAAAAAAAA \n";
destroyAllWindows();

Loading…
Cancel
Save