#include "opencv2/highgui.hpp" #include "opencv2/core.hpp" #include "opencv2/imgproc.hpp" #include "opencv2/imgcodecs.hpp" #include "opencv2/stitching.hpp" #include #include #include "opencv2/calib3d/calib3d.hpp" #include #include #include #include #include #include #include #include using namespace cv; using namespace std; #include namespace fs = std::filesystem; //C++17 //sudo apt install libjsoncpp-dev //sudo ln -s /usr/include/jsoncpp/json/ /usr/include/json //struct necessary for comparison between 2 opencv points - when using std::map //used to sort points according to X value struct ComparePoints { bool operator () (const cv::Point& a, const cv::Point& b) const { return (a.x < b.x) || (a.x == b.x && a.y < b.y); } } CP; class PointBase { public: vector points; //all points - points are being removed as adjacencies are found vector doneVector; //points that have been successfully searched for adjacencies vector toDoVector; //points yet to be searched for adjacent points std::map pathMap; //map for file paths - get the path of a file via pathMap[Point2d(x,y)] double imgWidthDeg; double imgHeightDeg; //loaded points in pointbase are sorted according to X value int load(string path) { //std::string path = "../data/"; for (const auto & entry : fs::directory_iterator(path)) { string filePath = fs::canonical(entry.path()); // absolute path without .. //cout << filePath.find(".json") << std::endl; if (filePath.find(".json") != string::npos) { //std::cout << filePath << std::endl; std::ifstream coord_file(filePath, std::ifstream::binary); Json::Value coords; coord_file >> coords; //cout << "x: " << coords["depthPos"]["x"].asDouble() << std::endl; //cout << "y: " << coords["depthPos"]["y"].asDouble() << std::endl; Point2d newPoint = Point2d(coords["depthPos"]["x"].asDouble() , coords["depthPos"]["y"].asDouble()); newPoint.x = fmod(newPoint.x,360.0) ; pathMap[newPoint] = filePath; points.push_back(newPoint); imgWidthDeg = coords["rectangle"]["w"].asDouble(); imgHeightDeg = coords["rectangle"]["h"].asDouble(); } } if (points.empty()) { cout << "No points detected, check path \n"; return -1; } else { sort(points.begin(),points.end(),CP); return 0; } } void printPoints(int which=0) { if (which==1) // prints current points vector { for(int i =0; i getAdjacents(Point2d inputPoint) { vector resultVector; for (int i = 0; i < points.size(); i++) { double yDifference = inputPoint.y-points[i].y; double xDifference = inputPoint.x - points[i].x; /* if(abs(yDifference)<15 && (xDifference<45) && (xDifference>0)) { resultVector.push_back(points[i]); toDoVector.push_back(points[i]); points.erase(points.begin()+i); continue; } if(abs(yDifference)<15 && (xDifference> -45) && (xDifference<0)) { resultVector.push_back(points[i]); toDoVector.push_back(points[i]); points.erase(points.begin()+i); continue; } if(abs(xDifference)<15 && (yDifference<45) && (yDifference>0)) { resultVector.push_back(points[i]); toDoVector.push_back(points[i]); points.erase(points.begin()+i); continue; } if(abs(xDifference)<15 && (yDifference> -45) && (yDifference<0)) { resultVector.push_back(points[i]); toDoVector.push_back(points[i]); points.erase(points.begin()+i); continue; }*/ if(abs(yDifference)<0.9*imgHeightDeg && abs(xDifference)<0.8*imgWidthDeg) { resultVector.push_back(points[i]); toDoVector.push_back(points[i]); points.erase(points.begin()+i); continue; } } return resultVector; } /* stitches images points have to be loaded in the pointBase first uses public toDoVector, points BUG: not all adjacent images are used for stitching - OpenCV evaluates them as not appropriate for stitching */ void stitchImgs() { int imgIdx = 0; //main loop while (!points.empty()) { //take the first point from the left and find adjts. to it Point2d p = points[0]; vector stitchQueue; stitchQueue.push_back(p); points.erase(points.begin()); vector temp = getAdjacents(p); //find adj. of first point - inits toDo vector if (toDoVector.empty()) //if first point has no adjts. - log him as singleton { ofstream singletonsFile("singletons.log", ios_base::app); if(!singletonsFile) { singletonsFile << "x: " << p.x << std::endl; singletonsFile << "y: " << p.y << std::endl; singletonsFile << pathMap[Point2d(p.x,p.y) ] << std::endl; singletonsFile.close(); } else { ofstream sing; sing.open("singletons.log",ios_base::app); sing << "x: " << p.x << std::endl; sing << "y: " << p.y << std::endl; sing << pathMap[Point2d(p.x,p.y) ] << std::endl; sing.close(); } stitchQueue.pop_back(); continue; } //finds adjacencies to every point while(!toDoVector.empty()) { Point2d p2 = toDoVector[0]; stitchQueue.push_back(p2); temp = getAdjacents(p2); toDoVector.erase(toDoVector.begin()); } //stitching // Define object to store the stitched image Mat pano; bool try_use_gpu=true; // Create a Stitcher class object with mode scans Ptr stitcher = Stitcher::create(Stitcher::SCANS); //stitcher->setPanoConfidenceThresh(0.85); cout << stitcher->panoConfidenceThresh() << "CONFIDENCE TRESH \n" ; //Ptr finder = stitcher->featuresFinder(); Ptr finder = SIFT::create(1600); stitcher->setFeaturesFinder(finder); cout << finder->getDefaultName()<<" FINDER \n"; vector imgs; //open stream for logging string logFileName = to_string(imgIdx) + ".log"; ofstream logFile(logFileName); for (Point2d coords: stitchQueue) { //log which images are in which result file logFile << "x: " << coords.x << std::endl; logFile << "y: " << coords.y << std::endl; logFile << pathMap[Point2d(coords.x,coords.y) ] << std::endl; //log to stdout cout << "x: " << coords.x << std::endl; cout << "y: " << coords.y << std::endl; string jsonPath = pathMap[coords]; string pngPath = jsonPath.erase(jsonPath.length()-4); pngPath = pngPath + "png"; cout << "file: " << pngPath << std::endl; Mat img = imread(pngPath,IMREAD_COLOR); //imshow("Location of Images", img);//Showing the circle// if (!img.empty()) { imgs.push_back(img); //store loaded image in vector of images } else { cout << "Error: Cannot read image"<< std::endl; } } logFile.close(); //stitching cout << "Stitching" << std::endl; Stitcher::Status status = stitcher->stitch(imgs, pano); if (status != Stitcher::OK) { // Check if images could not be stitched // status is OK if images are stitched successfully cout << "Can't stitch images\n"; } else { string resultName = "result" + to_string(imgIdx) +".jpg"; // Store a new image stitched from the given //set of images as "result.jpg" imwrite(resultName, pano); // Show the result imshow(resultName, pano); imgIdx++; waitKey(0); } } } /*returns minimum and maximum X and Y values from pointbase minX,maxX,minY,maxY */ std::vector getEdgeValues() { double minX=999; double maxX=0; double minY=900000; double maxY=0; std::vector res(4); for (Point2d coords: points) { if (coords.x < minX) minX=coords.x; if (coords.x > maxX) maxX=coords.x; if (coords.y < minY) minY=coords.y; if (coords.y > maxY) maxY=coords.y; } res[0]=minX; res[1]=maxX; res[2]=minY; res[3]=maxY; return(res); } void cumdump(int imgW,int imgH) { //sift takes too long if images are large, so we shrink them int imgWidth = imgW; int imgHeight = imgH; if (imgWidth > 1000) { imgWidth = int(0.4*imgWidth); imgHeight = int(0.4*imgHeight); } int placeCounter =0; int roiCounter = 0; int tempMatchCounter =0; std::vector edges; edges = getEdgeValues(); double minX=edges[0]; double maxX=edges[1]; double minY=edges[2]; double maxY=edges[3]; int windowHeight = (int)(maxY*(imgHeight/imgHeightDeg))+imgHeight*2; int windowWidth = (int)((360/imgWidthDeg)*imgWidth) + imgWidth; Mat img = Mat::zeros(windowHeight,windowWidth,CV_8UC1); rectangle(img,Point(3,3),Point(15,15),Scalar(128,128, 0),2,2,0); int imgIdx = 0; //main loop while (!points.empty()) { //take the first point from the left and find adjts. to it Point2d p = points[0]; vector stitchQueue; stitchQueue.push_back(p); points.erase(points.begin()); vector temp = getAdjacents(p); //find adj. of first point - inits toDo vector if (toDoVector.empty()) //if first point has no adjts. - log him as singleton { ofstream singletonsFile("singletons.log", ios_base::app); if(!singletonsFile) { singletonsFile << "x: " << p.x << std::endl; singletonsFile << "y: " << p.y << std::endl; singletonsFile << pathMap[Point2d(p.x,p.y) ] << std::endl; singletonsFile.close(); } else { ofstream sing; sing.open("singletons.log",ios_base::app); sing << "x: " << p.x << std::endl; sing << "y: " << p.y << std::endl; sing << pathMap[Point2d(p.x,p.y) ] << std::endl; sing.close(); } stitchQueue.pop_back(); continue; } //finds adjacencies to every point while(!toDoVector.empty()) { Point2d p2 = toDoVector[0]; stitchQueue.push_back(p2); temp = getAdjacents(p2); toDoVector.erase(toDoVector.begin()); } //stitching // Define object to store the stitched image /* a funct was created for this, see getEdgeValues() double minX=999; double maxX=0; double minY=900000; double maxY=0; for (Point2d coords: stitchQueue) { if (coords.x < minX) minX=coords.x; if (coords.x > maxX) maxX=coords.x; if (coords.y < minY) minY=coords.y; if (coords.y > maxY) maxY=coords.y; } */ int xDiff = (int)round(maxX)-(int)round(minX); int yDiff = (int)round(maxY)-(int)round(minY); cout < clahe = cv::createCLAHE(4.0, cv::Size(8,8)); clahe->apply(imgSmall,imgSmall); cout<< cv::typeToString( imgSmall.type() ); cout < windowWidth) xWitdth = windowWidth - xLoc; if (yLoc + yHeight > windowHeight) yHeight = windowHeight - yLoc; //create the ROI cout << "AM DOING ROI \n \n"; Mat roi; roi = img(Rect(xLoc, yLoc, xWitdth, yHeight)); /*imshow("roi",roi); imwrite("temp.jpg", roi); waitKey(0);*/ //detect the keypoints from both the ROI and the img we want to add cv::Ptr siftPtr = SIFT::create(); std::vector keypointsROI, keypointsImg; cv::Ptr siftExtrPtr; cv::Mat descriptorsROI, descriptorsImg; siftPtr->detect(roi, keypointsROI); siftPtr->detect(imgSmall, keypointsImg); //compute descriptors from found keypoints cout<<" SIZE \n"<compute(roi, keypointsROI, descriptorsROI); siftPtr->compute(imgSmall, keypointsImg, descriptorsImg); //match the descriptors cv::FlannBasedMatcher matcher; //matcher.match part1 /*std::vector matches; matcher.match(descriptorsROI,descriptorsImg,matches);*/ //trying out knnmatch std::vector> matches; matcher.knnMatch(descriptorsROI, descriptorsImg, matches, 2); // Find 2 nearest matches cv::Mat imageMatches; drawMatches(roi,keypointsROI,imgSmall,keypointsImg,matches,imageMatches); cv::namedWindow("matches"); cv::imshow("matches",imageMatches); //cv::waitKey(0); //matcher.match part2 /* double max_dist = 0; double min_dist = 100; //cout<<"ROWS DESC:"< max_dist ) max_dist = dist; } printf("-- Max dist : %f \n", max_dist ); printf("-- Min dist : %f \n", min_dist ); //find the GOOD matches from all descriptor matches //e.g. the ones, that have a distance LESS than 3* the smallest computed distance std::vector< DMatch > good_matches; for( int i = 0; i < descriptorsImg.rows; i++ ) { if( matches[i].distance < 3*min_dist ) { good_matches.push_back( matches[i]); } } */ //knnmatcher part2 cout<<"TU BY MALI BYT MATCHE " <<'\n'; std::vector< DMatch > good_matches; //float ratio = 0.4; // As in Lowe's paper; can be tuned /* //this is continous worsening of the ratio in order to get good matches - after 0.4 the results were getting kinda bad tho for (float ratio = 0.4; good_matches.size()<3; ratio+=0.05) { good_matches.clear(); cout << "RATIO: " < obj; std::vector< Point2d > scene; for( int i = 0; i < good_matches.size(); i++ ) { //Get the keypoints from the good matches obj.push_back( static_cast( keypointsImg[ good_matches[i].trainIdx ].pt )); scene.push_back( static_cast(keypointsROI[ good_matches[i].queryIdx ].pt )); } //find a transformation based on good matches //we do not need a Homography, since we deal with affine transformations (no viewport transf. are expected) //Mat H = findHomography( Mat(obj), Mat(scene), RANSAC ); Mat H; if(good_matches.size()>=3) H = estimateAffinePartial2D( Mat(obj), Mat(scene), noArray(),RANSAC ); //here we check for errors - either there were not enough good matches, OR an affine transformation could not be estimated //we use template matching if errors occured if(good_matches.size()<3 || H.empty()) { tempMatchCounter++; Mat workRoi, workImg; workRoi = roi.clone(); workImg = imgSmall.clone(); GaussianBlur(roi, workRoi, Size(3, 3), 0); GaussianBlur(imgSmall, workImg, Size(3, 3), 0); Canny(workRoi, workRoi, 100, 200, 3, false); Canny(workImg, workImg, 100, 200, 3, false); Mat result; double min_val, max_val; Point min_loc, max_loc; matchTemplate(workRoi,workImg,result,TM_CCOEFF); minMaxLoc(result,&min_val,&max_val,&min_loc,&max_loc); Mat roiMask, roiDT, imgWarpedMask,imgDT,roiMaskFinal,imgMaskFinal,imgNew; threshold(roi,roiMask,1,255,CV_8U); imgWarpedMask = Mat::zeros(roi.rows,roi.cols,CV_8U); imgWarpedMask(Rect(max_loc.x,max_loc.y,imgSmall.cols,imgSmall.rows))=1.0; distanceTransform(roiMask,roiDT,DIST_L2, 3); distanceTransform(imgWarpedMask,imgDT,DIST_L2, 3); normalize(roiDT, roiDT, 0.0, 1.0, NORM_MINMAX); normalize(imgDT, imgDT, 0.0, 1.0, NORM_MINMAX); cv::divide(roiDT,(roiDT+imgDT),roiMaskFinal); cv::divide(imgDT,(roiDT+imgDT),imgMaskFinal); normalize(roiMaskFinal, roiMaskFinal, 0.0, 255.0, NORM_MINMAX); normalize(imgMaskFinal, imgMaskFinal, 0.0, 255.0, NORM_MINMAX); imgNew = Mat::zeros(roi.rows,roi.cols,CV_8U); imgSmall.copyTo(imgNew(Rect(max_loc.x , max_loc.y , imgSmall.cols,imgSmall.rows))); roiMaskFinal.convertTo(roiMaskFinal,CV_8UC1); imgMaskFinal.convertTo(imgMaskFinal,CV_8UC1); roi.convertTo(roi,CV_32FC1); imgNew.convertTo(imgNew,CV_32FC1); roiMaskFinal.convertTo(roiMaskFinal,CV_32FC1); imgMaskFinal.convertTo(imgMaskFinal,CV_32FC1); multiply(roiMaskFinal,roi,roi); cout<stitch(imgs, pano); if (status != Stitcher::OK) { // Check if images could not be stitched // status is OK if images are stitched successfully cout << "Can't stitch images\n"; } else { string resultName = "result" + to_string(imgIdx) +".jpg"; // Store a new image stitched from the given //set of images as "result.jpg" imwrite(resultName, pano); // Show the result imshow(resultName, pano); waitKey(0); } */ } imwrite("result0GIOJSDFI.png", img); } void patternMatchTry(int imgW,int imgH) { int imgWidth = imgW; int imgHeight = imgH; if (imgWidth > 1000) { imgWidth = int(0.4*imgWidth); imgHeight = int(0.4*imgHeight); } std::vector edges; edges = getEdgeValues(); double minX=edges[0]; double maxX=edges[1]; double minY=edges[2]; double maxY=edges[3]; int windowHeight = (int)(maxY*(imgHeight/imgHeightDeg))+imgHeight*2; int windowWidth = (int)((360/imgWidthDeg)*imgWidth) + imgWidth; Mat img = Mat::zeros(windowHeight,windowWidth,CV_8UC1); rectangle(img,Point(3,3),Point(15,15),Scalar(128,128, 0),2,2,0); int imgIdx = 0; //main loop while (!points.empty()) { //take the first point from the left and find adjts. to it Point2d p = points[0]; vector stitchQueue; stitchQueue.push_back(p); points.erase(points.begin()); vector temp = getAdjacents(p); //find adj. of first point - inits toDo vector if (toDoVector.empty()) //if first point has no adjts. - log him as singleton { ofstream singletonsFile("singletons.log", ios_base::app); if(!singletonsFile) { singletonsFile << "x: " << p.x << std::endl; singletonsFile << "y: " << p.y << std::endl; singletonsFile << pathMap[Point2d(p.x,p.y) ] << std::endl; singletonsFile.close(); } else { ofstream sing; sing.open("singletons.log",ios_base::app); sing << "x: " << p.x << std::endl; sing << "y: " << p.y << std::endl; sing << pathMap[Point2d(p.x,p.y) ] << std::endl; sing.close(); } stitchQueue.pop_back(); continue; } //finds adjacencies to every point while(!toDoVector.empty()) { Point2d p2 = toDoVector[0]; stitchQueue.push_back(p2); temp = getAdjacents(p2); toDoVector.erase(toDoVector.begin()); } //stitching // Define object to store the stitched image /* a funct was created for this, see getEdgeValues() double minX=999; double maxX=0; double minY=900000; double maxY=0; for (Point2d coords: stitchQueue) { if (coords.x < minX) minX=coords.x; if (coords.x > maxX) maxX=coords.x; if (coords.y < minY) minY=coords.y; if (coords.y > maxY) maxY=coords.y; } */ int xDiff = (int)round(maxX)-(int)round(minX); int yDiff = (int)round(maxY)-(int)round(minY); cout < clahe = cv::createCLAHE(4.0, cv::Size(8,8)); clahe->apply(imgSmall,imgSmall); cout<< cv::typeToString( imgSmall.type() ); cout < windowWidth) xWitdth = windowWidth - xLoc; if (yLoc + yHeight > windowHeight) yHeight = windowHeight - yLoc; //create the ROI Mat roi; //we dont need to apply CLAHE to ROI, since it consists of images which have clahe already applied roi = img(Rect(xLoc, yLoc, xWitdth, yHeight)); Mat roiEdit, imgSmallEdit; roiEdit = roi.clone(); imgSmallEdit = imgSmall.clone(); GaussianBlur(roi, roiEdit, Size(3, 3), 0); GaussianBlur(imgSmall, imgSmallEdit, Size(3, 3), 0); Canny(roiEdit, roiEdit, 100, 200, 3, false); Canny(imgSmallEdit, imgSmallEdit, 100, 200, 3, false); //vector rectHolder; //Mat rect1,rect2,rect3,rect4,rect5; int rectH = imgSmall.rows, rectW = imgSmall.cols; /* Point2d coord1 = Point2d(int(imgWidth*0.1),int(imgHeight*0.1)); Point2d coord2 = Point2d(int(imgWidth*0.1),int(imgHeight*0.7)); Point2d coord3 = Point2d(int(imgWidth*0.5),int(imgHeight*0.5)); Point2d coord4 = Point2d(int(imgWidth*0.7),int(imgHeight*0.2)); Point2d coord5 = Point2d(int(imgWidth*0.7),int(imgHeight*0.7)); */ //vector coordRects; /* coordRects.push_back(coord1); coordRects.push_back(coord2); coordRects.push_back(coord3); coordRects.push_back(coord4); coordRects.push_back(coord5); */ /* vector coordRects; vector rects; for (int y= 0; y results; double min_val, max_val; Point min_loc, max_loc; double globMax = 0.0; int minIDX = 0; Mat result; for (int i = 0; i < rects.size(); i++) { matchTemplate(roiEdit,rects[i],result,TM_CCOEFF); results.push_back(result); minMaxLoc(result,&min_val,&max_val,&min_loc,&max_loc); if (max_val>globMax) { globMax=max_val; minIDX = i; } } minMaxLoc(results[minIDX],&min_val,&max_val,&min_loc,&max_loc); cout << "SIZE:"<< results[minIDX].size() << '\n'; cout << "IDX:" << minIDX << "\n"; */ //min_loc is the top left of the best of the five sub-rois //the top left of where to put the small img is min_loc - coordrects[minIDX] Mat result; double min_val, max_val; Point min_loc, max_loc; matchTemplate(roiEdit,imgSmallEdit,result,TM_CCOEFF); minMaxLoc(result,&min_val,&max_val,&min_loc,&max_loc); //debug stuff Mat roiWithLocation, chosenSmallRoiImg; imshow("matchResult",result); roiWithLocation= roiEdit.clone(); chosenSmallRoiImg = imgSmallEdit.clone(); rectangle(roiWithLocation,max_loc,Point(max_loc.x+rectW,max_loc.y+rectH),Scalar(128,128, 0),2,2,0); rectangle(chosenSmallRoiImg,max_loc,Point(rectW,rectH),Scalar(128,128, 0),2,2,0); imshow("roiWithLocation",roiWithLocation); imshow("chosenSmallRoiImg",chosenSmallRoiImg); //cout<<"HOW MANY RECTS " <stitch(imgs, pano); if (status != Stitcher::OK) { // Check if images could not be stitched // status is OK if images are stitched successfully cout << "Can't stitch images\n"; } else { string resultName = "result" + to_string(imgIdx) +".jpg"; // Store a new image stitched from the given //set of images as "result.jpg" imwrite(resultName, pano); // Show the result imshow(resultName, pano); waitKey(0); } */ } imwrite("result0GIOJSDFI.png", img); } };