生活随笔
收集整理的這篇文章主要介紹了
学习OpenCV——ORB简化版Location加速版
小編覺得挺不錯的,現在分享給大家,幫大家做個參考.
根據前面surf簡化版的結構,重新把ORB檢測的代碼給簡化以下,發現雖然速度一樣,確實能省好多行代碼,關鍵是有
BruteForceMatcher<HammingLUT>matcher的幫忙,直接省的寫了一個函數;
NB類型:class?gpu::BruteForceMatcher_GPU
再加上findHomography,之后perspectiveTransform就可以location,但是這樣速度很慢;
于是改動一下,求matches的keypoints的x與y坐標和的平均值,基本上就是對象中心!!!
以這個點為中心畫與原對象大小相同的矩形框,就可以定位出大概位置,但是肯定不如透視變換準確,而且不具有尺度不變性。
但是魯棒性應該更好,因為,只要能match成功,基本都能定位中心,但是透視變換有時卻因為尺度變換過大等因素,畫出很不靠譜的矩形框!
?
[cpp]?view plain
?copy ?print? #include?"opencv2/objdetect/objdetect.hpp"???#include?"opencv2/features2d/features2d.hpp"???#include?"opencv2/highgui/highgui.hpp"???#include?"opencv2/calib3d/calib3d.hpp"???#include?"opencv2/imgproc/imgproc_c.h"???#include?"opencv2/imgproc/imgproc.hpp"???????#include?<string>??#include?<vector>??#include?<iostream>????using?namespace?cv;??using?namespace?std;?????char*?image_filename1?=?"D:/src.jpg";???char*?image_filename2?=?"D:/Demo.jpg";?????int?main()??{??????Mat?img1?=?imread(?image_filename1,?CV_LOAD_IMAGE_GRAYSCALE?);??????Mat?img2?=?imread(?image_filename2,?CV_LOAD_IMAGE_GRAYSCALE?);????????int64?st,et;??????ORB?orb1(30,ORB::CommonParams(1.2,1));??????ORB?orb2(100,ORB::CommonParams(1.2,1));????????vector<KeyPoint>keys1,keys2;??????Mat?descriptor1,descriptor2;??????orb1(img1,Mat(),keys1,descriptor1,false);??????st=getTickCount();??????orb2(img2,Mat(),keys2,descriptor2,false);??????et=getTickCount()-st;??????et=et*1000/(double)getTickFrequency();??????cout<<"extract?time:"<<et<<"ms"<<endl;????????vector<DMatch>?matches;???????????????BruteForceMatcher<HammingLUT>matcher;????????st=getTickCount();??????matcher.match(descriptor1,descriptor2,matches);??????et=getTickCount()-st;??????et=et*1000/getTickFrequency();??????cout<<"match?time:"<<et<<"ms"<<endl;??????????????Mat?img_matches;??????drawMatches(?img1,?keys1,?img2,?keys2,??????????????????matches,?img_matches,?Scalar::all(-1),?Scalar::all(-1),??????????????????vector<char>(),?DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS?);??????imshow("match",img_matches);??????????cout<<"match?size:"<<matches.size()<<endl;??????????waitKey(0);??????????????st=getTickCount();??????vector<Point2f>pt1;??????vector<Point2f>pt2;??????float?x=0,y=0;??????for(size_t?i=0;i<matches.size();i++)??????{??????????pt1.push_back(keys1[matches[i].queryIdx].pt);??????????pt2.push_back(keys2[matches[i].trainIdx].pt);??????????x+=keys2[matches[i].trainIdx].pt.x;??????????y+=keys2[matches[i].trainIdx].pt.y;??????}??????x=x/matches.size();??????y=y/matches.size();????????????Mat?homo;??????homo=findHomography(pt1,pt2,CV_RANSAC);????????????????vector<Point2f>src_cornor(4);??????vector<Point2f>dst_cornor(4);??????src_cornor[0]=cvPoint(0,0);??????src_cornor[1]=cvPoint(img1.cols,0);??????src_cornor[2]=cvPoint(img1.cols,img1.rows);??????src_cornor[3]=cvPoint(0,img1.rows);??????perspectiveTransform(src_cornor,dst_cornor,homo);????????????Mat?img=imread(image_filename2,1);????????????line(img,dst_cornor[0],dst_cornor[1],Scalar(255,0,0),2);??????line(img,dst_cornor[1],dst_cornor[2],Scalar(255,0,0),2);??????line(img,dst_cornor[2],dst_cornor[3],Scalar(255,0,0),2);??????line(img,dst_cornor[3],dst_cornor[0],Scalar(255,0,0),2);????????????circle(img,Point(x,y),10,Scalar(0,0,255),3,CV_FILLED);??????line(img,Point(x-img1.cols/2,y-img1.rows/2),Point(x+img1.cols/2,y-img1.rows/2),Scalar(0,0,255),2);??????line(img,Point(x+img1.cols/2,y-img1.rows/2),Point(x+img1.cols/2,y+img1.rows/2),Scalar(0,0,255),2);??????line(img,Point(x+img1.cols/2,y+img1.rows/2),Point(x-img1.cols/2,y+img1.rows/2),Scalar(0,0,255),2);??????line(img,Point(x-img1.cols/2,y+img1.rows/2),Point(x-img1.cols/2,y-img1.rows/2),Scalar(0,0,255),2);????????imshow("location",img);????????????et=getTickCount()-st;??????et=et*1000/getTickFrequency();??????cout<<"location?time:"<<et<<"ms"<<endl;????????????waitKey(0);??}??
?
?
?
from:?http://blog.csdn.net/yangtrees/article/details/7545820
總結
以上是生活随笔為你收集整理的学习OpenCV——ORB简化版Location加速版的全部內容,希望文章能夠幫你解決所遇到的問題。
如果覺得生活随笔網站內容還不錯,歡迎將生活随笔推薦給好友。