refactored find_obj.py sample:

- handle bad matching case
- use BFMatcher and FlannBasedMatcher (and thus fixing a bug: L2^2 metric was used for flann)
This commit is contained in:
Alexander Mordvintsev 2012-07-13 20:27:54 +00:00
parent 30c611f1d5
commit 7bbc42127e

View File

@ -1,35 +1,17 @@
'''
Feature-based image matching sample.
USAGE
find_obj.py [ <image1> <image2> ]
'''
import numpy as np
import cv2
from common import anorm
from functools import partial
help_message = '''SURF image match
USAGE: findobj.py [ <image1> <image2> ]
'''
FLANN_INDEX_KDTREE = 1 # bug: flann enums are missing
flann_params = dict(algorithm = FLANN_INDEX_KDTREE,
trees = 4)
flann_params = dict(algorithm = FLANN_INDEX_KDTREE, trees = 5)
def match_bruteforce(desc1, desc2, r_threshold = 0.75):
res = []
for i in xrange(len(desc1)):
dist = anorm( desc2 - desc1[i] )
n1, n2 = dist.argsort()[:2]
r = dist[n1] / dist[n2]
if r < r_threshold:
res.append((i, n1))
return np.array(res)
def match_flann(desc1, desc2, r_threshold = 0.6):
flann = cv2.flann_Index(desc2, flann_params)
idx2, dist = flann.knnSearch(desc1, 2, params = {}) # bug: need to provide empty dict
mask = dist[:,0] / dist[:,1] < r_threshold
idx1 = np.arange(len(desc1))
pairs = np.int32( zip(idx1, idx2[:,0]) )
return pairs[mask]
def draw_match(img1, img2, p1, p2, status = None, H = None):
h1, w1 = img1.shape[:2]
@ -65,39 +47,49 @@ def draw_match(img1, img2, p1, p2, status = None, H = None):
if __name__ == '__main__':
print __doc__
import sys
try: fn1, fn2 = sys.argv[1:3]
except:
fn1 = '../c/box.png'
fn2 = '../c/box_in_scene.png'
print help_message
img1 = cv2.imread(fn1, 0)
img2 = cv2.imread(fn2, 0)
surf = cv2.SURF(1000)
kp1, desc1 = surf.detectAndCompute(img1, None)
kp2, desc2 = surf.detectAndCompute(img2, None)
desc1.shape = (-1, surf.descriptorSize())
desc2.shape = (-1, surf.descriptorSize())
detector = cv2.SIFT()
kp1, desc1 = detector.detectAndCompute(img1, None)
kp2, desc2 = detector.detectAndCompute(img2, None)
print 'img1 - %d features, img2 - %d features' % (len(kp1), len(kp2))
def match_and_draw(match, r_threshold):
m = match(desc1, desc2, r_threshold)
matched_p1 = np.array([kp1[i].pt for i, j in m])
matched_p2 = np.array([kp2[j].pt for i, j in m])
H, status = cv2.findHomography(matched_p1, matched_p2, cv2.RANSAC, 5.0)
print '%d / %d inliers/matched' % (np.sum(status), len(status))
bf_matcher = cv2.BFMatcher(cv2.NORM_L2)
flann_matcher = cv2.FlannBasedMatcher(flann_params, {}) # bug : need to pass empty dict (#1329)
vis = draw_match(img1, img2, matched_p1, matched_p2, status, H)
def match_and_draw(matcher, r_threshold = 0.75):
raw_matches = matcher.knnMatch(desc1, trainDescriptors = desc2, k = 2)
p1, p2 = [], []
for m in raw_matches:
if len(m) == 2 and m[0].distance < m[1].distance * r_threshold:
m = m[0]
p1.append( kp1[m.queryIdx].pt )
p2.append( kp2[m.trainIdx].pt )
p1, p2 = np.float32((p1, p2))
if len(p1) >= 4:
H, status = cv2.findHomography(p1, p2, cv2.RANSAC, 2.0)
print '%d / %d inliers/matched' % (np.sum(status), len(status))
else:
H, status = None, None
print '%d matches found, not enough for homography estimation' % len(p1)
vis = draw_match(img1, img2, p1, p2, status, H)
return vis
print 'bruteforce match:',
vis_brute = match_and_draw( match_bruteforce, 0.75 )
vis_brute = match_and_draw( bf_matcher )
print 'flann match:',
vis_flann = match_and_draw( match_flann, 0.6 ) # flann tends to find more distant second
# neighbours, so r_threshold is decreased
cv2.imshow('find_obj SURF', vis_brute)
cv2.imshow('find_obj SURF flann', vis_flann)
vis_flann = match_and_draw( flann_matcher )
cv2.imshow('find_obj', vis_brute)
cv2.imshow('find_obj flann', vis_flann)
0xFF & cv2.waitKey()
cv2.destroyAllWindows()