-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathfeatures.py
47 lines (35 loc) · 1.45 KB
/
features.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
import cv2 as cv
import numpy as np
np.set_printoptions(suppress=True, precision=6)
class FeatureExtractor:
def __init__(self):
self.orb = cv.ORB_create()
self.bf = cv.BFMatcher(cv.NORM_HAMMING)
def findKeypoints(self, img):
img = cv.cvtColor(img, cv.COLOR_BGR2GRAY)
features = cv.goodFeaturesToTrack(img, maxCorners=5000, qualityLevel=0.01, minDistance=7)
kps = [cv.KeyPoint(x=f[0][0], y=f[0][1], size=20) for f in features]
kps, des = self.orb.compute(img, kps)
return kps, des
def matcher(self, frame1, frame2):
kps1, des1 = frame1.getFeatures()
kps2, des2 = frame2.getFeatures()
matches = self.bf.knnMatch(des1,des2, k=2)
lowe_matches = []
ratio_thresh = 0.75
for m,n in matches:
if(m.distance < 32):
p1x, p1y = kps1[m.queryIdx].pt
p2x, p2y = kps2[m.trainIdx].pt
dist = np.sqrt((p1x-p2x)**2 + (p1y-p2y)**2)
if(dist < 100):
if m.distance < ratio_thresh * n.distance:
lowe_matches.append(m)
matches = lowe_matches
kps1 = [kps1[m.queryIdx] for m in matches]
des1 = [des1[m.queryIdx] for m in matches]
kps2 = [kps2[m.trainIdx] for m in matches]
des2 = [des2[m.trainIdx] for m in matches]
frame1.setFeatures(kps1, des1)
#frame2.setFeatures(kps2, des2)
return kps2, des2