-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathchapter-11-part-1.py
98 lines (84 loc) · 4.34 KB
/
chapter-11-part-1.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
import argparse
import cv2
import numpy as np
#What is epipolar geometry?
def build_arg_parser():
parser = argparse.ArgumentParser(description='Find fundamental matrix \
using the two input stereo images and draw epipolar lines')
parser.add_argument("--img-left", dest="img_left", required=True,
help="Image captured from the left view")
parser.add_argument("--img-right", dest="img_right", required=True,
help="Image captured from the right view")
parser.add_argument("--feature-type", dest="feature_type",
required=True, help="Feature extractor that will be used; can \
be either 'sift' or 'surf'")
return parser
def draw_lines(img_left, img_right, lines, pts_left, pts_right):
h,w = img_left.shape
img_left = cv2.cvtColor(img_left, cv2.COLOR_GRAY2BGR)
img_right = cv2.cvtColor(img_right, cv2.COLOR_GRAY2BGR)
for line, pt_left, pt_right in zip(lines, pts_left, pts_right):
x_start,y_start = map(int, [0, -line[2]/line[1] ])
x_end,y_end = map(int, [w, -(line[2]+line[0]*w)/line[1] ])
color = tuple(np.random.randint(0,255,2).tolist())
cv2.line(img_left, (x_start,y_start), (x_end,y_end), color,1)
cv2.circle(img_left, tuple(pt_left), 5, color, -1)
cv2.circle(img_right, tuple(pt_right), 5, color, -1)
return img_left, img_right
def get_descriptors(gray_image, feature_type):
if feature_type == 'surf':
feature_extractor = cv2.SURF()
elif feature_type == 'sift':
feature_extractor = cv2.SIFT()
else:
raise TypeError("Invalid feature type; should be either 'surf' or \
'sift'")
keypoints, descriptors = feature_extractor.detectAndCompute(gray_image,None)
return keypoints, descriptors
if __name__=='__main__':
args = build_arg_parser().parse_args()
img_left = cv2.imread(args.img_left,0) # left image
img_right = cv2.imread(args.img_right,0) # right image
feature_type = args.feature_type
if feature_type not in ['sift', 'surf']:
raise TypeError("Invalid feature type; has to be either 'sift' or \
'surf'")
scaling_factor = 1.0
img_left = cv2.resize(img_left, None, fx=scaling_factor,fy=scaling_factor, interpolation=cv2.INTER_AREA)
img_right = cv2.resize(img_right, None, fx=scaling_factor,fy=scaling_factor, interpolation=cv2.INTER_AREA)
kps_left, des_left = get_descriptors(img_left, feature_type)
kps_right, des_right = get_descriptors(img_right, feature_type)
# FLANN parameters
FLANN_INDEX_KDTREE = 0
index_params = dict(algorithm = FLANN_INDEX_KDTREE, trees = 5)
search_params = dict(checks=50)
# Get the matches based on the descriptors
flann = cv2.FlannBasedMatcher(index_params, search_params)
matches = flann.knnMatch(des_left, des_right, k=2)
pts_left_image = []
pts_right_image = []
# ratio test to retain only the good matches
for i,(m,n) in enumerate(matches):
if m.distance < 0.7*n.distance:
pts_left_image.append(kps_left[m.queryIdx].pt)
pts_right_image.append(kps_right[m.trainIdx].pt)
pts_left_image = np.float32(pts_left_image)
pts_right_image = np.float32(pts_right_image)
F, mask = cv2.findFundamentalMat(pts_left_image, pts_right_image,cv2.FM_LMEDS)
# Selecting only the inliers
pts_left_image = pts_left_image[mask.ravel()==1]
pts_right_image = pts_right_image[mask.ravel()==1]
# Drawing the lines on left image and the corresponding feature points on the right image
lines1 = cv2.computeCorrespondEpilines(pts_right_image.reshape(-1,1,2), 2, F)
lines1 = lines1.reshape(-1,3)
img_left_lines, img_right_pts = draw_lines(img_left, img_right, lines1,pts_left_image, pts_right_image)
# Drawing the lines on right image and the corresponding feature points on the left image
lines2 = cv2.computeCorrespondEpilines (pts_left_image.reshape(-1,1,2),1,F)
lines2 = lines2.reshape(-1,3)
img_right_lines, img_left_pts = draw_lines(img_right, img_left, lines2,pts_right_image, pts_left_image)
cv2.imshow('Epi lines on left image', img_left_lines)
cv2.imshow('Feature points on right image', img_right_pts)
cv2.imshow('Epi lines on right image', img_right_lines)
cv2.imshow('Feature points on left image', img_left_pts)
cv2.waitKey()
cv2.destroyAllWindows()