-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathImage_Features.m
89 lines (65 loc) · 2.94 KB
/
Image_Features.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
function Image_Features(image_file,Target)
boxImage = imread(Target);
boxImage=rgb2gray(boxImage );
figure;
imshow(boxImage);
title('Image of a Box');
%Read the target image containing a cluttered scene.
sceneImage = imread(image_file);
sceneImage =rgb2gray(sceneImage);
figure;
imshow(sceneImage);
title('Image of a Cluttered Scene');
%Step 2: Detect Feature Points
%Detect feature points in both images.
boxPoints = detectSURFFeatures(boxImage);
scenePoints = detectSURFFeatures(sceneImage);
%Visualize the strongest feature points found in the reference image.
figure;
imshow(boxImage);
title('100 Strongest Feature Points from Box Image');
hold on;
plot(selectStrongest(boxPoints, 100));
%Visualize the strongest feature points found in the target image.
figure;
imshow(sceneImage);
title('300 Strongest Feature Points from Scene Image');
hold on;
plot(selectStrongest(scenePoints, 300));
%Step 3: Extract Feature Descriptors
%Extract feature descriptors at the interest points in both images.
[boxFeatures, boxPoints] = extractFeatures(boxImage, boxPoints);
[sceneFeatures, scenePoints] = extractFeatures(sceneImage, scenePoints);
%Step 4: Find Putative Point Matches
%Match the features using their descriptors.
boxPairs = matchFeatures(boxFeatures, sceneFeatures);
%Display putatively matched features.
matchedBoxPoints = boxPoints(boxPairs(:, 1), :);
matchedScenePoints = scenePoints(boxPairs(:, 2), :);
figure;
showMatchedFeatures(boxImage, sceneImage, matchedBoxPoints, ...
matchedScenePoints, 'montage');
title('Putatively Matched Points (Including Outliers)');
%Step 5: Locate the Object in the Scene Using Putative Matches
%estimateGeometricTransform calculates the transformation relating the matched points, while eliminating outliers. This transformation allows us to localize the object in the scene.
%[tform, inlierBoxPoints, inlierScenePoints] = ...
% estimateGeometricTransform(matchedBoxPoints, matchedScenePoints, 'affine');
%Display the matching point pairs with the outliers removed
%figure;
%showMatchedFeatures(boxImage, sceneImage, inlierBoxPoints, ...
%5 inlierScenePoints, 'montage');
%title('Matched Points (Inliers Only)');
%Get the bounding polygon of the reference image.
%boxPolygon = [1, 1;... % top-left
% size(boxImage, 2), 1;... % top-right
% size(boxImage, 2), size(boxImage, 1);... % bottom-right
% 1, size(boxImage, 1);... % bottom-left
% 1, 1]; % top-left again to close the polygon
%Transform the polygon into the coordinate system of the target image. The transformed polygon indicates the location of the object in the scene.
%newBoxPolygon = transformPointsForward(tform, boxPolygon);
%Display the detected object.
%figure;
%imshow(sceneImage);
%hold on;
%line(newBoxPolygon(:, 1), newBoxPolygon(:, 2), 'Color', 'y');
%title('Detected Box');