# Python Computer Vision Programming - Chapter 2 Local Image Descriptors

2022-08-06 07:37:06

2.1 Harris角点检测器

2.2 SIFT（Size-invariant feature transformation）

2.2.1 兴趣点

2.2.2 描述子

2.2.3 检测兴趣点

2.2.4 匹配描述子

2.3 匹配地理标记图像

2.3.1 从Panoramio下载地理标记图像

2.3.2 使用局部描述子匹配

2.3.3 可视化连接的图像

# 2.1 Harris角点检测器

Harris角点检测算法是一个极为简单的角点检测算法.该算法的主要思想是,如果像素周围显示存在多于一个方向的边,我们认为该点为兴趣点.该点就称为角点.

midpoint of the image domainx上的对称半正定矩阵 定义为：  Depends on the area 的值,harris矩阵 的特征值有三种情况：

• 如果 λ1 和 λ2 都是很大的正数,则该 x 点为角点;
• 如果 λ1 很大,λ2 ≈ 0,则该区域内存在一个边,该区域内的平均 MI 的特征值不会变化太大;
• 如果 λ1≈λ2≈0,This area is empty.
# -*- coding: utf-8 -*-
from pylab import *
from PIL import Image
from PCV.localdescriptors import harris

# 读入图像
im = array(Image.open('8.jpg').convert('L'))

# 检测harris角点
harrisim = harris.compute_harris_response(im)

# Harris响应函数
harrisim1 = 255 - harrisim

figure()
gray()

# 画出Harris响应图
subplot(141)
imshow(harrisim1)
print ('harrisim1.shape')
axis('off')
axis('equal')

threshold = [0.01, 0.05, 0.1]
for i, thres in enumerate(threshold):
filtered_coords = harris.get_harris_points(harrisim, 6, thres)
subplot(1, 4, i + 2)
imshow(im)
print ('im.shape')
plot([p for p in filtered_coords], [p for p in filtered_coords], '*')
axis('off')

# 原书采用的PCV中PCV harris模块
# harris.plot_harris_points(im, filtered_coords)

# plot only 200 strongest
# harris.plot_harris_points(im, filtered_coords[:200])

show() It can be seen intuitively from the image results,Decrease the corner response value,降低角点检测的灵性,减少被检测角点的数量;Increase the corner response value,增加角点检测的灵敏性,增加被检测角点的数量.

Harris 角点检测器仅仅能够检测出图像中的兴趣点,但是没有给出通过比较图像间的兴趣点来寻找匹配角点的方法.我们需要在每个点上加入描述子信息,并给出一 个比较这些描述子的方法.

# -*- coding: utf-8 -*-
from pylab import *
from PIL import Image
from PCV.localdescriptors import harris
from PCV.tools.imtools import imresize

im1 = array(Image.open("15.jpg").convert("L"))
im2 = array(Image.open("15.1.jpg").convert("L"))

# resize加快匹配速度

wid = 5
harrisim = harris.compute_harris_response(im1, 5)
filtered_coords1 = harris.get_harris_points(harrisim, wid + 1)
d1 = harris.get_descriptors(im1, filtered_coords1, wid)

harrisim = harris.compute_harris_response(im2, 5)
filtered_coords2 = harris.get_harris_points(harrisim, wid + 1)
d2 = harris.get_descriptors(im2, filtered_coords2, wid)

print ('starting matching')
matches = harris.match_twosided(d1, d2)

figure()
gray()
harris.plot_matches(im1, im2, filtered_coords1, filtered_coords2, matches)
show() The algorithm results in an incorrect match,It may be due to the weak descriptiveness of the cross-correlation matrix of image pixels.同时,If the image size is larger,The processing time of the image can be very long.

# 2.2 SIFT（Size-invariant feature transformation）

SIFT特征包括兴趣点检测器和描述子.SIFT描述子具有非常强的稳健性,This is largely the sameSIFT特征能够成功和流行的主要原因.

## 2.2.1 兴趣点

SIFT特征使用高斯差分函数来定位兴趣点： ## 2.2.3 检测兴趣点

# -*- coding: utf-8 -*-
from PIL import Image
from pylab import *
from PCV.localdescriptors import sift
from PCV.localdescriptors import harris

# 添加中文字体支持
from matplotlib.font_manager import FontProperties

font = FontProperties(fname=r"c:\windows\fonts\SimSun.ttc", size=14)

imname = '8.jpg'
im = array(Image.open(imname).convert('L'))
sift.process_image(imname, 'empire.sift')

figure()
gray()
subplot(131)
sift.plot_features(im, l1, circle=False)
title(u'(a)SIFT特征', fontproperties=font)
subplot(132)
sift.plot_features(im, l1, circle=True)
title(u'(b)用圆圈表示SIFT特征尺度', fontproperties=font)

# 检测harris角点
harrisim = harris.compute_harris_response(im)

subplot(133)
filtered_coords = harris.get_harris_points(harrisim, 6, 0.1)
imshow(im)
plot([p for p in filtered_coords], [p for p in filtered_coords], '*')
axis('off')
title(u'(c)Harris角点', fontproperties=font)

show() Web search tutorial： The comparison results can be seen,SIFT和harrisThe selection of feature points by the two algorithms is very different.

## 2.2.4 匹配描述子

from PIL import Image
from pylab import *
import sys
from PCV.localdescriptors import sift

if len(sys.argv) >= 3:
im1f, im2f = sys.argv, sys.argv
else:
im1f = '15.jpg'
im2f = '15.1.jpg'
im1 = array(Image.open(im1f))
im2 = array(Image.open(im2f))

sift.process_image(im1f, 'out_sift_1.txt')
figure()
gray()
subplot(121)
sift.plot_features(im1, l1, circle=False)

sift.process_image(im2f, 'out_sift_2.txt')
subplot(122)
sift.plot_features(im2, l2, circle=False)

# matches = sift.match(d1, d2)
matches = sift.match_twosided(d1, d2)
print ('{} matches'.format(len(matches.nonzero())))

figure()
gray()
sift.plot_matches(im1, im2, l1, l2, matches, show_below=True)
show()  SIFTThe algorithm finds key points,Calculate keypoint locations,Feature matching is performed based on the feature distance and the ratio of the two best matching feature distances.In this way, the matching accuracy is improved.

# 2.3 匹配地理标记图像

## 2.3.1 从Panoramio下载地理标记图像

Photo sharing service provided by Google Panoramio（http://www.panoramio.com/） 已停止运营,Put the algorithm on hold for now.

## 2.3.2 使用局部描述子匹配

Take several pictures at a fixed location for processing

import json
import os
import urllib
# import urlparse
# from pylab import *
from PIL import Image
from PCV.localdescriptors import sift
from PCV.tools import imtools
from numpy import *
# import pydot

# path = "G:\\picture\\"

nbr_images = len(imlist)

featlist = [imname[:-3] + 'sift' for imname in imlist]
for i, imname in enumerate(imlist):
sift.process_image(imname, featlist[i])

matchscores = zeros((nbr_images, nbr_images))

for i in range(nbr_images):
for j in range(i, nbr_images):  # only compute upper triangle
print('comparing ', imlist[i], imlist[j])
matches = sift.match_twosided(d1, d2)
nbr_matches = sum(matches > 0)
print('number of matches = ', nbr_matches)
matchscores[i, j] = nbr_matches

# copy values
for i in range(nbr_images):
for j in range(i + 1, nbr_images):  # no need to copy diagonal
matchscores[j, i] = matchscores[i, j]   Add at the end of the code

print(mathscores)

得到mathscroes矩阵,The number of matching features between each team of images is stored. ## 2.3.3 可视化连接的图像

First, the connection between images is defined by whether they have matching local descriptors,然后可视化这些连接情况.为了完成可视化,我们可以在图中显示这些图像,图的边代表连接. 可以使用pydot 工具包 （http://code.google.com/p/pydot/）.The configuration process is omitted here,The toolkit is powerfulGraphVizimage galleryPython借口.

# -*- coding: utf-8 -*-
import json
import os
import urllib
# import urlparse
from pylab import *
from PIL import Image
from PCV.localdescriptors import sift
from PCV.tools import imtools
import pydot
from numpy import *
import os
# os.environ["PATH"] += os.pathsep + 'C:/Program Files (x86)/Graphviz2.38/bin/'
path = "G:\\picture\\path2\\"

nbr_images = len(imlist)

featlist = [imname[:-3] + 'sift' for imname in imlist]
for i, imname in enumerate(imlist):
sift.process_image(imname, featlist[i])

matchscores = zeros((nbr_images, nbr_images))

for i in range(nbr_images):
for j in range(i, nbr_images):  # only compute upper triangle
print('comparing ', imlist[i], imlist[j])
matches = sift.match_twosided(d1, d2)
nbr_matches = sum(matches > 0)
print('number of matches = ', nbr_matches)
matchscores[i, j] = nbr_matches

# copy values
for i in range(nbr_images):
for j in range(i + 1, nbr_images):  # no need to copy diagonal
matchscores[j, i] = matchscores[i, j]

# 可视化

threshold = 2  # min number of matches needed to create link

g = pydot.Dot(graph_type='graph')  # don't want the default directed graph

for i in range(nbr_images):
for j in range(i + 1, nbr_images):
if matchscores[i, j] > threshold:
# first image in pair
im = Image.open(imlist[i])
im.thumbnail((100, 100))
filename = path + str(i) + '.jpg'
im.save(filename)  # need temporary files of the right size

# second image in pair
im = Image.open(imlist[j])
im.thumbnail((100, 100))
filename = path + str(j) + '.jpg'
im.save(filename)  # need temporary files of the right size

g.write_jpg(r'G:\picture\path2\result.jpg')


An error may appear after configuring the environment：

dot" not found in path

self.prog = ‘dot’
------修改为-------
self.prog = r'D:\Graphviz\bin\dot.exe' Thumbnails are generated in the directory after processing,The connection is normal,But there are no thumbnails in the resulting graph.Can't figure out why,暂时搁置.