我使用 SIFT 特征检测器,需要两个刚性二维变换图像的角度 AND 中心平移。 因此我想直接从变换矩阵中提取信息。
附件是一个最小的工作示例。这个想法是用我从图像配准中获得的角度构建一个旋转矩阵,然后通过矩阵乘法向后提取平移矩阵。我认为它应该有效,但缺少一些小东西。最小工作示例的目标是计算图像配准的初始设置参数(tx,ty,角度)。
import numpy as np
import cv2 as cv, cv2
import matplotlib.pyplot as plt
import math
img=cv2.imread(r"\lena_std.tif",cv2.IMREAD_GRAYSCALE)
tx=0 #parameters to play which should be totally compensated by the code
ty=0
angle=60
rows,cols= img.shape
rot_mat = cv2.getRotationMatrix2D((cols/2,rows/2),angle,1)
rot_mat=np.vstack([rot_mat, np.array([0,0,1])])
trans_mat = np.array([
[1, 0, tx],
[0, 1, ty],
[0, 0, 1]
], dtype=np.float32)
M=trans_mat @ rot_mat
reference_image = img
test_image = cv2.warpAffine(img,M[:2,:3],(cols,rows))
def similarity_sift_flann_affine2D (reference_image, test_image):
img1=test_image.astype(np.uint8)
img2=reference_image.astype(np.uint8)
# Initiate SIFT detector
sift = cv.SIFT_create()
# find the keypoints and descriptors with SIFT
kp1, des1 = sift.detectAndCompute(img1,None)
kp2, des2 = sift.detectAndCompute(img2,None)
FLANN_INDEX_KDTREE = 1
index_params = dict(algorithm = FLANN_INDEX_KDTREE, trees = 5)
search_params = dict(checks = 50)
flann = cv.FlannBasedMatcher(index_params, search_params)
matches = flann.knnMatch(des1,des2,k=2)
# store all the good matches as per Lowe's ratio test.
good = []
for m,n in matches:
if m.distance < 0.7*n.distance: #Lowes ratio test
good.append(m)
MIN_MATCH_COUNT=10
if len(good)>=MIN_MATCH_COUNT:
src_pts = np.float32([ kp1[m.queryIdx].pt for m in good ]).reshape(-1,1,2)
dst_pts = np.float32([ kp2[m.trainIdx].pt for m in good ]).reshape(-1,1,2)
M, mask = cv2.estimateAffinePartial2D(src_pts, dst_pts)
#M,mask = cv.estimateAffine2D(src_pts, dst_pts)
else:
print( "Not enough matches are found - {}/{}".format(len(good), MIN_MATCH_COUNT) )
matchesMask = None
return M
M1=similarity_sift_flann_affine2D (reference_image, test_image)
sizeImg1=img.shape
u, _, vh = np.linalg.svd(M[0:2, 0:2])
R = u @ vh
angle2 = math.atan2(R[1,0], R[0,0])
angle2 =np.rad2deg(angle2)
cor_rot_mat = cv2.getRotationMatrix2D((cols/2,rows/2),-angle2,1)
cor_rot_mat=np.vstack([cor_rot_mat, np.array([0,0,1])])
M1=np.vstack([M1, np.array([0,0,1])])
#cor_trans_mat= np.linalg.inv(cor_rot_mat) @ M1
cor_trans_mat= M1 @ np.linalg.inv(cor_rot_mat)
print("angle diff", str(np.abs(angle2)-np.abs(angle)))
print("tx diff", str(np.abs(tx)-np.abs(cor_trans_mat[0][2])))
print("ty diff", str(np.abs(ty)-np.abs(cor_trans_mat[1][2])))
有人可以帮忙吗?非常感谢!
我找到了解决方案,我的代码背后的理论是正确的。错误是angle2的符号,我注释掉了错误的逆矩阵乘法。
此代码有效:
import numpy as np
import cv2 as cv, cv2
import matplotlib.pyplot as plt
import math
img=cv2.imread(r"\lena_std.tif",cv2.IMREAD_GRAYSCALE)
tx=36 #parameters to play which should be totally compensated by the code
ty=120
angle=30
rows,cols= img.shape
rot_mat = cv2.getRotationMatrix2D((cols/2,rows/2),angle,1)
rot_mat=np.vstack([rot_mat, np.array([0,0,1])])
trans_mat = np.array([
[1, 0, tx],
[0, 1, ty],
[0, 0, 1]
], dtype=np.float32)
M=trans_mat @ rot_mat
reference_image = img
test_image = cv2.warpAffine(img,M[:2,:3],(cols,rows))
def similarity_sift_flann_affine2D (reference_image, test_image):
img1=test_image.astype(np.uint8)
img2=reference_image.astype(np.uint8)
# Initiate SIFT detector
sift = cv.SIFT_create()
# find the keypoints and descriptors with SIFT
kp1, des1 = sift.detectAndCompute(img1,None)
kp2, des2 = sift.detectAndCompute(img2,None)
FLANN_INDEX_KDTREE = 1
index_params = dict(algorithm = FLANN_INDEX_KDTREE, trees = 5)
search_params = dict(checks = 50)
flann = cv.FlannBasedMatcher(index_params, search_params)
matches = flann.knnMatch(des1,des2,k=2)
# store all the good matches as per Lowe's ratio test.
good = []
for m,n in matches:
if m.distance < 0.7*n.distance: #Lowes ratio test
good.append(m)
MIN_MATCH_COUNT=10
if len(good)>=MIN_MATCH_COUNT:
src_pts = np.float32([ kp1[m.queryIdx].pt for m in good ]).reshape(-1,1,2)
dst_pts = np.float32([ kp2[m.trainIdx].pt for m in good ]).reshape(-1,1,2)
M, mask = cv2.estimateAffinePartial2D(src_pts, dst_pts)
#M,mask = cv.estimateAffine2D(src_pts, dst_pts)
else:
print( "Not enough matches are found - {}/{}".format(len(good), MIN_MATCH_COUNT) )
matchesMask = None
return M
M1=similarity_sift_flann_affine2D (reference_image, test_image)
sizeImg1=img.shape
u, _, vh = np.linalg.svd(M[0:2, 0:2])
R = u @ vh
angle2 = math.atan2(R[1,0], R[0,0])
angle2 =np.rad2deg(angle2)
cor_rot_mat = cv2.getRotationMatrix2D((cols/2,rows/2),angle2,1)
cor_rot_mat=np.vstack([cor_rot_mat, np.array([0,0,1])])
M1=np.vstack([M1, np.array([0,0,1])])
cor_trans_mat= np.linalg.inv(cor_rot_mat) @ M1
print("angle diff", str(np.abs(angle2)-np.abs(angle)))
print("tx diff", str(np.abs(tx)-np.abs(cor_trans_mat[0][2])))
print("ty diff", str(np.abs(ty)-np.abs(cor_trans_mat[1][2])))