public class ImageJoint {
private static int MIN = 2;
/**
* 对两张图像进行拼接
* @param imagePath1 图片1的路径
* @param imagePath2 图片2的路径
* @param flag 值为False --> 左右拼接 , True--> 上下拼接
* @return 返回图片的Bitmap
*/
public static Bitmap JointImage(String imagePath1, String imagePath2, boolean flag){
Mat img1 = Imgcodecs.imread(imagePath1);
Mat img2 = Imgcodecs.imread(imagePath2);
if (img1.empty() | img2.empty()) {
System.out.println("Failed to load image");
return null;
}
if (flag) {
Mat rotated1 = new Mat();
Core.transpose(img1, rotated1);
Core.flip(rotated1, rotated1, 0); // 0--》逆时针旋转90° 1--》顺时针旋转90°
img1 = rotated1;
Mat rotated2 = new Mat();
Core.transpose(img2, rotated2);
Core.flip(rotated2, rotated2, 0);
img2 = rotated2;
}
// Resize img2 to the size of img1
Size newSize = new Size(img1.cols(), img1.rows());
Imgproc.resize(img2, img2, newSize);
// Initialize SIFT detector
Feature2D sift = SIFT.create();
// Detect keypoints and descriptors
MatOfKeyPoint keypoints1 = new MatOfKeyPoint();
Mat descriptors1 = new Mat();
sift.detectAndCompute(img1, new Mat(), keypoints1, descriptors1);
MatOfKeyPoint keypoints2 = new MatOfKeyPoint();
Mat descriptors2 = new Mat();
sift.detectAndCompute(img2, new Mat(), keypoints2, descriptors2);
// FLANN based matcher
DescriptorMatcher matcher = new FlannBasedMatcher();
// MatOfDMatch matches = new MatOfDMatch();
List<MatOfDMatch> matches = new ArrayList<>();
matcher.knnMatch(descriptors1, descriptors2, matches, 2);
// Filter matches
// List<DMatch> goodMatches = new ArrayList<>();
// List<DMatch> matchList = matches.toList();
// for (DMatch[] match : matchList) {
// if (match[0].distance < 0.65 * match[1].distance) {
// goodMatches.add(match[0]);
// }
// }
//-- 使用洛氏比率测试筛选匹配项
final float ratioThresh = 0.65f;
List<DMatch> goodMatches = new ArrayList<>();
for (MatOfDMatch matOfDMatch : matches) {
DMatch[] dMatches = matOfDMatch.toArray();
DMatch m1 = dMatches[0];
DMatch m2 = dMatches[1];
if (m1.distance < ratioThresh * m2.distance) {
goodMatches.add(m1);
}
}
// Convert List<DMatch> to List<MatOfDMatch>
MatOfDMatch matOfDMatch = new MatOfDMatch();
matOfDMatch.fromList(goodMatches);
// Draw matches
Mat outputImg = new Mat();
Features2d.drawMatches(img1, keypoints1, img2, keypoints2, matOfDMatch, outputImg);
// Imgcodecs.imwrite((StorageManager.Join(StorageManager.GetStoragePath(), "joint.jpg")), outputImg);
if (goodMatches.size() > MIN) {
// Compute Homography using RANSAC
List<Point> srcPts = new ArrayList<>();
List<Point> tgtPts = new ArrayList<>();
for (DMatch m : goodMatches) {
srcPts.add(keypoints1.toList().get(m.queryIdx).pt);
tgtPts.add(keypoints2.toList().get(m.trainIdx).pt);
}
MatOfPoint2f srcMat = new MatOfPoint2f();
srcMat.fromList(srcPts);
MatOfPoint2f tgtMat = new MatOfPoint2f();
tgtMat.fromList(tgtPts);
Mat homography = new Mat();
Mat mask = new Mat();
homography = Calib3d.findHomography(srcMat, tgtMat, Calib3d.RANSAC, 2, mask);
// Warp img2 to img1's perspective
// Mat warpimg = new Mat();
// Imgproc.warpPerspective(img2, warpimg, homography.inv(), new Size(img1.cols() + img2.cols(), img2.rows()));
Mat warpimg = new Mat();
Mat invHomography = new Mat();
Core.invert(homography, invHomography);
Imgproc.warpPerspective(img2, warpimg, invHomography, new Size(img1.cols() + img2.cols(), img2.rows()));
// Create a canvas to draw results 1
// Mat direct = new Mat();
// warpImg.copyTo(direct);
// img1.copyTo(direct.rowRange(0, img1.rows()).colRange(0, img1.cols()));
// Create a canvas to draw results 2
Mat direct = new Mat();
warpimg.copyTo(direct);
Mat roi = direct.submat(new Rect(0, 0, img1.cols(), img1.rows()));
img1.copyTo(roi);
// Find overlapping boundaries
int left = 0;
int right = img1.cols();
// Find the left boundary
for (int col = 0; col < img1.cols(); col++) {
boolean img1HasNonZero = false;
boolean warpimgHasNonZero = false;
for (int row = 0; row < img1.rows(); row++) {
if (img1.get(row, col) != null && !Arrays.equals(img1.get(row, col), new double[]{0, 0, 0})) {
img1HasNonZero = true;
}
if (warpimg.get(row, col) != null && !Arrays.equals(warpimg.get(row, col), new double[]{0, 0, 0})) {
warpimgHasNonZero = true;
}
}
if (img1HasNonZero && warpimgHasNonZero) {
left = col;
break;
}
}
// Find the right boundary
for (int col = img1.cols() - 1; col >= 0; col--) {
boolean img1HasNonZero = false;
boolean warpimgHasNonZero = false;
for (int row = 0; row < img1.rows(); row++) {
if (img1.get(row, col) != null && !Arrays.equals(img1.get(row, col), new double[]{0, 0, 0})) {
img1HasNonZero = true;
}
if (warpimg.get(row, col) != null && !Arrays.equals(warpimg.get(row, col), new double[]{0, 0, 0})) {
warpimgHasNonZero = true;
}
}
if (img1HasNonZero && warpimgHasNonZero) {
right = col;
break;
}
}
// Image blending
Mat res = new Mat(img1.rows(), img1.cols(), CvType.CV_8UC3);
for (int row = 0; row < img1.rows(); row++) {
for (int col = 0; col < img1.cols(); col++) {
if (img1.get(row, col) == null) {
res.put(row, col, warpimg.get(row, col));
} else if (warpimg.get(row, col) == null) {
res.put(row, col, img1.get(row, col));
} else {
double[] img1Pixel = img1.get(row, col);
double[] warpimgPixel = warpimg.get(row, col);
double srcImgLen = Math.abs(col - left);
double testImgLen = Math.abs(col - right);
double alpha = srcImgLen / (srcImgLen + testImgLen);
double[] resPixel = new double[3];
for (int i = 0; i < 3; i++) {
resPixel[i] = img1Pixel[i] * (1 - alpha) + warpimgPixel[i] * alpha;
}
res.put(row, col, resPixel);
}
}
}
// Display images (You might need to use ImageView or other methods in Android)
// 图像拼接结果
Bitmap img3 = Bitmap.createBitmap(direct.cols(), direct.rows(), Bitmap.Config.ARGB_8888);
// 图像融合结果
Bitmap img4 = Bitmap.createBitmap(warpimg.cols(), warpimg.rows(), Bitmap.Config.ARGB_8888);
// 裁剪拼接后图像的黑色部分
Mat resultImg = cropBlackArea(direct);
if (flag) {
Mat rotated3 = new Mat();
Core.transpose(resultImg, rotated3);
Core.flip(rotated3, rotated3, 1);
resultImg = rotated3;
}
Bitmap img5 = Bitmap.createBitmap(resultImg.cols(), resultImg.rows(), Bitmap.Config.ARGB_8888);
Utils.matToBitmap(direct, img3);
Utils.matToBitmap(warpimg, img4);
Utils.matToBitmap(resultImg, img5);
Imgcodecs.imwrite((StorageManager.Join(StorageManager.GetStoragePath(), "joint_direct.jpg")), direct);
Imgcodecs.imwrite((StorageManager.Join(StorageManager.GetStoragePath(), "joint_warpimg.jpg")), warpimg);
Imgcodecs.imwrite((StorageManager.Join(StorageManager.GetStoragePath(), "joint_cropBlackArea.jpg")), resultImg);
return img5;
} else {
Log.e("MainActivity", "Not enough matches!");
}
return null;
}
/**
* 裁剪图像的黑色部分
* @param img 需要裁剪的图像
* @return 裁剪后图像的mat
*/
public static Mat cropBlackArea(Mat img) {
// 读取图像
Mat image = img;
// 将图像转换为灰度图像
Mat gray = new Mat();
Imgproc.cvtColor(image, gray, Imgproc.COLOR_BGR2GRAY);
// 创建二值图像
Mat binary = new Mat();
Imgproc.threshold(gray, binary, 1, 255, Imgproc.THRESH_BINARY);
// 查找轮廓
List<MatOfPoint> contours = new ArrayList<>();
Mat hierarchy = new Mat();
Imgproc.findContours(binary, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);
// 获取最大轮廓的边界框
if (!contours.isEmpty()) {
MatOfPoint largestContour = contours.stream().max((c1, c2) -> Double.compare(Imgproc.contourArea(c1), Imgproc.contourArea(c2))).orElse(null);
if (largestContour != null) {
Rect boundingRect = Imgproc.boundingRect(largestContour);
// 根据边界框裁剪图像
Mat croppedImage = new Mat(image, boundingRect);
// 保存裁剪后的图像
return croppedImage;
// Imgcodecs.imwrite(outputPath, croppedImage);
// System.out.println("Cropped image saved to " + outputPath);
} else {
System.out.println("No contours found");
}
} else {
System.out.println("No contours found");
}
return null;
}
}
Android 图像拼接
于 2024-08-30 15:32:38 首次发布