add part of opencv

This commit is contained in:
Tang1705
2020-01-27 20:20:56 +08:00
parent 0c4ac1d8bb
commit a71fa47620
6518 changed files with 3122580 additions and 0 deletions

View File

@@ -0,0 +1,163 @@
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.DMatch;
import org.opencv.core.KeyPoint;
import org.opencv.core.Mat;
import org.opencv.core.MatOfDMatch;
import org.opencv.core.MatOfKeyPoint;
import org.opencv.core.Scalar;
import org.opencv.features2d.AKAZE;
import org.opencv.features2d.DescriptorMatcher;
import org.opencv.features2d.Features2d;
import org.opencv.highgui.HighGui;
import org.opencv.imgcodecs.Imgcodecs;
import org.w3c.dom.Document;
import org.xml.sax.SAXException;
class AKAZEMatch {
public void run(String[] args) {
//! [load]
String filename1 = args.length > 2 ? args[0] : "../data/graf1.png";
String filename2 = args.length > 2 ? args[1] : "../data/graf3.png";
String filename3 = args.length > 2 ? args[2] : "../data/H1to3p.xml";
Mat img1 = Imgcodecs.imread(filename1, Imgcodecs.IMREAD_GRAYSCALE);
Mat img2 = Imgcodecs.imread(filename2, Imgcodecs.IMREAD_GRAYSCALE);
if (img1.empty() || img2.empty()) {
System.err.println("Cannot read images!");
System.exit(0);
}
File file = new File(filename3);
DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder documentBuilder;
Document document;
Mat homography = new Mat(3, 3, CvType.CV_64F);
double[] homographyData = new double[(int) (homography.total()*homography.channels())];
try {
documentBuilder = documentBuilderFactory.newDocumentBuilder();
document = documentBuilder.parse(file);
String homographyStr = document.getElementsByTagName("data").item(0).getTextContent();
String[] splited = homographyStr.split("\\s+");
int idx = 0;
for (String s : splited) {
if (!s.isEmpty()) {
homographyData[idx] = Double.parseDouble(s);
idx++;
}
}
} catch (ParserConfigurationException e) {
e.printStackTrace();
System.exit(0);
} catch (SAXException e) {
e.printStackTrace();
System.exit(0);
} catch (IOException e) {
e.printStackTrace();
System.exit(0);
}
homography.put(0, 0, homographyData);
//! [load]
//! [AKAZE]
AKAZE akaze = AKAZE.create();
MatOfKeyPoint kpts1 = new MatOfKeyPoint(), kpts2 = new MatOfKeyPoint();
Mat desc1 = new Mat(), desc2 = new Mat();
akaze.detectAndCompute(img1, new Mat(), kpts1, desc1);
akaze.detectAndCompute(img2, new Mat(), kpts2, desc2);
//! [AKAZE]
//! [2-nn matching]
DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING);
List<MatOfDMatch> knnMatches = new ArrayList<>();
matcher.knnMatch(desc1, desc2, knnMatches, 2);
//! [2-nn matching]
//! [ratio test filtering]
float ratioThreshold = 0.8f; // Nearest neighbor matching ratio
List<KeyPoint> listOfMatched1 = new ArrayList<>();
List<KeyPoint> listOfMatched2 = new ArrayList<>();
List<KeyPoint> listOfKeypoints1 = kpts1.toList();
List<KeyPoint> listOfKeypoints2 = kpts2.toList();
for (int i = 0; i < knnMatches.size(); i++) {
DMatch[] matches = knnMatches.get(i).toArray();
float dist1 = matches[0].distance;
float dist2 = matches[1].distance;
if (dist1 < ratioThreshold * dist2) {
listOfMatched1.add(listOfKeypoints1.get(matches[0].queryIdx));
listOfMatched2.add(listOfKeypoints2.get(matches[0].trainIdx));
}
}
//! [ratio test filtering]
//! [homography check]
double inlierThreshold = 2.5; // Distance threshold to identify inliers with homography check
List<KeyPoint> listOfInliers1 = new ArrayList<>();
List<KeyPoint> listOfInliers2 = new ArrayList<>();
List<DMatch> listOfGoodMatches = new ArrayList<>();
for (int i = 0; i < listOfMatched1.size(); i++) {
Mat col = new Mat(3, 1, CvType.CV_64F);
double[] colData = new double[(int) (col.total() * col.channels())];
colData[0] = listOfMatched1.get(i).pt.x;
colData[1] = listOfMatched1.get(i).pt.y;
colData[2] = 1.0;
col.put(0, 0, colData);
Mat colRes = new Mat();
Core.gemm(homography, col, 1.0, new Mat(), 0.0, colRes);
colRes.get(0, 0, colData);
Core.multiply(colRes, new Scalar(1.0 / colData[2]), col);
col.get(0, 0, colData);
double dist = Math.sqrt(Math.pow(colData[0] - listOfMatched2.get(i).pt.x, 2) +
Math.pow(colData[1] - listOfMatched2.get(i).pt.y, 2));
if (dist < inlierThreshold) {
listOfGoodMatches.add(new DMatch(listOfInliers1.size(), listOfInliers2.size(), 0));
listOfInliers1.add(listOfMatched1.get(i));
listOfInliers2.add(listOfMatched2.get(i));
}
}
//! [homography check]
//! [draw final matches]
Mat res = new Mat();
MatOfKeyPoint inliers1 = new MatOfKeyPoint(listOfInliers1.toArray(new KeyPoint[listOfInliers1.size()]));
MatOfKeyPoint inliers2 = new MatOfKeyPoint(listOfInliers2.toArray(new KeyPoint[listOfInliers2.size()]));
MatOfDMatch goodMatches = new MatOfDMatch(listOfGoodMatches.toArray(new DMatch[listOfGoodMatches.size()]));
Features2d.drawMatches(img1, inliers1, img2, inliers2, goodMatches, res);
Imgcodecs.imwrite("akaze_result.png", res);
double inlierRatio = listOfInliers1.size() / (double) listOfMatched1.size();
System.out.println("A-KAZE Matching Results");
System.out.println("*******************************");
System.out.println("# Keypoints 1: \t" + listOfKeypoints1.size());
System.out.println("# Keypoints 2: \t" + listOfKeypoints2.size());
System.out.println("# Matches: \t" + listOfMatched1.size());
System.out.println("# Inliers: \t" + listOfInliers1.size());
System.out.println("# Inliers Ratio: \t" + inlierRatio);
HighGui.imshow("result", res);
HighGui.waitKey();
//! [draw final matches]
System.exit(0);
}
}
public class AKAZEMatchDemo {
public static void main(String[] args) {
// Load the native OpenCV library
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
new AKAZEMatch().run(args);
}
}

View File

@@ -0,0 +1,56 @@
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.MatOfDMatch;
import org.opencv.core.MatOfKeyPoint;
import org.opencv.features2d.DescriptorMatcher;
import org.opencv.features2d.Features2d;
import org.opencv.highgui.HighGui;
import org.opencv.imgcodecs.Imgcodecs;
import org.opencv.xfeatures2d.SURF;
class SURFMatching {
public void run(String[] args) {
String filename1 = args.length > 1 ? args[0] : "../data/box.png";
String filename2 = args.length > 1 ? args[1] : "../data/box_in_scene.png";
Mat img1 = Imgcodecs.imread(filename1, Imgcodecs.IMREAD_GRAYSCALE);
Mat img2 = Imgcodecs.imread(filename2, Imgcodecs.IMREAD_GRAYSCALE);
if (img1.empty() || img2.empty()) {
System.err.println("Cannot read images!");
System.exit(0);
}
//-- Step 1: Detect the keypoints using SURF Detector, compute the descriptors
double hessianThreshold = 400;
int nOctaves = 4, nOctaveLayers = 3;
boolean extended = false, upright = false;
SURF detector = SURF.create(hessianThreshold, nOctaves, nOctaveLayers, extended, upright);
MatOfKeyPoint keypoints1 = new MatOfKeyPoint(), keypoints2 = new MatOfKeyPoint();
Mat descriptors1 = new Mat(), descriptors2 = new Mat();
detector.detectAndCompute(img1, new Mat(), keypoints1, descriptors1);
detector.detectAndCompute(img2, new Mat(), keypoints2, descriptors2);
//-- Step 2: Matching descriptor vectors with a brute force matcher
// Since SURF is a floating-point descriptor NORM_L2 is used
DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE);
MatOfDMatch matches = new MatOfDMatch();
matcher.match(descriptors1, descriptors2, matches);
//-- Draw matches
Mat imgMatches = new Mat();
Features2d.drawMatches(img1, keypoints1, img2, keypoints2, matches, imgMatches);
HighGui.imshow("Matches", imgMatches);
HighGui.waitKey(0);
System.exit(0);
}
}
public class SURFMatchingDemo {
public static void main(String[] args) {
// Load the native OpenCV library
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
new SURFMatching().run(args);
}
}

View File

@@ -0,0 +1,44 @@
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.MatOfKeyPoint;
import org.opencv.features2d.Features2d;
import org.opencv.highgui.HighGui;
import org.opencv.imgcodecs.Imgcodecs;
import org.opencv.xfeatures2d.SURF;
class SURFDetection {
public void run(String[] args) {
String filename = args.length > 0 ? args[0] : "../data/box.png";
Mat src = Imgcodecs.imread(filename, Imgcodecs.IMREAD_GRAYSCALE);
if (src.empty()) {
System.err.println("Cannot read image: " + filename);
System.exit(0);
}
//-- Step 1: Detect the keypoints using SURF Detector
double hessianThreshold = 400;
int nOctaves = 4, nOctaveLayers = 3;
boolean extended = false, upright = false;
SURF detector = SURF.create(hessianThreshold, nOctaves, nOctaveLayers, extended, upright);
MatOfKeyPoint keypoints = new MatOfKeyPoint();
detector.detect(src, keypoints);
//-- Draw keypoints
Features2d.drawKeypoints(src, keypoints, src);
//-- Show detected (drawn) keypoints
HighGui.imshow("SURF Keypoints", src);
HighGui.waitKey(0);
System.exit(0);
}
}
public class SURFDetectionDemo {
public static void main(String[] args) {
// Load the native OpenCV library
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
new SURFDetection().run(args);
}
}

View File

@@ -0,0 +1,78 @@
import java.util.ArrayList;
import java.util.List;
import org.opencv.core.Core;
import org.opencv.core.DMatch;
import org.opencv.core.Mat;
import org.opencv.core.MatOfByte;
import org.opencv.core.MatOfDMatch;
import org.opencv.core.MatOfKeyPoint;
import org.opencv.core.Scalar;
import org.opencv.features2d.DescriptorMatcher;
import org.opencv.features2d.Features2d;
import org.opencv.highgui.HighGui;
import org.opencv.imgcodecs.Imgcodecs;
import org.opencv.xfeatures2d.SURF;
class SURFFLANNMatching {
public void run(String[] args) {
String filename1 = args.length > 1 ? args[0] : "../data/box.png";
String filename2 = args.length > 1 ? args[1] : "../data/box_in_scene.png";
Mat img1 = Imgcodecs.imread(filename1, Imgcodecs.IMREAD_GRAYSCALE);
Mat img2 = Imgcodecs.imread(filename2, Imgcodecs.IMREAD_GRAYSCALE);
if (img1.empty() || img2.empty()) {
System.err.println("Cannot read images!");
System.exit(0);
}
//-- Step 1: Detect the keypoints using SURF Detector, compute the descriptors
double hessianThreshold = 400;
int nOctaves = 4, nOctaveLayers = 3;
boolean extended = false, upright = false;
SURF detector = SURF.create(hessianThreshold, nOctaves, nOctaveLayers, extended, upright);
MatOfKeyPoint keypoints1 = new MatOfKeyPoint(), keypoints2 = new MatOfKeyPoint();
Mat descriptors1 = new Mat(), descriptors2 = new Mat();
detector.detectAndCompute(img1, new Mat(), keypoints1, descriptors1);
detector.detectAndCompute(img2, new Mat(), keypoints2, descriptors2);
//-- Step 2: Matching descriptor vectors with a FLANN based matcher
// Since SURF is a floating-point descriptor NORM_L2 is used
DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED);
List<MatOfDMatch> knnMatches = new ArrayList<>();
matcher.knnMatch(descriptors1, descriptors2, knnMatches, 2);
//-- Filter matches using the Lowe's ratio test
float ratioThresh = 0.7f;
List<DMatch> listOfGoodMatches = new ArrayList<>();
for (int i = 0; i < knnMatches.size(); i++) {
if (knnMatches.get(i).rows() > 1) {
DMatch[] matches = knnMatches.get(i).toArray();
if (matches[0].distance < ratioThresh * matches[1].distance) {
listOfGoodMatches.add(matches[0]);
}
}
}
MatOfDMatch goodMatches = new MatOfDMatch();
goodMatches.fromList(listOfGoodMatches);
//-- Draw matches
Mat imgMatches = new Mat();
Features2d.drawMatches(img1, keypoints1, img2, keypoints2, goodMatches, imgMatches, Scalar.all(-1),
Scalar.all(-1), new MatOfByte(), Features2d.DrawMatchesFlags_NOT_DRAW_SINGLE_POINTS);
//-- Show detected matches
HighGui.imshow("Good Matches", imgMatches);
HighGui.waitKey(0);
System.exit(0);
}
}
public class SURFFLANNMatchingDemo {
public static void main(String[] args) {
// Load the native OpenCV library
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
new SURFFLANNMatching().run(args);
}
}

View File

@@ -0,0 +1,130 @@
import java.util.ArrayList;
import java.util.List;
import org.opencv.calib3d.Calib3d;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.DMatch;
import org.opencv.core.KeyPoint;
import org.opencv.core.Mat;
import org.opencv.core.MatOfByte;
import org.opencv.core.MatOfDMatch;
import org.opencv.core.MatOfKeyPoint;
import org.opencv.core.MatOfPoint2f;
import org.opencv.core.Point;
import org.opencv.core.Scalar;
import org.opencv.features2d.DescriptorMatcher;
import org.opencv.features2d.Features2d;
import org.opencv.highgui.HighGui;
import org.opencv.imgcodecs.Imgcodecs;
import org.opencv.imgproc.Imgproc;
import org.opencv.xfeatures2d.SURF;
class SURFFLANNMatchingHomography {
public void run(String[] args) {
String filenameObject = args.length > 1 ? args[0] : "../data/box.png";
String filenameScene = args.length > 1 ? args[1] : "../data/box_in_scene.png";
Mat imgObject = Imgcodecs.imread(filenameObject, Imgcodecs.IMREAD_GRAYSCALE);
Mat imgScene = Imgcodecs.imread(filenameScene, Imgcodecs.IMREAD_GRAYSCALE);
if (imgObject.empty() || imgScene.empty()) {
System.err.println("Cannot read images!");
System.exit(0);
}
//-- Step 1: Detect the keypoints using SURF Detector, compute the descriptors
double hessianThreshold = 400;
int nOctaves = 4, nOctaveLayers = 3;
boolean extended = false, upright = false;
SURF detector = SURF.create(hessianThreshold, nOctaves, nOctaveLayers, extended, upright);
MatOfKeyPoint keypointsObject = new MatOfKeyPoint(), keypointsScene = new MatOfKeyPoint();
Mat descriptorsObject = new Mat(), descriptorsScene = new Mat();
detector.detectAndCompute(imgObject, new Mat(), keypointsObject, descriptorsObject);
detector.detectAndCompute(imgScene, new Mat(), keypointsScene, descriptorsScene);
//-- Step 2: Matching descriptor vectors with a FLANN based matcher
// Since SURF is a floating-point descriptor NORM_L2 is used
DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED);
List<MatOfDMatch> knnMatches = new ArrayList<>();
matcher.knnMatch(descriptorsObject, descriptorsScene, knnMatches, 2);
//-- Filter matches using the Lowe's ratio test
float ratioThresh = 0.75f;
List<DMatch> listOfGoodMatches = new ArrayList<>();
for (int i = 0; i < knnMatches.size(); i++) {
if (knnMatches.get(i).rows() > 1) {
DMatch[] matches = knnMatches.get(i).toArray();
if (matches[0].distance < ratioThresh * matches[1].distance) {
listOfGoodMatches.add(matches[0]);
}
}
}
MatOfDMatch goodMatches = new MatOfDMatch();
goodMatches.fromList(listOfGoodMatches);
//-- Draw matches
Mat imgMatches = new Mat();
Features2d.drawMatches(imgObject, keypointsObject, imgScene, keypointsScene, goodMatches, imgMatches, Scalar.all(-1),
Scalar.all(-1), new MatOfByte(), Features2d.DrawMatchesFlags_NOT_DRAW_SINGLE_POINTS);
//-- Localize the object
List<Point> obj = new ArrayList<>();
List<Point> scene = new ArrayList<>();
List<KeyPoint> listOfKeypointsObject = keypointsObject.toList();
List<KeyPoint> listOfKeypointsScene = keypointsScene.toList();
for (int i = 0; i < listOfGoodMatches.size(); i++) {
//-- Get the keypoints from the good matches
obj.add(listOfKeypointsObject.get(listOfGoodMatches.get(i).queryIdx).pt);
scene.add(listOfKeypointsScene.get(listOfGoodMatches.get(i).trainIdx).pt);
}
MatOfPoint2f objMat = new MatOfPoint2f(), sceneMat = new MatOfPoint2f();
objMat.fromList(obj);
sceneMat.fromList(scene);
double ransacReprojThreshold = 3.0;
Mat H = Calib3d.findHomography( objMat, sceneMat, Calib3d.RANSAC, ransacReprojThreshold );
//-- Get the corners from the image_1 ( the object to be "detected" )
Mat objCorners = new Mat(4, 1, CvType.CV_32FC2), sceneCorners = new Mat();
float[] objCornersData = new float[(int) (objCorners.total() * objCorners.channels())];
objCorners.get(0, 0, objCornersData);
objCornersData[0] = 0;
objCornersData[1] = 0;
objCornersData[2] = imgObject.cols();
objCornersData[3] = 0;
objCornersData[4] = imgObject.cols();
objCornersData[5] = imgObject.rows();
objCornersData[6] = 0;
objCornersData[7] = imgObject.rows();
objCorners.put(0, 0, objCornersData);
Core.perspectiveTransform(objCorners, sceneCorners, H);
float[] sceneCornersData = new float[(int) (sceneCorners.total() * sceneCorners.channels())];
sceneCorners.get(0, 0, sceneCornersData);
//-- Draw lines between the corners (the mapped object in the scene - image_2 )
Imgproc.line(imgMatches, new Point(sceneCornersData[0] + imgObject.cols(), sceneCornersData[1]),
new Point(sceneCornersData[2] + imgObject.cols(), sceneCornersData[3]), new Scalar(0, 255, 0), 4);
Imgproc.line(imgMatches, new Point(sceneCornersData[2] + imgObject.cols(), sceneCornersData[3]),
new Point(sceneCornersData[4] + imgObject.cols(), sceneCornersData[5]), new Scalar(0, 255, 0), 4);
Imgproc.line(imgMatches, new Point(sceneCornersData[4] + imgObject.cols(), sceneCornersData[5]),
new Point(sceneCornersData[6] + imgObject.cols(), sceneCornersData[7]), new Scalar(0, 255, 0), 4);
Imgproc.line(imgMatches, new Point(sceneCornersData[6] + imgObject.cols(), sceneCornersData[7]),
new Point(sceneCornersData[0] + imgObject.cols(), sceneCornersData[1]), new Scalar(0, 255, 0), 4);
//-- Show detected matches
HighGui.imshow("Good Matches & Object detection", imgMatches);
HighGui.waitKey(0);
System.exit(0);
}
}
public class SURFFLANNMatchingHomographyDemo {
public static void main(String[] args) {
// Load the native OpenCV library
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
new SURFFLANNMatchingHomography().run(args);
}
}