Example Associate Interest Points
From BoofCV
Jump to navigationJump to searchDetect Interest Point Example
Associated feature between images using example code. |
---|
A common problem for many computer vision applications is matching features observed in two or more images. Below is an example of how this can be accomplished using interest point and their descriptions. When you run this example click on the image to select individual points.
Example Code:
Concepts:
- Describe point features
- Associate descriptions
Relevant Applets:
Example Code
/**
* After interest points have been detected in two images the next step is to associate the two
* sets of images so that the relationship can be found. This is done by detecting point features inside
* the image and associating them using their descriptors. A high level interface is used that allows
* different features to be easily swapped.
*
* @author Peter Abeles
*/
public class ExampleAssociatePoints<T extends ImageSingleBand, FD extends TupleDesc> {
// algorithm used to detect and describe interest points
DetectDescribePoint<T,FD> detDesc;
// Associated descriptions together by minimizing an error metric
GeneralAssociation<FD> associate;
// location of interest points
List<Point2D_F64> pointsA;
List<Point2D_F64> pointsB;
Class<T> imageType;
public ExampleAssociatePoints(DetectDescribePoint<T,FD> detDesc,
GeneralAssociation<FD> associate,
Class<T> imageType) {
this.detDesc = detDesc;
this.associate = associate;
this.imageType = imageType;
}
/**
* Detect and associate point features in the two images. Display the results.
*/
public void associate( BufferedImage imageA , BufferedImage imageB )
{
T inputA = ConvertBufferedImage.convertFromSingle(imageA, null, imageType);
T inputB = ConvertBufferedImage.convertFromSingle(imageB, null, imageType);
// stores the location of detected interest points
pointsA = new ArrayList<Point2D_F64>();
pointsB = new ArrayList<Point2D_F64>();
// stores the description of detected interest points
FastQueue<FD> descA = UtilFeature.createQueue(detDesc,100);
FastQueue<FD> descB = UtilFeature.createQueue(detDesc,100);
// describe each image using interest points
describeImage(inputA,pointsA,descA);
describeImage(inputB,pointsB,descB);
// Associate features between the two images
associate.setSource(descA);
associate.setDestination(descB);
associate.associate();
// display the results
AssociationPanel panel = new AssociationPanel(20);
panel.setAssociation(pointsA,pointsB,associate.getMatches());
panel.setImages(imageA,imageB);
ShowImages.showWindow(panel,"Associated Features");
}
/**
* Detects features inside the two images and computes descriptions at those points.
*/
private void describeImage(T input, List<Point2D_F64> points, FastQueue<FD> descs )
{
detDesc.detect(input);
for( int i = 0; i < detDesc.getNumberOfFeatures(); i++ ) {
points.add( detDesc.getLocation(i).copy() );
descs.grow().setTo(detDesc.getDescriptor(i));
}
}
public static void main( String args[] ) {
Class imageType = ImageFloat32.class;
// select which algorithms to use
DetectDescribePoint detDesc = FactoryDetectDescribe.surf(1, 2, 200, 1, 9, 4, 4,true,imageType);
ScoreAssociation scorer = FactoryAssociation.defaultScore(detDesc.getDescriptorType());
GeneralAssociation associate = FactoryAssociation.greedy(scorer, Double.MAX_VALUE, -1, true);
// load and match images
ExampleAssociatePoints app = new ExampleAssociatePoints(detDesc,associate,imageType);
BufferedImage imageA = UtilImageIO.loadImage("../data/evaluation/stitch/kayak_01.jpg");
BufferedImage imageB = UtilImageIO.loadImage("../data/evaluation/stitch/kayak_03.jpg");
app.associate(imageA,imageB);
}
}