Difference between revisions of "Example Associate Interest Points"

From BoofCV
Jump to navigationJump to search
m
m
Line 9: Line 9:
</center>
</center>


To determine the motion between two frames parts of each frame need to be associated with each otherThe standard approach when using interest points to first detect the interest points, compute descriptions of them, then associate the features together.  Once associated other algorithms can be used to extract the relationship between each image.
A common problem for many computer vision applications is matching features observed in two or more imagesBelow is an example of how this can be accomplished using interest point and their descriptionsWhen you run this example click on the image to select individual points.
 
This example code shows how to describe and associate point features using the DescribeRegionPoint and GeneralAssociation interfaces, respectivelyNote that not all feature descriptors perform well when using this interface.  For example, BRIEF is much slower when using this abstraction.


Example Code:
Example Code:
* [https://github.com/lessthanoptimal/BoofCV/blob/v0.11/examples/src/boofcv/examples/ExampleAssociatePoints.java ExampleAssociatePoints.java]
* [https://github.com/lessthanoptimal/BoofCV/blob/v0.12/examples/src/boofcv/examples/ExampleAssociatePoints.java ExampleAssociatePoints.java]


Concepts:
Concepts:
Line 27: Line 25:


<syntaxhighlight lang="java">
<syntaxhighlight lang="java">
/**
* After interest points have been detected in two images the next step is to associate the two
* sets of images so that the relationship can be found.  This is done by detecting point features inside
* the image and associating them using their descriptors.  A high level interface is used that allows
* different features to be easily swapped.
*
* @author Peter Abeles
*/
public class ExampleAssociatePoints<T extends ImageSingleBand, FD extends TupleDesc> {
public class ExampleAssociatePoints<T extends ImageSingleBand, FD extends TupleDesc> {


// algorithm used to detect interest points
// algorithm used to detect and describe interest points
InterestPointDetector<T> detector;
DetectDescribePoint<T,FD> detDesc;
// algorithm used to describe each interest point based on local pixels
DescribeRegionPoint<T, FD> describe;
// Associated descriptions together by minimizing an error metric
// Associated descriptions together by minimizing an error metric
GeneralAssociation<FD> associate;
GeneralAssociation<FD> associate;
Line 42: Line 46:
Class<T> imageType;
Class<T> imageType;


public ExampleAssociatePoints(InterestPointDetector<T> detector,
public ExampleAssociatePoints(DetectDescribePoint<T,FD> detDesc,
  DescribeRegionPoint<T, FD> describe,
  GeneralAssociation<FD> associate,
  GeneralAssociation<FD> associate,
  Class<T> imageType) {
  Class<T> imageType) {
this.detector = detector;
this.detDesc = detDesc;
this.describe = describe;
this.associate = associate;
this.associate = associate;
this.imageType = imageType;
this.imageType = imageType;
Line 65: Line 67:


// stores the description of detected interest points
// stores the description of detected interest points
FastQueue<FD> descA = new TupleDescQueue<FD>(describe,true);
FastQueue<FD> descA = UtilFeature.createQueue(detDesc,100);
FastQueue<FD> descB = new TupleDescQueue<FD>(describe,true);
FastQueue<FD> descB = UtilFeature.createQueue(detDesc,100);


// describe each image using interest points
// describe each image using interest points
Line 73: Line 75:


// Associate features between the two images
// Associate features between the two images
associate.associate(descA,descB);
associate.setSource(descA);
associate.setDestination(descB);
associate.associate();


// display the results
// display the results
Line 88: Line 92:
private void describeImage(T input, List<Point2D_F64> points, FastQueue<FD> descs )
private void describeImage(T input, List<Point2D_F64> points, FastQueue<FD> descs )
{
{
detector.detect(input);
detDesc.detect(input);
describe.setImage(input);
 
descs.reset();
 
for( int i = 0; i < detector.getNumberOfFeatures(); i++ ) {
// get the feature location info
Point2D_F64 p = detector.getLocation(i);
double yaw = detector.getOrientation(i);
double scale = detector.getScale(i);


// extract the description and save the results into the provided description
for( int i = 0; i < detDesc.getNumberOfFeatures(); i++ ) {
if( describe.isInBounds(p.x,p.y,yaw,scale)) {
points.add( detDesc.getLocation(i).copy() );
  describe.process(p.x, p.y, yaw, scale, descs.grow());
descs.grow().setTo(detDesc.getDescriptor(i));
points.add(p.copy());
}
}
}
}
}
Line 112: Line 105:


// select which algorithms to use
// select which algorithms to use
InterestPointDetector detector = FactoryInterestPoint.fastHessian(1, 2, 200, 1, 9, 4, 4);
DetectDescribePoint detDesc = FactoryDetectDescribe.surf(1, 2, 200, 1, 9, 4, 4,true,imageType);
DescribeRegionPoint describe = FactoryDescribeRegionPoint.surf(true, imageType);
// DescribeRegionPoint describe = FactoryDescribeRegionPoint.brief(16,512,-1,4,true, imageType);


ScoreAssociation scorer = FactoryAssociation.defaultScore(describe.getDescriptorType());
ScoreAssociation scorer = FactoryAssociation.defaultScore(detDesc.getDescriptorType());
GeneralAssociation associate = FactoryAssociation.greedy(scorer, Double.MAX_VALUE, -1, true);
GeneralAssociation associate = FactoryAssociation.greedy(scorer, Double.MAX_VALUE, -1, true);


// load and match images
// load and match images
ExampleAssociatePoints app = new ExampleAssociatePoints(detector,describe,associate,imageType);
ExampleAssociatePoints app = new ExampleAssociatePoints(detDesc,associate,imageType);


BufferedImage imageA = UtilImageIO.loadImage("../data/evaluation/stitch/kayak_01.jpg");
BufferedImage imageA = UtilImageIO.loadImage("../data/evaluation/stitch/kayak_01.jpg");

Revision as of 06:39, 5 December 2012

Detect Interest Point Example

Associated feature between images using example code.
Associated feature between images using example code.

A common problem for many computer vision applications is matching features observed in two or more images. Below is an example of how this can be accomplished using interest point and their descriptions. When you run this example click on the image to select individual points.

Example Code:

Concepts:

  • Describe point features
  • Associate descriptions

Relevant Applets:

Example Code

/**
 * After interest points have been detected in two images the next step is to associate the two
 * sets of images so that the relationship can be found.  This is done by detecting point features inside
 * the image and associating them using their descriptors.  A high level interface is used that allows
 * different features to be easily swapped.
 *
 * @author Peter Abeles
 */
public class ExampleAssociatePoints<T extends ImageSingleBand, FD extends TupleDesc> {

	// algorithm used to detect and describe interest points
	DetectDescribePoint<T,FD> detDesc;
	// Associated descriptions together by minimizing an error metric
	GeneralAssociation<FD> associate;

	// location of interest points
	List<Point2D_F64> pointsA;
	List<Point2D_F64> pointsB;

	Class<T> imageType;

	public ExampleAssociatePoints(DetectDescribePoint<T,FD> detDesc,
								  GeneralAssociation<FD> associate,
								  Class<T> imageType) {
		this.detDesc = detDesc;
		this.associate = associate;
		this.imageType = imageType;
	}

	/**
	 * Detect and associate point features in the two images.  Display the results.
	 */
	public void associate( BufferedImage imageA , BufferedImage imageB )
	{
		T inputA = ConvertBufferedImage.convertFromSingle(imageA, null, imageType);
		T inputB = ConvertBufferedImage.convertFromSingle(imageB, null, imageType);

		// stores the location of detected interest points
		pointsA = new ArrayList<Point2D_F64>();
		pointsB = new ArrayList<Point2D_F64>();

		// stores the description of detected interest points
		FastQueue<FD> descA = UtilFeature.createQueue(detDesc,100);
		FastQueue<FD> descB = UtilFeature.createQueue(detDesc,100);

		// describe each image using interest points
		describeImage(inputA,pointsA,descA);
		describeImage(inputB,pointsB,descB);

		// Associate features between the two images
		associate.setSource(descA);
		associate.setDestination(descB);
		associate.associate();

		// display the results
		AssociationPanel panel = new AssociationPanel(20);
		panel.setAssociation(pointsA,pointsB,associate.getMatches());
		panel.setImages(imageA,imageB);

		ShowImages.showWindow(panel,"Associated Features");
	}

	/**
	 * Detects features inside the two images and computes descriptions at those points.
	 */
	private void describeImage(T input, List<Point2D_F64> points, FastQueue<FD> descs )
	{
		detDesc.detect(input);

		for( int i = 0; i < detDesc.getNumberOfFeatures(); i++ ) {
			points.add( detDesc.getLocation(i).copy() );
			descs.grow().setTo(detDesc.getDescriptor(i));
		}
	}

	public static void main( String args[] ) {

		Class imageType = ImageFloat32.class;

		// select which algorithms to use
		DetectDescribePoint detDesc = FactoryDetectDescribe.surf(1, 2, 200, 1, 9, 4, 4,true,imageType);

		ScoreAssociation scorer = FactoryAssociation.defaultScore(detDesc.getDescriptorType());
		GeneralAssociation associate = FactoryAssociation.greedy(scorer, Double.MAX_VALUE, -1, true);

		// load and match images
		ExampleAssociatePoints app = new ExampleAssociatePoints(detDesc,associate,imageType);

		BufferedImage imageA = UtilImageIO.loadImage("../data/evaluation/stitch/kayak_01.jpg");
		BufferedImage imageB = UtilImageIO.loadImage("../data/evaluation/stitch/kayak_03.jpg");

		app.associate(imageA,imageB);
	}
}