Difference between revisions of "Example Associate Interest Points"

From BoofCV
Jump to navigationJump to search
(Created example)
 
(Updated for v0.3)
Line 27: Line 27:


<syntaxhighlight lang="java">
<syntaxhighlight lang="java">
public class ExampleAssociatePoints<T extends ImageBase> {
public class ExampleAssociatePoints<T extends ImageSingleBand> {


// algorithm used to detect interest points
// algorithm used to detect interest points
Line 53: Line 53:
public void associate( BufferedImage imageA , BufferedImage imageB )
public void associate( BufferedImage imageA , BufferedImage imageB )
{
{
T inputA = ConvertBufferedImage.convertFrom(imageA, null, imageType);
T inputA = ConvertBufferedImage.convertFromSingle(imageA, null, imageType);
T inputB = ConvertBufferedImage.convertFrom(imageB, null, imageType);
T inputB = ConvertBufferedImage.convertFromSingle(imageB, null, imageType);


// stores the location of detected interest points
// stores the location of detected interest points
Line 110: Line 110:


// select which algorithms to use
// select which algorithms to use
InterestPointDetector detector = FactoryInterestPoint.fastHessian(1, 2, 400, 1, 9, 4, 4);
InterestPointDetector detector = FactoryInterestPoint.fastHessian(1, 2, 300, 1, 9, 4, 4);
DescribeRegionPoint describe = FactoryDescribeRegionPoint.surf(true, imageType);
DescribeRegionPoint describe = FactoryDescribeRegionPoint.surf(true, imageType);
GeneralAssociation<TupleDesc_F64> associate = FactoryAssociation.greedy(new ScoreAssociateEuclideanSq(), 2, -1, true);
GeneralAssociation<TupleDesc_F64> associate = FactoryAssociation.greedy(new ScoreAssociateEuclideanSq(), 2, -1, true);
Line 117: Line 117:
ExampleAssociatePoints app = new ExampleAssociatePoints(detector,describe,associate,imageType);
ExampleAssociatePoints app = new ExampleAssociatePoints(detector,describe,associate,imageType);


BufferedImage imageA = UtilImageIO.loadImage("../evaluation/data/stitch/kayak_01.jpg");
BufferedImage imageA = UtilImageIO.loadImage("../data/evaluation/stitch/kayak_01.jpg");
BufferedImage imageB = UtilImageIO.loadImage("../evaluation/data/stitch/kayak_03.jpg");
BufferedImage imageB = UtilImageIO.loadImage("../data/evaluation/stitch/kayak_03.jpg");


app.associate(imageA,imageB);
app.associate(imageA,imageB);

Revision as of 10:18, 2 January 2012

Detect Interest Point Example

Associated feature between images using example code.
Associated feature between images using example code.

To determine the motion between two frames parts of each frame need to be associated with each other. The standard approach when using interest points to first detect the interest points, compute descriptions of them, then associate the features together. Once associated other algorithms can be used to extract the relationship between each image.

This example code shows how to describe and associate point features using the DescribeRegionPoint and GeneralAssociation interfaces, respectively. Note that not all feature descriptors perform well when using this interface. For example, BRIEF is much slower when using this abstraction.

Example Code:

Concepts:

  • Describe point features
  • Associate descriptions

Relevant Applets:

Example Code

public class ExampleAssociatePoints<T extends ImageSingleBand> {

	// algorithm used to detect interest points
	InterestPointDetector<T> detector;
	// algorithm used to describe each interest point based on local pixels
	DescribeRegionPoint<T> describe;
	// Associated descriptions together by minimizing an error metric
	GeneralAssociation<TupleDesc_F64> associate;

	Class<T> imageType;

	public ExampleAssociatePoints(InterestPointDetector<T> detector,
								  DescribeRegionPoint<T> describe,
								  GeneralAssociation<TupleDesc_F64> associate,
								  Class<T> imageType) {
		this.detector = detector;
		this.describe = describe;
		this.associate = associate;
		this.imageType = imageType;
	}

	/**
	 * Detect and associate point features in the two images.  Display the results.
	 */
	public void associate( BufferedImage imageA , BufferedImage imageB )
	{
		T inputA = ConvertBufferedImage.convertFromSingle(imageA, null, imageType);
		T inputB = ConvertBufferedImage.convertFromSingle(imageB, null, imageType);

		// stores the location of detected interest points
		List<Point2D_F64> pointsA = new ArrayList<Point2D_F64>();
		List<Point2D_F64> pointsB = new ArrayList<Point2D_F64>();

		// stores the description of detected interest points
		FastQueue<TupleDesc_F64> descA = new TupleDescQueue(describe.getDescriptionLength(),true);
		FastQueue<TupleDesc_F64> descB = new TupleDescQueue(describe.getDescriptionLength(),true);

		// describe each image using interest points
		describeImage(inputA,pointsA,descA);
		describeImage(inputB,pointsB,descB);

		// Associate features between the two images
		associate.associate(descA,descB);

		// display the results
		AssociationPanel panel = new AssociationPanel(20);
		panel.setAssociation(pointsA,pointsB,associate.getMatches());
		panel.setImages(imageA,imageB);

		ShowImages.showWindow(panel,"Associated Features");
	}

	/**
	 * Detects features inside the two images and computes descriptions at those points.
	 */
	private void describeImage(T input, List<Point2D_F64> points, FastQueue<TupleDesc_F64> descs )
	{
		detector.detect(input);
		describe.setImage(input);

		descs.reset();
		TupleDesc_F64 desc = descs.pop();
		for( int i = 0; i < detector.getNumberOfFeatures(); i++ ) {
			// get the feature location info
			Point2D_F64 p = detector.getLocation(i);
			double yaw = detector.getOrientation(i);
			double scale = detector.getScale(i);

			// extract the description and save the results into the provided description
			if( describe.process(p.x,p.y,yaw,scale,desc) != null ) {
				points.add(p.copy());
				desc = descs.pop();
			}
		}
		// remove the last element from the queue, which has not been used.
		descs.removeTail();
	}

	public static void main( String args[] ) {

		Class imageType = ImageFloat32.class;

		// select which algorithms to use
		InterestPointDetector detector = FactoryInterestPoint.fastHessian(1, 2, 300, 1, 9, 4, 4);
		DescribeRegionPoint describe = FactoryDescribeRegionPoint.surf(true, imageType);
		GeneralAssociation<TupleDesc_F64> associate = FactoryAssociation.greedy(new ScoreAssociateEuclideanSq(), 2, -1, true);

		// load and match images
		ExampleAssociatePoints app = new ExampleAssociatePoints(detector,describe,associate,imageType);

		BufferedImage imageA = UtilImageIO.loadImage("../data/evaluation/stitch/kayak_01.jpg");
		BufferedImage imageB = UtilImageIO.loadImage("../data/evaluation/stitch/kayak_03.jpg");

		app.associate(imageA,imageB);
	}
}