Difference between revisions of "Example Associate Interest Points"
From BoofCV
Jump to navigationJump to searchm |
m |
||
Line 27: | Line 27: | ||
<syntaxhighlight lang="java"> | <syntaxhighlight lang="java"> | ||
public class ExampleAssociatePoints<T extends ImageSingleBand> { | public class ExampleAssociatePoints<T extends ImageSingleBand, FD extends TupleDesc> { | ||
// algorithm used to detect interest points | // algorithm used to detect interest points | ||
InterestPointDetector<T> detector; | InterestPointDetector<T> detector; | ||
// algorithm used to describe each interest point based on local pixels | // algorithm used to describe each interest point based on local pixels | ||
DescribeRegionPoint<T> describe; | DescribeRegionPoint<T, FD> describe; | ||
// Associated descriptions together by minimizing an error metric | // Associated descriptions together by minimizing an error metric | ||
GeneralAssociation< | GeneralAssociation<FD> associate; | ||
// location of interest points | // location of interest points | ||
Line 43: | Line 43: | ||
public ExampleAssociatePoints(InterestPointDetector<T> detector, | public ExampleAssociatePoints(InterestPointDetector<T> detector, | ||
DescribeRegionPoint<T> describe, | DescribeRegionPoint<T, FD> describe, | ||
GeneralAssociation< | GeneralAssociation<FD> associate, | ||
Class<T> imageType) { | Class<T> imageType) { | ||
this.detector = detector; | this.detector = detector; | ||
Line 65: | Line 65: | ||
// stores the description of detected interest points | // stores the description of detected interest points | ||
FastQueue< | FastQueue<FD> descA = new TupleDescQueue<FD>(describe,true); | ||
FastQueue< | FastQueue<FD> descB = new TupleDescQueue<FD>(describe,true); | ||
// describe each image using interest points | // describe each image using interest points | ||
Line 86: | Line 86: | ||
* Detects features inside the two images and computes descriptions at those points. | * Detects features inside the two images and computes descriptions at those points. | ||
*/ | */ | ||
private void describeImage(T input, List<Point2D_F64> points, FastQueue< | private void describeImage(T input, List<Point2D_F64> points, FastQueue<FD> descs ) | ||
{ | { | ||
detector.detect(input); | detector.detect(input); | ||
Line 92: | Line 92: | ||
descs.reset(); | descs.reset(); | ||
for( int i = 0; i < detector.getNumberOfFeatures(); i++ ) { | for( int i = 0; i < detector.getNumberOfFeatures(); i++ ) { | ||
// get the feature location info | // get the feature location info | ||
Line 100: | Line 100: | ||
// extract the description and save the results into the provided description | // extract the description and save the results into the provided description | ||
if( describe.process(p.x,p.y,yaw,scale, | if( describe.isInBounds(p.x,p.y,yaw,scale)) { | ||
describe.process(p.x, p.y, yaw, scale, descs.pop()); | |||
points.add(p.copy()); | points.add(p.copy()); | ||
} | } | ||
} | } | ||
} | } | ||
Line 116: | Line 114: | ||
InterestPointDetector detector = FactoryInterestPoint.fastHessian(1, 2, 200, 1, 9, 4, 4); | InterestPointDetector detector = FactoryInterestPoint.fastHessian(1, 2, 200, 1, 9, 4, 4); | ||
DescribeRegionPoint describe = FactoryDescribeRegionPoint.surf(true, imageType); | DescribeRegionPoint describe = FactoryDescribeRegionPoint.surf(true, imageType); | ||
GeneralAssociation | // DescribeRegionPoint describe = FactoryDescribeRegionPoint.brief(16,512,-1,4,true, imageType); | ||
ScoreAssociation scorer = FactoryAssociation.defaultScore(describe.getDescriptorType()); | |||
GeneralAssociation associate = FactoryAssociation.greedy(scorer, Double.MAX_VALUE, -1, true); | |||
// load and match images | // load and match images |
Revision as of 08:21, 18 September 2012
Detect Interest Point Example
Associated feature between images using example code. |
---|
To determine the motion between two frames parts of each frame need to be associated with each other. The standard approach when using interest points to first detect the interest points, compute descriptions of them, then associate the features together. Once associated other algorithms can be used to extract the relationship between each image.
This example code shows how to describe and associate point features using the DescribeRegionPoint and GeneralAssociation interfaces, respectively. Note that not all feature descriptors perform well when using this interface. For example, BRIEF is much slower when using this abstraction.
Example Code:
Concepts:
- Describe point features
- Associate descriptions
Relevant Applets:
Example Code
public class ExampleAssociatePoints<T extends ImageSingleBand, FD extends TupleDesc> {
// algorithm used to detect interest points
InterestPointDetector<T> detector;
// algorithm used to describe each interest point based on local pixels
DescribeRegionPoint<T, FD> describe;
// Associated descriptions together by minimizing an error metric
GeneralAssociation<FD> associate;
// location of interest points
List<Point2D_F64> pointsA;
List<Point2D_F64> pointsB;
Class<T> imageType;
public ExampleAssociatePoints(InterestPointDetector<T> detector,
DescribeRegionPoint<T, FD> describe,
GeneralAssociation<FD> associate,
Class<T> imageType) {
this.detector = detector;
this.describe = describe;
this.associate = associate;
this.imageType = imageType;
}
/**
* Detect and associate point features in the two images. Display the results.
*/
public void associate( BufferedImage imageA , BufferedImage imageB )
{
T inputA = ConvertBufferedImage.convertFromSingle(imageA, null, imageType);
T inputB = ConvertBufferedImage.convertFromSingle(imageB, null, imageType);
// stores the location of detected interest points
pointsA = new ArrayList<Point2D_F64>();
pointsB = new ArrayList<Point2D_F64>();
// stores the description of detected interest points
FastQueue<FD> descA = new TupleDescQueue<FD>(describe,true);
FastQueue<FD> descB = new TupleDescQueue<FD>(describe,true);
// describe each image using interest points
describeImage(inputA,pointsA,descA);
describeImage(inputB,pointsB,descB);
// Associate features between the two images
associate.associate(descA,descB);
// display the results
AssociationPanel panel = new AssociationPanel(20);
panel.setAssociation(pointsA,pointsB,associate.getMatches());
panel.setImages(imageA,imageB);
ShowImages.showWindow(panel,"Associated Features");
}
/**
* Detects features inside the two images and computes descriptions at those points.
*/
private void describeImage(T input, List<Point2D_F64> points, FastQueue<FD> descs )
{
detector.detect(input);
describe.setImage(input);
descs.reset();
for( int i = 0; i < detector.getNumberOfFeatures(); i++ ) {
// get the feature location info
Point2D_F64 p = detector.getLocation(i);
double yaw = detector.getOrientation(i);
double scale = detector.getScale(i);
// extract the description and save the results into the provided description
if( describe.isInBounds(p.x,p.y,yaw,scale)) {
describe.process(p.x, p.y, yaw, scale, descs.pop());
points.add(p.copy());
}
}
}
public static void main( String args[] ) {
Class imageType = ImageFloat32.class;
// select which algorithms to use
InterestPointDetector detector = FactoryInterestPoint.fastHessian(1, 2, 200, 1, 9, 4, 4);
DescribeRegionPoint describe = FactoryDescribeRegionPoint.surf(true, imageType);
// DescribeRegionPoint describe = FactoryDescribeRegionPoint.brief(16,512,-1,4,true, imageType);
ScoreAssociation scorer = FactoryAssociation.defaultScore(describe.getDescriptorType());
GeneralAssociation associate = FactoryAssociation.greedy(scorer, Double.MAX_VALUE, -1, true);
// load and match images
ExampleAssociatePoints app = new ExampleAssociatePoints(detector,describe,associate,imageType);
BufferedImage imageA = UtilImageIO.loadImage("../data/evaluation/stitch/kayak_01.jpg");
BufferedImage imageB = UtilImageIO.loadImage("../data/evaluation/stitch/kayak_03.jpg");
app.associate(imageA,imageB);
}
}