Example SURF Feature
From BoofCV
Jump to navigationJump to searchSpeeded Up Robust Feature (SURF) is a region descriptor and interest point detector. Two different ways of using SURF are demonstrated in this example. The easy way uses a high level interface that is easy to work with, but sacrifices flexibility. The harder way directly creates the SURF classes, is more complex, and requires a better understanding of how the code works.
Example File: ExampleFeatureSurf.java
Concepts:
- SURF
- Region Descriptor
- Interest Point
Example Code
/**
* Example of how to use SURF detector and descriptors in BoofCV.
*
* @author Peter Abeles
*/
public class ExampleFeatureSurf {
/**
* Use generalized interfaces for working with SURF. This removes much of the drudgery, but also reduces flexibility
* and slightly increases memory and computational requirements.
*
* @param image Input image type. DOES NOT NEED TO BE GrayF32, GrayU8 works too
*/
public static void easy( GrayF32 image ) {
// create the detector and descriptors
ConfigFastHessian configDetector = new ConfigFastHessian();
configDetector.extract = new ConfigExtract(2, 0, 5, true);
configDetector.maxFeaturesPerScale = 200;
configDetector.initialSampleStep = 2;
DetectDescribePoint<GrayF32,TupleDesc_F64> surf = FactoryDetectDescribe.
surfStable(configDetector, null, null,GrayF32.class);
// specify the image to process
surf.detect(image);
System.out.println("Found Features: "+surf.getNumberOfFeatures());
System.out.println("First descriptor's first value: "+surf.getDescription(0).value[0]);
}
/**
* Configured exactly the same as the easy example above, but require a lot more code and a more in depth
* understanding of how SURF works and is configured. Each sub-problem which composes "SURF" is now explicitly
* created and configured independently. This allows an advance user to tune it for a specific problem.
*
* @param image Input image type. DOES NOT NEED TO BE GrayF32, GrayU8 works too
*/
public static <II extends ImageGray<II>> void harder(GrayF32 image ) {
// SURF works off of integral images
Class<II> integralType = GIntegralImageOps.getIntegralType(GrayF32.class);
// define the feature detection algorithm
ConfigFastHessian config = new ConfigFastHessian();
config.extract = new ConfigExtract(2, 0, 5, true);
config.maxFeaturesPerScale = 200;
config.initialSampleStep = 2;
FastHessianFeatureDetector<II> detector = FactoryInterestPointAlgs.fastHessian(config);
// estimate orientation
OrientationIntegral<II> orientation = FactoryOrientationAlgs.sliding_ii(null, integralType);
DescribePointSurf<II> descriptor = FactoryDescribePointAlgs.surfStability(null,integralType);
// compute the integral image of 'image'
II integral = GeneralizedImageOps.createSingleBand(integralType,image.width,image.height);
GIntegralImageOps.transform(image, integral);
// detect fast hessian features
detector.detect(integral);
// tell algorithms which image to process
orientation.setImage(integral);
descriptor.setImage(integral);
List<ScalePoint> points = detector.getFoundFeatures();
List<TupleDesc_F64> descriptions = new ArrayList<>();
for( ScalePoint p : points ) {
// estimate orientation
orientation.setObjectRadius( p.scale*BoofDefaults.SURF_SCALE_TO_RADIUS);
double angle = orientation.compute(p.pixel.x,p.pixel.y);
// extract the SURF description for this region
TupleDesc_F64 desc = descriptor.createDescription();
descriptor.describe(p.pixel.x,p.pixel.y,angle,p.scale, true, desc);
// save everything for processing later on
descriptions.add(desc);
}
System.out.println("Found Features: "+points.size());
System.out.println("First descriptor's first value: "+descriptions.get(0).value[0]);
}
public static void main( String[] args ) {
// Need to turn off concurrency since the order in which feature are returned
// is not deterministic if turned on
BoofConcurrency.USE_CONCURRENT = false;
GrayF32 image = UtilImageIO.loadImage(UtilIO.pathExample("particles01.jpg"), GrayF32.class);
// run each example
ExampleFeatureSurf.easy(image);
ExampleFeatureSurf.harder(image);
System.out.println("Done!");
}
}