Difference between revisions of "Example Track Point Features"

From BoofCV
m (Improved comments and tweaked code)
m
 
(13 intermediate revisions by the same user not shown)
Line 1: Line 1:
= Tracking Point Features =
 
 
 
<center>
 
<center>
 
{| style="width:370pt;"
 
{| style="width:370pt;"
Line 14: Line 12:
  
 
Example Code:
 
Example Code:
* [https://github.com/lessthanoptimal/BoofCV/blob/master/examples/src/boofcv/examples/ExamplePointFeatureTracker.java ExamplePointFeatureTracker.java]
+
* [https://github.com/lessthanoptimal/BoofCV/blob/v0.27/examples/src/boofcv/examples/tracking/ExamplePointFeatureTracker.java ExamplePointFeatureTracker.java]
  
 
Concepts:
 
Concepts:
Line 20: Line 18:
 
* Tracking point features abstractly
 
* Tracking point features abstractly
 
* Displaying the location of point features
 
* Displaying the location of point features
 
Relevant Applets:
 
* [[Applet_Feature_Tracking| Point Tracker Applet]]
 
  
 
= Example Code =
 
= Example Code =
  
 
<syntaxhighlight lang="java">
 
<syntaxhighlight lang="java">
public class ExamplePointFeatureTracker< T extends ImageBase , D extends ImageBase >
+
/**
 +
* <p>
 +
* Example of how to use the {@link boofcv.abst.feature.tracker.PointTracker} to track different types of point features.
 +
* ImagePointTracker hides much of the complexity involved in tracking point features and masks
 +
* the very different underlying structures used by these different trackers.  The default trackers
 +
* provided in BoofCV are general purpose trackers, that might not be the best tracker or utility
 +
* the underlying image features the best in all situations.
 +
* </p>
 +
*
 +
* @author Peter Abeles
 +
*/
 +
public class ExamplePointFeatureTracker< T extends ImageGray<T>, D extends ImageGray<D>>
 
{
 
{
 
// type of input image
 
// type of input image
Line 34: Line 40:
  
 
// tracks point features inside the image
 
// tracks point features inside the image
ImagePointTracker<T> tracker;
+
PointTracker<T> tracker;
  
 
// displays the video sequence and tracked features
 
// displays the video sequence and tracked features
 
ImagePanel gui = new ImagePanel();
 
ImagePanel gui = new ImagePanel();
  
public ExamplePointFeatureTracker(Class<T> imageType) {
+
int pause;
 +
 
 +
public ExamplePointFeatureTracker(Class<T> imageType , int pause ) {
 
this.imageType = imageType;
 
this.imageType = imageType;
 
this.derivType = GImageDerivativeOps.getDerivativeType(imageType);
 
this.derivType = GImageDerivativeOps.getDerivativeType(imageType);
 +
this.pause = pause;
 
}
 
}
  
Line 52: Line 61:
 
T frame = sequence.next();
 
T frame = sequence.next();
 
gui.setPreferredSize(new Dimension(frame.getWidth(),frame.getHeight()));
 
gui.setPreferredSize(new Dimension(frame.getWidth(),frame.getHeight()));
ShowImages.showWindow(gui,"KTL Tracker");
+
ShowImages.showWindow(gui,"KTL Tracker", true);
  
 
// process each frame in the image sequence
 
// process each frame in the image sequence
Line 62: Line 71:
  
 
// if there are too few tracks spawn more
 
// if there are too few tracks spawn more
if( tracker.getActiveTracks().size() < 100 )
+
if( tracker.getActiveTracks(null).size() < 130 )
 
tracker.spawnTracks();
 
tracker.spawnTracks();
  
Line 69: Line 78:
  
 
// wait for a fraction of a second so it doesn't process to fast
 
// wait for a fraction of a second so it doesn't process to fast
BoofMiscOps.pause(100);
+
BoofMiscOps.pause(pause);
 
}
 
}
 
}
 
}
Line 80: Line 89:
 
Graphics2D g2 = orig.createGraphics();
 
Graphics2D g2 = orig.createGraphics();
  
// draw active tracks as blue dots
+
// draw tracks with semi-unique colors so you can track individual points with your eyes
for( AssociatedPair p : tracker.getActiveTracks() ) {
+
for( PointTrack p : tracker.getActiveTracks(null) ) {
int x = (int)p.currLoc.x;
+
int red = (int)(2.5*(p.featureId%100));
int y = (int)p.currLoc.y;
+
int green = (int)((255.0/150.0)*(p.featureId%150));
 
+
int blue = (int)(p.featureId%255);
VisualizeFeatures.drawPoint(g2, x, y, Color.blue);
+
VisualizeFeatures.drawPoint(g2, (int)p.x, (int)p.y, new Color(red,green,blue));
 
}
 
}
  
 
// draw tracks which have just been spawned green
 
// draw tracks which have just been spawned green
for( AssociatedPair p : tracker.getNewTracks() ) {
+
for( PointTrack p : tracker.getNewTracks(null) ) {
int x = (int)p.currLoc.x;
+
VisualizeFeatures.drawPoint(g2, (int)p.x, (int)p.y, Color.green);
int y = (int)p.currLoc.y;
 
 
 
VisualizeFeatures.drawPoint(g2, x, y, Color.green);
 
 
}
 
}
  
 
// tell the GUI to update
 
// tell the GUI to update
gui.setBufferedImage(orig);
+
gui.setImage(orig);
 
gui.repaint();
 
gui.repaint();
 
}
 
}
Line 105: Line 111:
 
*/
 
*/
 
public void createKLT() {
 
public void createKLT() {
PkltManagerConfig<T, D> config = PkltManagerConfig.createDefault(imageType,derivType);
+
PkltConfig config = new PkltConfig();
config.maxFeatures = 200;
+
config.templateRadius = 3;
config.featureRadius = 3;
 
 
config.pyramidScaling = new int[]{1,2,4,8};
 
config.pyramidScaling = new int[]{1,2,4,8};
  
tracker = FactoryPointSequentialTracker.klt(config);
+
tracker = FactoryPointTracker.klt(config, new ConfigGeneralDetector(600, 6, 1),
 +
imageType, derivType);
 
}
 
}
  
Line 117: Line 123:
 
*/
 
*/
 
public void createSURF() {
 
public void createSURF() {
tracker = FactoryPointSequentialTracker.surf(200,80,3,imageType);
+
ConfigFastHessian configDetector = new ConfigFastHessian();
 +
configDetector.maxFeaturesPerScale = 250;
 +
configDetector.extractRadius = 3;
 +
configDetector.initialSampleSize = 2;
 +
tracker = FactoryPointTracker.dda_FH_SURF_Fast(configDetector, null, null, imageType);
 
}
 
}
+
 
 
public static void main( String args[] ) throws FileNotFoundException {
 
public static void main( String args[] ) throws FileNotFoundException {
  
Class imageType = ImageFloat32.class;
+
Class imageType = GrayF32.class;
 +
 
 +
MediaManager media = DefaultMediaManager.INSTANCE;
  
// loads an MJPEG video sequence
+
int pause;
VideoMjpegCodec codec = new VideoMjpegCodec();
+
SimpleImageSequence sequence =
List<byte[]> data = codec.read(new FileInputStream("../applet/data/zoom.mjpeg"));
+
media.openVideo(UtilIO.pathExample("zoom.mjpeg"), ImageType.single(imageType)); pause=100;
SimpleImageSequence sequence = new JpegByteImageSequence(imageType,data,true);
+
// media.openCamera(null,640,480,ImageType.single(imageType)); pause = 5;
 +
sequence.setLoop(true);
  
ExamplePointFeatureTracker app = new ExamplePointFeatureTracker(imageType);
+
ExamplePointFeatureTracker app = new ExamplePointFeatureTracker(imageType,pause);
  
 
// Comment or un-comment to change the type of tracker being used
 
// Comment or un-comment to change the type of tracker being used

Latest revision as of 09:21, 17 August 2017

Tracked point features in an image sequence.
Tracked point features in an image sequence. Blue dots are older tracks and green dots are newly spawned tracks.

Tracking how point features move inside an image is used to extract the geometric structure and apparent motion of the scene. There are many different ways in which point features are tracked. BoofCV provides a basic tracker that hides much of this complexity and allows a variety of different trackers to be used with out modifying any of the code.

The example code below shows how to use the ImagePointTracker interface to process images and get a list of detected points. Which tracker is used can be changed by toggling comments in the main function.

Example Code:

Concepts:

  • Loading image sequences
  • Tracking point features abstractly
  • Displaying the location of point features

Example Code

/**
 * <p>
 * Example of how to use the {@link boofcv.abst.feature.tracker.PointTracker} to track different types of point features.
 * ImagePointTracker hides much of the complexity involved in tracking point features and masks
 * the very different underlying structures used by these different trackers.  The default trackers
 * provided in BoofCV are general purpose trackers, that might not be the best tracker or utility
 * the underlying image features the best in all situations.
 * </p>
 *
 * @author Peter Abeles
 */
public class ExamplePointFeatureTracker< T extends ImageGray<T>, D extends ImageGray<D>>
{
	// type of input image
	Class<T> imageType;
	Class<D> derivType;

	// tracks point features inside the image
	PointTracker<T> tracker;

	// displays the video sequence and tracked features
	ImagePanel gui = new ImagePanel();

	int pause;

	public ExamplePointFeatureTracker(Class<T> imageType , int pause ) {
		this.imageType = imageType;
		this.derivType = GImageDerivativeOps.getDerivativeType(imageType);
		this.pause = pause;
	}

	/**
	 * Processes the sequence of images and displays the tracked features in a window
	 */
	public void process(SimpleImageSequence<T> sequence) {

		// Figure out how large the GUI window should be
		T frame = sequence.next();
		gui.setPreferredSize(new Dimension(frame.getWidth(),frame.getHeight()));
		ShowImages.showWindow(gui,"KTL Tracker", true);

		// process each frame in the image sequence
		while( sequence.hasNext() ) {
			frame = sequence.next();

			// tell the tracker to process the frame
			tracker.process(frame);

			// if there are too few tracks spawn more
			if( tracker.getActiveTracks(null).size() < 130 )
				tracker.spawnTracks();

			// visualize tracking results
			updateGUI(sequence);

			// wait for a fraction of a second so it doesn't process to fast
			BoofMiscOps.pause(pause);
		}
	}

	/**
	 * Draw tracked features in blue, or red if they were just spawned.
	 */
	private void updateGUI(SimpleImageSequence<T> sequence) {
		BufferedImage orig = sequence.getGuiImage();
		Graphics2D g2 = orig.createGraphics();

		// draw tracks with semi-unique colors so you can track individual points with your eyes
		for( PointTrack p : tracker.getActiveTracks(null) ) {
			int red = (int)(2.5*(p.featureId%100));
			int green = (int)((255.0/150.0)*(p.featureId%150));
			int blue = (int)(p.featureId%255);
			VisualizeFeatures.drawPoint(g2, (int)p.x, (int)p.y, new Color(red,green,blue));
		}

		// draw tracks which have just been spawned green
		for( PointTrack p : tracker.getNewTracks(null) ) {
			VisualizeFeatures.drawPoint(g2, (int)p.x, (int)p.y, Color.green);
		}

		// tell the GUI to update
		gui.setImage(orig);
		gui.repaint();
	}

	/**
	 * A simple way to create a Kanade-Lucas-Tomasi (KLT) tracker.
	 */
	public void createKLT() {
		PkltConfig config = new PkltConfig();
		config.templateRadius = 3;
		config.pyramidScaling = new int[]{1,2,4,8};

		tracker = FactoryPointTracker.klt(config, new ConfigGeneralDetector(600, 6, 1),
				imageType, derivType);
	}

	/**
	 * Creates a SURF feature tracker.
	 */
	public void createSURF() {
		ConfigFastHessian configDetector = new ConfigFastHessian();
		configDetector.maxFeaturesPerScale = 250;
		configDetector.extractRadius = 3;
		configDetector.initialSampleSize = 2;
		tracker = FactoryPointTracker.dda_FH_SURF_Fast(configDetector, null, null, imageType);
	}

	public static void main( String args[] ) throws FileNotFoundException {

		Class imageType = GrayF32.class;

		MediaManager media = DefaultMediaManager.INSTANCE;

		int pause;
		SimpleImageSequence sequence =
				media.openVideo(UtilIO.pathExample("zoom.mjpeg"), ImageType.single(imageType)); pause=100;
//				media.openCamera(null,640,480,ImageType.single(imageType)); pause = 5;
		sequence.setLoop(true);

		ExamplePointFeatureTracker app = new ExamplePointFeatureTracker(imageType,pause);

		// Comment or un-comment to change the type of tracker being used
		app.createKLT();
//		app.createSURF();

		app.process(sequence);
	}
}