Difference between revisions of "Example Video Stabilization"
From BoofCV
Jump to navigationJump to searchm |
m |
||
(2 intermediate revisions by the same user not shown) | |||
Line 7: | Line 7: | ||
Video stabilization attempts to overlay more recent images on top of a keyframe. This makes it easier to identify objects which are moving relative to the background. | Video stabilization attempts to overlay more recent images on top of a keyframe. This makes it easier to identify objects which are moving relative to the background. | ||
Example File: [https://github.com/lessthanoptimal/BoofCV/blob/v0. | Example File: [https://github.com/lessthanoptimal/BoofCV/blob/v0.38/examples/src/main/java/boofcv/examples/geometry/ExampleVideoStabilization.java ExampleVideoStabilization.java] | ||
Concepts: | Concepts: | ||
Line 21: | Line 21: | ||
<syntaxhighlight lang="java"> | <syntaxhighlight lang="java"> | ||
/** | /** | ||
* Example of how to stabilizing a video sequence using StitchingFromMotion2D. | * Example of how to stabilizing a video sequence using StitchingFromMotion2D. Video stabilization is almost | ||
* the same as creating a video mosaic and the code in this example is a tweaked version of the mosaic example. | * the same as creating a video mosaic and the code in this example is a tweaked version of the mosaic example. | ||
* The differences are that the output size is the same as the input image size and that the origin is never changed. | * The differences are that the output size is the same as the input image size and that the origin is never changed. | ||
Line 28: | Line 28: | ||
*/ | */ | ||
public class ExampleVideoStabilization { | public class ExampleVideoStabilization { | ||
public static void main( String | public static void main( String[] args ) { | ||
// Configure the feature detector | // Configure the feature detector | ||
ConfigPointDetector configDetector = new ConfigPointDetector(); | |||
configDetector.type = PointDetectorTypes.SHI_TOMASI; | |||
configDetector.general.maxFeatures = 300; | |||
configDetector.general.threshold = 10; | |||
configDetector.general.radius = 2; | |||
// Use a KLT tracker | // Use a KLT tracker | ||
PointTracker<GrayF32> tracker = FactoryPointTracker.klt( | PointTracker<GrayF32> tracker = FactoryPointTracker.klt(4, configDetector, 3, | ||
GrayF32.class,GrayF32.class); | GrayF32.class, GrayF32.class); | ||
// This estimates the 2D image motion | // This estimates the 2D image motion | ||
// An Affine2D_F64 model also works quite well. | // An Affine2D_F64 model also works quite well. | ||
ImageMotion2D<GrayF32,Homography2D_F64> motion2D = | ImageMotion2D<GrayF32, Homography2D_F64> motion2D = | ||
FactoryMotion2D.createMotion2D(200,3,2,30,0.6,0.5,false,tracker,new Homography2D_F64()); | FactoryMotion2D.createMotion2D(200, 3, 2, 30, 0.6, 0.5, false, tracker, new Homography2D_F64()); | ||
// wrap it so it output color images while estimating motion from gray | // wrap it so it output color images while estimating motion from gray | ||
ImageMotion2D<Planar<GrayF32>,Homography2D_F64> motion2DColor = | ImageMotion2D<Planar<GrayF32>, Homography2D_F64> motion2DColor = | ||
new PlToGrayMotion2D<>(motion2D, GrayF32.class); | new PlToGrayMotion2D<>(motion2D, GrayF32.class); | ||
// This fuses the images together | // This fuses the images together | ||
StitchingFromMotion2D<Planar<GrayF32>,Homography2D_F64> | StitchingFromMotion2D<Planar<GrayF32>, Homography2D_F64> | ||
stabilize = FactoryMotion2D.createVideoStitch(0.5, motion2DColor,ImageType.pl(3,GrayF32.class)); | stabilize = FactoryMotion2D.createVideoStitch(0.5, motion2DColor, ImageType.pl(3, GrayF32.class)); | ||
// Load an image sequence | // Load an image sequence | ||
Line 67: | Line 67: | ||
// Create the GUI for displaying the results + input image | // Create the GUI for displaying the results + input image | ||
ImageGridPanel gui = new ImageGridPanel(1,2); | ImageGridPanel gui = new ImageGridPanel(1, 2); | ||
gui.setImage(0,0,new BufferedImage(frame.width,frame.height,BufferedImage.TYPE_INT_RGB)); | gui.setImage(0, 0, new BufferedImage(frame.width, frame.height, BufferedImage.TYPE_INT_RGB)); | ||
gui.setImage(0,1,new BufferedImage(frame.width,frame.height,BufferedImage.TYPE_INT_RGB)); | gui.setImage(0, 1, new BufferedImage(frame.width, frame.height, BufferedImage.TYPE_INT_RGB)); | ||
gui.autoSetPreferredSize(); | gui.autoSetPreferredSize(); | ||
ShowImages.showWindow(gui,"Example Stabilization", true); | ShowImages.showWindow(gui, "Example Stabilization", true); | ||
// process the video sequence one frame at a time | // process the video sequence one frame at a time | ||
while( video.hasNext() ) { | while (video.hasNext()) { | ||
if( !stabilize.process(video.next()) ) | if (!stabilize.process(video.next())) | ||
throw new RuntimeException("Don't forget to handle failures!"); | throw new RuntimeException("Don't forget to handle failures!"); | ||
// display the stabilized image | // display the stabilized image | ||
ConvertBufferedImage.convertTo(frame,gui.getImage(0, 0),true); | ConvertBufferedImage.convertTo(frame, gui.getImage(0, 0), true); | ||
ConvertBufferedImage.convertTo(stabilize.getStitchedImage(), gui.getImage(0, 1),true); | ConvertBufferedImage.convertTo(stabilize.getStitchedImage(), gui.getImage(0, 1), true); | ||
gui.repaint(); | gui.repaint(); |
Latest revision as of 10:18, 12 July 2021
Video stabilization attempts to overlay more recent images on top of a keyframe. This makes it easier to identify objects which are moving relative to the background.
Example File: ExampleVideoStabilization.java
Concepts:
- Image Stitching
- Image Stabilization
- 2D image motion
Related Examples:
Example Code
/**
* Example of how to stabilizing a video sequence using StitchingFromMotion2D. Video stabilization is almost
* the same as creating a video mosaic and the code in this example is a tweaked version of the mosaic example.
* The differences are that the output size is the same as the input image size and that the origin is never changed.
*
* @author Peter Abeles
*/
public class ExampleVideoStabilization {
public static void main( String[] args ) {
// Configure the feature detector
ConfigPointDetector configDetector = new ConfigPointDetector();
configDetector.type = PointDetectorTypes.SHI_TOMASI;
configDetector.general.maxFeatures = 300;
configDetector.general.threshold = 10;
configDetector.general.radius = 2;
// Use a KLT tracker
PointTracker<GrayF32> tracker = FactoryPointTracker.klt(4, configDetector, 3,
GrayF32.class, GrayF32.class);
// This estimates the 2D image motion
// An Affine2D_F64 model also works quite well.
ImageMotion2D<GrayF32, Homography2D_F64> motion2D =
FactoryMotion2D.createMotion2D(200, 3, 2, 30, 0.6, 0.5, false, tracker, new Homography2D_F64());
// wrap it so it output color images while estimating motion from gray
ImageMotion2D<Planar<GrayF32>, Homography2D_F64> motion2DColor =
new PlToGrayMotion2D<>(motion2D, GrayF32.class);
// This fuses the images together
StitchingFromMotion2D<Planar<GrayF32>, Homography2D_F64>
stabilize = FactoryMotion2D.createVideoStitch(0.5, motion2DColor, ImageType.pl(3, GrayF32.class));
// Load an image sequence
MediaManager media = DefaultMediaManager.INSTANCE;
String fileName = UtilIO.pathExample("shake.mjpeg");
SimpleImageSequence<Planar<GrayF32>> video =
media.openVideo(fileName, ImageType.pl(3, GrayF32.class));
Planar<GrayF32> frame = video.next();
// The output image size is the same as the input image size
stabilize.configure(frame.width, frame.height, null);
// process the first frame
stabilize.process(frame);
// Create the GUI for displaying the results + input image
ImageGridPanel gui = new ImageGridPanel(1, 2);
gui.setImage(0, 0, new BufferedImage(frame.width, frame.height, BufferedImage.TYPE_INT_RGB));
gui.setImage(0, 1, new BufferedImage(frame.width, frame.height, BufferedImage.TYPE_INT_RGB));
gui.autoSetPreferredSize();
ShowImages.showWindow(gui, "Example Stabilization", true);
// process the video sequence one frame at a time
while (video.hasNext()) {
if (!stabilize.process(video.next()))
throw new RuntimeException("Don't forget to handle failures!");
// display the stabilized image
ConvertBufferedImage.convertTo(frame, gui.getImage(0, 0), true);
ConvertBufferedImage.convertTo(stabilize.getStitchedImage(), gui.getImage(0, 1), true);
gui.repaint();
// throttle the speed just in case it's on a fast computer
BoofMiscOps.pause(50);
}
}
}