Difference between revisions of "Example Background Stationary Camera"

From BoofCV
Jump to navigationJump to search
m
m
 
(One intermediate revision by the same user not shown)
Line 8: Line 8:


Example File:  
Example File:  
* [https://github.com/lessthanoptimal/BoofCV/blob/v0.36/examples/src/main/java/boofcv/examples/tracking/ExampleBackgroundRemovalStationary.java ExampleBackgroundRemovalStationary.java]
* [https://github.com/lessthanoptimal/BoofCV/blob/v0.38/examples/src/main/java/boofcv/examples/tracking/ExampleBackgroundRemovalStationary.java ExampleBackgroundRemovalStationary.java]


Concepts:
Concepts:
Line 21: Line 21:
<syntaxhighlight lang="java">
<syntaxhighlight lang="java">
/**
/**
  * Example showing how to perform background modeling when the camera is assumed to be stationary. This scenario
  * Example showing how to perform background modeling when the camera is assumed to be stationary. This scenario
  * can be computed much faster than the moving camera case and depending on the background model can some times produce
  * can be computed much faster than the moving camera case and depending on the background model can some times produce
  * reasonable results when the camera has a little bit of jitter.
  * reasonable results when the camera has a little bit of jitter.
Line 28: Line 28:
  */
  */
public class ExampleBackgroundRemovalStationary {
public class ExampleBackgroundRemovalStationary {
public static void main(String[] args) {
public static void main( String[] args ) {
 
String fileName = UtilIO.pathExample("background/street_intersection.mp4");
String fileName = UtilIO.pathExample("background/street_intersection.mp4");
// String fileName = UtilIO.pathExample("background/rubixfire.mp4"); // dynamic background
// String fileName = UtilIO.pathExample("background/rubixfire.mp4"); // dynamic background
// String fileName = UtilIO.pathExample("background/horse_jitter.mp4"); // degraded performance because of jitter
// String fileName = UtilIO.pathExample("background/horse_jitter.mp4"); // degraded performance because of jitter
// String fileName = UtilIO.pathExample("tracking/chipmunk.mjpeg"); // Camera moves. Stationary will fail here
// String fileName = UtilIO.pathExample("tracking/chipmunk.mjpeg"); // Camera moves. Stationary will fail here


// Comment/Uncomment to switch input image type
// Comment/Uncomment to switch input image type
Line 40: Line 39:
// ImageType imageType = ImageType.il(3, InterleavedU8.class);
// ImageType imageType = ImageType.il(3, InterleavedU8.class);


ConfigBackgroundGmm configGmm = new ConfigBackgroundGmm();
// ConfigBackgroundGmm configGmm = new ConfigBackgroundGmm();


// Comment/Uncomment to switch algorithms
// Comment/Uncomment to switch algorithms
Line 52: Line 51:
// media.openCamera(null,640,480,background.getImageType());
// media.openCamera(null,640,480,background.getImageType());


// Declare storage for segmented image. 1 = moving foreground and 0 = background
// Declare storage for segmented image. 1 = moving foreground and 0 = background
GrayU8 segmented = new GrayU8(video.getWidth(),video.getHeight());
GrayU8 segmented = new GrayU8(video.getWidth(), video.getHeight());


BufferedImage visualized = new BufferedImage(segmented.width,segmented.height,BufferedImage.TYPE_INT_RGB);
var visualized = new BufferedImage(segmented.width, segmented.height, BufferedImage.TYPE_INT_RGB);
ImageGridPanel gui = new ImageGridPanel(1,2);
var gui = new ImageGridPanel(1, 2);
gui.setImages(visualized, visualized);
gui.setImages(visualized, visualized);


Line 64: Line 63:
double alpha = 0.01; // smoothing factor for FPS
double alpha = 0.01; // smoothing factor for FPS


while( video.hasNext() ) {
while (video.hasNext()) {
ImageBase input = video.next();
ImageBase input = video.next();


long before = System.nanoTime();
long before = System.nanoTime();
background.updateBackground(input,segmented);
background.updateBackground(input, segmented);
long after = System.nanoTime();
long after = System.nanoTime();


fps = (1.0-alpha)*fps + alpha*(1.0/((after-before)/1e9));
fps = (1.0 - alpha)*fps + alpha*(1.0/((after - before)/1e9));


VisualizeBinaryData.renderBinary(segmented, false, visualized);
VisualizeBinaryData.renderBinary(segmented, false, visualized);
Line 77: Line 76:
gui.setImage(0, 1, visualized);
gui.setImage(0, 1, visualized);
gui.repaint();
gui.repaint();
System.out.println("FPS = "+fps);
System.out.println("FPS = " + fps);


try {Thread.sleep(5);} catch (InterruptedException e) {}
BoofMiscOps.sleep(5);
}
}
System.out.println("done!");
System.out.println("done!");

Latest revision as of 13:14, 12 July 2021

Example of background modeling/motion detection from a stationary camera. Moving objects are detected inside the video based on their difference from a background model. These techniques can run very fast (basic runs over 2,000 fps) and be very effective in tracking algorithms

Example File:

Concepts:

  • Motion Detection
  • 2D Image Stitching

Related Examples:

Example Code

/**
 * Example showing how to perform background modeling when the camera is assumed to be stationary. This scenario
 * can be computed much faster than the moving camera case and depending on the background model can some times produce
 * reasonable results when the camera has a little bit of jitter.
 *
 * @author Peter Abeles
 */
public class ExampleBackgroundRemovalStationary {
	public static void main( String[] args ) {
		String fileName = UtilIO.pathExample("background/street_intersection.mp4");
//		String fileName = UtilIO.pathExample("background/rubixfire.mp4"); // dynamic background
//		String fileName = UtilIO.pathExample("background/horse_jitter.mp4"); // degraded performance because of jitter
//		String fileName = UtilIO.pathExample("tracking/chipmunk.mjpeg"); // Camera moves. Stationary will fail here

		// Comment/Uncomment to switch input image type
		ImageType imageType = ImageType.single(GrayF32.class);
//		ImageType imageType = ImageType.il(3, InterleavedF32.class);
//		ImageType imageType = ImageType.il(3, InterleavedU8.class);

//		ConfigBackgroundGmm configGmm = new ConfigBackgroundGmm();

		// Comment/Uncomment to switch algorithms
		BackgroundModelStationary background =
				FactoryBackgroundModel.stationaryBasic(new ConfigBackgroundBasic(35, 0.005f), imageType);
//				FactoryBackgroundModel.stationaryGmm(configGmm, imageType);

		MediaManager media = DefaultMediaManager.INSTANCE;
		SimpleImageSequence video =
				media.openVideo(fileName, background.getImageType());
//				media.openCamera(null,640,480,background.getImageType());

		// Declare storage for segmented image. 1 = moving foreground and 0 = background
		GrayU8 segmented = new GrayU8(video.getWidth(), video.getHeight());

		var visualized = new BufferedImage(segmented.width, segmented.height, BufferedImage.TYPE_INT_RGB);
		var gui = new ImageGridPanel(1, 2);
		gui.setImages(visualized, visualized);

		ShowImages.showWindow(gui, "Static Scene: Background Segmentation", true);

		double fps = 0;
		double alpha = 0.01; // smoothing factor for FPS

		while (video.hasNext()) {
			ImageBase input = video.next();

			long before = System.nanoTime();
			background.updateBackground(input, segmented);
			long after = System.nanoTime();

			fps = (1.0 - alpha)*fps + alpha*(1.0/((after - before)/1e9));

			VisualizeBinaryData.renderBinary(segmented, false, visualized);
			gui.setImage(0, 0, (BufferedImage)video.getGuiImage());
			gui.setImage(0, 1, visualized);
			gui.repaint();
			System.out.println("FPS = " + fps);

			BoofMiscOps.sleep(5);
		}
		System.out.println("done!");
	}
}