Example Visual Odometry Monocular Plane

From BoofCV
Jump to navigationJump to search
The printable version is no longer supported and may have rendering errors. Please update your browser bookmarks and please use the default browser print function instead.

This example demonstrates how to estimate the camera's ego motion using a single camera and known plane. Since the plane's relative location to the camera is known there is no scale ambiguity, like there is with a more general single camera solution.

Example Code:

Concepts:

  • Plane/Homography

Related Examples:

Example Code

/**
 * Bare bones example showing how to estimate the camera's ego-motion using a single camera and a known
 * plane. Additional information on the scene can be optionally extracted from the algorithm,
 * if it implements AccessPointTracks3D.
 *
 * @author Peter Abeles
 */
public class ExampleVisualOdometryMonocularPlane {
	public static void main( String[] args ) {
		MediaManager media = DefaultMediaManager.INSTANCE;

		String directory = UtilIO.pathExample("vo/drc/");

		// load camera description and the video sequence
		MonoPlaneParameters calibration = CalibrationIO.load(
				media.openFile(new File(directory, "mono_plane.yaml").getPath()));
		SimpleImageSequence<GrayU8> video = media.openVideo(
				new File(directory, "left.mjpeg").getPath(), ImageType.single(GrayU8.class));

		var config = new ConfigPlanarTrackPnP();

		// specify how the image features are going to be tracked
		config.tracker.typeTracker = ConfigPointTracker.TrackerType.KLT;
		config.tracker.klt.pyramidLevels = ConfigDiscreteLevels.levels(4);
		config.tracker.klt.templateRadius = 3;

		config.tracker.detDesc.detectPoint.type = PointDetectorTypes.SHI_TOMASI;
		config.tracker.detDesc.detectPoint.general.maxFeatures = 600;
		config.tracker.detDesc.detectPoint.general.radius = 3;
		config.tracker.detDesc.detectPoint.general.threshold = 1;

		// Configure how visual odometry works
		config.thresholdAdd = 75;
		config.thresholdRetire = 2;
		config.ransac.iterations = 200;
		config.ransac.inlierThreshold = 1.5;

		// declares the algorithm
		MonocularPlaneVisualOdometry<GrayU8> visualOdometry = FactoryVisualOdometry.monoPlaneInfinity(config, GrayU8.class);

		// Pass in intrinsic/extrinsic calibration. This can be changed in the future.
		visualOdometry.setCalibration(calibration);

		// Process the video sequence and output the location plus number of inliers
		long startTime = System.nanoTime();
		while (video.hasNext()) {
			GrayU8 image = video.next();

			if (!visualOdometry.process(image)) {
				System.out.println("Fault!");
				visualOdometry.reset();
			}

			Se3_F64 leftToWorld = visualOdometry.getCameraToWorld();
			Vector3D_F64 T = leftToWorld.getT();

			System.out.printf("Location %8.2f %8.2f %8.2f, %s\n", T.x, T.y, T.z, trackStats(visualOdometry));
		}
		System.out.printf("FPS %4.2f\n", video.getFrameNumber()/((System.nanoTime() - startTime)*1e-9));
	}
}