Difference between revisions of "Example Visual Odometry Depth"
From BoofCV
Jump to navigationJump to search (Updated for v0.16)  | 
				m  | 
				||
| (12 intermediate revisions by the same user not shown) | |||
| Line 3: | Line 3: | ||
Example Code:  | Example Code:  | ||
* [https://github.com/lessthanoptimal/BoofCV/blob/v0.  | * [https://github.com/lessthanoptimal/BoofCV/blob/v0.39/examples/src/main/java/boofcv/examples/sfm/ExampleVisualOdometryDepth.java ExampleVisualOdometryDepth.java]  | ||
Concepts:  | Concepts:  | ||
* RGB-D  | * RGB-D  | ||
Relevant   | Relevant Videos:  | ||
* [  | * [http://www.youtube.com/watch?v=kOGXsf1tP3A| YouTube]  | ||
Related Examples:  | Related Examples:  | ||
| Line 25: | Line 25: | ||
  */  |   */  | ||
public class ExampleVisualOdometryDepth {  | public class ExampleVisualOdometryDepth {  | ||
	public static void main( String[] args ) {  | |||
	public static void main( String   | |||
		MediaManager media = DefaultMediaManager.INSTANCE;  | 		MediaManager media = DefaultMediaManager.INSTANCE;  | ||
		String directory = "  | 		String directory = UtilIO.pathExample("kinect/straight");  | ||
		// load camera description and the video sequence  | 		// load camera description and the video sequence  | ||
		VisualDepthParameters param =   | 		VisualDepthParameters param = CalibrationIO.load(  | ||
				media.openFile(new File(directory, "visualdepth.yaml").getPath()));  | |||
		// specify how the image features are going to be tracked  | 		// specify how the image features are going to be tracked  | ||
		ConfigRgbDepthTrackPnP config = new ConfigRgbDepthTrackPnP();  | |||
		config.depthScale = 1e-3; // convert depth image distance units to meters  | |||
		config.tracker.typeTracker = ConfigPointTracker.TrackerType.KLT;  | |||
		config.tracker.detDesc.detectPoint.type = PointDetectorTypes.SHI_TOMASI;  | |||
		config.tracker.detDesc.detectPoint.shiTomasi.radius = 3;  | |||
		config.tracker.detDesc.detectPoint.general.maxFeatures = 600;  | |||
		config.tracker.detDesc.detectPoint.general.radius = 3;  | |||
		config.tracker.detDesc.detectPoint.general.threshold = 1;  | |||
		// declares the algorithm  | 		// declares the algorithm  | ||
		DepthVisualOdometry<  | 		DepthVisualOdometry<GrayU8, GrayU16> visualOdometry =  | ||
				FactoryVisualOdometry.  | 				FactoryVisualOdometry.rgbDepthPnP(config, GrayU8.class, GrayU16.class);  | ||
		// Pass in intrinsic/extrinsic calibration.   | 		// Pass in intrinsic/extrinsic calibration. This can be changed in the future.  | ||
		visualOdometry.setCalibration(param.visualParam,new   | 		visualOdometry.setCalibration(param.visualParam, new DoNothing2Transform2_F32());  | ||
		// Process the video sequence and output the location plus number of inliers  | 		// Process the video sequence and output the location plus number of inliers  | ||
		SimpleImageSequence<  | 		SimpleImageSequence<GrayU8> videoVisual = media.openVideo(  | ||
		SimpleImageSequence<  | 				new File(directory, "rgb.mjpeg").getPath(), ImageType.single(GrayU8.class));  | ||
		SimpleImageSequence<GrayU16> videoDepth = media.openVideo(  | |||
				new File(directory, "depth.mpng").getPath(), ImageType.single(GrayU16.class));  | |||
		while( videoVisual.hasNext() ) {  | 		long startTime = System.nanoTime();  | ||
		while (videoVisual.hasNext()) {  | |||
			GrayU8 visual = videoVisual.next();  | |||
			GrayU16 depth = videoDepth.next();  | |||
			if( !visualOdometry.process(  | 			if (!visualOdometry.process(visual, depth)) {  | ||
				throw new RuntimeException("VO Failed!");  | 				throw new RuntimeException("VO Failed!");  | ||
			}  | 			}  | ||
| Line 77: | Line 70: | ||
			Vector3D_F64 T = leftToWorld.getT();  | 			Vector3D_F64 T = leftToWorld.getT();  | ||
			System.out.printf("Location %8.2f %8.2f %8.2f   | 			System.out.printf("Location %8.2f %8.2f %8.2f, %s\n", T.x, T.y, T.z, trackStats(visualOdometry));  | ||
		}  | 		}  | ||
		System.out.printf("FPS %4.2f\n", videoVisual.getFrameNumber()/((System.nanoTime() - startTime)*1e-9));  | |||
	}  | 	}  | ||
}  | }  | ||
</syntaxhighlight>  | </syntaxhighlight>  | ||
Latest revision as of 19:26, 8 October 2021
This example demonstrates how to estimate the camera's ego motion using an RGB-D sensor, such as the Kinect, which contains visual and depth information.
Example Code:
Concepts:
- RGB-D
 
Relevant Videos:
Related Examples:
Example Code
/**
 * Bare bones example showing how to estimate the camera's ego-motion using a depth camera system, e.g. Kinect.
 * Additional information on the scene can be optionally extracted from the algorithm if it implements AccessPointTracks3D.
 *
 * @author Peter Abeles
 */
public class ExampleVisualOdometryDepth {
	public static void main( String[] args ) {
		MediaManager media = DefaultMediaManager.INSTANCE;
		String directory = UtilIO.pathExample("kinect/straight");
		// load camera description and the video sequence
		VisualDepthParameters param = CalibrationIO.load(
				media.openFile(new File(directory, "visualdepth.yaml").getPath()));
		// specify how the image features are going to be tracked
		ConfigRgbDepthTrackPnP config = new ConfigRgbDepthTrackPnP();
		config.depthScale = 1e-3; // convert depth image distance units to meters
		config.tracker.typeTracker = ConfigPointTracker.TrackerType.KLT;
		config.tracker.detDesc.detectPoint.type = PointDetectorTypes.SHI_TOMASI;
		config.tracker.detDesc.detectPoint.shiTomasi.radius = 3;
		config.tracker.detDesc.detectPoint.general.maxFeatures = 600;
		config.tracker.detDesc.detectPoint.general.radius = 3;
		config.tracker.detDesc.detectPoint.general.threshold = 1;
		// declares the algorithm
		DepthVisualOdometry<GrayU8, GrayU16> visualOdometry =
				FactoryVisualOdometry.rgbDepthPnP(config, GrayU8.class, GrayU16.class);
		// Pass in intrinsic/extrinsic calibration. This can be changed in the future.
		visualOdometry.setCalibration(param.visualParam, new DoNothing2Transform2_F32());
		// Process the video sequence and output the location plus number of inliers
		SimpleImageSequence<GrayU8> videoVisual = media.openVideo(
				new File(directory, "rgb.mjpeg").getPath(), ImageType.single(GrayU8.class));
		SimpleImageSequence<GrayU16> videoDepth = media.openVideo(
				new File(directory, "depth.mpng").getPath(), ImageType.single(GrayU16.class));
		long startTime = System.nanoTime();
		while (videoVisual.hasNext()) {
			GrayU8 visual = videoVisual.next();
			GrayU16 depth = videoDepth.next();
			if (!visualOdometry.process(visual, depth)) {
				throw new RuntimeException("VO Failed!");
			}
			Se3_F64 leftToWorld = visualOdometry.getCameraToWorld();
			Vector3D_F64 T = leftToWorld.getT();
			System.out.printf("Location %8.2f %8.2f %8.2f, %s\n", T.x, T.y, T.z, trackStats(visualOdometry));
		}
		System.out.printf("FPS %4.2f\n", videoVisual.getFrameNumber()/((System.nanoTime() - startTime)*1e-9));
	}
}