Difference between revisions of "Example Stereo Disparity 3D"

From BoofCV
Jump to navigationJump to search
(Created page with "<center> <gallery widths=320px heights=240px> file:Example_stereo_disparity3d_pointcloud.jpg | 3D point cloud generated from disparity image. </gallery> </center> An addition...")
 
m
 
(14 intermediate revisions by the same user not shown)
Line 1: Line 1:
<center>
<center>
<gallery widths=320px heights=240px>
{| class="wikitable" width="400pt"
file:Example_stereo_disparity3d_pointcloud.jpg | 3D point cloud generated from disparity image.
| [[file:Example_stereo_disparity3d_pointcloud.jpg|400px]] || {{#ev:youtube|https://www.youtube.com/watch?v=8pn9Ebw90uk|400|center|||start=672}}
</gallery>
|-
| 3D point cloud generated from disparity image. || Stereo Update Video
|}
</center>
</center>


Line 8: Line 10:


Example Code:
Example Code:
* [https://github.com/lessthanoptimal/BoofCV/blob/v0.14/examples/src/boofcv/examples/ExampleStereoDisparity3D.java ExampleStereoDisparity3D.java]
* [https://github.com/lessthanoptimal/BoofCV/blob/v0.41/examples/src/main/java/boofcv/examples/stereo/ExampleStereoDisparity3D.java ExampleStereoDisparity3D.java]


Concepts:
Concepts:
Line 14: Line 16:
* Point clouds
* Point clouds


Relevant Applets:
Related Videos
* [[Applet_Stereo_Disparity| Stereo Disparity]]
* [https://www.youtube.com/watch?v=8pn9Ebw90uk&t=672s Stereo Update 2020]


Related Examples:
Related Examples:
* [[Example_Stereo_Disparity| Stereo Disparity]]
* [[Example_Stereo_Disparity| Stereo Disparity]]
* [[Example_Stereo_Mesh| Disparity to Mesh]]


= Example Code =
= Example Code =
Line 25: Line 28:
/**
/**
  * Expanding upon ExampleStereoDisparity, this example demonstrates how to rescale an image for stereo processing and
  * Expanding upon ExampleStereoDisparity, this example demonstrates how to rescale an image for stereo processing and
  * then compute its 3D point cloud. Images are often rescaled to improve speed and some times quality. Creating
  * then compute its 3D point cloud. Images are often rescaled to improve speed and some times quality. Creating
  * 3D point clouds from disparity images is easy and well documented in the literature, but there are some nuances
  * 3D point clouds from disparity images is easy and well documented in the literature, but there are some nuances
  * to it.
  * to it.
Line 33: Line 36:
public class ExampleStereoDisparity3D {
public class ExampleStereoDisparity3D {


// Specifies what size input images are scaled to
// Specifies the disparity values which will be considered
public static final double scale = 0.5;
private static final int disparityMin = 15;
private static final int disparityRange = 60;


// Specifies what range of disparity is considered
/**
public static final int minDisparity = 0;
* Given already computed rectified images and known stereo parameters, create a 3D cloud and visualize it
public static final int maxDisparity = 40;
*/
public static final int rangeDisparity = maxDisparity-minDisparity;
public static JComponent computeAndShowCloud( StereoParameters param,
  GrayU8 rectLeft,
  RectifyCalibrated rectAlg, GrayF32 disparity ) {
// The point cloud will be in the left cameras reference frame
DMatrixRMaj rectK = rectAlg.getCalibrationMatrix();
DMatrixRMaj rectR = rectAlg.getRectifiedRotation();


public static void main( String args[] ) {
// Put all the disparity parameters into one data structure
var disparityParameters = new DisparityParameters();
disparityParameters.baseline = param.getBaseline();
disparityParameters.disparityMin = disparityMin;
disparityParameters.disparityRange = disparityRange;
disparityParameters.rotateToRectified.setTo(rectR);
PerspectiveOps.matrixToPinhole(rectK, rectLeft.width, rectLeft.height, disparityParameters.pinhole);
 
// Iterate through each pixel in disparity image and compute its 3D coordinate
PointCloudViewer pcv = VisualizeData.createPointCloudViewer();
pcv.setTranslationStep(param.getBaseline()*0.1);
 
// Next create the 3D point cloud. The function below will handle conversion from disparity into
// XYZ, then transform from rectified into normal camera coordinate system. Feel free to glance at the
// source code to understand exactly what it's doing
MultiViewStereoOps.disparityToCloud(disparity, disparityParameters, null,
( pixX, pixY, x, y, z ) -> {
// look up the gray value. Then convert it into RGB
int v = rectLeft.unsafe_get(pixX, pixY);
pcv.addPoint(x, y, z, v << 16 | v << 8 | v);
});
 
// Configure the display
// pcv.setFog(true);
// pcv.setClipDistance(baseline*45);
// PeriodicColorizer colorizer = new TwoAxisRgbPlane.Z_XY(4.0);
// colorizer.setPeriod(baseline*5);
// pcv.setColorizer(colorizer); // sometimes pseudo color can be easier to view
pcv.setDotSize(1);
pcv.setCameraHFov(PerspectiveOps.computeHFov(param.left));
// pcv.setCameraToWorld(cameraToWorld);
JComponent viewer = pcv.getComponent();
viewer.setPreferredSize(new Dimension(600, 600*param.left.height/param.left.width));
return viewer;
}
 
public static void main( String[] args ) {
// ------------- Compute Stereo Correspondence
// ------------- Compute Stereo Correspondence


// Load camera images and stereo camera parameters
// Load camera images and stereo camera parameters
String calibDir = "../data/applet/calibration/stereo/Bumblebee2_Chess/";
String calibDir = UtilIO.pathExample("calibration/stereo/Bumblebee2_Chess/");
String imageDir = "../data/applet/stereo/";
String imageDir = UtilIO.pathExample("stereo/");


StereoParameters param = BoofMiscOps.loadXML(calibDir + "stereo.xml");
StereoParameters param = CalibrationIO.load(new File(calibDir, "stereo.yaml"));


// load and convert images into a BoofCV format
// load and convert images into a BoofCV format
BufferedImage origLeft = UtilImageIO.loadImage(imageDir + "chair01_left.jpg");
BufferedImage origLeft = UtilImageIO.loadImage(imageDir, "chair01_left.jpg");
BufferedImage origRight = UtilImageIO.loadImage(imageDir + "chair01_right.jpg");
BufferedImage origRight = UtilImageIO.loadImage(imageDir, "chair01_right.jpg");
 
ImageUInt8 distLeft = ConvertBufferedImage.convertFrom(origLeft, (ImageUInt8) null);
ImageUInt8 distRight = ConvertBufferedImage.convertFrom(origRight,(ImageUInt8)null);
 
// re-scale input images
ImageUInt8 scaledLeft = new ImageUInt8((int)(distLeft.width*scale),(int)(distLeft.height*scale));
ImageUInt8 scaledRight = new ImageUInt8((int)(distRight.width*scale),(int)(distRight.height*scale));
 
DistortImageOps.scale(distLeft,scaledLeft, TypeInterpolate.BILINEAR);
DistortImageOps.scale(distRight,scaledRight, TypeInterpolate.BILINEAR);


// Don't forget to adjust camera parameters for the change in scale!
GrayU8 distLeft = ConvertBufferedImage.convertFrom(origLeft, (GrayU8)null);
PerspectiveOps.scaleIntrinsic(param.left, scale);
GrayU8 distRight = ConvertBufferedImage.convertFrom(origRight, (GrayU8)null);
PerspectiveOps.scaleIntrinsic(param.right,scale);


// rectify images and compute disparity
// rectify images and compute disparity
ImageUInt8 rectLeft = new ImageUInt8(scaledLeft.width,scaledLeft.height);
GrayU8 rectLeft = distLeft.createSameShape();
ImageUInt8 rectRight = new ImageUInt8(scaledRight.width,scaledRight.height);
GrayU8 rectRight = distRight.createSameShape();


RectifyCalibrated rectAlg = ExampleStereoDisparity.rectify(scaledLeft,scaledRight,param,rectLeft,rectRight);
RectifyCalibrated rectAlg = ExampleStereoDisparity.rectify(distLeft, distRight, param, rectLeft, rectRight);


// ImageUInt8 disparity = ExampleStereoDisparity.denseDisparity(rectLeft, rectRight, 3,minDisparity, maxDisparity);
// GrayU8 disparity = ExampleStereoDisparity.denseDisparity(rectLeft, rectRight, 3,disparityMin, disparityRange);
ImageFloat32 disparity = ExampleStereoDisparity.denseDisparitySubpixel(rectLeft, rectRight, 3, minDisparity, maxDisparity);
GrayF32 disparity = ExampleStereoDisparity.denseDisparitySubpixel(
rectLeft, rectRight, 5, disparityMin, disparityRange);


// ------------- Convert disparity image into a 3D point cloud
// ------------- Convert disparity image into a 3D point cloud


// The point cloud will be in the left cameras reference frame
JComponent viewer = computeAndShowCloud(param, rectLeft, rectAlg, disparity);
DenseMatrix64F rectK = rectAlg.getCalibrationMatrix();
DenseMatrix64F rectR = rectAlg.getRectifiedRotation();
 
// used to display the point cloud
PointCloudViewer viewer = new PointCloudViewer(rectK,1);
viewer.setPreferredSize(new Dimension(rectLeft.width,rectLeft.height));
 
// extract intrinsic parameters from rectified camera
double baseline = param.getBaseline();
double fx = rectK.get(0,0);
double fy = rectK.get(1,1);
double cx = rectK.get(0,2);
double cy = rectK.get(1,2);
 
// Iterate through each pixel in disparity image and compute its 3D coordinate
Point3D_F64 pointRect = new Point3D_F64();
Point3D_F64 pointLeft = new Point3D_F64();
for( int y = 0; y < disparity.height; y++ ) {
for( int x = 0; x < disparity.width; x++ ) {
double d = disparity.unsafe_get(x,y) + minDisparity;
 
// skip over pixels were no correspondence was found
if( d >= rangeDisparity )
continue;
 
// Coordinate in rectified camera frame
pointRect.z = baseline*fx/d;
pointRect.x = pointRect.z*(x - cx)/fx;
pointRect.y = pointRect.z*(y - cy)/fy;
 
// rotate into the original left camera frame
GeometryMath_F64.multTran(rectR, pointRect, pointLeft);
 
// add pixel to the view for display purposes and sets its gray scale value
int v = rectLeft.unsafe_get(x, y);
viewer.addPoint(pointLeft.x, pointLeft.y, pointLeft.z, v << 16 | v << 8 | v);
}
}


// display the results. Click and drag to change point cloud camera
// display the results. Click and drag to change point cloud camera
BufferedImage visualized = VisualizeImageData.disparity(disparity, null,minDisparity, maxDisparity,0);
BufferedImage visualized = VisualizeImageData.disparity(disparity, null, disparityRange, 0);
ShowImages.showWindow(visualized,"Disparity");
ShowImages.showWindow(visualized, "Disparity", true);
ShowImages.showWindow(viewer,"Point Cloud");
ShowImages.showWindow(viewer, "Point Cloud", true);
}
}
}
}
</syntaxhighlight>
</syntaxhighlight>

Latest revision as of 17:02, 2 September 2022

Example stereo disparity3d pointcloud.jpg
3D point cloud generated from disparity image. Stereo Update Video

An additional example for stereo disparity. This one shows you how to properly resize input images and convert the disparity image into a 3D point cloud.

Example Code:

Concepts:

  • Stereo disparity
  • Point clouds

Related Videos

Related Examples:

Example Code

/**
 * Expanding upon ExampleStereoDisparity, this example demonstrates how to rescale an image for stereo processing and
 * then compute its 3D point cloud. Images are often rescaled to improve speed and some times quality. Creating
 * 3D point clouds from disparity images is easy and well documented in the literature, but there are some nuances
 * to it.
 *
 * @author Peter Abeles
 */
public class ExampleStereoDisparity3D {

	// Specifies the disparity values which will be considered
	private static final int disparityMin = 15;
	private static final int disparityRange = 60;

	/**
	 * Given already computed rectified images and known stereo parameters, create a 3D cloud and visualize it
	 */
	public static JComponent computeAndShowCloud( StereoParameters param,
												  GrayU8 rectLeft,
												  RectifyCalibrated rectAlg, GrayF32 disparity ) {
		// The point cloud will be in the left cameras reference frame
		DMatrixRMaj rectK = rectAlg.getCalibrationMatrix();
		DMatrixRMaj rectR = rectAlg.getRectifiedRotation();

		// Put all the disparity parameters into one data structure
		var disparityParameters = new DisparityParameters();
		disparityParameters.baseline = param.getBaseline();
		disparityParameters.disparityMin = disparityMin;
		disparityParameters.disparityRange = disparityRange;
		disparityParameters.rotateToRectified.setTo(rectR);
		PerspectiveOps.matrixToPinhole(rectK, rectLeft.width, rectLeft.height, disparityParameters.pinhole);

		// Iterate through each pixel in disparity image and compute its 3D coordinate
		PointCloudViewer pcv = VisualizeData.createPointCloudViewer();
		pcv.setTranslationStep(param.getBaseline()*0.1);

		// Next create the 3D point cloud. The function below will handle conversion from disparity into
		// XYZ, then transform from rectified into normal camera coordinate system. Feel free to glance at the
		// source code to understand exactly what it's doing
		MultiViewStereoOps.disparityToCloud(disparity, disparityParameters, null,
				( pixX, pixY, x, y, z ) -> {
					// look up the gray value. Then convert it into RGB
					int v = rectLeft.unsafe_get(pixX, pixY);
					pcv.addPoint(x, y, z, v << 16 | v << 8 | v);
				});

		// Configure the display
//		pcv.setFog(true);
//		pcv.setClipDistance(baseline*45);
//		PeriodicColorizer colorizer = new TwoAxisRgbPlane.Z_XY(4.0);
//		colorizer.setPeriod(baseline*5);
//		pcv.setColorizer(colorizer); // sometimes pseudo color can be easier to view
		pcv.setDotSize(1);
		pcv.setCameraHFov(PerspectiveOps.computeHFov(param.left));
//		pcv.setCameraToWorld(cameraToWorld);
		JComponent viewer = pcv.getComponent();
		viewer.setPreferredSize(new Dimension(600, 600*param.left.height/param.left.width));
		return viewer;
	}

	public static void main( String[] args ) {
		// ------------- Compute Stereo Correspondence

		// Load camera images and stereo camera parameters
		String calibDir = UtilIO.pathExample("calibration/stereo/Bumblebee2_Chess/");
		String imageDir = UtilIO.pathExample("stereo/");

		StereoParameters param = CalibrationIO.load(new File(calibDir, "stereo.yaml"));

		// load and convert images into a BoofCV format
		BufferedImage origLeft = UtilImageIO.loadImage(imageDir, "chair01_left.jpg");
		BufferedImage origRight = UtilImageIO.loadImage(imageDir, "chair01_right.jpg");

		GrayU8 distLeft = ConvertBufferedImage.convertFrom(origLeft, (GrayU8)null);
		GrayU8 distRight = ConvertBufferedImage.convertFrom(origRight, (GrayU8)null);

		// rectify images and compute disparity
		GrayU8 rectLeft = distLeft.createSameShape();
		GrayU8 rectRight = distRight.createSameShape();

		RectifyCalibrated rectAlg = ExampleStereoDisparity.rectify(distLeft, distRight, param, rectLeft, rectRight);

//		GrayU8 disparity = ExampleStereoDisparity.denseDisparity(rectLeft, rectRight, 3,disparityMin, disparityRange);
		GrayF32 disparity = ExampleStereoDisparity.denseDisparitySubpixel(
				rectLeft, rectRight, 5, disparityMin, disparityRange);

		// ------------- Convert disparity image into a 3D point cloud

		JComponent viewer = computeAndShowCloud(param, rectLeft, rectAlg, disparity);

		// display the results. Click and drag to change point cloud camera
		BufferedImage visualized = VisualizeImageData.disparity(disparity, null, disparityRange, 0);
		ShowImages.showWindow(visualized, "Disparity", true);
		ShowImages.showWindow(viewer, "Point Cloud", true);
	}
}