Difference between revisions of "Example Stereo Disparity 3D"

From BoofCV
Jump to navigationJump to search
m
m
(One intermediate revision by the same user not shown)
Line 1: Line 1:
<center>
<center>
{| class="wikitable" width="400pt"
{| class="wikitable" width="400pt"
| [[file:Example_stereo_disparity3d_pointcloud.jpg|400px]] || {{#ev:youtube|https://www.youtube.com/watch?v=8pn9Ebw90uk&t=672s|400|center}}
| [[file:Example_stereo_disparity3d_pointcloud.jpg|400px]] || {{#ev:youtube|https://www.youtube.com/watch?v=8pn9Ebw90uk|400|center|||start=672}}
|-
|-
| 3D point cloud generated from disparity image. || Stereo Update Video
| 3D point cloud generated from disparity image. || Stereo Update Video
Line 10: Line 10:


Example Code:
Example Code:
* [https://github.com/lessthanoptimal/BoofCV/blob/v0.35/examples/src/main/java/boofcv/examples/stereo/ExampleStereoDisparity3D.java ExampleStereoDisparity3D.java]
* [https://github.com/lessthanoptimal/BoofCV/blob/v0.36/examples/src/main/java/boofcv/examples/stereo/ExampleStereoDisparity3D.java ExampleStereoDisparity3D.java]


Concepts:
Concepts:
Line 35: Line 35:
public class ExampleStereoDisparity3D {
public class ExampleStereoDisparity3D {


// Specifies what disparity values are considered
// Specifies the disparity values which will be considered
public static final int minDisparity = 10;
private static final int disparityMin = 15;
public static final int rangeDisparity = 60;
private static final int disparityRange = 60;


public static void main( String args[] ) {
/**
* Given already computed rectified images and known stereo parameters, create a 3D cloud and visualize it
*/
public static JComponent computeAndShowCloud( StereoParameters param,
  GrayU8 rectLeft,
  RectifyCalibrated rectAlg, GrayF32 disparity ) {
// The point cloud will be in the left cameras reference frame
DMatrixRMaj rectK = rectAlg.getCalibrationMatrix();
DMatrixRMaj rectR = rectAlg.getRectifiedRotation();
 
// Put all the disparity parameters into one data structure
var disparityParameters = new DisparityParameters();
disparityParameters.baseline = param.getBaseline();
disparityParameters.disparityMin = disparityMin;
disparityParameters.disparityRange = disparityRange;
disparityParameters.rotateToRectified.set(rectR);
PerspectiveOps.matrixToPinhole(rectK, rectLeft.width, rectLeft.height, disparityParameters.pinhole);
 
// Iterate through each pixel in disparity image and compute its 3D coordinate
PointCloudViewer pcv = VisualizeData.createPointCloudViewer();
pcv.setTranslationStep(param.getBaseline()*0.1);
 
// Next create the 3D point cloud. The function below will handle conversion from disparity into
// XYZ, then transform from rectified into normal camera coordinate system. Feel free to glance at the
// source code to understand exactly what it's doing
MultiViewStereoOps.disparityToCloud(disparity, disparityParameters, null,
( pixX, pixY, x, y, z ) -> {
// look up the gray value. Then convert it into RGB
int v = rectLeft.unsafe_get(pixX, pixY);
pcv.addPoint(x, y, z, v << 16 | v << 8 | v);
});
 
// Configure the display
// pcv.setFog(true);
// pcv.setClipDistance(baseline*45);
// PeriodicColorizer colorizer = new TwoAxisRgbPlane.Z_XY(4.0);
// colorizer.setPeriod(baseline*5);
// pcv.setColorizer(colorizer); // sometimes pseudo color can be easier to view
pcv.setDotSize(1);
pcv.setCameraHFov(PerspectiveOps.computeHFov(param.left));
// pcv.setCameraToWorld(cameraToWorld);
JComponent viewer = pcv.getComponent();
viewer.setPreferredSize(new Dimension(600, 600*param.left.height/param.left.width));
return viewer;
}
 
public static void main( String[] args ) {
// ------------- Compute Stereo Correspondence
// ------------- Compute Stereo Correspondence


Line 46: Line 92:
String imageDir = UtilIO.pathExample("stereo/");
String imageDir = UtilIO.pathExample("stereo/");


StereoParameters param = CalibrationIO.load(new File(calibDir , "stereo.yaml"));
StereoParameters param = CalibrationIO.load(new File(calibDir, "stereo.yaml"));


// load and convert images into a BoofCV format
// load and convert images into a BoofCV format
BufferedImage origLeft = UtilImageIO.loadImage(imageDir , "chair01_left.jpg");
BufferedImage origLeft = UtilImageIO.loadImage(imageDir, "chair01_left.jpg");
BufferedImage origRight = UtilImageIO.loadImage(imageDir , "chair01_right.jpg");
BufferedImage origRight = UtilImageIO.loadImage(imageDir, "chair01_right.jpg");


GrayU8 distLeft = ConvertBufferedImage.convertFrom(origLeft, (GrayU8) null);
GrayU8 distLeft = ConvertBufferedImage.convertFrom(origLeft, (GrayU8)null);
GrayU8 distRight = ConvertBufferedImage.convertFrom(origRight,(GrayU8)null);
GrayU8 distRight = ConvertBufferedImage.convertFrom(origRight, (GrayU8)null);


// rectify images and compute disparity
// rectify images and compute disparity
Line 59: Line 105:
GrayU8 rectRight = distRight.createSameShape();
GrayU8 rectRight = distRight.createSameShape();


RectifyCalibrated rectAlg = ExampleStereoDisparity.rectify(distLeft,distRight,param,rectLeft,rectRight);
RectifyCalibrated rectAlg = ExampleStereoDisparity.rectify(distLeft, distRight, param, rectLeft, rectRight);


// GrayU8 disparity = ExampleStereoDisparity.denseDisparity(rectLeft, rectRight, 3,minDisparity, rangeDisparity);
// GrayU8 disparity = ExampleStereoDisparity.denseDisparity(rectLeft, rectRight, 3,disparityMin, disparityRange);
GrayF32 disparity = ExampleStereoDisparity.denseDisparitySubpixel(
GrayF32 disparity = ExampleStereoDisparity.denseDisparitySubpixel(
rectLeft, rectRight, 5, minDisparity, rangeDisparity);
rectLeft, rectRight, 5, disparityMin, disparityRange);


// ------------- Convert disparity image into a 3D point cloud
// ------------- Convert disparity image into a 3D point cloud


// The point cloud will be in the left cameras reference frame
JComponent viewer = computeAndShowCloud(param, rectLeft, rectAlg, disparity);
DMatrixRMaj rectK = rectAlg.getCalibrationMatrix();
DMatrixRMaj rectR = rectAlg.getRectifiedRotation();
 
// extract intrinsic parameters from rectified camera
double baseline = param.getBaseline()*0.1;
double fx = rectK.get(0,0);
double fy = rectK.get(1,1);
double cx = rectK.get(0,2);
double cy = rectK.get(1,2);
 
double maxZ = baseline*100;
 
// Iterate through each pixel in disparity image and compute its 3D coordinate
PointCloudViewer pcv = VisualizeData.createPointCloudViewer();
pcv.setTranslationStep(1.5);
 
Point3D_F64 pointRect = new Point3D_F64();
Point3D_F64 pointLeft = new Point3D_F64();
for( int y = 0; y < disparity.height; y++ ) {
for( int x = 0; x < disparity.width; x++ ) {
double d = disparity.unsafe_get(x,y) + minDisparity;
 
// skip over pixels were no correspondence was found
if( d >= rangeDisparity || d <= 0 )
continue;
 
// Coordinate in rectified camera frame
pointRect.z = baseline*fx/d;
pointRect.x = pointRect.z*(x - cx)/fx;
pointRect.y = pointRect.z*(y - cy)/fy;
 
// prune points which are likely to be noise
if( pointRect.z >= maxZ )
continue;
 
// rotate into the original left camera frame
GeometryMath_F64.multTran(rectR, pointRect, pointLeft);
 
// add pixel to the view for display purposes and sets its gray scale value
int v = rectLeft.unsafe_get(x, y);
pcv.addPoint(pointLeft.x,pointLeft.y,pointLeft.z,v << 16 | v << 8 | v);
// temp.add( pointLeft.copy() );
}
}
 
// move it back a bit to make the 3D structure more apparent
Se3_F64 cameraToWorld = new Se3_F64();
cameraToWorld.T.z = -baseline*5;
cameraToWorld.T.x = baseline*12;
ConvertRotation3D_F64.eulerToMatrix(EulerType.XYZ,0.1,-0.4,0,cameraToWorld.R);
 
// Configure the display
// pcv.setFog(true);
// pcv.setClipDistance(baseline*45);
// PeriodicColorizer colorizer = new TwoAxisRgbPlane.Z_XY(4.0);
// colorizer.setPeriod(baseline*5);
// pcv.setColorizer(colorizer); // sometimes pseudo color can be easier to view
pcv.setDotSize(1);
pcv.setCameraHFov(PerspectiveOps.computeHFov(param.left));
pcv.setCameraToWorld(cameraToWorld);
JComponent viewer = pcv.getComponent();
viewer.setPreferredSize(new Dimension(600,600*param.left.height/param.left.width));


// display the results.  Click and drag to change point cloud camera
// display the results.  Click and drag to change point cloud camera
BufferedImage visualized = VisualizeImageData.disparity(disparity, null,rangeDisparity,0);
BufferedImage visualized = VisualizeImageData.disparity(disparity, null, disparityRange, 0);
ShowImages.showWindow(visualized,"Disparity", true);
ShowImages.showWindow(visualized, "Disparity", true);
ShowImages.showWindow(viewer,"Point Cloud", true);
ShowImages.showWindow(viewer, "Point Cloud", true);
}
}
}
}
</syntaxhighlight>
</syntaxhighlight>

Revision as of 19:50, 21 December 2020

Example stereo disparity3d pointcloud.jpg
3D point cloud generated from disparity image. Stereo Update Video

An additional example for stereo disparity. This one shows you how to properly resize input images and convert the disparity image into a 3D point cloud.

Example Code:

Concepts:

  • Stereo disparity
  • Point clouds

Related Videos

Related Examples:

Example Code

/**
 * Expanding upon ExampleStereoDisparity, this example demonstrates how to rescale an image for stereo processing and
 * then compute its 3D point cloud.  Images are often rescaled to improve speed and some times quality.  Creating
 * 3D point clouds from disparity images is easy and well documented in the literature, but there are some nuances
 * to it.
 *
 * @author Peter Abeles
 */
public class ExampleStereoDisparity3D {

	// Specifies the disparity values which will be considered
	private static final int disparityMin = 15;
	private static final int disparityRange = 60;

	/**
	 * Given already computed rectified images and known stereo parameters, create a 3D cloud and visualize it
	 */
	public static JComponent computeAndShowCloud( StereoParameters param,
												  GrayU8 rectLeft,
												  RectifyCalibrated rectAlg, GrayF32 disparity ) {
		// The point cloud will be in the left cameras reference frame
		DMatrixRMaj rectK = rectAlg.getCalibrationMatrix();
		DMatrixRMaj rectR = rectAlg.getRectifiedRotation();

		// Put all the disparity parameters into one data structure
		var disparityParameters = new DisparityParameters();
		disparityParameters.baseline = param.getBaseline();
		disparityParameters.disparityMin = disparityMin;
		disparityParameters.disparityRange = disparityRange;
		disparityParameters.rotateToRectified.set(rectR);
		PerspectiveOps.matrixToPinhole(rectK, rectLeft.width, rectLeft.height, disparityParameters.pinhole);

		// Iterate through each pixel in disparity image and compute its 3D coordinate
		PointCloudViewer pcv = VisualizeData.createPointCloudViewer();
		pcv.setTranslationStep(param.getBaseline()*0.1);

		// Next create the 3D point cloud. The function below will handle conversion from disparity into
		// XYZ, then transform from rectified into normal camera coordinate system. Feel free to glance at the
		// source code to understand exactly what it's doing
		MultiViewStereoOps.disparityToCloud(disparity, disparityParameters, null,
				( pixX, pixY, x, y, z ) -> {
					// look up the gray value. Then convert it into RGB
					int v = rectLeft.unsafe_get(pixX, pixY);
					pcv.addPoint(x, y, z, v << 16 | v << 8 | v);
				});

		// Configure the display
//		pcv.setFog(true);
//		pcv.setClipDistance(baseline*45);
//		PeriodicColorizer colorizer = new TwoAxisRgbPlane.Z_XY(4.0);
//		colorizer.setPeriod(baseline*5);
//		pcv.setColorizer(colorizer); // sometimes pseudo color can be easier to view
		pcv.setDotSize(1);
		pcv.setCameraHFov(PerspectiveOps.computeHFov(param.left));
//		pcv.setCameraToWorld(cameraToWorld);
		JComponent viewer = pcv.getComponent();
		viewer.setPreferredSize(new Dimension(600, 600*param.left.height/param.left.width));
		return viewer;
	}

	public static void main( String[] args ) {
		// ------------- Compute Stereo Correspondence

		// Load camera images and stereo camera parameters
		String calibDir = UtilIO.pathExample("calibration/stereo/Bumblebee2_Chess/");
		String imageDir = UtilIO.pathExample("stereo/");

		StereoParameters param = CalibrationIO.load(new File(calibDir, "stereo.yaml"));

		// load and convert images into a BoofCV format
		BufferedImage origLeft = UtilImageIO.loadImage(imageDir, "chair01_left.jpg");
		BufferedImage origRight = UtilImageIO.loadImage(imageDir, "chair01_right.jpg");

		GrayU8 distLeft = ConvertBufferedImage.convertFrom(origLeft, (GrayU8)null);
		GrayU8 distRight = ConvertBufferedImage.convertFrom(origRight, (GrayU8)null);

		// rectify images and compute disparity
		GrayU8 rectLeft = distLeft.createSameShape();
		GrayU8 rectRight = distRight.createSameShape();

		RectifyCalibrated rectAlg = ExampleStereoDisparity.rectify(distLeft, distRight, param, rectLeft, rectRight);

//		GrayU8 disparity = ExampleStereoDisparity.denseDisparity(rectLeft, rectRight, 3,disparityMin, disparityRange);
		GrayF32 disparity = ExampleStereoDisparity.denseDisparitySubpixel(
				rectLeft, rectRight, 5, disparityMin, disparityRange);

		// ------------- Convert disparity image into a 3D point cloud

		JComponent viewer = computeAndShowCloud(param, rectLeft, rectAlg, disparity);

		// display the results.  Click and drag to change point cloud camera
		BufferedImage visualized = VisualizeImageData.disparity(disparity, null, disparityRange, 0);
		ShowImages.showWindow(visualized, "Disparity", true);
		ShowImages.showWindow(viewer, "Point Cloud", true);
	}
}