Difference between revisions of "Example Stereo Disparity"
m |
|||
Line 9: | Line 9: | ||
For visualization purposes the disparity is encoded using a color histogram. Hotter colors indicate closer objects while cooler objects indicate objects that are farther away. Cameras must be accurate calibrated or else an error of a few pixels will drastically degrade performance. A common preprocessing step is to run a Laplacian of Gaussian edge detector across the image to provide invariance to lighting conditions. This was not done below because the cameras have their gain synchronized. | For visualization purposes the disparity is encoded using a color histogram. Hotter colors indicate closer objects while cooler objects indicate objects that are farther away. Cameras must be accurate calibrated or else an error of a few pixels will drastically degrade performance. A common preprocessing step is to run a Laplacian of Gaussian edge detector across the image to provide invariance to lighting conditions. This was not done below because the cameras have their gain synchronized. | ||
Example File: [https://github.com/lessthanoptimal/BoofCV/blob/v0. | Example File: [https://github.com/lessthanoptimal/BoofCV/blob/v0.14/examples/src/boofcv/examples/ExampleStereoDisparity.java ExampleStereoDisparity.java] | ||
Concepts: | Concepts: | ||
Line 48: | Line 48: | ||
* @param rectLeft Rectified left camera image | * @param rectLeft Rectified left camera image | ||
* @param rectRight Rectified right camera image | * @param rectRight Rectified right camera image | ||
* @param regionSize Radius of region being matched | |||
* @param minDisparity Minimum disparity that is considered | * @param minDisparity Minimum disparity that is considered | ||
* @param maxDisparity Maximum disparity that is considered | * @param maxDisparity Maximum disparity that is considered | ||
Line 53: | Line 54: | ||
*/ | */ | ||
public static ImageUInt8 denseDisparity( ImageUInt8 rectLeft , ImageUInt8 rectRight , | public static ImageUInt8 denseDisparity( ImageUInt8 rectLeft , ImageUInt8 rectRight , | ||
int regionSize, | |||
int minDisparity , int maxDisparity ) | int minDisparity , int maxDisparity ) | ||
{ | { | ||
Line 59: | Line 61: | ||
StereoDisparity<ImageUInt8,ImageUInt8> disparityAlg = | StereoDisparity<ImageUInt8,ImageUInt8> disparityAlg = | ||
FactoryStereoDisparity.regionWta(DisparityAlgorithms.RECT_FIVE, | FactoryStereoDisparity.regionWta(DisparityAlgorithms.RECT_FIVE, | ||
minDisparity, maxDisparity, | minDisparity, maxDisparity, regionSize, regionSize, 25, 1, 0.2, ImageUInt8.class); | ||
// process and return the results | |||
disparityAlg.process(rectLeft,rectRight); | |||
return disparityAlg.getDisparity(); | |||
} | |||
/** | |||
* Same as above, but compute disparity to within sub-pixel accuracy. The difference between the | |||
* two is more apparent when a 3D point cloud is computed. | |||
*/ | |||
public static ImageFloat32 denseDisparitySubpixel( ImageUInt8 rectLeft , ImageUInt8 rectRight , | |||
int regionSize , | |||
int minDisparity , int maxDisparity ) | |||
{ | |||
// A slower but more accuracy algorithm is selected | |||
// All of these parameters should be turned | |||
StereoDisparity<ImageUInt8,ImageFloat32> disparityAlg = | |||
FactoryStereoDisparity.regionSubpixelWta(DisparityAlgorithms.RECT_FIVE, | |||
minDisparity, maxDisparity, regionSize, regionSize, 25, 1, 0.2, ImageUInt8.class); | |||
// process and return the results | // process and return the results | ||
Line 70: | Line 92: | ||
* Rectified the input images using known calibration. | * Rectified the input images using known calibration. | ||
*/ | */ | ||
public static | public static RectifyCalibrated rectify( ImageUInt8 origLeft , ImageUInt8 origRight , | ||
StereoParameters param , | |||
ImageUInt8 rectLeft , ImageUInt8 rectRight ) | |||
{ | { | ||
// Compute rectification | // Compute rectification | ||
Line 91: | Line 113: | ||
// Adjust the rectification to make the view area more useful | // Adjust the rectification to make the view area more useful | ||
RectifyImageOps. | RectifyImageOps.allInsideLeft(param.left, rect1, rect2, rectK); | ||
// undistorted and rectify images | // undistorted and rectify images | ||
Line 101: | Line 123: | ||
imageDistortLeft.apply(origLeft, rectLeft); | imageDistortLeft.apply(origLeft, rectLeft); | ||
imageDistortRight.apply(origRight, rectRight); | imageDistortRight.apply(origRight, rectRight); | ||
return rectifyAlg; | |||
} | } | ||
Line 123: | Line 147: | ||
// compute disparity | // compute disparity | ||
ImageUInt8 disparity = denseDisparity(rectLeft,rectRight,10,60); | ImageUInt8 disparity = denseDisparity(rectLeft,rectRight,5,10,60); | ||
// ImageFloat32 disparity = denseDisparitySubpixel(rectLeft,rectRight,5,10,60); | |||
// show results | // show results |
Revision as of 13:00, 14 April 2013
Shows how to compute dense disparity between two rectified stereo images. BoofCV provides two different rectangular region based algorithms and noise reduction techniques targeted at real-time processing. Stereo vision can be difficult to get right, so please read all JavaDoc and cited papers. Dense stereo disparity is a computationally expensive and is likely to require a reduction in image size to achieve truly real-time performance.
For visualization purposes the disparity is encoded using a color histogram. Hotter colors indicate closer objects while cooler objects indicate objects that are farther away. Cameras must be accurate calibrated or else an error of a few pixels will drastically degrade performance. A common preprocessing step is to run a Laplacian of Gaussian edge detector across the image to provide invariance to lighting conditions. This was not done below because the cameras have their gain synchronized.
Example File: ExampleStereoDisparity.java
Concepts:
- Stereo Vision
- Disparity
- Rectification
Related Applets:
Related Examples:
Example Code
/**
* The disparity between two stereo images is used to estimate the range of objects inside
* the camera's view. Disparity is the difference in position between the viewed location
* of a point in the left and right stereo images. Because input images are rectified,
* corresponding points can be found by only searching along image rows.
*
* Values in the disparity image specify how different the two images are. A value of X indicates
* that the corresponding point in the right image from the left is at "x' = x - X - minDisparity",
* where x' and x are the locations in the right and left images respectively. An invalid value
* with no correspondence is set to a value more than (max - min) disparity.
*
* @author Peter Abeles
*/
public class ExampleStereoDisparity {
/**
* Computes the dense disparity between between two stereo images. The input images
* must be rectified with lens distortion removed to work! Floating point images
* are also supported.
*
* @param rectLeft Rectified left camera image
* @param rectRight Rectified right camera image
* @param regionSize Radius of region being matched
* @param minDisparity Minimum disparity that is considered
* @param maxDisparity Maximum disparity that is considered
* @return Disparity image
*/
public static ImageUInt8 denseDisparity( ImageUInt8 rectLeft , ImageUInt8 rectRight ,
int regionSize,
int minDisparity , int maxDisparity )
{
// A slower but more accuracy algorithm is selected
// All of these parameters should be turned
StereoDisparity<ImageUInt8,ImageUInt8> disparityAlg =
FactoryStereoDisparity.regionWta(DisparityAlgorithms.RECT_FIVE,
minDisparity, maxDisparity, regionSize, regionSize, 25, 1, 0.2, ImageUInt8.class);
// process and return the results
disparityAlg.process(rectLeft,rectRight);
return disparityAlg.getDisparity();
}
/**
* Same as above, but compute disparity to within sub-pixel accuracy. The difference between the
* two is more apparent when a 3D point cloud is computed.
*/
public static ImageFloat32 denseDisparitySubpixel( ImageUInt8 rectLeft , ImageUInt8 rectRight ,
int regionSize ,
int minDisparity , int maxDisparity )
{
// A slower but more accuracy algorithm is selected
// All of these parameters should be turned
StereoDisparity<ImageUInt8,ImageFloat32> disparityAlg =
FactoryStereoDisparity.regionSubpixelWta(DisparityAlgorithms.RECT_FIVE,
minDisparity, maxDisparity, regionSize, regionSize, 25, 1, 0.2, ImageUInt8.class);
// process and return the results
disparityAlg.process(rectLeft,rectRight);
return disparityAlg.getDisparity();
}
/**
* Rectified the input images using known calibration.
*/
public static RectifyCalibrated rectify( ImageUInt8 origLeft , ImageUInt8 origRight ,
StereoParameters param ,
ImageUInt8 rectLeft , ImageUInt8 rectRight )
{
// Compute rectification
RectifyCalibrated rectifyAlg = RectifyImageOps.createCalibrated();
Se3_F64 leftToRight = param.getRightToLeft().invert(null);
// original camera calibration matrices
DenseMatrix64F K1 = PerspectiveOps.calibrationMatrix(param.getLeft(), null);
DenseMatrix64F K2 = PerspectiveOps.calibrationMatrix(param.getRight(), null);
rectifyAlg.process(K1,new Se3_F64(),K2,leftToRight);
// rectification matrix for each image
DenseMatrix64F rect1 = rectifyAlg.getRect1();
DenseMatrix64F rect2 = rectifyAlg.getRect2();
// New calibration matrix,
DenseMatrix64F rectK = rectifyAlg.getCalibrationMatrix();
// Adjust the rectification to make the view area more useful
RectifyImageOps.allInsideLeft(param.left, rect1, rect2, rectK);
// undistorted and rectify images
ImageDistort<ImageUInt8> imageDistortLeft =
RectifyImageOps.rectifyImage(param.getLeft(), rect1, ImageUInt8.class);
ImageDistort<ImageUInt8> imageDistortRight =
RectifyImageOps.rectifyImage(param.getRight(), rect2, ImageUInt8.class);
imageDistortLeft.apply(origLeft, rectLeft);
imageDistortRight.apply(origRight, rectRight);
return rectifyAlg;
}
public static void main( String args[] ) {
String calibDir = "../data/applet/calibration/stereo/Bumblebee2_Chess/";
String imageDir = "../data/applet/stereo/";
StereoParameters param = BoofMiscOps.loadXML(calibDir + "stereo.xml");
// load and convert images into a BoofCV format
BufferedImage origLeft = UtilImageIO.loadImage(imageDir + "chair01_left.jpg");
BufferedImage origRight = UtilImageIO.loadImage(imageDir + "chair01_right.jpg");
ImageUInt8 distLeft = ConvertBufferedImage.convertFrom(origLeft,(ImageUInt8)null);
ImageUInt8 distRight = ConvertBufferedImage.convertFrom(origRight,(ImageUInt8)null);
// rectify images
ImageUInt8 rectLeft = new ImageUInt8(distLeft.width,distLeft.height);
ImageUInt8 rectRight = new ImageUInt8(distRight.width,distRight.height);
rectify(distLeft,distRight,param,rectLeft,rectRight);
// compute disparity
ImageUInt8 disparity = denseDisparity(rectLeft,rectRight,5,10,60);
// ImageFloat32 disparity = denseDisparitySubpixel(rectLeft,rectRight,5,10,60);
// show results
BufferedImage visualized = VisualizeImageData.disparity(disparity, null,10,60,0);
ShowImages.showWindow(rectLeft,"Rectified");
ShowImages.showWindow(visualized,"Disparity");
}
}