Difference between revisions of "Example Stereo Single Camera"

From BoofCV
Jump to navigationJump to search
(Created example)
 
m
Line 113: Line 113:
ShowImages.showWindow(visualized, "Disparity");
ShowImages.showWindow(visualized, "Disparity");


showPointCloud(disparity, outLeft, leftToRight, intrinsic, minDisparity, maxDisparity);
showPointCloud(disparity, outLeft, leftToRight, rectifiedK, minDisparity, maxDisparity);


System.out.println("Total found " + matchedCalibrated.size());
System.out.println("Total found " + matchedCalibrated.size());
Line 127: Line 127:
* @return Found camera motion.  Note translation has an arbitrary scale
* @return Found camera motion.  Note translation has an arbitrary scale
*/
*/
public static Se3_F64 estimateCameraMotion(IntrinsicParameters intrinsic, List<AssociatedPair> matchedNorm, List<AssociatedPair> inliers) {
public static Se3_F64 estimateCameraMotion(IntrinsicParameters intrinsic,
EpipolarMatrixEstimator essentialAlg = FactoryEpipolar.computeFundamentalOne(5, false, 5);
  List<AssociatedPair> matchedNorm, List<AssociatedPair> inliers)
{
Estimate1ofEpipolar essentialAlg = FactoryMultiView.computeFundamental_1(EnumEpipolar.ESSENTIAL_5_NISTER, 5);
TriangulateTwoViewsCalibrated triangulate = FactoryTriangulate.twoGeometric();
TriangulateTwoViewsCalibrated triangulate = FactoryTriangulate.twoGeometric();
ModelGenerator<Se3_F64, AssociatedPair> generateEpipolarMotion =
ModelGenerator<Se3_F64, AssociatedPair> generateEpipolarMotion =
Line 134: Line 136:


DistanceFromModel<Se3_F64, AssociatedPair> distanceSe3 =
DistanceFromModel<Se3_F64, AssociatedPair> distanceSe3 =
new DistanceSe3SymmetricSq(triangulate, intrinsic.fx, intrinsic.fy, intrinsic.skew);
new DistanceSe3SymmetricSq(triangulate,
 
intrinsic.fx, intrinsic.fy, intrinsic.skew,
int N = generateEpipolarMotion.getMinimumPoints();
intrinsic.fx, intrinsic.fy, intrinsic.skew);


// 1/2 a pixel tolerance for RANSAC inliers
// 1/2 a pixel tolerance for RANSAC inliers
Line 142: Line 144:


ModelMatcher<Se3_F64, AssociatedPair> epipolarMotion =
ModelMatcher<Se3_F64, AssociatedPair> epipolarMotion =
new SimpleInlierRansac<Se3_F64, AssociatedPair>(2323, generateEpipolarMotion, distanceSe3,
new Ransac<Se3_F64, AssociatedPair>(2323, generateEpipolarMotion, distanceSe3,
200, N, N, 100000, ransacTOL);
200, ransacTOL);


if (!epipolarMotion.process(matchedNorm))
if (!epipolarMotion.process(matchedNorm))
Line 166: Line 168:
AssociatedPair c = new AssociatedPair();
AssociatedPair c = new AssociatedPair();


tran.compute(p.keyLoc.x, p.keyLoc.y, c.keyLoc);
tran.compute(p.p1.x, p.p1.y, c.p1);
tran.compute(p.currLoc.x, p.currLoc.y, c.currLoc);
tran.compute(p.p2.x, p.p2.y, c.p2);


calibratedFeatures.add(c);
calibratedFeatures.add(c);
Line 196: Line 198:


// original camera calibration matrices
// original camera calibration matrices
DenseMatrix64F K = UtilIntrinsic.calibrationMatrix(intrinsic, null);
DenseMatrix64F K = PerspectiveOps.calibrationMatrix(intrinsic, null);


rectifyAlg.process(K, new Se3_F64(), K, leftToRight);
rectifyAlg.process(K, new Se3_F64(), K, leftToRight);
Line 232: Line 234:
AssociatedPair p = new AssociatedPair();
AssociatedPair p = new AssociatedPair();


tran.compute(n.keyLoc.x, n.keyLoc.y, p.keyLoc);
tran.compute(n.p1.x, n.p1.y, p.p1);
tran.compute(n.currLoc.x, n.currLoc.y, p.currLoc);
tran.compute(n.p2.x, n.p2.y, p.p2);


pixels.add(p);
pixels.add(p);
Line 250: Line 252:
*/
*/
public static void showPointCloud(ImageSingleBand disparity, BufferedImage left,
public static void showPointCloud(ImageSingleBand disparity, BufferedImage left,
  Se3_F64 motion, IntrinsicParameters intrinsic,
  Se3_F64 motion, DenseMatrix64F rectR ,
  int minDisparity, int maxDisparity) {
  int minDisparity, int maxDisparity) {
PointCloudTiltPanel gui = new PointCloudTiltPanel();
PointCloudTiltPanel gui = new PointCloudTiltPanel();
Line 256: Line 258:
double baseline = motion.getT().norm();
double baseline = motion.getT().norm();


DenseMatrix64F K = UtilIntrinsic.calibrationMatrix(intrinsic, null);
gui.configure(baseline, rectR, minDisparity, maxDisparity);
 
gui.configure(baseline, K, minDisparity, maxDisparity);
gui.process(disparity, left);
gui.process(disparity, left);
gui.setPreferredSize(new Dimension(left.getWidth(), left.getHeight()));
gui.setPreferredSize(new Dimension(left.getWidth(), left.getHeight()));

Revision as of 16:43, 6 November 2012

Dense Stereo from Single Camera Example

Associated inlier features between two views
Associated inlier features between two views
Stereo disparity image
3D point cloud
Stereo disparity image 3D point cloud

A dense point cloud of an environment can be created from a single camera using two views. If the distance between the two views is not known, then the scale of the found point cloud will be arbitrary. In this example natural features are used to determine the geometric relationship between the two views. The algorithm can be summarized as follows:

  • Load camera calibration and two images
  • Detect, describe, and associate image features
  • Compute camera motion (Essential matrix)
  • Rectify image pair
  • Compute dense stereo disparity
  • Convert into 3D point cloud

Example File: ExampleImageStitching.java

Concepts:

  • Point feature association
  • Epipolar geometry
  • Rectification
  • Dense stereo processing

Relevant Applets:

Related Tutorials/Example Code:

Example Code

/**
 * Example demonstrating how to use to images taken from a single calibrated camera to create a stereo disparity image,
 * from which a dense 3D point cloud of the scene can be computed.  For this technique to work the camera's motion
 * needs to be approximately tangential to the direction the camera is pointing.  The code below assumes that the first
 * image is to the left of the second image.
 *
 * @author Peter Abeles
 */
public class ExampleStereoTwoViewsOneCamera {

	// Disparity calculation parameters
	private static int minDisparity = 30;
	private static int maxDisparity = 160;

	public static void main(String args[]) {
		// specify location of images and calibration
		String calibDir = "../data/applet/calibration/mono/Sony_DSC-HX5V_Chess/";
		String imageDir = "../data/applet/stereo/";

		// Camera parameters
		IntrinsicParameters intrinsic = BoofMiscOps.loadXML(calibDir + "intrinsic.xml");

		// Input images from the camera moving left to right
		BufferedImage origLeft = UtilImageIO.loadImage(imageDir + "mono_wall_01.jpg");
//		BufferedImage origRight = UtilImageIO.loadImage(imageDir+"mono_wall_02.jpg");
		BufferedImage origRight = UtilImageIO.loadImage(imageDir + "mono_wall_03.jpg");

		// Input images with lens distortion
		ImageUInt8 distortedLeft = ConvertBufferedImage.convertFrom(origLeft, (ImageUInt8) null);
		ImageUInt8 distortedRight = ConvertBufferedImage.convertFrom(origRight, (ImageUInt8) null);

		// matched features between the two images
		List<AssociatedPair> matchedFeatures = ExampleFundamentalMatrix.computeMatches(origLeft, origRight);

		// convert from pixel coordinates into normalized image coordinates
		List<AssociatedPair> matchedCalibrated = convertToNormalizedCoordinates(matchedFeatures, intrinsic);

		// Robustly estimate camera motion
		List<AssociatedPair> inliers = new ArrayList<AssociatedPair>();
		Se3_F64 leftToRight = estimateCameraMotion(intrinsic, matchedCalibrated, inliers);

		drawInliers(origLeft, origRight, intrinsic, inliers);

		// Rectify and remove lens distortion for stereo processing
		DenseMatrix64F rectifiedK = new DenseMatrix64F(3, 3);
		ImageUInt8 rectifiedLeft = new ImageUInt8(distortedLeft.width, distortedLeft.height);
		ImageUInt8 rectifiedRight = new ImageUInt8(distortedLeft.width, distortedLeft.height);

		rectifyImages(distortedLeft, distortedRight, leftToRight, intrinsic, rectifiedLeft, rectifiedRight, rectifiedK);

		// compute disparity
		StereoDisparity<ImageUInt8, ImageFloat32> disparityAlg =
				FactoryStereoDisparity.regionSubpixelWta(DisparityAlgorithms.RECT_FIVE,
						minDisparity, maxDisparity, 5, 5, 20, 1, 0.1, ImageUInt8.class);

		// process and return the results
		disparityAlg.process(rectifiedLeft, rectifiedRight);
		ImageFloat32 disparity = disparityAlg.getDisparity();

		// show results
		BufferedImage visualized = VisualizeImageData.disparity(disparity, null, minDisparity, maxDisparity, 0);

		BufferedImage outLeft = ConvertBufferedImage.convertTo(rectifiedLeft, null);
		BufferedImage outRight = ConvertBufferedImage.convertTo(rectifiedRight, null);

		ShowImages.showWindow(new RectifiedPairPanel(true, outLeft, outRight), "Rectification");
		ShowImages.showWindow(visualized, "Disparity");

		showPointCloud(disparity, outLeft, leftToRight, rectifiedK, minDisparity, maxDisparity);

		System.out.println("Total found " + matchedCalibrated.size());
		System.out.println("Total Inliers " + inliers.size());
	}

	/**
	 * Estimates the camera motion robustly using RANSAC and a set of associated points.
	 *
	 * @param intrinsic   Intrinsic camera parameters
	 * @param matchedNorm set of matched point features in normalized image coordinates
	 * @param inliers     OUTPUT: Set of inlier features from RANSAC
	 * @return Found camera motion.  Note translation has an arbitrary scale
	 */
	public static Se3_F64 estimateCameraMotion(IntrinsicParameters intrinsic,
											   List<AssociatedPair> matchedNorm, List<AssociatedPair> inliers)
	{
		Estimate1ofEpipolar essentialAlg = FactoryMultiView.computeFundamental_1(EnumEpipolar.ESSENTIAL_5_NISTER, 5);
		TriangulateTwoViewsCalibrated triangulate = FactoryTriangulate.twoGeometric();
		ModelGenerator<Se3_F64, AssociatedPair> generateEpipolarMotion =
				new Se3FromEssentialGenerator(essentialAlg, triangulate);

		DistanceFromModel<Se3_F64, AssociatedPair> distanceSe3 =
				new DistanceSe3SymmetricSq(triangulate,
						intrinsic.fx, intrinsic.fy, intrinsic.skew,
						intrinsic.fx, intrinsic.fy, intrinsic.skew);

		// 1/2 a pixel tolerance for RANSAC inliers
		double ransacTOL = 0.5 * 0.5 * 2.0;

		ModelMatcher<Se3_F64, AssociatedPair> epipolarMotion =
				new Ransac<Se3_F64, AssociatedPair>(2323, generateEpipolarMotion, distanceSe3,
						200, ransacTOL);

		if (!epipolarMotion.process(matchedNorm))
			throw new RuntimeException("Motion estimation failed");

		// save inlier set for debugging purposes
		inliers.addAll(epipolarMotion.getMatchSet());

		return epipolarMotion.getModel();
	}

	/**
	 * Convert a set of associated point features from pixel coordinates into normalized image coordinates.
	 */
	public static List<AssociatedPair> convertToNormalizedCoordinates(List<AssociatedPair> matchedFeatures, IntrinsicParameters intrinsic) {

		PointTransform_F64 tran = LensDistortionOps.transformRadialToNorm_F64(intrinsic);

		List<AssociatedPair> calibratedFeatures = new ArrayList<AssociatedPair>();

		for (AssociatedPair p : matchedFeatures) {
			AssociatedPair c = new AssociatedPair();

			tran.compute(p.p1.x, p.p1.y, c.p1);
			tran.compute(p.p2.x, p.p2.y, c.p2);

			calibratedFeatures.add(c);
		}

		return calibratedFeatures;
	}

	/**
	 * Remove lens distortion and rectify stereo images
	 *
	 * @param distortedLeft  Input distorted image from left camera.
	 * @param distortedRight Input distorted image from right camera.
	 * @param leftToRight    Camera motion from left to right
	 * @param intrinsic      Intrinsic camera parameters
	 * @param rectifiedLeft  Output rectified image for left camera.
	 * @param rectifiedRight Output rectified image for right camera.
	 * @param rectifiedK     Output camera calibration matrix for rectified camera
	 */
	public static void rectifyImages(ImageUInt8 distortedLeft,
									 ImageUInt8 distortedRight,
									 Se3_F64 leftToRight,
									 IntrinsicParameters intrinsic,
									 ImageUInt8 rectifiedLeft,
									 ImageUInt8 rectifiedRight,
									 DenseMatrix64F rectifiedK) {
		RectifyCalibrated rectifyAlg = RectifyImageOps.createCalibrated();

		// original camera calibration matrices
		DenseMatrix64F K = PerspectiveOps.calibrationMatrix(intrinsic, null);

		rectifyAlg.process(K, new Se3_F64(), K, leftToRight);

		// rectification matrix for each image
		DenseMatrix64F rect1 = rectifyAlg.getRect1();
		DenseMatrix64F rect2 = rectifyAlg.getRect2();

		// New calibration matrix,
		rectifiedK.set(rectifyAlg.getCalibrationMatrix());

		// Adjust the rectification to make the view area more useful
		RectifyImageOps.allInsideLeft(intrinsic, rect1, rect2, rectifiedK);

		// undistorted and rectify images
		ImageDistort<ImageUInt8> distortLeft =
				RectifyImageOps.rectifyImage(intrinsic, rect1, ImageUInt8.class);
		ImageDistort<ImageUInt8> distortRight =
				RectifyImageOps.rectifyImage(intrinsic, rect2, ImageUInt8.class);

		distortLeft.apply(distortedLeft, rectifiedLeft);
		distortRight.apply(distortedRight, rectifiedRight);
	}

	/**
	 * Draw inliers for debugging purposes.  Need to convert from normalized to pixel coordinates.
	 */
	public static void drawInliers(BufferedImage left, BufferedImage right, IntrinsicParameters intrinsic,
								   List<AssociatedPair> normalized) {
		PointTransform_F64 tran = LensDistortionOps.transformNormToRadial_F64(intrinsic);

		List<AssociatedPair> pixels = new ArrayList<AssociatedPair>();

		for (AssociatedPair n : normalized) {
			AssociatedPair p = new AssociatedPair();

			tran.compute(n.p1.x, n.p1.y, p.p1);
			tran.compute(n.p2.x, n.p2.y, p.p2);

			pixels.add(p);
		}

		// display the results
		AssociationPanel panel = new AssociationPanel(20);
		panel.setAssociation(pixels);
		panel.setImages(left, right);

		ShowImages.showWindow(panel, "Inlier Features");
	}

	/**
	 * Show results as a point cloud
	 */
	public static void showPointCloud(ImageSingleBand disparity, BufferedImage left,
									  Se3_F64 motion, DenseMatrix64F rectR ,
									  int minDisparity, int maxDisparity) {
		PointCloudTiltPanel gui = new PointCloudTiltPanel();

		double baseline = motion.getT().norm();

		gui.configure(baseline, rectR, minDisparity, maxDisparity);
		gui.process(disparity, left);
		gui.setPreferredSize(new Dimension(left.getWidth(), left.getHeight()));

		ShowImages.showWindow(gui, "Point Cloud");
	}
}