Difference between revisions of "Example Stereo Single Camera"

From BoofCV
Jump to navigationJump to search
m
m
Line 23: Line 23:


Example File:  
Example File:  
* [https://github.com/lessthanoptimal/BoofCV/blob/v0.31/examples/src/main/java/boofcv/examples/stereo/ExampleStereoTwoViewsOneCamera.java ExampleStereoTwoViewsOneCamera.java]
* [https://github.com/lessthanoptimal/BoofCV/blob/v0.32/examples/src/main/java/boofcv/examples/stereo/ExampleStereoTwoViewsOneCamera.java ExampleStereoTwoViewsOneCamera.java]


Concepts:
Concepts:
Line 83: Line 83:
// Rectify and remove lens distortion for stereo processing
// Rectify and remove lens distortion for stereo processing
DMatrixRMaj rectifiedK = new DMatrixRMaj(3, 3);
DMatrixRMaj rectifiedK = new DMatrixRMaj(3, 3);
DMatrixRMaj rectifiedR = new DMatrixRMaj(3, 3);
GrayU8 rectifiedLeft = distortedLeft.createSameShape();
GrayU8 rectifiedLeft = distortedLeft.createSameShape();
GrayU8 rectifiedRight = distortedRight.createSameShape();
GrayU8 rectifiedRight = distortedRight.createSameShape();
GrayU8 rectifiedMask = distortedLeft.createSameShape();


rectifyImages(distortedLeft, distortedRight, leftToRight, intrinsic, rectifiedLeft, rectifiedRight, rectifiedK);
rectifyImages(distortedLeft, distortedRight, leftToRight, intrinsic,intrinsic,
rectifiedLeft, rectifiedRight,rectifiedMask, rectifiedK,rectifiedR);


// compute disparity
// compute disparity
Line 102: Line 105:
disparityAlg.process(derivLeft, derivRight);
disparityAlg.process(derivLeft, derivRight);
GrayF32 disparity = disparityAlg.getDisparity();
GrayF32 disparity = disparityAlg.getDisparity();
RectifyImageOps.applyMask(disparity,rectifiedMask,0);


// show results
// show results
Line 112: Line 116:
ShowImages.showWindow(visualized, "Disparity",true);
ShowImages.showWindow(visualized, "Disparity",true);


showPointCloud(disparity, outLeft, leftToRight, rectifiedK, minDisparity, maxDisparity);
showPointCloud(disparity, outLeft, leftToRight, rectifiedK,rectifiedR, minDisparity, maxDisparity);


System.out.println("Total found " + matchedCalibrated.size());
System.out.println("Total found " + matchedCalibrated.size());
Line 129: Line 133:
  List<AssociatedPair> matchedNorm, List<AssociatedPair> inliers)
  List<AssociatedPair> matchedNorm, List<AssociatedPair> inliers)
{
{
RansacMultiView<Se3_F64, AssociatedPair> epipolarMotion =
ModelMatcherMultiview<Se3_F64, AssociatedPair> epipolarMotion =
FactoryMultiViewRobust.baselineRansac(new ConfigEssential(),new ConfigRansac(200,0.5));
FactoryMultiViewRobust.baselineRansac(new ConfigEssential(),new ConfigRansac(200,0.5));
epipolarMotion.setIntrinsic(0,intrinsic);
epipolarMotion.setIntrinsic(0,intrinsic);
Line 148: Line 152:
public static List<AssociatedPair> convertToNormalizedCoordinates(List<AssociatedPair> matchedFeatures, CameraPinholeRadial intrinsic) {
public static List<AssociatedPair> convertToNormalizedCoordinates(List<AssociatedPair> matchedFeatures, CameraPinholeRadial intrinsic) {


Point2Transform2_F64 p_to_n = LensDistortionOps.narrow(intrinsic).undistort_F64(true, false);
Point2Transform2_F64 p_to_n = LensDistortionFactory.narrow(intrinsic).undistort_F64(true, false);


List<AssociatedPair> calibratedFeatures = new ArrayList<>();
List<AssociatedPair> calibratedFeatures = new ArrayList<>();
Line 170: Line 174:
* @param distortedRight Input distorted image from right camera.
* @param distortedRight Input distorted image from right camera.
* @param leftToRight    Camera motion from left to right
* @param leftToRight    Camera motion from left to right
* @param intrinsic      Intrinsic camera parameters
* @param intrinsicLeft  Intrinsic camera parameters
* @param rectifiedLeft  Output rectified image for left camera.
* @param rectifiedLeft  Output rectified image for left camera.
* @param rectifiedRight Output rectified image for right camera.
* @param rectifiedRight Output rectified image for right camera.
* @param rectifiedMask  Mask that indicates invalid pixels in rectified image. 1 = valid, 0 = invalid
* @param rectifiedK    Output camera calibration matrix for rectified camera
* @param rectifiedK    Output camera calibration matrix for rectified camera
*/
*/
public static void rectifyImages(GrayU8 distortedLeft,
public static <T extends ImageBase<T>>
GrayU8 distortedRight,
void rectifyImages(T distortedLeft,
Se3_F64 leftToRight,
  T distortedRight,
CameraPinholeRadial intrinsic,
  Se3_F64 leftToRight,
GrayU8 rectifiedLeft,
  CameraPinholeRadial intrinsicLeft,
GrayU8 rectifiedRight,
  CameraPinholeRadial intrinsicRight,
DMatrixRMaj rectifiedK) {
  T rectifiedLeft,
  T rectifiedRight,
  GrayU8 rectifiedMask,
  DMatrixRMaj rectifiedK,
  DMatrixRMaj rectifiedR) {
RectifyCalibrated rectifyAlg = RectifyImageOps.createCalibrated();
RectifyCalibrated rectifyAlg = RectifyImageOps.createCalibrated();


// original camera calibration matrices
// original camera calibration matrices
DMatrixRMaj K = PerspectiveOps.pinholeToMatrix(intrinsic, (DMatrixRMaj)null);
DMatrixRMaj K1 = PerspectiveOps.pinholeToMatrix(intrinsicLeft, (DMatrixRMaj)null);
DMatrixRMaj K2 = PerspectiveOps.pinholeToMatrix(intrinsicRight, (DMatrixRMaj)null);


rectifyAlg.process(K, new Se3_F64(), K, leftToRight);
rectifyAlg.process(K1, new Se3_F64(), K2, leftToRight);


// rectification matrix for each image
// rectification matrix for each image
DMatrixRMaj rect1 = rectifyAlg.getRect1();
DMatrixRMaj rect1 = rectifyAlg.getRect1();
DMatrixRMaj rect2 = rectifyAlg.getRect2();
DMatrixRMaj rect2 = rectifyAlg.getRect2();
rectifiedR.set(rectifyAlg.getRectifiedRotation());


// New calibration matrix,
// New calibration matrix,
Line 197: Line 208:


// Adjust the rectification to make the view area more useful
// Adjust the rectification to make the view area more useful
RectifyImageOps.allInsideLeft(intrinsic, rect1, rect2, rectifiedK);
RectifyImageOps.fullViewLeft(intrinsicLeft, rect1, rect2, rectifiedK);


// undistorted and rectify images
// undistorted and rectify images
Line 205: Line 216:
ConvertMatrixData.convert(rect2, rect2_F32);
ConvertMatrixData.convert(rect2, rect2_F32);


ImageDistort<GrayU8,GrayU8> distortLeft =
// Extending the image prevents a harsh edge reducing false matches at the image border
RectifyImageOps.rectifyImage(intrinsic, rect1_F32, BorderType.SKIP, distortedLeft.getImageType());
// SKIP is another option, possibly a tinny bit faster, but has a harsh edge which will need to be filtered
ImageDistort<GrayU8,GrayU8> distortRight =
ImageDistort<T,T> distortLeft =
RectifyImageOps.rectifyImage(intrinsic, rect2_F32, BorderType.SKIP, distortedRight.getImageType());
RectifyImageOps.rectifyImage(intrinsicLeft, rect1_F32, BorderType.EXTENDED, distortedLeft.getImageType());
ImageDistort<T,T> distortRight =
RectifyImageOps.rectifyImage(intrinsicRight, rect2_F32, BorderType.EXTENDED, distortedRight.getImageType());


distortLeft.apply(distortedLeft, rectifiedLeft);
distortLeft.apply(distortedLeft, rectifiedLeft,rectifiedMask);
distortRight.apply(distortedRight, rectifiedRight);
distortRight.apply(distortedRight, rectifiedRight);
}
}
Line 219: Line 232:
public static void drawInliers(BufferedImage left, BufferedImage right, CameraPinholeRadial intrinsic,
public static void drawInliers(BufferedImage left, BufferedImage right, CameraPinholeRadial intrinsic,
  List<AssociatedPair> normalized) {
  List<AssociatedPair> normalized) {
Point2Transform2_F64 n_to_p = LensDistortionOps.narrow(intrinsic).distort_F64(false,true);
Point2Transform2_F64 n_to_p = LensDistortionFactory.narrow(intrinsic).distort_F64(false,true);


List<AssociatedPair> pixels = new ArrayList<>();
List<AssociatedPair> pixels = new ArrayList<>();
Line 244: Line 257:
*/
*/
public static void showPointCloud(ImageGray disparity, BufferedImage left,
public static void showPointCloud(ImageGray disparity, BufferedImage left,
  Se3_F64 motion, DMatrixRMaj rectifiedK ,
  Se3_F64 motion, DMatrixRMaj rectifiedK , DMatrixRMaj rectifiedR,
  int minDisparity, int maxDisparity)
  int minDisparity, int maxDisparity)
{
{
DisparityToColorPointCloud d2c = new DisparityToColorPointCloud();
DisparityToColorPointCloud d2c = new DisparityToColorPointCloud();
double baseline = motion.getT().norm();
double baseline = motion.getT().norm();
d2c.configure(baseline, rectifiedK, new DoNothing2Transform2_F64(), minDisparity, maxDisparity);
d2c.configure(baseline, rectifiedK, rectifiedR, new DoNothing2Transform2_F64(), minDisparity, maxDisparity);
d2c.process(disparity,left);
d2c.process(disparity,left);


Line 255: Line 268:


// skew the view to make the structure easier to see
// skew the view to make the structure easier to see
Se3_F64 cameraToWorld = SpecialEuclideanOps_F64.eulerXyz(-20,0,0,0,0.2,0,null);
Se3_F64 cameraToWorld = SpecialEuclideanOps_F64.eulerXyz(-baseline*5,0,0,0,0.2,0,null);


PointCloudViewer pcv = VisualizeData.createPointCloudViewer();
PointCloudViewer pcv = VisualizeData.createPointCloudViewer();
pcv.setCameraHFov(PerspectiveOps.computeHFov(rectifiedPinhole));
pcv.setCameraHFov(PerspectiveOps.computeHFov(rectifiedPinhole));
pcv.setCameraToWorld(cameraToWorld);
pcv.setCameraToWorld(cameraToWorld);
pcv.setTranslationStep(5);
pcv.setTranslationStep(baseline/3);
pcv.addCloud(d2c.getCloud(),d2c.getCloudColor());
pcv.addCloud(d2c.getCloud(),d2c.getCloudColor());
pcv.setDotSize(1);
pcv.setTranslationStep(baseline/10);


pcv.getComponent().setPreferredSize(new Dimension(left.getWidth(), left.getHeight()));
pcv.getComponent().setPreferredSize(new Dimension(left.getWidth(), left.getHeight()));

Revision as of 21:10, 26 December 2018

Associated inlier features between two views
Associated inlier features between two views
Stereo disparity image
3D point cloud
Stereo disparity image 3D point cloud

A dense point cloud of an environment can be created from a single camera using two views. If the distance between the two views is not known, then the scale of the found point cloud will be arbitrary. In this example natural features are used to determine the geometric relationship between the two views. The algorithm can be summarized as follows:

  • Load camera calibration and two images
  • Detect, describe, and associate image features
  • Compute camera motion (Essential matrix)
  • Rectify image pair
  • Compute dense stereo disparity
  • Convert into 3D point cloud

Example File:

Concepts:

  • Point feature association
  • Epipolar geometry
  • Rectification
  • Dense stereo processing

Related Tutorials/Example Code:

Example Code

/**
 * Example demonstrating how to use to images taken from a single calibrated camera to create a stereo disparity image,
 * from which a dense 3D point cloud of the scene can be computed.  For this technique to work the camera's motion
 * needs to be approximately tangential to the direction the camera is pointing.  The code below assumes that the first
 * image is to the left of the second image.
 *
 * @author Peter Abeles
 */
public class ExampleStereoTwoViewsOneCamera {

	// Disparity calculation parameters
	private static final int minDisparity = 15;
	private static final int maxDisparity = 100;

	public static void main(String args[]) {
		// specify location of images and calibration
		String calibDir = UtilIO.pathExample("calibration/mono/Sony_DSC-HX5V_Chess/");
		String imageDir = UtilIO.pathExample("stereo/");

		// Camera parameters
		CameraPinholeRadial intrinsic = CalibrationIO.load(new File(calibDir , "intrinsic.yaml"));

		// Input images from the camera moving left to right
		BufferedImage origLeft = UtilImageIO.loadImage(imageDir , "mono_wall_01.jpg");
		BufferedImage origRight = UtilImageIO.loadImage(imageDir, "mono_wall_02.jpg");

		// Input images with lens distortion
		GrayU8 distortedLeft = ConvertBufferedImage.convertFrom(origLeft, (GrayU8) null);
		GrayU8 distortedRight = ConvertBufferedImage.convertFrom(origRight, (GrayU8) null);

		// matched features between the two images
		List<AssociatedPair> matchedFeatures = ExampleFundamentalMatrix.computeMatches(origLeft, origRight);

		// convert from pixel coordinates into normalized image coordinates
		List<AssociatedPair> matchedCalibrated = convertToNormalizedCoordinates(matchedFeatures, intrinsic);

		// Robustly estimate camera motion
		List<AssociatedPair> inliers = new ArrayList<>();
		Se3_F64 leftToRight = estimateCameraMotion(intrinsic, matchedCalibrated, inliers);

		drawInliers(origLeft, origRight, intrinsic, inliers);

		// Rectify and remove lens distortion for stereo processing
		DMatrixRMaj rectifiedK = new DMatrixRMaj(3, 3);
		DMatrixRMaj rectifiedR = new DMatrixRMaj(3, 3);
		GrayU8 rectifiedLeft = distortedLeft.createSameShape();
		GrayU8 rectifiedRight = distortedRight.createSameShape();
		GrayU8 rectifiedMask = distortedLeft.createSameShape();

		rectifyImages(distortedLeft, distortedRight, leftToRight, intrinsic,intrinsic,
				rectifiedLeft, rectifiedRight,rectifiedMask, rectifiedK,rectifiedR);

		// compute disparity
		StereoDisparity<GrayS16, GrayF32> disparityAlg =
				FactoryStereoDisparity.regionSubpixelWta(DisparityAlgorithms.RECT_FIVE,
						minDisparity, maxDisparity, 5, 5, 20, 1, 0.1, GrayS16.class);

		// Apply the Laplacian across the image to add extra resistance to changes in lighting or camera gain
		GrayS16 derivLeft = new GrayS16(rectifiedLeft.width,rectifiedLeft.height);
		GrayS16 derivRight = new GrayS16(rectifiedLeft.width,rectifiedLeft.height);
		LaplacianEdge.process(rectifiedLeft, derivLeft);
		LaplacianEdge.process(rectifiedRight,derivRight);

		// process and return the results
		disparityAlg.process(derivLeft, derivRight);
		GrayF32 disparity = disparityAlg.getDisparity();
		RectifyImageOps.applyMask(disparity,rectifiedMask,0);

		// show results
		BufferedImage visualized = VisualizeImageData.disparity(disparity, null, minDisparity, maxDisparity, 0);

		BufferedImage outLeft = ConvertBufferedImage.convertTo(rectifiedLeft, null);
		BufferedImage outRight = ConvertBufferedImage.convertTo(rectifiedRight, null);

		ShowImages.showWindow(new RectifiedPairPanel(true, outLeft, outRight), "Rectification",true);
		ShowImages.showWindow(visualized, "Disparity",true);

		showPointCloud(disparity, outLeft, leftToRight, rectifiedK,rectifiedR, minDisparity, maxDisparity);

		System.out.println("Total found " + matchedCalibrated.size());
		System.out.println("Total Inliers " + inliers.size());
	}

	/**
	 * Estimates the camera motion robustly using RANSAC and a set of associated points.
	 *
	 * @param intrinsic   Intrinsic camera parameters
	 * @param matchedNorm set of matched point features in normalized image coordinates
	 * @param inliers     OUTPUT: Set of inlier features from RANSAC
	 * @return Found camera motion.  Note translation has an arbitrary scale
	 */
	public static Se3_F64 estimateCameraMotion(CameraPinholeRadial intrinsic,
											   List<AssociatedPair> matchedNorm, List<AssociatedPair> inliers)
	{
		ModelMatcherMultiview<Se3_F64, AssociatedPair> epipolarMotion =
				FactoryMultiViewRobust.baselineRansac(new ConfigEssential(),new ConfigRansac(200,0.5));
		epipolarMotion.setIntrinsic(0,intrinsic);
		epipolarMotion.setIntrinsic(1,intrinsic);

		if (!epipolarMotion.process(matchedNorm))
			throw new RuntimeException("Motion estimation failed");

		// save inlier set for debugging purposes
		inliers.addAll(epipolarMotion.getMatchSet());

		return epipolarMotion.getModelParameters();
	}

	/**
	 * Convert a set of associated point features from pixel coordinates into normalized image coordinates.
	 */
	public static List<AssociatedPair> convertToNormalizedCoordinates(List<AssociatedPair> matchedFeatures, CameraPinholeRadial intrinsic) {

		Point2Transform2_F64 p_to_n = LensDistortionFactory.narrow(intrinsic).undistort_F64(true, false);

		List<AssociatedPair> calibratedFeatures = new ArrayList<>();

		for (AssociatedPair p : matchedFeatures) {
			AssociatedPair c = new AssociatedPair();

			p_to_n.compute(p.p1.x, p.p1.y, c.p1);
			p_to_n.compute(p.p2.x, p.p2.y, c.p2);

			calibratedFeatures.add(c);
		}

		return calibratedFeatures;
	}

	/**
	 * Remove lens distortion and rectify stereo images
	 *
	 * @param distortedLeft  Input distorted image from left camera.
	 * @param distortedRight Input distorted image from right camera.
	 * @param leftToRight    Camera motion from left to right
	 * @param intrinsicLeft  Intrinsic camera parameters
	 * @param rectifiedLeft  Output rectified image for left camera.
	 * @param rectifiedRight Output rectified image for right camera.
	 * @param rectifiedMask  Mask that indicates invalid pixels in rectified image. 1 = valid, 0 = invalid
	 * @param rectifiedK     Output camera calibration matrix for rectified camera
	 */
	public static <T extends ImageBase<T>>
	void rectifyImages(T distortedLeft,
					   T distortedRight,
					   Se3_F64 leftToRight,
					   CameraPinholeRadial intrinsicLeft,
					   CameraPinholeRadial intrinsicRight,
					   T rectifiedLeft,
					   T rectifiedRight,
					   GrayU8 rectifiedMask,
					   DMatrixRMaj rectifiedK,
					   DMatrixRMaj rectifiedR) {
		RectifyCalibrated rectifyAlg = RectifyImageOps.createCalibrated();

		// original camera calibration matrices
		DMatrixRMaj K1 = PerspectiveOps.pinholeToMatrix(intrinsicLeft, (DMatrixRMaj)null);
		DMatrixRMaj K2 = PerspectiveOps.pinholeToMatrix(intrinsicRight, (DMatrixRMaj)null);

		rectifyAlg.process(K1, new Se3_F64(), K2, leftToRight);

		// rectification matrix for each image
		DMatrixRMaj rect1 = rectifyAlg.getRect1();
		DMatrixRMaj rect2 = rectifyAlg.getRect2();
		rectifiedR.set(rectifyAlg.getRectifiedRotation());

		// New calibration matrix,
		rectifiedK.set(rectifyAlg.getCalibrationMatrix());

		// Adjust the rectification to make the view area more useful
		RectifyImageOps.fullViewLeft(intrinsicLeft, rect1, rect2, rectifiedK);

		// undistorted and rectify images
		FMatrixRMaj rect1_F32 = new FMatrixRMaj(3,3);
		FMatrixRMaj rect2_F32 = new FMatrixRMaj(3,3);
		ConvertMatrixData.convert(rect1, rect1_F32);
		ConvertMatrixData.convert(rect2, rect2_F32);

		// Extending the image prevents a harsh edge reducing false matches at the image border
		// SKIP is another option, possibly a tinny bit faster, but has a harsh edge which will need to be filtered
		ImageDistort<T,T> distortLeft =
				RectifyImageOps.rectifyImage(intrinsicLeft, rect1_F32, BorderType.EXTENDED, distortedLeft.getImageType());
		ImageDistort<T,T> distortRight =
				RectifyImageOps.rectifyImage(intrinsicRight, rect2_F32, BorderType.EXTENDED, distortedRight.getImageType());

		distortLeft.apply(distortedLeft, rectifiedLeft,rectifiedMask);
		distortRight.apply(distortedRight, rectifiedRight);
	}

	/**
	 * Draw inliers for debugging purposes.  Need to convert from normalized to pixel coordinates.
	 */
	public static void drawInliers(BufferedImage left, BufferedImage right, CameraPinholeRadial intrinsic,
								   List<AssociatedPair> normalized) {
		Point2Transform2_F64 n_to_p = LensDistortionFactory.narrow(intrinsic).distort_F64(false,true);

		List<AssociatedPair> pixels = new ArrayList<>();

		for (AssociatedPair n : normalized) {
			AssociatedPair p = new AssociatedPair();

			n_to_p.compute(n.p1.x, n.p1.y, p.p1);
			n_to_p.compute(n.p2.x, n.p2.y, p.p2);

			pixels.add(p);
		}

		// display the results
		AssociationPanel panel = new AssociationPanel(20);
		panel.setAssociation(pixels);
		panel.setImages(left, right);

		ShowImages.showWindow(panel, "Inlier Features", true);
	}

	/**
	 * Show results as a point cloud
	 */
	public static void showPointCloud(ImageGray disparity, BufferedImage left,
									  Se3_F64 motion, DMatrixRMaj rectifiedK , DMatrixRMaj rectifiedR,
									  int minDisparity, int maxDisparity)
	{
		DisparityToColorPointCloud d2c = new DisparityToColorPointCloud();
		double baseline = motion.getT().norm();
		d2c.configure(baseline, rectifiedK, rectifiedR, new DoNothing2Transform2_F64(), minDisparity, maxDisparity);
		d2c.process(disparity,left);

		CameraPinhole rectifiedPinhole = PerspectiveOps.matrixToPinhole(rectifiedK,disparity.width,disparity.height,null);

		// skew the view to make the structure easier to see
		Se3_F64 cameraToWorld = SpecialEuclideanOps_F64.eulerXyz(-baseline*5,0,0,0,0.2,0,null);

		PointCloudViewer pcv = VisualizeData.createPointCloudViewer();
		pcv.setCameraHFov(PerspectiveOps.computeHFov(rectifiedPinhole));
		pcv.setCameraToWorld(cameraToWorld);
		pcv.setTranslationStep(baseline/3);
		pcv.addCloud(d2c.getCloud(),d2c.getCloudColor());
		pcv.setDotSize(1);
		pcv.setTranslationStep(baseline/10);

		pcv.getComponent().setPreferredSize(new Dimension(left.getWidth(), left.getHeight()));
		ShowImages.showWindow(pcv.getComponent(), "Point Cloud", true);
	}
}