Difference between revisions of "Example Multiview Reconstruction Dense"
From BoofCV
Jump to navigationJump to searchm  | 
				m  | 
				||
| (7 intermediate revisions by the same user not shown) | |||
| Line 10: | Line 10: | ||
Example Code:  | Example Code:  | ||
* [https://github.com/lessthanoptimal/BoofCV/blob/v0.  | * [https://github.com/lessthanoptimal/BoofCV/blob/v0.41/examples/src/main/java/boofcv/examples/reconstruction/ExampleMultiViewDenseReconstruction.java ExampleMultiViewDenseReconstruction.java]  | ||
Concepts:  | Concepts:  | ||
* Structure from Motion  | * Structure from Motion  | ||
* [[Example_Sparse_Bundle_Adjustment|Sparse Bundle Adjustment]]  | * [[Example_Sparse_Bundle_Adjustment|Sparse Bundle Adjustment]]  | ||
* [[Example_Multi_Baseline_Stereo|Multi Baseline Stereo]]  | * [[Example_Multi_Baseline_Stereo|Multi Baseline Stereo]]  | ||
* [[Example_Multiview_Uncalibrated_Reconstruction_Sparse|Uncalibrated Sparse Reconstruction]]  | * [[Example_Multiview_Uncalibrated_Reconstruction_Sparse|Uncalibrated Sparse Reconstruction]]  | ||
Videos:  | |||
* [https://youtu.be/BbTPQ9mIoQU?t=6 Improvements in v0.38]  | |||
Tutorials  | |||
* [[3D_Reconstruction_on_Desktop_Tutorial|Photogrammetry / 3D Reconstruction on Desktop]]  | |||
| Line 29: | Line 33: | ||
  * neighbors for stereo computations using a heuristic. Then a global point cloud is created from the "center" view  |   * neighbors for stereo computations using a heuristic. Then a global point cloud is created from the "center" view  | ||
  * disparity images while taking care to avoid adding duplicate points.  |   * disparity images while taking care to avoid adding duplicate points.  | ||
  *  |   *  | ||
  * @author Peter Abeles  |   * @author Peter Abeles  | ||
| Line 38: | Line 39: | ||
	public static void main( String[] args ) {  | 	public static void main( String[] args ) {  | ||
		var example = new ExampleMultiViewSparseReconstruction();  | 		var example = new ExampleMultiViewSparseReconstruction();  | ||
//		example.  | //		example.compute("tree_snow_01.mp4", true);  | ||
//		example.compute("ditch_02.mp4", true);  | |||
//		example.compute("  | //		example.compute("holiday_display_01.mp4", true);  | ||
//		example.compute("  | 		example.compute("log_building_02.mp4", true);  | ||
//		example.compute("  | //		example.compute("drone_park_01.mp4", false);  | ||
//		example.compute("stone_sign.mp4", true);  | |||
		//   | 		// Looks up images based on their index in the file list  | ||
		var   | 		var imageLookup = new LookUpImageFilesByIndex(example.imageFiles);  | ||
		// We will use a high level algorithm that does almost all the work for us. It is highly configurable  | |||
		// and just about every parameter can be tweaked using its Config. Internal algorithms can be accessed  | |||
		// and customize directly if needed. Specifics for how it work is beyond this example but the code  | |||
		// is easily accessible.  | |||
		// Let's do some custom configuration for this scenario  | |||
		var config = new ConfigSparseToDenseCloud();  | |||
		config.disparity.approach = ConfigDisparity.Approach.SGM;  | |||
		ConfigDisparitySGM configSgm = config.disparity.approachSGM;  | |||
		configSgm.validateRtoL = 0;  | 		configSgm.validateRtoL = 0;  | ||
		configSgm.texture = 0.75;  | 		configSgm.texture = 0.75;  | ||
		configSgm.disparityRange =   | 		configSgm.disparityRange = 250;  | ||
		configSgm.paths = ConfigDisparitySGM.Paths.P4;  | 		configSgm.paths = ConfigDisparitySGM.Paths.P4;  | ||
		configSgm.configBlockMatch.radiusX = 3;  | 		configSgm.configBlockMatch.radiusX = 3;  | ||
		configSgm.configBlockMatch.radiusY = 3;  | 		configSgm.configBlockMatch.radiusY = 3;  | ||
		//   | 		// Create the sparse to dense reconstruction using a factory  | ||
		SparseSceneToDenseCloud<GrayU8> sparseToDense =  | |||
				FactorySceneReconstruction.sparseSceneToDenseCloud(config, ImageType.SB_U8);  | |||
		//   | 		// To help make the time go by faster while we wait about 1 to 2 minutes for it to finish, let's print stuff  | ||
		sparseToDense.getMultiViewStereo().setVerbose(  | |||
				System.out, BoofMiscOps.hashSet(BoofVerbose.RECURSIVE, BoofVerbose.RUNTIME));  | |||
		//   | 		// To visualize intermediate results we will add a listener. This will show fused disparity images  | ||
		sparseToDense.getMultiViewStereo().setListener(new MultiViewStereoFromKnownSceneStructure.Listener<>() {  | |||
			@Override  | 			@Override  | ||
			public void handlePairDisparity( String left, String right, GrayU8 rect0, GrayU8 rect1,  | 			public void handlePairDisparity( String left, String right, GrayU8 rect0, GrayU8 rect1,  | ||
											 GrayF32 disparity  | 											 GrayF32 disparity, DisparityParameters parameters ) {  | ||
				//   | 				// Uncomment to display individual stereo pairs. Commented out by default because it generates  | ||
				//   | 				// a LOT of windows  | ||
//				BufferedImage outLeft = ConvertBufferedImage.convertTo(rect0, null);  | |||
//				BufferedImage outRight = ConvertBufferedImage.convertTo(rect1, null);  | |||
//  | |||
//				ShowImages.showWindow(new RectifiedPairPanel(true, outLeft, outRight), "Rectification: "+left+" "+right);  | |||
//				BufferedImage colorized = VisualizeImageData.disparity(disparity, null, parameters.disparityRange, 0);  | |||
//				ShowImages.showWindow(colorized, "Disparity " + left + " " + right);  | |||
			}  | 			}  | ||
			@Override  | 			@Override  | ||
			public void   | 			public void handleFused( String name, GrayF32 inverseDepth ) {  | ||
				// You can also do custom filtering of the disparity image in this function. If the line below is  | |||
				// uncommented then points which are far away will be marked as invalid  | |||
//				PixelMath.operator1(inverseDepth, ( v ) -> v <= 1.0f ? v : Float.NaN, inverseDepth);  | |||
				// Display the disparity for each center view  | 				// Display the disparity for each center view  | ||
				BufferedImage colorized = VisualizeImageData.  | 				BufferedImage colorized = VisualizeImageData.inverseDepth(inverseDepth, null, 0f, 0);  | ||
				ShowImages.showWindow(colorized, "Center " + name);  | 				ShowImages.showWindow(colorized, "Center " + name);  | ||
			}  | 			}  | ||
		});  | 		});  | ||
		//   | 		// It needs a lookup table to go from SBA view index to image name. It loads images as needed to perform  | ||
		//   | 		// stereo disparity  | ||
		var viewToId = new TIntObjectHashMap<String>();  | |||
		BoofMiscOps.forIdx(example.working.listViews, ( workIdxI, wv ) -> viewToId.put(wv.index, wv.pview.id));  | |||
		if (!sparseToDense.process(example.scene, viewToId, imageLookup))  | |||
			throw new RuntimeException("Dense reconstruction failed!");  | |||
		saveCloudToDisk(sparseToDense);  | |||
		// Display the dense cloud  | |||
		visualizeInPointCloud(sparseToDense.getCloud(), sparseToDense.getColorRgb(), example.scene);  | |||
	}  | |||
		//   | |||
		//   | 	private static void saveCloudToDisk( SparseSceneToDenseCloud<GrayU8> sparseToDense ) {  | ||
		// Save the dense point cloud to disk in PLY format  | |||
		try (FileOutputStream out = new FileOutputStream("saved_cloud.ply")) {  | |||
			// Filter points which are far away to make it easier to view in 3rd party viewers that auto scale  | |||
			// You might need to adjust the threshold for your application if too many points are cut  | |||
			double distanceThreshold = 50.0;  | |||
			List<Point3D_F64> cloud = sparseToDense.getCloud();  | |||
			DogArray_I32 colorsRgb = sparseToDense.getColorRgb();  | |||
			DogArray<Point3dRgbI_F64> filtered = PointCloudUtils_F64.filter(  | |||
					( idx, p ) -> p.setTo(cloud.get(idx)), colorsRgb::get, cloud.size(),  | |||
					( idx ) -> cloud.get(idx).norm() <= distanceThreshold, null);  | |||
			PointCloudIO.save3D(PointCloudIO.Format.PLY, PointCloudReader.wrapF64RGB(filtered.toList()), true, out);  | |||
		} catch (IOException e) {  | |||
			e.printStackTrace();  | |||
		}  | |||
	}  | 	}  | ||
| Line 135: | Line 138: | ||
		viewer.setDotSize(1);  | 		viewer.setDotSize(1);  | ||
		viewer.setTranslationStep(0.15);  | 		viewer.setTranslationStep(0.15);  | ||
		viewer.addCloud(  | 		viewer.addCloud(( idx, p ) -> p.setTo(cloud.get(idx)), colorsRgb::get, cloud.size());  | ||
		viewer.setCameraHFov(UtilAngle.radian(60));  | 		viewer.setCameraHFov(UtilAngle.radian(60));  | ||
Latest revision as of 16:44, 2 September 2022
| Video showing resulting point cloud. Red squares represent camera view locations. | 
After the sparse reconstruction has been applied and the extrinsic and intrinsic parameters of the scene are known, the next step it to compute a dense reconstruction. Internally key frames are selected to perform multi-baseline stereo on and then their resulting point clouds are all combined together into a single cloud.
Example Code:
Concepts:
- Structure from Motion
 - Sparse Bundle Adjustment
 - Multi Baseline Stereo
 - Uncalibrated Sparse Reconstruction
 
Videos:
Tutorials
Example Code
/**
 * A dense point cloud is created using a previously computed sparse reconstruction and a basic implementation of
 * multiview stereo (MVS). This approach to MVS works by identifying "center" views which have the best set of
 * neighbors for stereo computations using a heuristic. Then a global point cloud is created from the "center" view
 * disparity images while taking care to avoid adding duplicate points.
 *
 * @author Peter Abeles
 */
public class ExampleMultiViewDenseReconstruction {
	public static void main( String[] args ) {
		var example = new ExampleMultiViewSparseReconstruction();
//		example.compute("tree_snow_01.mp4", true);
//		example.compute("ditch_02.mp4", true);
//		example.compute("holiday_display_01.mp4", true);
		example.compute("log_building_02.mp4", true);
//		example.compute("drone_park_01.mp4", false);
//		example.compute("stone_sign.mp4", true);
		// Looks up images based on their index in the file list
		var imageLookup = new LookUpImageFilesByIndex(example.imageFiles);
		// We will use a high level algorithm that does almost all the work for us. It is highly configurable
		// and just about every parameter can be tweaked using its Config. Internal algorithms can be accessed
		// and customize directly if needed. Specifics for how it work is beyond this example but the code
		// is easily accessible.
		// Let's do some custom configuration for this scenario
		var config = new ConfigSparseToDenseCloud();
		config.disparity.approach = ConfigDisparity.Approach.SGM;
		ConfigDisparitySGM configSgm = config.disparity.approachSGM;
		configSgm.validateRtoL = 0;
		configSgm.texture = 0.75;
		configSgm.disparityRange = 250;
		configSgm.paths = ConfigDisparitySGM.Paths.P4;
		configSgm.configBlockMatch.radiusX = 3;
		configSgm.configBlockMatch.radiusY = 3;
		// Create the sparse to dense reconstruction using a factory
		SparseSceneToDenseCloud<GrayU8> sparseToDense =
				FactorySceneReconstruction.sparseSceneToDenseCloud(config, ImageType.SB_U8);
		// To help make the time go by faster while we wait about 1 to 2 minutes for it to finish, let's print stuff
		sparseToDense.getMultiViewStereo().setVerbose(
				System.out, BoofMiscOps.hashSet(BoofVerbose.RECURSIVE, BoofVerbose.RUNTIME));
		// To visualize intermediate results we will add a listener. This will show fused disparity images
		sparseToDense.getMultiViewStereo().setListener(new MultiViewStereoFromKnownSceneStructure.Listener<>() {
			@Override
			public void handlePairDisparity( String left, String right, GrayU8 rect0, GrayU8 rect1,
											 GrayF32 disparity, DisparityParameters parameters ) {
				// Uncomment to display individual stereo pairs. Commented out by default because it generates
				// a LOT of windows
//				BufferedImage outLeft = ConvertBufferedImage.convertTo(rect0, null);
//				BufferedImage outRight = ConvertBufferedImage.convertTo(rect1, null);
//
//				ShowImages.showWindow(new RectifiedPairPanel(true, outLeft, outRight), "Rectification: "+left+" "+right);
//				BufferedImage colorized = VisualizeImageData.disparity(disparity, null, parameters.disparityRange, 0);
//				ShowImages.showWindow(colorized, "Disparity " + left + " " + right);
			}
			@Override
			public void handleFused( String name, GrayF32 inverseDepth ) {
				// You can also do custom filtering of the disparity image in this function. If the line below is
				// uncommented then points which are far away will be marked as invalid
//				PixelMath.operator1(inverseDepth, ( v ) -> v <= 1.0f ? v : Float.NaN, inverseDepth);
				// Display the disparity for each center view
				BufferedImage colorized = VisualizeImageData.inverseDepth(inverseDepth, null, 0f, 0);
				ShowImages.showWindow(colorized, "Center " + name);
			}
		});
		// It needs a lookup table to go from SBA view index to image name. It loads images as needed to perform
		// stereo disparity
		var viewToId = new TIntObjectHashMap<String>();
		BoofMiscOps.forIdx(example.working.listViews, ( workIdxI, wv ) -> viewToId.put(wv.index, wv.pview.id));
		if (!sparseToDense.process(example.scene, viewToId, imageLookup))
			throw new RuntimeException("Dense reconstruction failed!");
		saveCloudToDisk(sparseToDense);
		// Display the dense cloud
		visualizeInPointCloud(sparseToDense.getCloud(), sparseToDense.getColorRgb(), example.scene);
	}
	private static void saveCloudToDisk( SparseSceneToDenseCloud<GrayU8> sparseToDense ) {
		// Save the dense point cloud to disk in PLY format
		try (FileOutputStream out = new FileOutputStream("saved_cloud.ply")) {
			// Filter points which are far away to make it easier to view in 3rd party viewers that auto scale
			// You might need to adjust the threshold for your application if too many points are cut
			double distanceThreshold = 50.0;
			List<Point3D_F64> cloud = sparseToDense.getCloud();
			DogArray_I32 colorsRgb = sparseToDense.getColorRgb();
			DogArray<Point3dRgbI_F64> filtered = PointCloudUtils_F64.filter(
					( idx, p ) -> p.setTo(cloud.get(idx)), colorsRgb::get, cloud.size(),
					( idx ) -> cloud.get(idx).norm() <= distanceThreshold, null);
			PointCloudIO.save3D(PointCloudIO.Format.PLY, PointCloudReader.wrapF64RGB(filtered.toList()), true, out);
		} catch (IOException e) {
			e.printStackTrace();
		}
	}
	public static void visualizeInPointCloud( List<Point3D_F64> cloud, DogArray_I32 colorsRgb,
											  SceneStructureMetric structure ) {
		PointCloudViewer viewer = VisualizeData.createPointCloudViewer();
		viewer.setFog(true);
		viewer.setDotSize(1);
		viewer.setTranslationStep(0.15);
		viewer.addCloud(( idx, p ) -> p.setTo(cloud.get(idx)), colorsRgb::get, cloud.size());
		viewer.setCameraHFov(UtilAngle.radian(60));
		SwingUtilities.invokeLater(() -> {
			// Show where the cameras are
			BoofSwingUtil.visualizeCameras(structure, viewer);
			// Display the point cloud
			viewer.getComponent().setPreferredSize(new Dimension(600, 600));
			ShowImages.showWindow(viewer.getComponent(), "Dense Reconstruction Cloud", true);
		});
	}
}