Difference between revisions of "Example Multiview Uncalibrated Reconstruction Sparse"

From BoofCV
Jump to navigationJump to search
(Created page with "<center> {| | border|thumb|400px|start=0 |- | Pseudo color sparse reconstruction from uncalibrated input images. |} </cen...")
 
m
 
(7 intermediate revisions by the same user not shown)
Line 1: Line 1:
<center>
<center>
{|
{|
| [[file:Snow_tree.jpg|border|thumb|x400px]]
| [[file:Multi_view_uncalibrated_sparse_snowtree.mp4|border|thumb|400px|start=0]]
| [[file:Multi_view_uncalibrated_sparse_snowtree.mp4|border|thumb|400px|start=0]]
|-
|-
| Scene being reconstructed
| Pseudo color sparse reconstruction from uncalibrated input images.
| Pseudo color sparse reconstruction from uncalibrated input images.
|}  
|}  
Line 10: Line 12:


Example Code:
Example Code:
* [https://github.com/lessthanoptimal/BoofCV/blob/v0.37/examples/src/main/java/boofcv/examples/sfm/ExampleMultiViewSparseReconstruction.java ExampleMultiViewSparseReconstruction.java]
* [https://github.com/lessthanoptimal/BoofCV/blob/v0.40/examples/src/main/java/boofcv/examples/reconstruction/ExampleMultiViewSparseReconstruction.java ExampleMultiViewSparseReconstruction.java]


Concepts:
Concepts:
* Structure from Motion
* Structure from Motion
* Multiple View Stereo
* Multi Baseline Stereo
* [[Example_Sparse_Bundle_Adjustment|Sparse Bundle Adjustment]]
* [[Example_Sparse_Bundle_Adjustment|Sparse Bundle Adjustment]]
* [[Example_Multi_Baseline_Stereo|Multi Baseline Stereo]]
* [[Example_Multi_Baseline_Stereo|Multi Baseline Stereo]]
* [[Example_Multiview_Reconstruction_Dense|Dense Multiview Reconstruction]]
* [[Example_Multiview_Reconstruction_Dense|Dense Multiview Reconstruction]]
Videos:
* [https://youtu.be/BbTPQ9mIoQU?t=6 Improvements in v0.38]
Tutorials:
* [[3D_Reconstruction_on_Desktop_Tutorial|Photogrammetry / 3D Reconstruction on Desktop]]


= Example Code =
= Example Code =
Line 37: Line 43:
  */
  */
public class ExampleMultiViewSparseReconstruction {
public class ExampleMultiViewSparseReconstruction {
// Instead of processing all the frames just process the first N frames
int maxFrames = 40;
String workDirectory;
String workDirectory;
List<String> imageFiles = new ArrayList<>();
List<String> imageFiles = new ArrayList<>();


PairwiseImageGraph pairwise = null;
PairwiseImageGraph pairwise = null;
LookUpSimilarImages similarImages;
LookUpSimilarImages dbSimilar;
LookUpCameraInfo dbCams = new LookUpCameraInfo();
SceneWorkingGraph working = null;
SceneWorkingGraph working = null;
SceneStructureMetric scene = null;
SceneStructureMetric scene = null;
Line 53: Line 56:
public static void main( String[] args ) {
public static void main( String[] args ) {
var example = new ExampleMultiViewSparseReconstruction();
var example = new ExampleMultiViewSparseReconstruction();
// example.maxFrames = 100;       // This will process the entire sequence
example.compute("tree_snow_01.mp4", true);
example.compute("tree_snow_01.mp4");
// example.compute("ditch_02.mp4", true);
// example.compute("ditch_02.mp4");
// example.compute("holiday_display_01.mp4", true);
// example.compute("holiday_display_01.mp4");
// example.compute("log_building_02.mp4", true);
// example.compute("log_building_02.mp4");
// example.compute("drone_park_01.mp4", false);
// example.compute("stone_sign.mp4", true);
example.visualizeSparseCloud();
example.visualizeSparseCloud();


Line 63: Line 67:
}
}


public void compute( String videoName ) {
public void compute( String videoName, boolean sequential ) {
// Turn on threaded code for bundle adjustment
// Turn on threaded code for bundle adjustment
DDoglegConcurrency.USE_CONCURRENT = true;
DDoglegConcurrency.USE_CONCURRENT = true;
Line 70: Line 74:
String path = UtilIO.pathExample("mvs/" + videoName);
String path = UtilIO.pathExample("mvs/" + videoName);
workDirectory = "mvs_work/" + FilenameUtils.getBaseName(videoName);
workDirectory = "mvs_work/" + FilenameUtils.getBaseName(videoName);
// Attempt to reload intermediate results if previously computed
if (!rebuild) {
try {
pairwise = MultiViewIO.load(new File(workDirectory, "pairwise.yaml").getPath(), (PairwiseImageGraph)null);
} catch (UncheckedIOException ignore) {}
try {
working = MultiViewIO.load(new File(workDirectory, "working.yaml").getPath(), pairwise, null);
} catch (UncheckedIOException ignore) {}
try {
scene = MultiViewIO.load(new File(workDirectory, "structure.yaml").getPath(), (SceneStructureMetric)null);
} catch (UncheckedIOException ignore) {}
}


// Convert the video into an image sequence. Later on we will need to access the images in random order
// Convert the video into an image sequence. Later on we will need to access the images in random order
var imageDirectory = new File(workDirectory, "images");
var imageDirectory = new File(workDirectory, "images");
if (!imageDirectory.exists())
 
if (imageDirectory.exists()) {
imageFiles = UtilIO.listSmart(String.format("glob:%s/images/*.png", workDirectory), true, ( f ) -> true);
} else {
checkTrue(imageDirectory.mkdirs(), "Failed to image directory");
checkTrue(imageDirectory.mkdirs(), "Failed to image directory");
SimpleImageSequence<InterleavedU8> sequence = DefaultMediaManager.INSTANCE.openVideo(path, ImageType.IL_U8);
SimpleImageSequence<InterleavedU8> sequence = DefaultMediaManager.INSTANCE.openVideo(path, ImageType.IL_U8);
System.out.println("----------------------------------------------------------------------------");
System.out.println("----------------------------------------------------------------------------");
System.out.println("### Decoding Video");
System.out.println("### Decoding Video");
BoofMiscOps.profile(() -> {
BoofMiscOps.profile(() -> {
int frame = 0;
int frame = 0;
while (sequence.hasNext() && frame < maxFrames) {
while (sequence.hasNext()) {
InterleavedU8 image = sequence.next();
InterleavedU8 image = sequence.next();
File imageFile = new File(imageDirectory, String.format("frame%d.png", frame++));
File imageFile = new File(imageDirectory, String.format("frame%04d.png", frame++));
imageFiles.add(imageFile.getPath());
imageFiles.add(imageFile.getPath());
// This is commented out for what appears to be a JRE bug.
// This is commented out for what appears to be a JRE bug.
// V  [libjvm.so+0xdc4059]  SWPointer::SWPointer(MemNode*, SuperWord*, Node_Stack*, bool)
// V  [libjvm.so+0xdc4059]  SWPointer::SWPointer(MemNode*, SuperWord*, Node_Stack*, bool)
// if (imageFile.exists())
UtilImageIO.saveImage(image, imageFile.getPath());
// continue;
}
UtilImageIO.saveImage(image, imageFile.getPath());
}, "Video Decoding");
}
 
// Only determine the visual relationship between images if needed
if (pairwise == null || working == null) {
if (sequential) {
similarImagesFromSequence();
} else {
similarImagesFromUnsorted();
}
}
}, "Video Decoding");
}


computePairwiseGraph();
if (pairwise == null)
metricFromPairwise();
computePairwiseGraph();
bundleAdjustmentRefine();
if (working == null)
metricFromPairwise();
if (scene == null)
bundleAdjustmentRefine();


Rodrigues_F64 rod = new Rodrigues_F64();
var rod = new Rodrigues_F64();
System.out.println("----------------------------------------------------------------------------");
System.out.println("----------------------------------------------------------------------------");
for (PairwiseImageGraph.View pv : pairwise.nodes.toList()) {
for (PairwiseImageGraph.View pv : pairwise.nodes.toList()) {
var wv = working.lookupView(pv.id);
if (!working.containsView(pv.id))
if (wv == null)
continue;
continue;
int order = working.viewList.indexOf(wv);
SceneWorkingGraph.View wv = working.lookupView(pv.id);
int order = working.listViews.indexOf(wv);
ConvertRotation3D_F64.matrixToRodrigues(wv.world_to_view.R, rod);
ConvertRotation3D_F64.matrixToRodrigues(wv.world_to_view.R, rod);
BundlePinholeSimplified intrinsics = working.getViewCamera(wv).intrinsic;
System.out.printf("view[%2d]='%2s' f=%6.1f k1=%6.3f k2=%6.3f T={%5.1f,%5.1f,%5.1f} R=%4.2f\n",
System.out.printf("view[%2d]='%2s' f=%6.1f k1=%6.3f k2=%6.3f T={%5.1f,%5.1f,%5.1f} R=%4.2f\n",
order, wv.pview.id, wv.intrinsic.f, wv.intrinsic.k1, wv.intrinsic.k2,
order, wv.pview.id, intrinsics.f, intrinsics.k1, intrinsics.k2,
wv.world_to_view.T.x, wv.world_to_view.T.y, wv.world_to_view.T.z, rod.theta);
wv.world_to_view.T.x, wv.world_to_view.T.y, wv.world_to_view.T.z, rod.theta);
}
}
Line 116: Line 150:
* not realize you're back at the initial location. Typically this results in a noticeable miss alignment.
* not realize you're back at the initial location. Typically this results in a noticeable miss alignment.
*/
*/
private void trackImageFeatures() {
private void similarImagesFromSequence() {
if (similarImages!=null)
return;
System.out.println("----------------------------------------------------------------------------");
System.out.println("----------------------------------------------------------------------------");
System.out.println("### Creating Similar Images");
System.out.println("### Creating Similar Images from an ordered set of images");


// Configure the KLT tracker
// Configure the KLT tracker
int radius = 5;
ConfigPointTracker configTracker = FactorySceneRecognition.createDefaultTrackerConfig();
var configTracker = new ConfigPointTracker();
configTracker.typeTracker = ConfigPointTracker.TrackerType.KLT;
configTracker.klt.pruneClose = true;
configTracker.klt.toleranceFB = 2;
configTracker.klt.templateRadius = radius;
configTracker.klt.maximumTracks.setFixed(800);
configTracker.klt.config.maxIterations = 30;
configTracker.detDesc.typeDetector = ConfigDetectInterestPoint.DetectorType.POINT;
configTracker.detDesc.detectPoint.type = PointDetectorTypes.SHI_TOMASI;
configTracker.detDesc.detectPoint.shiTomasi.radius = 6;
configTracker.detDesc.detectPoint.general.radius = 4;
// configTracker.detDesc.detectPoint.general.threshold = 0;
configTracker.detDesc.detectPoint.general.selector = ConfigSelectLimit.selectUniform(2.0);


PointTracker<GrayU8> tracker = FactoryPointTracker.tracker(configTracker, GrayU8.class, null);
PointTracker<GrayU8> tracker = FactoryPointTracker.tracker(configTracker, GrayU8.class, null);
var activeTracks = new ArrayList<PointTrack>();


var trackerSimilar = new PointTrackerToSimilarImages();
var config = new ConfigSimilarImagesTrackThenMatch();
 
final var dbSimilar = FactorySceneReconstruction.createTrackThenMatch(config, ImageType.SB_U8);
dbSimilar.setVerbose(System.out, BoofMiscOps.hashSet(BoofVerbose.RECURSIVE));


// Track features across the entire sequence and save the results
// Track features across the entire sequence and save the results
Line 148: Line 171:
String filePath = imageFiles.get(frameId);
String filePath = imageFiles.get(frameId);
GrayU8 frame = UtilImageIO.loadImage(filePath, GrayU8.class);
GrayU8 frame = UtilImageIO.loadImage(filePath, GrayU8.class);
Objects.requireNonNull(frame, "Failed to load image");
if (first) {
if (first) {
first = false;
first = false;
trackerSimilar.initialize(frame.width, frame.height);
dbSimilar.initialize(frame.width, frame.height);
dbCams.addCameraCanonical(frame.width, frame.height, 60.0);
}
}
tracker.process(frame);
tracker.process(frame);
int active = tracker.getTotalActive();
int activeCount = tracker.getTotalActive();
int dropped = tracker.getDroppedTracks(null).size();
int droppedCount = tracker.getDroppedTracks(null).size();
tracker.spawnTracks();
tracker.spawnTracks();
trackerSimilar.processFrame(tracker);
tracker.getActiveTracks(activeTracks);
String id = frameId + "";//trackerSimilar.frames.getTail().frameID;
dbSimilar.processFrame(frame, activeTracks, tracker.getFrameID());
System.out.println("frame id = " + id + " active=" + active + " dropped=" + dropped);
String id = frameId + "";
System.out.println("frame id = " + id + " active=" + activeCount + " dropped=" + droppedCount);
 
// Everything maps to the same camera
dbCams.addView(id, 0);
}
 
dbSimilar.finishedTracking();
}, "Finding Similar");
 
this.dbSimilar = dbSimilar;
}
 
/**
* Assumes that the images are complete unsorted
*/
private void similarImagesFromUnsorted() {
System.out.println("----------------------------------------------------------------------------");
System.out.println("### Creating Similar Images from unordered images");
 
var config = new ConfigSimilarImagesSceneRecognition();
 
final var similarImages = FactorySceneReconstruction.createSimilarImages(config, ImageType.SB_U8);
similarImages.setVerbose(System.out, BoofMiscOps.hashSet(BoofVerbose.RECURSIVE));
 
// Track features across the entire sequence and save the results
BoofMiscOps.profile(() -> {
for (int frameId = 0; frameId < imageFiles.size(); frameId++) {
String filePath = imageFiles.get(frameId);
GrayU8 frame = UtilImageIO.loadImage(filePath, GrayU8.class);
Objects.requireNonNull(frame, "Failed to load image");


// TODO drop tracks which have been viewed for too long to reduce the negative affects of track drift?
String viewID = frameId + "";


// To keep things manageable only process the first few frames, if configured to do so
similarImages.addImage(viewID, frame);
if (frameId >= maxFrames)
// Everything maps to the same camera
break;
if (frameId == 0)
dbCams.addCameraCanonical(frame.width, frame.height, 60.0);
dbCams.addView(viewID, 0);
}
}
}, "Tracking Features");


similarImages = trackerSimilar;
similarImages.fixate();
}, "Finding Similar");
 
this.dbSimilar = similarImages;
}
}


Line 176: Line 236:
*/
*/
public void computePairwiseGraph() {
public void computePairwiseGraph() {
var savePath = new File(workDirectory, "pairwise.yaml");
try {
pairwise = MultiViewIO.load(savePath.getPath(), (PairwiseImageGraph)null);
} catch (UncheckedIOException ignore) {}
// Recompute if the number of images has changed
if (!rebuild && pairwise != null && pairwise.nodes.size == imageFiles.size()) {
System.out.println("Loaded Pairwise Graph");
return;
} else {
rebuild = true;
pairwise = null;
}
trackImageFeatures();
System.out.println("----------------------------------------------------------------------------");
System.out.println("----------------------------------------------------------------------------");
System.out.println("### Creating Pairwise");
System.out.println("### Creating Pairwise");
var generatePairwise = new GeneratePairwiseImageGraph();
var config = new ConfigGeneratePairwiseImageGraph();
GeneratePairwiseImageGraph generatePairwise = FactorySceneReconstruction.generatePairwise(config);
BoofMiscOps.profile(() -> {
BoofMiscOps.profile(() -> {
generatePairwise.setVerbose(System.out, null);
generatePairwise.setVerbose(System.out, BoofMiscOps.hashSet(BoofVerbose.RECURSIVE));
generatePairwise.process(similarImages);
generatePairwise.process(dbSimilar, dbCams);
}, "Created Pairwise graph");
}, "Created Pairwise graph");
pairwise = generatePairwise.getGraph();
pairwise = generatePairwise.getGraph();
var savePath = new File(workDirectory, "pairwise.yaml");
MultiViewIO.save(pairwise, savePath.getPath());
MultiViewIO.save(pairwise, savePath.getPath());
System.out.println("  nodes.size=" + pairwise.nodes.size);
System.out.println("  nodes.size=" + pairwise.nodes.size);
Line 210: Line 258:
*/
*/
public void metricFromPairwise() {
public void metricFromPairwise() {
var savePath = new File(workDirectory, "working.yaml");
if (!rebuild) {
try {
working = MultiViewIO.load(savePath.getPath(), pairwise, null);
} catch (UncheckedIOException ignore) {}
}
// Recompute if the number of images has changed
if (working != null){
System.out.println("Loaded Metric Reconstruction");
return;
}
trackImageFeatures();
System.out.println("----------------------------------------------------------------------------");
System.out.println("----------------------------------------------------------------------------");
System.out.println("### Metric Reconstruction");
System.out.println("### Metric Reconstruction");


var metric = new MetricFromUncalibratedPairwiseGraph();
var metric = new MetricFromUncalibratedPairwiseGraph();
metric.setVerbose(System.out, null);
metric.setVerbose(System.out, BoofMiscOps.hashSet(BoofVerbose.RECURSIVE));
metric.getInitProjective().setVerbose(System.out, null);
metric.getExpandMetric().setVerbose(System.out, null);
BoofMiscOps.profile(() -> {
BoofMiscOps.profile(() -> {
if (!metric.process(similarImages, pairwise)) {
if (!metric.process(dbSimilar, dbCams, pairwise)) {
System.err.println("Reconstruction failed");
System.err.println("Reconstruction failed");
System.exit(0);
System.exit(0);
Line 239: Line 270:
}, "Metric Reconstruction");
}, "Metric Reconstruction");


working = metric.getWorkGraph();
working = metric.getLargestScene();
 
var savePath = new File(workDirectory, "working.yaml");
MultiViewIO.save(working, savePath.getPath());
MultiViewIO.save(working, savePath.getPath());
}
}
Line 248: Line 281:
*/
*/
public void bundleAdjustmentRefine() {
public void bundleAdjustmentRefine() {
var savePath = new File(workDirectory, "structure.yaml");
if (!rebuild) {
try {
scene = MultiViewIO.load(savePath.getPath(), (SceneStructureMetric)null);
} catch (UncheckedIOException ignore) {}
}
// Recompute if the number of images has changed
if (scene != null) {
System.out.println("Loaded Refined Scene");
return;
}
trackImageFeatures();
System.out.println("----------------------------------------------------------------------------");
System.out.println("----------------------------------------------------------------------------");
System.out.println("Refining the scene");
System.out.println("Refining the scene");
Line 268: Line 287:
BoofMiscOps.profile(() -> {
BoofMiscOps.profile(() -> {
// Bundle adjustment is run twice, with the worse 5% of points discarded in an attempt to reduce noise
// Bundle adjustment is run twice, with the worse 5% of points discarded in an attempt to reduce noise
refine.bundleAdjustment.keepFraction = 0.95;
refine.metricSba.keepFraction = 0.95;
refine.bundleAdjustment.getSba().setVerbose(System.out, null);
refine.metricSba.getSba().setVerbose(System.out, null);
if (!refine.process(similarImages, working)) {
if (!refine.process(dbSimilar, working)) {
System.out.println("SBA REFINE FAILED");
System.out.println("SBA REFINE FAILED");
}
}
}, "Bundle Adjustment refine");
}, "Bundle Adjustment refine");
scene = refine.bundleAdjustment.structure;
scene = refine.metricSba.structure;
 
var savePath = new File(workDirectory, "structure.yaml");
MultiViewIO.save(scene, savePath.getPath());
MultiViewIO.save(scene, savePath.getPath());
}
}
Line 318: Line 339:
viewer.setDotSize(1);
viewer.setDotSize(1);
viewer.setTranslationStep(0.15);
viewer.setTranslationStep(0.15);
viewer.addCloud(cloudXyz, rgb.data);
viewer.addCloud(( idx, p ) -> p.setTo(cloudXyz.get(idx)), rgb::get, rgb.size);
viewer.setCameraHFov(UtilAngle.radian(60));
viewer.setCameraHFov(UtilAngle.radian(60));


Line 328: Line 349:
viewer.getComponent().setPreferredSize(new Dimension(600, 600));
viewer.getComponent().setPreferredSize(new Dimension(600, 600));
ShowImages.showWindow(viewer.getComponent(), "Refined Scene", true);
ShowImages.showWindow(viewer.getComponent(), "Refined Scene", true);
var copy = new DogArray<>(Point3dRgbI_F64::new);
viewer.copyCloud(copy);
try (var out = new FileOutputStream("saved_cloud.ply")) {
PointCloudIO.save3D(PointCloudIO.Format.PLY, PointCloudReader.wrapF64RGB(copy.toList()), true, out);
} catch (IOException e) {
e.printStackTrace();
}
});
});
}
}
}
}
</syntaxhighlight>
</syntaxhighlight>

Latest revision as of 15:24, 17 January 2022

Snow tree.jpg
Scene being reconstructed Pseudo color sparse reconstruction from uncalibrated input images.

Reconstruction from uncalibrated images is one of the more challenging problems in 3D scene reconstruction since the lens parameters are not initially known and are notoriously unstable to estimate. In this example BoofCV takes a sequence of uncalibrated images and carefully estimates the metric reconstruction from the sparse features. This will then be used to perform dense reconstruction.

Example Code:

Concepts:

Videos:

Tutorials:

Example Code

/**
 * Estimate scene parameters using a sparse set of features across uncalibrated images. In this example, a KLT
 * feature tracker will be used due to speed and simplicity even though there are some disadvantages
 * mentioned below. After image features have been tracked across the sequence we will first determine 3D
 * connectivity through two-view geometry, followed my a metric elevation. Then a final refinement
 * using bundle adjustment.
 *
 * This is unusual in that it will estimate intrinsic parameters from scratch with very few assumptions.
 * Most MVS software uses a data base of known camera parameters to provide an initial seed as this can simplify
 * the problem and make it more stable.
 *
 * @author Peter Abeles
 */
public class ExampleMultiViewSparseReconstruction {
	String workDirectory;
	List<String> imageFiles = new ArrayList<>();

	PairwiseImageGraph pairwise = null;
	LookUpSimilarImages dbSimilar;
	LookUpCameraInfo dbCams = new LookUpCameraInfo();
	SceneWorkingGraph working = null;
	SceneStructureMetric scene = null;

	boolean rebuild = false;

	public static void main( String[] args ) {
		var example = new ExampleMultiViewSparseReconstruction();
		example.compute("tree_snow_01.mp4", true);
//		example.compute("ditch_02.mp4", true);
//		example.compute("holiday_display_01.mp4", true);
//		example.compute("log_building_02.mp4", true);
//		example.compute("drone_park_01.mp4", false);
//		example.compute("stone_sign.mp4", true);
		example.visualizeSparseCloud();

		System.out.println("done");
	}

	public void compute( String videoName, boolean sequential ) {
		// Turn on threaded code for bundle adjustment
		DDoglegConcurrency.USE_CONCURRENT = true;

		// Create a directory to store the work space
		String path = UtilIO.pathExample("mvs/" + videoName);
		workDirectory = "mvs_work/" + FilenameUtils.getBaseName(videoName);

		// Attempt to reload intermediate results if previously computed
		if (!rebuild) {
			try {
				pairwise = MultiViewIO.load(new File(workDirectory, "pairwise.yaml").getPath(), (PairwiseImageGraph)null);
			} catch (UncheckedIOException ignore) {}

			try {
				working = MultiViewIO.load(new File(workDirectory, "working.yaml").getPath(), pairwise, null);
			} catch (UncheckedIOException ignore) {}

			try {
				scene = MultiViewIO.load(new File(workDirectory, "structure.yaml").getPath(), (SceneStructureMetric)null);
			} catch (UncheckedIOException ignore) {}
		}

		// Convert the video into an image sequence. Later on we will need to access the images in random order
		var imageDirectory = new File(workDirectory, "images");

		if (imageDirectory.exists()) {
			imageFiles = UtilIO.listSmart(String.format("glob:%s/images/*.png", workDirectory), true, ( f ) -> true);
		} else {
			checkTrue(imageDirectory.mkdirs(), "Failed to image directory");
			SimpleImageSequence<InterleavedU8> sequence = DefaultMediaManager.INSTANCE.openVideo(path, ImageType.IL_U8);
			System.out.println("----------------------------------------------------------------------------");
			System.out.println("### Decoding Video");
			BoofMiscOps.profile(() -> {
				int frame = 0;
				while (sequence.hasNext()) {
					InterleavedU8 image = sequence.next();
					File imageFile = new File(imageDirectory, String.format("frame%04d.png", frame++));
					imageFiles.add(imageFile.getPath());
					// This is commented out for what appears to be a JRE bug.
					// V  [libjvm.so+0xdc4059]  SWPointer::SWPointer(MemNode*, SuperWord*, Node_Stack*, bool)
					UtilImageIO.saveImage(image, imageFile.getPath());
				}
			}, "Video Decoding");
		}

		// Only determine the visual relationship between images if needed
		if (pairwise == null || working == null) {
			if (sequential) {
				similarImagesFromSequence();
			} else {
				similarImagesFromUnsorted();
			}
		}

		if (pairwise == null)
			computePairwiseGraph();
		if (working == null)
			metricFromPairwise();
		if (scene == null)
			bundleAdjustmentRefine();

		var rod = new Rodrigues_F64();
		System.out.println("----------------------------------------------------------------------------");
		for (PairwiseImageGraph.View pv : pairwise.nodes.toList()) {
			if (!working.containsView(pv.id))
				continue;
			SceneWorkingGraph.View wv = working.lookupView(pv.id);
			int order = working.listViews.indexOf(wv);
			ConvertRotation3D_F64.matrixToRodrigues(wv.world_to_view.R, rod);
			BundlePinholeSimplified intrinsics = working.getViewCamera(wv).intrinsic;
			System.out.printf("view[%2d]='%2s' f=%6.1f k1=%6.3f k2=%6.3f T={%5.1f,%5.1f,%5.1f} R=%4.2f\n",
					order, wv.pview.id, intrinsics.f, intrinsics.k1, intrinsics.k2,
					wv.world_to_view.T.x, wv.world_to_view.T.y, wv.world_to_view.T.z, rod.theta);
		}
		System.out.println("   Views used: " + scene.views.size + " / " + pairwise.nodes.size);
	}

	/**
	 * For a pairwise graph to be constructed, image feature relationships between frames are needed. For a video
	 * sequence, KLT is an easy and fast way to do this. However, KLT will not "close the loop", and it will
	 * not realize you're back at the initial location. Typically this results in a noticeable miss alignment.
	 */
	private void similarImagesFromSequence() {
		System.out.println("----------------------------------------------------------------------------");
		System.out.println("### Creating Similar Images from an ordered set of images");

		// Configure the KLT tracker
		ConfigPointTracker configTracker = FactorySceneRecognition.createDefaultTrackerConfig();

		PointTracker<GrayU8> tracker = FactoryPointTracker.tracker(configTracker, GrayU8.class, null);
		var activeTracks = new ArrayList<PointTrack>();

		var config = new ConfigSimilarImagesTrackThenMatch();

		final var dbSimilar = FactorySceneReconstruction.createTrackThenMatch(config, ImageType.SB_U8);
		dbSimilar.setVerbose(System.out, BoofMiscOps.hashSet(BoofVerbose.RECURSIVE));

		// Track features across the entire sequence and save the results
		BoofMiscOps.profile(() -> {
			boolean first = true;
			for (int frameId = 0; frameId < imageFiles.size(); frameId++) {
				String filePath = imageFiles.get(frameId);
				GrayU8 frame = UtilImageIO.loadImage(filePath, GrayU8.class);
				Objects.requireNonNull(frame, "Failed to load image");
				if (first) {
					first = false;
					dbSimilar.initialize(frame.width, frame.height);
					dbCams.addCameraCanonical(frame.width, frame.height, 60.0);
				}

				tracker.process(frame);
				int activeCount = tracker.getTotalActive();
				int droppedCount = tracker.getDroppedTracks(null).size();
				tracker.spawnTracks();
				tracker.getActiveTracks(activeTracks);
				dbSimilar.processFrame(frame, activeTracks, tracker.getFrameID());
				String id = frameId + "";
				System.out.println("frame id = " + id + " active=" + activeCount + " dropped=" + droppedCount);

				// Everything maps to the same camera
				dbCams.addView(id, 0);
			}

			dbSimilar.finishedTracking();
		}, "Finding Similar");

		this.dbSimilar = dbSimilar;
	}

	/**
	 * Assumes that the images are complete unsorted
	 */
	private void similarImagesFromUnsorted() {
		System.out.println("----------------------------------------------------------------------------");
		System.out.println("### Creating Similar Images from unordered images");

		var config = new ConfigSimilarImagesSceneRecognition();

		final var similarImages = FactorySceneReconstruction.createSimilarImages(config, ImageType.SB_U8);
		similarImages.setVerbose(System.out, BoofMiscOps.hashSet(BoofVerbose.RECURSIVE));

		// Track features across the entire sequence and save the results
		BoofMiscOps.profile(() -> {
			for (int frameId = 0; frameId < imageFiles.size(); frameId++) {
				String filePath = imageFiles.get(frameId);
				GrayU8 frame = UtilImageIO.loadImage(filePath, GrayU8.class);
				Objects.requireNonNull(frame, "Failed to load image");

				String viewID = frameId + "";

				similarImages.addImage(viewID, frame);
				// Everything maps to the same camera
				if (frameId == 0)
					dbCams.addCameraCanonical(frame.width, frame.height, 60.0);
				dbCams.addView(viewID, 0);
			}

			similarImages.fixate();
		}, "Finding Similar");

		this.dbSimilar = similarImages;
	}

	/**
	 * This step attempts to determine which views have a 3D (not homographic) relationship with each other and which
	 * features are real and not fake.
	 */
	public void computePairwiseGraph() {
		System.out.println("----------------------------------------------------------------------------");
		System.out.println("### Creating Pairwise");
		var config = new ConfigGeneratePairwiseImageGraph();
		GeneratePairwiseImageGraph generatePairwise = FactorySceneReconstruction.generatePairwise(config);
		BoofMiscOps.profile(() -> {
			generatePairwise.setVerbose(System.out, BoofMiscOps.hashSet(BoofVerbose.RECURSIVE));
			generatePairwise.process(dbSimilar, dbCams);
		}, "Created Pairwise graph");
		pairwise = generatePairwise.getGraph();

		var savePath = new File(workDirectory, "pairwise.yaml");
		MultiViewIO.save(pairwise, savePath.getPath());
		System.out.println("  nodes.size=" + pairwise.nodes.size);
		System.out.println("  edges.size=" + pairwise.edges.size);
	}

	/**
	 * Next a metric reconstruction is attempted using views with a 3D relationship. This is a tricky step
	 * and works by finding clusters of views which are likely to have numerically stable results then expanding
	 * the sparse metric reconstruction.
	 */
	public void metricFromPairwise() {
		System.out.println("----------------------------------------------------------------------------");
		System.out.println("### Metric Reconstruction");

		var metric = new MetricFromUncalibratedPairwiseGraph();
		metric.setVerbose(System.out, BoofMiscOps.hashSet(BoofVerbose.RECURSIVE));
		BoofMiscOps.profile(() -> {
			if (!metric.process(dbSimilar, dbCams, pairwise)) {
				System.err.println("Reconstruction failed");
				System.exit(0);
			}
		}, "Metric Reconstruction");

		working = metric.getLargestScene();

		var savePath = new File(workDirectory, "working.yaml");
		MultiViewIO.save(working, savePath.getPath());
	}

	/**
	 * Here the initial estimate found in the metric reconstruction is refined using Bundle Adjustment, which just
	 * means all parameters (camera, view pose, point location) are optimized all at once.
	 */
	public void bundleAdjustmentRefine() {
		System.out.println("----------------------------------------------------------------------------");
		System.out.println("Refining the scene");

		var refine = new RefineMetricWorkingGraph();
		BoofMiscOps.profile(() -> {
			// Bundle adjustment is run twice, with the worse 5% of points discarded in an attempt to reduce noise
			refine.metricSba.keepFraction = 0.95;
			refine.metricSba.getSba().setVerbose(System.out, null);
			if (!refine.process(dbSimilar, working)) {
				System.out.println("SBA REFINE FAILED");
			}
		}, "Bundle Adjustment refine");
		scene = refine.metricSba.structure;

		var savePath = new File(workDirectory, "structure.yaml");
		MultiViewIO.save(scene, savePath.getPath());
	}

	/**
	 * To visualize the results we will render a sparse point cloud along with the location of each camera in the
	 * scene.
	 */
	public void visualizeSparseCloud() {
		checkTrue(scene.isHomogenous());
		List<Point3D_F64> cloudXyz = new ArrayList<>();
		Point4D_F64 world = new Point4D_F64();

		// NOTE: By default the colors found below are not used. Look before to see why and how to turn them on.
		//
		// Colorize the cloud by reprojecting the images. The math is straight forward but there's a lot of book
		// keeping that needs to be done due to the scene data structure. A class is provided to make this process easy
		var imageLookup = new LookUpImageFilesByIndex(imageFiles);
		var colorize = new ColorizeMultiViewStereoResults<>(new LookUpColorRgbFormats.PL_U8(), imageLookup);

		DogArray_I32 rgb = new DogArray_I32();
		rgb.resize(scene.points.size);
		colorize.processScenePoints(scene,
				( viewIdx ) -> viewIdx + "", // String encodes the image's index
				( pointIdx, r, g, b ) -> rgb.set(pointIdx, (r << 16) | (g << 8) | b)); // Assign the RGB color

		// Convert the structure into regular 3D points from homogenous
		for (int i = 0; i < scene.points.size; i++) {
			scene.points.get(i).get(world);
			// If the point is at infinity it's not clear what to do. It would be best to skip it then the color
			// array would be out of sync. Let's just throw it far far away then.
			if (world.w == 0.0)
				cloudXyz.add(new Point3D_F64(0, 0, Double.MAX_VALUE));
			else
				cloudXyz.add(new Point3D_F64(world.x/world.w, world.y/world.w, world.z/world.w));
		}

		PointCloudViewer viewer = VisualizeData.createPointCloudViewer();
		viewer.setFog(true);
		// We just did a bunch of work to look up the true color of points, however for sparse data it's easy to see
		// the structure with psuedo color. Comment out the line below to see the true color.
		viewer.setColorizer(new TwoAxisRgbPlane.Z_XY(1.0).fperiod(40));
		viewer.setDotSize(1);
		viewer.setTranslationStep(0.15);
		viewer.addCloud(( idx, p ) -> p.setTo(cloudXyz.get(idx)), rgb::get, rgb.size);
		viewer.setCameraHFov(UtilAngle.radian(60));

		SwingUtilities.invokeLater(() -> {
			// Show where the cameras are
			BoofSwingUtil.visualizeCameras(scene, viewer);

			// Size the window and show it to the user
			viewer.getComponent().setPreferredSize(new Dimension(600, 600));
			ShowImages.showWindow(viewer.getComponent(), "Refined Scene", true);

			var copy = new DogArray<>(Point3dRgbI_F64::new);
			viewer.copyCloud(copy);

			try (var out = new FileOutputStream("saved_cloud.ply")) {
				PointCloudIO.save3D(PointCloudIO.Format.PLY, PointCloudReader.wrapF64RGB(copy.toList()), true, out);
			} catch (IOException e) {
				e.printStackTrace();
			}
		});
	}
}