Example Stereo Single Camera

In this scenario we wish to compute a dense point cloud from two views taken with the same calibrated camera. Because the intrinsic parameters are known, it is easier to converge towards a valid solution than the entirely uncalibrated scenario.

Because the extrinsic parameters (translation and rotation) between the two views is unknown the scene's structure can only be recovered up to a scale factor. In this example natural features are used to determine the geometric relationship between the two views. The algorithm can be summarized as follows:

Example Code

/** * Example demonstrating how to use to images taken from a single calibrated camera to create a stereo disparity image, * from which a dense 3D point cloud of the scene can be computed. For this technique to work the camera's motion * needs to be approximately tangential to the direction the camera is pointing. The code below assumes that the first * image is to the left of the second image. * * @author Peter Abeles */publicclassExampleStereoTwoViewsOneCamera{// Disparity calculation parametersprivatestaticfinalintminDisparity=15;privatestaticfinalintmaxDisparity=100;publicstaticvoidmain(Stringargs[]){// specify location of images and calibrationStringcalibDir=UtilIO.pathExample("calibration/mono/Sony_DSC-HX5V_Chess/");StringimageDir=UtilIO.pathExample("stereo/");// Camera parametersCameraPinholeRadialintrinsic=CalibrationIO.load(newFile(calibDir,"intrinsic.yaml"));// Input images from the camera moving left to rightBufferedImageorigLeft=UtilImageIO.loadImage(imageDir,"mono_wall_01.jpg");BufferedImageorigRight=UtilImageIO.loadImage(imageDir,"mono_wall_02.jpg");// Input images with lens distortionGrayU8distortedLeft=ConvertBufferedImage.convertFrom(origLeft,(GrayU8)null);GrayU8distortedRight=ConvertBufferedImage.convertFrom(origRight,(GrayU8)null);// matched features between the two imagesList<AssociatedPair>matchedFeatures=ExampleFundamentalMatrix.computeMatches(origLeft,origRight);// convert from pixel coordinates into normalized image coordinatesList<AssociatedPair>matchedCalibrated=convertToNormalizedCoordinates(matchedFeatures,intrinsic);// Robustly estimate camera motionList<AssociatedPair>inliers=newArrayList<>();Se3_F64leftToRight=estimateCameraMotion(intrinsic,matchedCalibrated,inliers);drawInliers(origLeft,origRight,intrinsic,inliers);// Rectify and remove lens distortion for stereo processingDMatrixRMajrectifiedK=newDMatrixRMaj(3,3);DMatrixRMajrectifiedR=newDMatrixRMaj(3,3);GrayU8rectifiedLeft=distortedLeft.createSameShape();GrayU8rectifiedRight=distortedRight.createSameShape();GrayU8rectifiedMask=distortedLeft.createSameShape();rectifyImages(distortedLeft,distortedRight,leftToRight,intrinsic,intrinsic,rectifiedLeft,rectifiedRight,rectifiedMask,rectifiedK,rectifiedR);// compute disparityStereoDisparity<GrayS16,GrayF32>disparityAlg=FactoryStereoDisparity.regionSubpixelWta(DisparityAlgorithms.RECT_FIVE,minDisparity,maxDisparity,5,5,20,1,0.1,GrayS16.class);// Apply the Laplacian across the image to add extra resistance to changes in lighting or camera gainGrayS16derivLeft=newGrayS16(rectifiedLeft.width,rectifiedLeft.height);GrayS16derivRight=newGrayS16(rectifiedLeft.width,rectifiedLeft.height);LaplacianEdge.process(rectifiedLeft,derivLeft);LaplacianEdge.process(rectifiedRight,derivRight);// process and return the resultsdisparityAlg.process(derivLeft,derivRight);GrayF32disparity=disparityAlg.getDisparity();RectifyImageOps.applyMask(disparity,rectifiedMask,0);// show resultsBufferedImagevisualized=VisualizeImageData.disparity(disparity,null,minDisparity,maxDisparity,0);BufferedImageoutLeft=ConvertBufferedImage.convertTo(rectifiedLeft,null);BufferedImageoutRight=ConvertBufferedImage.convertTo(rectifiedRight,null);ShowImages.showWindow(newRectifiedPairPanel(true,outLeft,outRight),"Rectification",true);ShowImages.showWindow(visualized,"Disparity",true);showPointCloud(disparity,outLeft,leftToRight,rectifiedK,rectifiedR,minDisparity,maxDisparity);System.out.println("Total found "+matchedCalibrated.size());System.out.println("Total Inliers "+inliers.size());}/** * Estimates the camera motion robustly using RANSAC and a set of associated points. * * @param intrinsic Intrinsic camera parameters * @param matchedNorm set of matched point features in normalized image coordinates * @param inliers OUTPUT: Set of inlier features from RANSAC * @return Found camera motion. Note translation has an arbitrary scale */publicstaticSe3_F64estimateCameraMotion(CameraPinholeRadialintrinsic,List<AssociatedPair>matchedNorm,List<AssociatedPair>inliers){ModelMatcherMultiview<Se3_F64,AssociatedPair>epipolarMotion=FactoryMultiViewRobust.baselineRansac(newConfigEssential(),newConfigRansac(200,0.5));epipolarMotion.setIntrinsic(0,intrinsic);epipolarMotion.setIntrinsic(1,intrinsic);if(!epipolarMotion.process(matchedNorm))thrownewRuntimeException("Motion estimation failed");// save inlier set for debugging purposesinliers.addAll(epipolarMotion.getMatchSet());returnepipolarMotion.getModelParameters();}/** * Convert a set of associated point features from pixel coordinates into normalized image coordinates. */publicstaticList<AssociatedPair>convertToNormalizedCoordinates(List<AssociatedPair>matchedFeatures,CameraPinholeRadialintrinsic){Point2Transform2_F64p_to_n=LensDistortionFactory.narrow(intrinsic).undistort_F64(true,false);List<AssociatedPair>calibratedFeatures=newArrayList<>();for(AssociatedPairp:matchedFeatures){AssociatedPairc=newAssociatedPair();p_to_n.compute(p.p1.x,p.p1.y,c.p1);p_to_n.compute(p.p2.x,p.p2.y,c.p2);calibratedFeatures.add(c);}returncalibratedFeatures;}/** * Remove lens distortion and rectify stereo images * * @param distortedLeft Input distorted image from left camera. * @param distortedRight Input distorted image from right camera. * @param leftToRight Camera motion from left to right * @param intrinsicLeft Intrinsic camera parameters * @param rectifiedLeft Output rectified image for left camera. * @param rectifiedRight Output rectified image for right camera. * @param rectifiedMask Mask that indicates invalid pixels in rectified image. 1 = valid, 0 = invalid * @param rectifiedK Output camera calibration matrix for rectified camera */publicstatic<TextendsImageBase<T>>voidrectifyImages(TdistortedLeft,TdistortedRight,Se3_F64leftToRight,CameraPinholeRadialintrinsicLeft,CameraPinholeRadialintrinsicRight,TrectifiedLeft,TrectifiedRight,GrayU8rectifiedMask,DMatrixRMajrectifiedK,DMatrixRMajrectifiedR){RectifyCalibratedrectifyAlg=RectifyImageOps.createCalibrated();// original camera calibration matricesDMatrixRMajK1=PerspectiveOps.pinholeToMatrix(intrinsicLeft,(DMatrixRMaj)null);DMatrixRMajK2=PerspectiveOps.pinholeToMatrix(intrinsicRight,(DMatrixRMaj)null);rectifyAlg.process(K1,newSe3_F64(),K2,leftToRight);// rectification matrix for each imageDMatrixRMajrect1=rectifyAlg.getRect1();DMatrixRMajrect2=rectifyAlg.getRect2();rectifiedR.set(rectifyAlg.getRectifiedRotation());// New calibration matrix,rectifiedK.set(rectifyAlg.getCalibrationMatrix());// Adjust the rectification to make the view area more usefulRectifyImageOps.fullViewLeft(intrinsicLeft,rect1,rect2,rectifiedK);// undistorted and rectify imagesFMatrixRMajrect1_F32=newFMatrixRMaj(3,3);FMatrixRMajrect2_F32=newFMatrixRMaj(3,3);ConvertMatrixData.convert(rect1,rect1_F32);ConvertMatrixData.convert(rect2,rect2_F32);// Extending the image prevents a harsh edge reducing false matches at the image border// SKIP is another option, possibly a tinny bit faster, but has a harsh edge which will need to be filteredImageDistort<T,T>distortLeft=RectifyImageOps.rectifyImage(intrinsicLeft,rect1_F32,BorderType.EXTENDED,distortedLeft.getImageType());ImageDistort<T,T>distortRight=RectifyImageOps.rectifyImage(intrinsicRight,rect2_F32,BorderType.EXTENDED,distortedRight.getImageType());distortLeft.apply(distortedLeft,rectifiedLeft,rectifiedMask);distortRight.apply(distortedRight,rectifiedRight);}/** * Draw inliers for debugging purposes. Need to convert from normalized to pixel coordinates. */publicstaticvoiddrawInliers(BufferedImageleft,BufferedImageright,CameraPinholeRadialintrinsic,List<AssociatedPair>normalized){Point2Transform2_F64n_to_p=LensDistortionFactory.narrow(intrinsic).distort_F64(false,true);List<AssociatedPair>pixels=newArrayList<>();for(AssociatedPairn:normalized){AssociatedPairp=newAssociatedPair();n_to_p.compute(n.p1.x,n.p1.y,p.p1);n_to_p.compute(n.p2.x,n.p2.y,p.p2);pixels.add(p);}// display the resultsAssociationPanelpanel=newAssociationPanel(20);panel.setAssociation(pixels);panel.setImages(left,right);ShowImages.showWindow(panel,"Inlier Features",true);}/** * Show results as a point cloud */publicstaticvoidshowPointCloud(ImageGraydisparity,BufferedImageleft,Se3_F64motion,DMatrixRMajrectifiedK,DMatrixRMajrectifiedR,intminDisparity,intmaxDisparity){DisparityToColorPointCloudd2c=newDisparityToColorPointCloud();doublebaseline=motion.getT().norm();d2c.configure(baseline,rectifiedK,rectifiedR,newDoNothing2Transform2_F64(),minDisparity,maxDisparity);d2c.process(disparity,left);CameraPinholerectifiedPinhole=PerspectiveOps.matrixToPinhole(rectifiedK,disparity.width,disparity.height,null);// skew the view to make the structure easier to seeSe3_F64cameraToWorld=SpecialEuclideanOps_F64.eulerXyz(-baseline*5,0,0,0,0.2,0,null);PointCloudViewerpcv=VisualizeData.createPointCloudViewer();pcv.setCameraHFov(PerspectiveOps.computeHFov(rectifiedPinhole));pcv.setCameraToWorld(cameraToWorld);pcv.setTranslationStep(baseline/3);pcv.addCloud(d2c.getCloud(),d2c.getCloudColor());pcv.setDotSize(1);pcv.setTranslationStep(baseline/10);pcv.getComponent().setPreferredSize(newDimension(left.getWidth(),left.getHeight()));ShowImages.showWindow(pcv.getComponent(),"Point Cloud",true);}}