Merge branch 'svn/trunk'

Conflicts:
	gtsam/navigation/tests/testInertialNavFactor_GlobalVelocity.cpp
	gtsam/nonlinear/ISAM2.cpp
	gtsam/nonlinear/ISAM2.h
	gtsam/slam/tests/testBetweenFactor.cpp
	gtsam_unstable/nonlinear/ConcurrentBatchFilter.cpp
	gtsam_unstable/nonlinear/tests/testConcurrentBatchFilter.cpp
	gtsam_unstable/nonlinear/tests/testConcurrentBatchSmoother.cpp
	gtsam_unstable/slam/SmartProjectionFactor.h
release/4.3a0
Richard Roberts 2013-08-11 18:16:56 +00:00
commit 4585fd1caa
49 changed files with 55213 additions and 2333 deletions

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,2 @@
BodyPtx BodyPty BodyPtz BodyPrx BodyPry BodyPrz AccelerometerSigma GyroscopeSigma IntegrationSigma AccelerometerBiasSigma GyroscopeBiasSigma AverageDeltaT
0 0 0 0 0 0 0.01 0.000175 0 0.000167 2.91e-006 0.0100395199348279

468
examples/Data/KittiGps.txt Normal file
View File

@ -0,0 +1,468 @@
Time Latitude Longitude Altitude PositionSigma
46534.47837579 48.982530923568 8.3903459539796 116.43446350098 0.072138755187486
46536.397971133 48.982637737646 8.3904395756801 116.39415740967 0.077077882690173
46537.40790448 48.982638403644 8.3904400660267 116.39430236816 0.077077882690173
46538.417854613 48.982639054039 8.3904406077418 116.39440917969 0.077077882690173
46539.427814058 48.982639701333 8.3904411470687 116.39470672607 0.077077882690173
46540.437704157 48.982640368632 8.3904416416362 116.39482116699 0.077077882690173
46541.447571055 48.982641020649 8.3904421826205 116.39489746094 0.077077882690173
46542.45750414 48.982641670789 8.3904427238241 116.39487457275 0.077077882690173
46543.467420571 48.98264233992 8.3904432196704 116.39485168457 0.077077882690173
46544.477186399 48.98264299402 8.3904437631157 116.39492034912 0.077077882690173
46545.487146318 48.982643646028 8.3904443016455 116.39477539062 0.077077882690173
46546.497019056 48.982644318256 8.390444792625 116.39532470703 0.077077882690173
46547.506781063 48.982644974973 8.3904453372047 116.39517974854 0.077077882690173
46548.516637843 48.9826456288 8.3904458799874 116.39517211914 0.077077882690173
46549.52655747 48.982646302123 8.3904463742274 116.39556884766 0.077077882690173
46550.536380144 48.982646960728 8.3904469172003 116.39527130127 0.077077882690173
46551.546297813 48.982647617358 8.3904474633594 116.39515686035 0.077077882690173
46552.556134492 48.982648291715 8.390447951888 116.39591217041 0.077077882690173
46553.556620892 48.982648952976 8.3904484941506 116.39554595947 0.077077882690173
46554.565888081 48.98264961178 8.3904490383148 116.39540100098 0.077077882690173
46555.575787206 48.982650288455 8.3904495289266 116.39614868164 0.077077882690173
46556.585675987 48.982650952186 8.3904500721994 116.39587402344 0.077077882690173
46557.595615531 48.982651613635 8.3904506146677 116.39552307129 0.077077882690173
46558.605522807 48.982652292496 8.3904511069163 116.39621734619 0.077077882690173
46559.615373617 48.982652958406 8.3904516515705 116.39595794678 0.077077882690173
46560.625244993 48.98265362289 8.3904521942027 116.39569091797 0.077077882690173
46561.635143034 48.982654303591 8.3904526887944 116.3963470459 0.077077882690173
46562.644985218 48.982654971673 8.3904532335878 116.39611053467 0.077077882690173
46563.654853868 48.982655638509 8.3904537785253 116.39585113525 0.077794601355107
46564.664840181 48.982656320994 8.3904542803316 116.39681243896 0.077794601355107
46565.67472556 48.982656991926 8.3904548257033 116.39672088623 0.077794601355107
46566.684600982 48.982657660454 8.3904553709421 116.39641571045 0.077794601355107
46567.69445871 48.982658345315 8.3904558745861 116.39734649658 0.077794601355107
46568.694752963 48.98265901787 8.3904564227676 116.39722442627 0.077794601355107
46569.704205622 48.982659689239 8.3904569673181 116.39707946777 0.077794601355107
46570.714115042 48.982660375331 8.3904574734588 116.39790344238 0.077794601355107
46571.7240302097 48.982661049731 8.3904580208474 116.3977355957 0.077794601355107
46572.733803953 48.98266172296 8.3904585678131 116.39752197266 0.077794601355107
46573.743778622 48.982662410871 8.390459075055 116.39833068848 0.077794601355107
46574.75366829 48.982663088101 8.39045962353 116.39832305908 0.077794601355107
46575.763502718 48.982663762721 8.3904601698486 116.39807891846 0.077794601355107
46576.773425433 48.982664457595 8.3904606793942 116.39806365967 0.077794601355107
46577.783263421 48.982665137122 8.3904612276202 116.39820098877 0.077794601355107
46578.793129246 48.982665814413 8.3904617778371 116.39828491211 0.077794601355107
46579.803019797 48.982666511107 8.3904622895775 116.39846801758 0.077794601355107
46580.812971397 48.982667192196 8.3904628384972 116.39878845215 0.077794601355107
46581.812973661 48.98266787212 8.3904633886502 116.39900970459 0.077794601355107
46582.822724791 48.982668570161 8.390463902026 116.3994140625 0.077794601355107
46583.834990833 48.982669253451 8.3904644522753 116.40001678467 0.077794601355107
46584.842505808 48.982669935469 8.390465001081 116.40031433105 0.077794601355107
46585.852380614 48.982670635089 8.3904655169598 116.40087127686 0.077794601355107
46586.862289581 48.982671320324 8.3904660691134 116.40154266357 0.077794601355107
46587.872126551 48.982672004277 8.3904666182046 116.40216064453 0.077794601355107
46588.881994553 48.982672705124 8.3904671356213 116.40279388428 0.077794601355107
46589.891872729 48.982673391568 8.3904676893295 116.40348052979 0.077794601355107
46590.901792241 48.982674077002 8.3904682416209 116.40419006348 0.077794601355107
46591.911956265 48.982674778546 8.3904687629936 116.40480804443 0.077794601355107
46592.921705223 48.98267546617 8.3904693180609 116.40550231934 0.077794601355107
46593.931564271 48.982676152869 8.3904698731146 116.40628814697 0.077794601355107
46594.941532522 48.982676854847 8.3904703986159 116.40700531006 0.077794601355107
46595.951309096 48.982677544021 8.3904709557016 116.40765380859 0.077794601355107
46596.961135064 48.982678232159 8.390471510579 116.408203125 0.077794601355107
46597.96117884 48.982678935435 8.390472038199 116.40875244141 0.077794601355107
46598.970937122 48.982679625896 8.3904725992791 116.40939331055 0.077794601355107
46599.980772432 48.982680315502 8.390473152759 116.4098815918 0.077794601355107
46600.990674182 48.982681019694 8.3904736808386 116.41033172607 0.077794601355107
46602.000546794 48.982681712314 8.3904742392497 116.41079711914 0.077794601355107
46603.010428562 48.98268240283 8.3904747981971 116.41142272949 0.077794601355107
46604.010461353 48.982683108505 8.390475326022 116.4118270874 0.077794601355107
46605.020209853 48.982683803362 8.3904758829458 116.41233062744 0.077794601355107
46606.030071765 48.982684495722 8.3904764410565 116.41277313232 0.077794601355107
46607.039984219 48.982685203059 8.3904769692928 116.41313934326 0.077794601355107
46608.049829571 48.982685899049 8.390477526392 116.41352081299 0.077794601355107
46609.059760065 48.982686593859 8.3904780844487 116.41412353516 0.077794601355107
46610.069659515 48.982687302187 8.3904786145822 116.41454315186 0.077794601355107
46611.079473921 48.982687999556 8.390479170931 116.41506958008 0.077794601355107
46612.089363448 48.98268869621 8.3904797292032 116.41542816162 0.077794601355107
46613.099313545 48.982689405787 8.3904802605124 116.41582489014 0.077794601355107
46614.102154941 48.982690104595 8.3904808185335 116.41622924805 0.077794601355107
46615.109145674 48.982690801524 8.3904813772106 116.4167175293 0.077794601355107
46616.118929637 48.982691516842 8.3904819149795 116.41648864746 0.077794601355107
46617.128767067 48.982692216175 8.390482472939 116.41668701172 0.077794601355107
46618.128793577 48.982692914656 8.3904830330916 116.41716003418 0.077794601355107
46619.138653093 48.982693630101 8.3904835718059 116.41682434082 0.077794601355107
46620.148433572 48.982694329952 8.39048413237 116.41715240479 0.077794601355107
46621.158409799 48.982695029829 8.3904846892525 116.4174118042 0.077794601355107
46622.168254522 48.982695746041 8.3904852284736 116.41710662842 0.077794601355107
46623.178126988 48.982696447894 8.3904857887985 116.41732025146 0.077794601355107
46624.18797132 48.982697147566 8.3904863495611 116.4176940918 0.077794601355107
46625.19785557 48.982697862274 8.3904868923042 116.41771697998 0.077794601355107
46626.207807668 48.982698564572 8.3904874531129 116.41793060303 0.077794601355107
46627.217644285 48.982699266509 8.3904880136621 116.41813659668 0.077794601355107
46628.22749807 48.982699982365 8.3904885568117 116.41814422607 0.077794601355107
46629.237868029 48.982700686765 8.3904891177477 116.41827392578 0.077794601355107
46630.247291053 48.982701388413 8.3904896796788 116.41846466064 0.077794601355107
46631.257256005 48.982702104738 8.390490224735 116.41840362549 0.077794601355107
46632.267067061 48.982702810016 8.3904907878194 116.4185256958 0.077794601355107
46633.27788599 48.982703514427 8.3904913485087 116.41865539551 0.077794601355107
46634.28681835 48.982704232022 8.390491893636 116.41865539551 0.077794601355107
46635.296706212 48.982704938802 8.3904924550809 116.41886901855 0.077794601355107
46636.306687485 48.982705643692 8.3904930190657 116.4189453125 0.077794601355107
46637.306702181 48.98270635393 8.3904935580737 116.41944885254 0.077794601355107
46638.316398116 48.982707062371 8.3904941215543 116.41974639893 0.077794601355107
46639.326233059 48.982707768731 8.3904946831525 116.41985321045 0.077794601355107
46640.336191046 48.982708480501 8.3904952233969 116.42046356201 0.077794601355107
46641.346018681 48.98270918972 8.3904957886068 116.42091369629 0.077794601355107
46642.35589624 48.982709898293 8.3904963515336 116.42092132568 0.077794601355107
46643.365804099 48.982710611396 8.3904968929765 116.42154693604 0.077794601355107
46644.375727679 48.982711323247 8.3904974571766 116.42179870605 0.077794601355107
46645.38556534 48.982712031755 8.3904980234355 116.42211914062 0.077794601355107
46646.395462191 48.982712746212 8.3904985774143 116.42224121094 0.077794601355107
46647.405516799 48.98271345917 8.3904991421973 116.422706604 0.077794601355107
46648.415648639 48.982714170634 8.3904997058756 116.42281341553 0.077794601355107
46649.425187386 48.982714887069 8.3905002574485 116.42311096191 0.077794601355107
46650.435206854 48.98271560178 8.3905008217311 116.42374420166 0.077794601355107
46651.445116298 48.98271631494 8.3905013857879 116.42406463623 0.077794601355107
46652.455663295 48.982717033231 8.3905019384399 116.42446136475 0.077794601355107
46653.464935523 48.982717750267 8.3905025026417 116.42520141602 0.077794601355107
46654.474778807 48.98271846498 8.3905030658218 116.42575073242 0.077794601355107
46655.484473355 48.982719184701 8.3905036182469 116.42624664307 0.077794601355107
46656.49435535 48.982719902697 8.3905041826247 116.42699432373 0.077794601355107
46657.504209941 48.98272061995 8.3905047464476 116.4275894165 0.077794601355107
46658.514078193 48.982721342179 8.3905052991025 116.42905426025 0.077794601355107
46659.52399239 48.982722061747 8.3905058640669 116.42966461182 0.077794601355107
46660.533888423 48.982722779964 8.3905064287348 116.43035888672 0.077794601355107
46661.54378329 48.982723503072 8.3905069832569 116.43170166016 0.077794601355107
46662.553619734 48.982724223218 8.3905075505968 116.43222045898 0.077794601355107
46663.56379414 48.9827249431 8.3905081163512 116.43286895752 0.077794601355107
46664.573380882 48.982725667174 8.3905086725439 116.43411254883 0.080622577482985
46665.583266361 48.982726388557 8.390509240528 116.43452453613 0.080622577482985
46666.593140868 48.982727108761 8.390509810321 116.43518066406 0.080622577482985
46667.603103764 48.982727830071 8.3905103611481 116.43701171875 0.080622577482985
46668.612934287 48.982728552569 8.3905109291148 116.43733215332 0.080622577482985
46669.613003869 48.982729274304 8.3905114990437 116.4377822876 0.080622577482985
46670.622748418 48.982729996671 8.390512052271 116.43941497803 0.080622577482985
46671.632607071 48.982730719421 8.3905126248211 116.43965911865 0.080622577482985
46672.642467511 48.982731442801 8.3905131910452 116.43985748291 0.080622577482985
46673.652359638 48.982732166213 8.3905137453647 116.44117736816 0.080622577482985
46674.662247872 48.982732890397 8.3905143198525 116.44119262695 0.080622577482985
46675.672122831 48.982733613499 8.3905148908076 116.44144439697 0.080622577482985
46676.682018227 48.982734344095 8.390515453764 116.44216918945 0.080622577482985
46677.691919221 48.982735069854 8.3905160275404 116.44203186035 0.080622577482985
46678.701803419 48.982735794025 8.3905166032058 116.4421081543 0.080622577482985
46679.711725398 48.982736525492 8.3905171684714 116.44271087646 0.080622577482985
46680.721551119 48.982737252224 8.390517743962 116.44248199463 0.080622577482985
46681.731437022 48.982737977929 8.3905183197274 116.44247436523 0.080622577482985
46682.731523463 48.982738710292 8.3905188872024 116.44300842285 0.080622577482985
46683.741223296 48.982739438336 8.3905194629077 116.44267272949 0.080622577482985
46684.751092799 48.982740164973 8.3905200409068 116.44255065918 0.080622577482985
46685.760970043 48.982740898293 8.3905206104841 116.44291687012 0.080622577482985
46686.771997213 48.982741627001 8.39052118928 116.44258117676 0.080622577482985
46687.780750598 48.98274235533 8.3905217648773 116.4421081543 0.080622577482985
46688.790608805 48.982743080328 8.3905223225781 116.44178771973 0.080622577482985
46689.800647325 48.982743810502 8.3905229031653 116.44135284424 0.080622577482985
46690.811736869 48.982744539367 8.3905234810354 116.44080352783 0.080622577482985
46691.820296146 48.98274526574 8.3905240413504 116.44033813477 0.080622577482985
46692.830162569 48.982745997378 8.3905246227375 116.43975830078 0.080622577482985
46693.840049869 48.9827467273 8.3905252025325 116.43918609619 0.080622577482985
46694.849967565 48.982747454663 8.3905257645901 116.43859863281 0.080622577482985
46695.859846428 48.98274818697 8.3905263473977 116.43801879883 0.080622577482985
46696.869725717 48.982748918708 8.3905269278921 116.4372177124 0.080622577482985
46697.879597785 48.98274964756 8.3905274914763 116.43655395508 0.080622577482985
46698.88948373 48.982750381739 8.3905280749159 116.43585205078 0.080622577482985
46699.899595957 48.982751113725 8.390528657902 116.43515777588 0.080622577482985
46700.909405283 48.982751844104 8.3905292237077 116.43448638916 0.080622577482985
46701.919305179 48.982752579636 8.3905298073703 116.43374633789 0.080622577482985
46702.929154081 48.982753313249 8.3905303911038 116.43297576904 0.080622577482985
46703.939099211 48.9827540451 8.3905309576531 116.43218994141 0.080622577482985
46704.948851642 48.982754781208 8.3905315422152 116.43134307861 0.080622577482985
46705.958749245 48.98275551619 8.3905321274067 116.43042755127 0.080622577482985
46706.968621846 48.982756254793 8.390532702215 116.43008422852 0.080622577482985
46707.978487585 48.982756992215 8.3905332883045 116.42916870117 0.080622577482985
46708.988386354 48.982757727889 8.3905338743741 116.4281463623 0.080622577482985
46709.98885519 48.982758467237 8.3905344514471 116.42772674561 0.080622577482985
46710.998316553 48.982759205108 8.3905350400643 116.42688751221 0.080622577482985
46712.008057874 48.982759942394 8.3905356257311 116.42567443848 0.080622577482985
46713.017859623 48.982760682746 8.3905362038632 116.4252166748 0.080622577482985
46714.027834517 48.982761421614 8.390536795143 116.42449188232 0.080622577482985
46715.037669162 48.982762158986 8.3905373825201 116.42335510254 0.080622577482985
46716.047525746 48.982762899914 8.3905379621873 116.42309570312 0.080622577482985
46717.057423688 48.982763639767 8.3905385529117 116.42258453369 0.080622577482985
46718.067294693 48.982764378796 8.3905391439387 116.42183685303 0.080622577482985
46719.077221407 48.982765120405 8.3905397167105 116.42156982422 0.080622577482985
46720.087086385 48.982765861944 8.3905403089978 116.42120361328 0.080622577482985
46721.096971349 48.982766601925 8.3905408998404 116.42071533203 0.080622577482985
46722.106850907 48.982767344795 8.3905414751178 116.42057037354 0.080622577482985
46723.116788501 48.982768087474 8.3905420703077 116.42043304443 0.080622577482985
46724.126656325 48.982768829317 8.3905426637012 116.42024230957 0.080622577482985
46725.136521327 48.982769573788 8.3905432417753 116.42042541504 0.080622577482985
46726.146444588 48.982770317331 8.3905438374788 116.42044067383 0.080622577482985
46727.15631358 48.982771059963 8.390544436255 116.4204864502 0.080622577482985
46728.166175031 48.982771804898 8.3905450183055 116.42080688477 0.080622577482985
46729.176043595 48.982772549197 8.3905456176303 116.42095947266 0.080622577482985
46730.185902329 48.982773293658 8.3905462144165 116.42107391357 0.080622577482985
46731.195981882 48.982774039986 8.3905467983712 116.42158508301 0.080622577482985
46732.205726467 48.982774785338 8.3905473988558 116.42194366455 0.080622577482985
46733.215597349 48.982775530703 8.3905480001049 116.42219543457 0.080622577482985
46734.2254705084 48.982776278075 8.3905485866776 116.42295074463 0.080622577482985
46735.235344035 48.982777025389 8.3905491888619 116.4235534668 0.080622577482985
46736.245271197 48.982777771861 8.3905497891934 116.42386627197 0.080622577482985
46737.255117766 48.982778516832 8.3905503657002 116.4239730835 0.080622577482985
46738.2650876519 48.982779264985 8.3905509706213 116.42463684082 0.080622577482985
46739.274940029 48.982780013031 8.3905515715727 116.42515563965 0.080622577482985
46740.284789218 48.982780759009 8.3905521500366 116.4253692627 0.080622577482985
46741.294848806 48.982781509121 8.3905527535196 116.42593383789 0.080622577482985
46742.304675941 48.982782258002 8.3905533576055 116.42655181885 0.080622577482985
46743.314413093 48.98278300548 8.3905539370032 116.42666625977 0.080622577482985
46744.32430281 48.982783755614 8.3905545427999 116.42716217041 0.080622577482985
46745.334199889 48.982784506718 8.3905551465813 116.42783355713 0.080622577482985
46746.344135909 48.982785254644 8.3905557296408 116.4280166626 0.080622577482985
46747.353994077 48.98278600651 8.3905563333539 116.42870330811 0.080622577482985
46748.363884985 48.982786757263 8.3905569395728 116.42921447754 0.080622577482985
46749.374031768 48.982787507124 8.3905575218474 116.42951202393 0.080622577482985
46750.383688991 48.982788259217 8.3905581282131 116.43003845215 0.080622577482985
46751.393653253 48.982789011669 8.3905587318394 116.43068695068 0.080622577482985
46752.403473971 48.982789760939 8.390559317251 116.43074035645 0.080622577482985
46753.403602427 48.982790510813 8.3905599249955 116.43103027344 0.080622577482985
46754.413188468 48.982791263546 8.3905605333162 116.43153381348 0.080622577482985
46755.423061958 48.982792012038 8.3905611229085 116.43146514893 0.080622577482985
46756.423204203 48.982792764413 8.3905617318241 116.43164825439 0.080622577482985
46757.432983107 48.982793513558 8.390562340213 116.43164825439 0.080622577482985
46758.4429261 48.982794263593 8.3905629296594 116.43125152588 0.080622577482985
46759.452664919 48.982795014317 8.3905635432651 116.43118286133 0.080622577482985
46760.462763171 48.982795765668 8.3905641497346 116.43118286133 0.080622577482985
46761.472614316 48.982796514489 8.3905647410515 116.43064117432 0.080622577482985
46762.482352886 48.982797267491 8.3905653514117 116.43017578125 0.080622577482985
46763.492344408 48.982798016499 8.3905659661196 116.4301071167 0.080622577482985
46764.502100082 48.9827987666 8.3905665590305 116.42923736572 0.080622577482985
46765.511960054 48.982799519896 8.3905671714197 116.42854309082 0.082036577207975
46766.521836002 48.9828002724 8.3905677820126 116.42807006836 0.082036577207975
46767.531690085 48.982801023669 8.3905683748596 116.42696380615 0.082036577207975
46768.54155003 48.982801777889 8.3905689851418 116.4260559082 0.082036577207975
46769.551447192 48.982802530802 8.3905695985423 116.42537689209 0.082036577207975
46770.56136825 48.982803283213 8.3905701908568 116.42419433594 0.082036577207975
46771.5712437513 48.982804038583 8.3905707999903 116.42325592041 0.082036577207975
46772.581105489 48.982804793329 8.3905714110531 116.42241668701 0.082036577207975
46773.590988261 48.982805548339 8.3905720025241 116.42127990723 0.082036577207975
46774.590998862 48.982806307235 8.3905726081534 116.42029571533 0.082036577207975
46775.601510578 48.982807062474 8.3905732182174 116.41941833496 0.082036577207975
46776.610638865 48.982807819364 8.3905738073227 116.41822052002 0.082036577207975
46777.620520041 48.982808578608 8.3905744129125 116.41734313965 0.082036577207975
46778.630385861 48.982809337892 8.3905750184521 116.41647338867 0.082036577207975
46779.640309875 48.982810096311 8.3905756068121 116.41547393799 0.082036577207975
46780.650185468 48.982810857417 8.3905762113437 116.41484832764 0.082036577207975
46781.660044356 48.982811616118 8.3905768176856 116.41408538818 0.082036577207975
46782.66994556 48.982812374742 8.3905774081454 116.41355895996 0.082036577207975
46783.679818911 48.982813139676 8.390578011424 116.41313934326 0.082036577207975
46784.689851967 48.982813900691 8.3905786160058 116.41234588623 0.082036577207975
46785.69960609 48.982814661447 8.3905792069628 116.41174316406 0.082036577207975
46786.709503172 48.982815424186 8.390579818954 116.41158294678 0.082036577207975
46787.71939825 48.982816193451 8.390580415035 116.41095733643 0.082036577207975
46788.729256235 48.982816957308 8.3905810051983 116.4103012085 0.082036577207975
46789.739177859 48.982817720312 8.3905816117547 116.41031646729 0.082036577207975
46790.749042094 48.982818486234 8.390582224189 116.41033172607 0.082036577207975
46791.758898514 48.982819250474 8.3905828171536 116.40995025635 0.082036577207975
46792.768806224 48.982820017421 8.3905834214398 116.40942382812 0.082036577207975
46793.778675536 48.982820781437 8.3905840350413 116.41022491455 0.082036577207975
46794.788603495 48.982821683775 8.3905847362337 116.41995239258 0.082036577207975
46795.798501681 48.98282245199 8.3905853420887 116.41973114014 0.082036577207975
46796.808396108 48.982823218967 8.3905859504922 116.41987609863 0.082036577207975
46797.818215132 48.982824114563 8.3905866468519 116.42946624756 0.082036577207975
46798.818308841 48.982824883413 8.3905872550729 116.42949676514 0.082036577207975
46799.827987497 48.982825652685 8.3905878609011 116.42971801758 0.082036577207975
46800.837876469 48.982826545301 8.3905885508369 116.43894958496 0.082036577207975
46801.847773501 48.982827318719 8.3905891563965 116.43909454346 0.082036577207975
46802.857826602 48.982828086178 8.3905897659379 116.4393081665 0.082036577207975
46803.867533794 48.98282897243 8.3905904527253 116.4481048584 0.082036577207975
46804.877459869 48.982829744407 8.3905910609147 116.44842529297 0.082036577207975
46805.887474934 48.98283051894 8.3905916639599 116.44847106934 0.082036577207975
46806.897418464 48.982831402135 8.3905923435632 116.45683288574 0.082036577207975
46807.907157586 48.982832177739 8.3905929478529 116.45709991455 0.082036577207975
46808.90717593 48.982832949218 8.3905935556382 116.45735168457 0.082036577207975
46809.907869815 48.982833827302 8.390594232297 116.46536254883 0.082036577207975
46810.91699441 48.982834601363 8.3905948400989 116.46605682373 0.082036577207975
46811.926819439 48.982835377671 8.3905954415824 116.46617126465 0.082036577207975
46812.936591528 48.982836251578 8.3905961137986 116.47401428223 0.082036577207975
46813.9464878608 48.982837028567 8.3905967178607 116.47451019287 0.082036577207975
46814.9562604769 48.982837801725 8.3905973252199 116.47496795654 0.082036577207975
46815.966211682 48.98283867111 8.3905979941926 116.48223114014 0.082036577207975
46816.976103058 48.98283944856 8.390598598945 116.48239135742 0.082036577207975
46817.986151499 48.98284022461 8.3905992015345 116.48271179199 0.082036577207975
46818.995867047 48.982841089189 8.3905998658641 116.48942565918 0.082036577207975
46820.005725162 48.982841864474 8.3906004728921 116.48993682861 0.082036577207975
46821.015592639 48.982842641977 8.3906010738912 116.48977661133 0.082036577207975
46822.025463197 48.982843502182 8.3906017340953 116.49605560303 0.082036577207975
46823.035411626 48.982844279508 8.3906023395073 116.49614715576 0.082036577207975
46824.045259766 48.982845053976 8.3906029461288 116.49662017822 0.082036577207975
46825.055180106 48.982845910342 8.3906036063315 116.50254821777 0.082036577207975
46826.065022328 48.982846685889 8.3906042124802 116.50285339355 0.082036577207975
46827.07499507 48.982847462609 8.3906048189716 116.50286102295 0.082036577207975
46828.084843058 48.982848313451 8.3906054770162 116.50853729248 0.082036577207975
46829.085060376 48.982849088622 8.3906060849486 116.50869750977 0.082036577207975
46830.094557513 48.982849864478 8.3906066910434 116.5089263916 0.082036577207975
46831.104428817 48.98285071254 8.3906073455063 116.5142364502 0.082036577207975
46832.1143589 48.98285148857 8.3906079521653 116.51425933838 0.082036577207975
46833.12420917 48.982852264213 8.3906085603972 116.51451873779 0.082036577207975
46834.134202648 48.982853109148 8.390609210975 116.51948547363 0.082036577207975
46835.143965301 48.982853885196 8.3906098133192 116.51908111572 0.082036577207975
46836.153847916 48.982854660168 8.3906104256572 116.51945495605 0.082036577207975
46837.163757531 48.9828555 8.3906110763584 116.52411651611 0.082036577207975
46838.173681359 48.982856277426 8.3906116787851 116.52375030518 0.082036577207975
46839.183652268 48.982857052609 8.3906122820438 116.52353668213 0.082036577207975
46840.1934873715 48.982857889966 8.390612927567 116.52803039551 0.082036577207975
46841.2033212864 48.982858664365 8.3906135370946 116.528465271 0.082036577207975
46842.213171913 48.982859443718 8.3906141327327 116.52767181396 0.082036577207975
46843.2230505753 48.982860278347 8.3906147760637 116.53214263916 0.082036577207975
46844.233021359 48.982861056432 8.3906153813738 116.53266906738 0.082036577207975
46845.242827709 48.982861831067 8.390615987121 116.53270721436 0.082036577207975
46846.252707418 48.982862666806 8.390616631132 116.5368347168 0.082036577207975
46847.262581497 48.982863444114 8.3906172347912 116.53695678711 0.082036577207975
46848.263775179 48.982864222396 8.3906178400113 116.53759002686 0.082036577207975
46849.272421171 48.982865060887 8.390618472133 116.54067993164 0.082036577207975
46850.282387429 48.982865838632 8.3906190702191 116.54047393799 0.082036577207975
46851.292143649 48.982866617193 8.3906196734496 116.54051208496 0.082036577207975
46852.302018107 48.982867453848 8.3906202999398 116.54296875 0.082036577207975
46853.311916991 48.982868230241 8.3906209019735 116.54236602783 0.082036577207975
46854.321779125 48.982869009414 8.3906214966155 116.54215240479 0.082036577207975
46855.331680705 48.982869841349 8.3906221216796 116.54433441162 0.082036577207975
46856.341607069 48.982870617754 8.3906227181505 116.54376220703 0.082036577207975
46857.351482383 48.982871395325 8.390623319339 116.54332733154 0.082036577207975
46858.36137011 48.982872225359 8.3906239405806 116.54535675049 0.082036577207975
46859.371230785 48.982873003951 8.3906245372592 116.54467010498 0.082036577207975
46860.38128337 48.982873779954 8.3906251339677 116.54420471191 0.082036577207975
46861.391097064 48.982874608784 8.3906257519197 116.54597473145 0.082036577207975
46862.400959735 48.982875384727 8.3906263484643 116.54532623291 0.082036577207975
46863.401052759 48.982876162561 8.3906269461274 116.54484558105 0.082036577207975
46864.410741881 48.982876986259 8.3906275638686 116.54658508301 0.082036577207975
46865.420591932 48.98287776098 8.3906281614249 116.54601287842 0.082036577207975
46866.42062093 48.982878537151 8.3906287584467 116.54550170898 0.051107729356723
46867.430552878 48.982879357927 8.3906293769422 116.54716491699 0.051107729356723
46868.440466026 48.982880131379 8.390629978178 116.54672241211 0.051107729356723
46869.450253478 48.982880906417 8.3906305748197 116.54621887207 0.051107729356723
46870.46005645 48.982881722937 8.3906311931494 116.54778289795 0.051107729356723
46871.469953376 48.982882495414 8.3906317945865 116.54707336426 0.051107729356723
46872.479820573 48.98288326942 8.3906323951183 116.54679107666 0.051107729356723
46873.489660312 48.982884083298 8.3906330123468 116.54796600342 0.051107729356723
46874.499567335 48.982884854689 8.3906336132853 116.54708099365 0.051107729356723
46875.509389077 48.982885629144 8.3906342116196 116.5464553833 0.051107729356723
46876.519260651 48.982886441349 8.390634824916 116.54724121094 0.051107729356723
46877.529171242 48.982887213032 8.3906354211233 116.54600524902 0.051107729356723
46878.53905883 48.982887986134 8.3906360224312 116.54542541504 0.051107729356723
46879.548911895 48.982888796636 8.3906366324532 116.54594421387 0.051107729356723
46880.558793117 48.982889568954 8.3906372251448 116.54445648193 0.051107729356723
46881.568737272 48.982890342191 8.390637823128 116.54376983643 0.051107729356723
46882.57860618 48.982891151555 8.3906384299057 116.54433441162 0.051107729356723
46883.588504241 48.982891923664 8.3906390190492 116.54323577881 0.051107729356723
46884.598350895 48.982892696528 8.3906396147454 116.54219055176 0.051107729356723
46885.608245086 48.982893502987 8.3906402195885 116.54307556152 0.051107729356723
46886.608962596 48.982894275782 8.3906408091756 116.54232025146 0.051107729356723
46887.618000059 48.982895047893 8.3906413990678 116.54134368896 0.051107729356723
46888.627911017 48.982895853122 8.3906420014676 116.54220581055 0.051107729356723
46889.63778101 48.982896624338 8.3906425945668 116.54165649414 0.051107729356723
46890.647729544 48.98289739711 8.3906431830809 116.54082489014 0.051107729356723
46891.657573441 48.982898187537 8.3906437891796 116.54415893555 0.051107729356723
46892.667424349 48.982898958013 8.3906443826796 116.54379272461 0.051107729356723
46893.677449523 48.982899728351 8.3906449761701 116.54307556152 0.051107729356723
46894.687816687 48.982900516959 8.3906455824252 116.5463104248 0.051107729356723
46895.697074962 48.982901287142 8.390646176352 116.54584503174 0.051107729356723
46896.706958146 48.982902056827 8.3906467708509 116.54550170898 0.051107729356723
46897.716839862 48.982902844001 8.3906473788662 116.54862976074 0.051107729356723
46898.72674478 48.982903612441 8.3906479753366 116.54859161377 0.051107729356723
46899.73665572 48.982904382145 8.3906485681821 116.54795837402 0.051107729356723
46900.746541701 48.982905167443 8.3906491746946 116.55098724365 0.051107729356723
46901.756401271 48.982905936067 8.3906497741015 116.5509185791 0.051107729356723
46902.766336295 48.982906703654 8.3906503685681 116.55074310303 0.051107729356723
46903.776295577 48.982907488848 8.3906509747703 116.55358886719 0.051107729356723
46904.786063798 48.982908258745 8.3906515694027 116.55320739746 0.051107729356723
46905.795921386 48.982909025279 8.3906521701153 116.55303955078 0.051107729356723
46906.805820038 48.982909807752 8.3906527770638 116.55569458008 0.051107729356723
46907.81569973 48.982910573984 8.3906533739064 116.55563354492 0.051107729356723
46908.825612749 48.982911345127 8.3906539675577 116.55488586426 0.051107729356723
46909.835528661 48.982912128347 8.3906545698534 116.55732727051 0.051107729356723
46910.845371598 48.982912896961 8.3906551646956 116.55702972412 0.051107729356723
46911.855258862 48.982913662591 8.3906557612821 116.55693054199 0.051107729356723
46912.865207373 48.982914444259 8.3906563621292 116.55935668945 0.051107729356723
46913.875217009 48.982915211568 8.3906569523297 116.55922698975 0.051107729356723
46914.885120318 48.982915979685 8.3906575480123 116.55889892578 0.051107729356723
46915.894922607 48.98291675962 8.3906581462098 116.56122589111 0.051107729356723
46916.90474484 48.982917526374 8.3906587357421 116.56113433838 0.051107729356723
46917.904794336 48.982918293947 8.3906593238035 116.56090545654 0.051107729356723
46918.914687072 48.982919074075 8.3906599162375 116.5630645752 0.051107729356723
46919.924363772 48.982919840744 8.3906605018862 116.5629119873 0.051107729356723
46920.934238628 48.982920607274 8.3906610897463 116.56280517578 0.051107729356723
46921.944148956 48.982921385183 8.3906616804399 116.5648651123 0.051107729356723
46922.954041151 48.982922149771 8.3906622646921 116.56478881836 0.051107729356723
46923.963896322 48.982922916584 8.3906628498759 116.56467437744 0.051107729356723
46924.963914916 48.982923693477 8.3906634381239 116.56662750244 0.051107729356723
46925.973907879 48.982924458805 8.3906640185973 116.56633758545 0.051107729356723
46926.98360197 48.982925222803 8.3906646047301 116.56642913818 0.051107729356723
46927.993410546 48.982925999121 8.3906651916631 116.56811523438 0.051107729356723
46929.00332416 48.982926763011 8.3906657719421 116.5676651001 0.051107729356723
46930.013212187 48.982927528198 8.3906663498705 116.567237854 0.051107729356723
46931.023073382 48.982928303727 8.3906669308441 116.56831359863 0.051107729356723
46932.032955131 48.982929068023 8.3906675097025 116.56716156006 0.051107729356723
46933.042933769 48.982929829753 8.3906680890171 116.56691741943 0.051107729356723
46934.052725383 48.982930602284 8.3906686719863 116.56777191162 0.051107729356723
46935.062646001 48.982931366342 8.3906692442368 116.56658172607 0.051107729356723
46936.072520176 48.982932129351 8.390669826704 116.5657043457 0.051107729356723
46937.082411589 48.982932901609 8.3906704077574 116.56665802002 0.051107729356723
46938.092271378 48.982933662859 8.3906709841994 116.56603240967 0.051107729356723
46939.102148658 48.982934427132 8.3906715581898 116.56452178955 0.051107729356723
46940.112289119 48.982935197978 8.3906721384476 116.56531524658 0.051107729356723
46941.121952338 48.982935958395 8.3906727173536 116.56481933594 0.051107729356723
46942.131875512 48.982936719487 8.3906732932631 116.56387329102 0.051107729356723
46943.141680875 48.98293748497 8.390673887836 116.5650100708 0.051107729356723
46944.151564554 48.982938244467 8.3906744652488 116.56477355957 0.051107729356723
46945.161504412 48.982939004431 8.3906750442956 116.56379699707 0.051107729356723
46946.171454778 48.982939769483 8.390675637866 116.56485748291 0.051107729356723
46947.181257826 48.982940530955 8.3906762123217 116.56439971924 0.051107729356723
46948.191114611 48.982941288677 8.3906767919343 116.56402587891 0.051107729356723
46949.20102244 48.982942052746 8.3906773866092 116.56523132324 0.051107729356723
46950.210877036 48.982942812279 8.3906779630193 116.56516265869 0.051107729356723
46951.220789111 48.982943572185 8.3906785378907 116.56446838379 0.051107729356723
46952.230758043 48.982944335186 8.3906791304873 116.56582641602 0.051107729356723
46953.24055257 48.982945093931 8.3906797106438 116.56645965576 0.051107729356723
46954.250444413 48.98294585271 8.3906802836285 116.56587219238 0.051107729356723
46955.260295656 48.982946615954 8.3906808743532 116.5673828125 0.051107729356723
46956.270232748 48.982947374619 8.3906814556262 116.56803894043 0.051107729356723
46957.280117293 48.982948132675 8.3906820322656 116.56848144531 0.051107729356723
46958.289976632 48.982948894621 8.3906826232925 116.57034301758 0.051107729356723
46959.299886859 48.982949652104 8.3906831989033 116.57125091553 0.051107729356723
46960.309765589 48.982950410373 8.3906837808762 116.57192230225 0.051107729356723
46961.319655022 48.982951171478 8.3906843699989 116.57392120361 0.051107729356723
46962.329479781 48.982951929485 8.3906849447623 116.57461547852 0.051107729356723
46963.339424565 48.982952686833 8.3906855229563 116.57586669922 0.051107729356723
46964.349275317 48.982953447369 8.3906861125618 116.5778427124 0.051107729356723
46965.359168118 48.982954201875 8.3906866891254 116.57861328125 0.051107729356723
46966.369233692 48.982954960951 8.3906872643341 116.57935333252 0.051107729356723
46967.379188594 48.982955719952 8.390687853023 116.58097839355 0.033600595232823
46968.388966176 48.982956474316 8.3906884312723 116.58137512207 0.033600595232823
46969.398909085 48.982957229088 8.3906890059952 116.58237457275 0.033600595232823
46970.408716186 48.982957987257 8.3906895936359 116.58386993408 0.033600595232823
46971.418726445 48.982958742188 8.390690164902 116.58389282227 0.033600595232823
46972.428590244 48.982959496062 8.3906907446756 116.58442687988 0.033600595232823
46973.438313836 48.982960252931 8.3906913311975 116.58554077148 0.033600595232823
46974.438340286 48.982961004717 8.3906919069974 116.5856552124 0.033600595232823
46975.448185205 48.982961760777 8.3906924757347 116.58567810059 0.033600595232823
46976.45797584 48.982962517438 8.390693058565 116.58670043945 0.033600595232823
46977.467846198 48.982963270234 8.3906936304815 116.58670043945 0.033600595232823
46978.467898342 48.982964021779 8.3906942050281 116.58692932129 0.033600595232823
46979.477596612 48.982964776881 8.3906947869431 116.58779144287 0.033600595232823
46980.487476114 48.982965528223 8.39069535983 116.58764648438 0.033600595232823
46981.497362615 48.982966280339 8.390695930908 116.58778381348 0.033600595232823
46982.507202152 48.982967033776 8.390696512084 116.58861541748 0.033600595232823
46983.517118638 48.982967785002 8.3906970814261 116.58840942383 0.033600595232823
46984.526948992 48.982968535949 8.390697654384 116.58837127686 0.033600595232823
46985.536857947 48.982969289053 8.3906982332511 116.58909606934 0.033600595232823
46986.546737498 48.982970038133 8.3906988038223 116.58887481689 0.033600595232823
46987.556657323 48.982970790436 8.3906993715514 116.58875274658 0.033600595232823
46988.566715759 48.982971542455 8.3906999499718 116.58930206299 0.033600595232823
46989.576409117 48.982972290445 8.3907005207007 116.58917999268 0.033600595232823
46990.58628031 48.982973040192 8.39070109028 116.5890045166 0.033600595232823
46991.596233106 48.982973791475 8.3907016679585 116.58950805664 0.033600595232823
46992.60605425 48.982974539908 8.3907022362086 116.58917999268 0.033600595232823
46993.616026659 48.982975287272 8.3907028082783 116.5890045166 0.033600595232823
46994.625821268 48.982976035033 8.3907034039939 116.58915710449 0.033600595232823
46995.635751899 48.982976780843 8.3907039749871 116.58876800537 0.033600595232823
46996.645591573 48.982977528466 8.3907045449079 116.58850860596 0.033600595232823
46997.655484934 48.982978274477 8.3907051410204 116.58868408203 0.033600595232823
46998.665530916 48.982979020186 8.3907057102071 116.58827972412 0.033600595232823
46999.675265207 48.982979765191 8.3907062832539 116.58805847168 0.033600595232823
47000.685218422 48.982980510851 8.3907068771266 116.5881729126 0.033600595232823
47001.695029865 48.982981256295 8.3907074448941 116.58788299561 0.033600595232823
47002.704901935 48.982982001613 8.3907080148377 116.58746337891 0.033600595232823
47003.715449642 48.982982746538 8.390708606401 116.58760070801 0.033600595232823
47004.724710736 48.982983491137 8.3907091727507 116.58738708496 0.033600595232823
47005.734604637 48.982984236533 8.3907097406197 116.58688354492 0.033600595232823

View File

@ -0,0 +1,2 @@
BodyPtx BodyPty BodyPtz BodyPrx BodyPry BodyPrz UpdatePeriod
-0.0147725597895258 7.22876903268996e-006 -1.49304056640669e-005 -0.809818318663195 0.32628214806349 -0.797376731139142 1

View File

@ -2124,7 +2124,7 @@ class NonlinearISAM {
#include <gtsam/slam/PriorFactor.h>
template<T = {gtsam::LieScalar, gtsam::LieVector, gtsam::LieMatrix, gtsam::Point2, gtsam::StereoPoint2, gtsam::Point3, gtsam::Rot2, gtsam::Rot3, gtsam::Pose2, gtsam::Pose3, gtsam::Cal3_S2, gtsam::CalibratedCamera, gtsam::SimpleCamera, gtsam::imuBias::ConstantBias}>
virtual class PriorFactor : gtsam::NonlinearFactor {
virtual class PriorFactor : gtsam::NoiseModelFactor {
PriorFactor(size_t key, const T& prior, const gtsam::noiseModel::Base* noiseModel);
T prior() const;
@ -2135,7 +2135,7 @@ virtual class PriorFactor : gtsam::NonlinearFactor {
#include <gtsam/slam/BetweenFactor.h>
template<T = {gtsam::LieScalar, gtsam::LieVector, gtsam::LieMatrix, gtsam::Point2, gtsam::Point3, gtsam::Rot2, gtsam::Rot3, gtsam::Pose2, gtsam::Pose3, gtsam::imuBias::ConstantBias}>
virtual class BetweenFactor : gtsam::NonlinearFactor {
virtual class BetweenFactor : gtsam::NoiseModelFactor {
BetweenFactor(size_t key1, size_t key2, const T& relativePose, const gtsam::noiseModel::Base* noiseModel);
T measured() const;
@ -2146,7 +2146,7 @@ virtual class BetweenFactor : gtsam::NonlinearFactor {
#include <gtsam/nonlinear/NonlinearEquality.h>
template<T = {gtsam::LieScalar, gtsam::LieVector, gtsam::LieMatrix, gtsam::Point2, gtsam::StereoPoint2, gtsam::Point3, gtsam::Rot2, gtsam::Rot3, gtsam::Pose2, gtsam::Pose3, gtsam::Cal3_S2, gtsam::CalibratedCamera, gtsam::SimpleCamera, gtsam::imuBias::ConstantBias}>
virtual class NonlinearEquality : gtsam::NonlinearFactor {
virtual class NonlinearEquality : gtsam::NoiseModelFactor {
// Constructor - forces exact evaluation
NonlinearEquality(size_t j, const T& feasible);
// Constructor - allows inexact evaluation

View File

@ -524,6 +524,25 @@ Matrix stack(size_t nrMatrices, ...)
return A;
}
/* ************************************************************************* */
Matrix stack(const std::vector<Matrix>& blocks) {
if (blocks.size() == 1) return blocks.at(0);
int nrows = 0, ncols = blocks.at(0).cols();
BOOST_FOREACH(const Matrix& mat, blocks) {
nrows += mat.rows();
if (ncols != mat.cols())
throw invalid_argument("Matrix::stack(): column size mismatch!");
}
Matrix result(nrows, ncols);
int cur_row = 0;
BOOST_FOREACH(const Matrix& mat, blocks) {
result.middleRows(cur_row, mat.rows()) = mat;
cur_row += mat.rows();
}
return result;
}
/* ************************************************************************* */
Matrix collect(const std::vector<const Matrix *>& matrices, size_t m, size_t n)
{

View File

@ -404,6 +404,7 @@ GTSAM_EXPORT Vector backSubstituteLower(const Matrix& L, const Vector& b, bool u
* @return combined matrix [A1; A2; A3]
*/
GTSAM_EXPORT Matrix stack(size_t nrMatrices, ...);
GTSAM_EXPORT Matrix stack(const std::vector<Matrix>& blocks);
/**
* create a matrix by concatenating

View File

@ -149,6 +149,12 @@ TEST( matrix, stack )
C(i + 2, j) = B(i, j);
EQUALITY(C,AB);
std::vector<gtsam::Matrix> matrices;
matrices.push_back(A);
matrices.push_back(B);
Matrix AB2 = stack(matrices);
EQUALITY(C,AB2);
}
/* ************************************************************************* */

View File

@ -96,7 +96,7 @@ double Point3::norm() const {
/* ************************************************************************* */
ostream &operator<<(ostream &os, const Point3& p) {
os << '(' << p.x() << ", " << p.y() << ", " << p.z() << ')';
os << '[' << p.x() << ", " << p.y() << ", " << p.z() << "]\';";
return os;
}

View File

@ -181,7 +181,7 @@ namespace gtsam {
/// @{
/** print */
void print(const std::string& s="R") const { gtsam::print((Matrix)matrix(), s);}
void print(const std::string& s="R") const;
/** equals with an tolerance */
bool equals(const Rot3& p, double tol = 1e-9) const;

View File

@ -69,6 +69,11 @@ Rot3::Rot3(const Matrix& R) {
/* ************************************************************************* */
Rot3::Rot3(const Quaternion& q) : rot_(q.toRotationMatrix()) {}
/* ************************************************************************* */
void Rot3::print(const std::string& s) const {
gtsam::print((Matrix)matrix(), s);
}
/* ************************************************************************* */
Rot3 Rot3::Rx(double t) {
double st = sin(t), ct = cos(t);

View File

@ -77,7 +77,7 @@ TEST( Point3, stream)
Point3 p(1,2, -3);
std::ostringstream os;
os << p;
EXPECT(os.str() == "(1, 2, -3)");
EXPECT(os.str() == "[1, 2, -3]';");
}
/* ************************************************************************* */

View File

@ -747,7 +747,7 @@ TEST( Pose3, stream)
Pose3 T;
std::ostringstream os;
os << T;
EXPECT(os.str() == "\n|1, 0, 0|\n|0, 1, 0|\n|0, 0, 1|\n\n(0, 0, 0)\n");
EXPECT(os.str() == "\n|1, 0, 0|\n|0, 1, 0|\n|0, 0, 1|\n\n[0, 0, 0]';\n");
}
/* ************************************************************************* */

View File

@ -19,3 +19,8 @@ set (navigation_excluded_tests "")
if (GTSAM_BUILD_TESTS)
gtsam_add_subdir_tests(navigation "${navigation_local_libs}" "${gtsam-default}" "${navigation_excluded_tests}")
endif()
# Build timing scripts
if (GTSAM_BUILD_TIMING)
gtsam_add_subdir_timing(navigation "${navigation_local_libs}" "${gtsam-default}" "${navigation_excluded_files}")
endif(GTSAM_BUILD_TIMING)

View File

@ -678,74 +678,6 @@ TEST (InertialNavFactor_GlobalVelocity, JacobianWithTransform ) {
CHECK( gtsam::assert_equal(H5_expectedVel, H5_actualVel, 1e-5));
}
#include <gtsam/linear/GaussianFactorGraph.h>
/* ************************************************************************* */
TEST( InertialNavFactor_GlobalVelocity, LinearizeTiming)
{
gtsam::Key PoseKey1(11);
gtsam::Key PoseKey2(12);
gtsam::Key VelKey1(21);
gtsam::Key VelKey2(22);
gtsam::Key BiasKey1(31);
double measurement_dt(0.1);
Vector world_g(Vector_(3, 0.0, 0.0, 9.81));
Vector world_rho(Vector_(3, 0.0, -1.5724e-05, 0.0)); // NED system
gtsam::Vector ECEF_omega_earth(Vector_(3, 0.0, 0.0, 7.292115e-5));
gtsam::Vector world_omega_earth(world_R_ECEF.matrix() * ECEF_omega_earth);
SharedGaussian model(noiseModel::Isotropic::Sigma(9, 0.1));
// Second test: zero angular motion, some acceleration - generated in matlab
Vector measurement_acc(Vector_(3, 6.501390843381716, -6.763926150509185, -2.300389940090343));
Vector measurement_gyro(Vector_(3, 0.1, 0.2, 0.3));
InertialNavFactor_GlobalVelocity<Pose3, LieVector, imuBias::ConstantBias> f(PoseKey1, VelKey1, BiasKey1, PoseKey2, VelKey2, measurement_acc, measurement_gyro, measurement_dt, world_g, world_rho, world_omega_earth, model);
Rot3 R1(0.487316618, 0.125253866, 0.86419557,
0.580273724, 0.693095498, -0.427669306,
-0.652537293, 0.709880342, 0.265075427);
Point3 t1(2.0,1.0,3.0);
Pose3 Pose1(R1, t1);
LieVector Vel1(3,0.5,-0.5,0.4);
Rot3 R2(0.473618898, 0.119523052, 0.872582019,
0.609241153, 0.67099888, -0.422594037,
-0.636011287, 0.731761397, 0.244979388);
Point3 t2 = t1.compose( Point3(Vel1*measurement_dt) );
Pose3 Pose2(R2, t2);
Vector dv = measurement_dt * (R1.matrix() * measurement_acc + world_g);
LieVector Vel2 = Vel1.compose( dv );
imuBias::ConstantBias Bias1;
Values values;
values.insert(PoseKey1, Pose1);
values.insert(PoseKey2, Pose2);
values.insert(VelKey1, Vel1);
values.insert(VelKey2, Vel2);
values.insert(BiasKey1, Bias1);
Ordering ordering;
ordering.push_back(PoseKey1);
ordering.push_back(VelKey1);
ordering.push_back(BiasKey1);
ordering.push_back(PoseKey2);
ordering.push_back(VelKey2);
GaussianFactorGraph graph;
gttic_(LinearizeTiming);
for(size_t i = 0; i < 100000; ++i) {
GaussianFactor::shared_ptr g = f.linearize(values);
graph.push_back(g);
}
gttoc_(LinearizeTiming);
tictoc_finishedIteration_();
tictoc_print_();
}
/* ************************************************************************* */
int main() { TestResult tr; return TestRegistry::runAllTests(tr);}
/* ************************************************************************* */

View File

@ -0,0 +1,110 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file testInertialNavFactor_GlobalVelocity.cpp
* @brief Unit test for the InertialNavFactor_GlobalVelocity
* @author Vadim Indelman, Stephen Williams
*/
#include <iostream>
#include <gtsam/navigation/ImuBias.h>
#include <gtsam/navigation/InertialNavFactor_GlobalVelocity.h>
#include <gtsam/geometry/Pose3.h>
#include <gtsam/nonlinear/Values.h>
#include <gtsam/nonlinear/Key.h>
#include <gtsam/base/numericalDerivative.h>
#include <gtsam/base/LieVector.h>
using namespace std;
using namespace gtsam;
gtsam::Rot3 world_R_ECEF(
0.31686, 0.51505, 0.79645,
0.85173, -0.52399, 0,
0.41733, 0.67835, -0.60471);
gtsam::Vector ECEF_omega_earth(Vector_(3, 0.0, 0.0, 7.292115e-5));
gtsam::Vector world_omega_earth(world_R_ECEF.matrix() * ECEF_omega_earth);
/* ************************************************************************* */
gtsam::Pose3 predictionErrorPose(const Pose3& p1, const LieVector& v1, const imuBias::ConstantBias& b1, const Pose3& p2, const LieVector& v2, const InertialNavFactor_GlobalVelocity<Pose3, LieVector, imuBias::ConstantBias>& factor) {
return Pose3::Expmap(factor.evaluateError(p1, v1, b1, p2, v2).head(6));
}
gtsam::LieVector predictionErrorVel(const Pose3& p1, const LieVector& v1, const imuBias::ConstantBias& b1, const Pose3& p2, const LieVector& v2, const InertialNavFactor_GlobalVelocity<Pose3, LieVector, imuBias::ConstantBias>& factor) {
return LieVector::Expmap(factor.evaluateError(p1, v1, b1, p2, v2).tail(3));
}
#include <gtsam/linear/GaussianFactorGraph.h>
/* ************************************************************************* */
int main() {
gtsam::Key PoseKey1(11);
gtsam::Key PoseKey2(12);
gtsam::Key VelKey1(21);
gtsam::Key VelKey2(22);
gtsam::Key BiasKey1(31);
double measurement_dt(0.1);
Vector world_g(Vector_(3, 0.0, 0.0, 9.81));
Vector world_rho(Vector_(3, 0.0, -1.5724e-05, 0.0)); // NED system
gtsam::Vector ECEF_omega_earth(Vector_(3, 0.0, 0.0, 7.292115e-5));
gtsam::Vector world_omega_earth(world_R_ECEF.matrix() * ECEF_omega_earth);
SharedGaussian model(noiseModel::Isotropic::Sigma(9, 0.1));
// Second test: zero angular motion, some acceleration - generated in matlab
Vector measurement_acc(Vector_(3, 6.501390843381716, -6.763926150509185, -2.300389940090343));
Vector measurement_gyro(Vector_(3, 0.1, 0.2, 0.3));
InertialNavFactor_GlobalVelocity<Pose3, LieVector, imuBias::ConstantBias> f(PoseKey1, VelKey1, BiasKey1, PoseKey2, VelKey2, measurement_acc, measurement_gyro, measurement_dt, world_g, world_rho, world_omega_earth, model);
Rot3 R1(0.487316618, 0.125253866, 0.86419557,
0.580273724, 0.693095498, -0.427669306,
-0.652537293, 0.709880342, 0.265075427);
Point3 t1(2.0,1.0,3.0);
Pose3 Pose1(R1, t1);
LieVector Vel1(3,0.5,-0.5,0.4);
Rot3 R2(0.473618898, 0.119523052, 0.872582019,
0.609241153, 0.67099888, -0.422594037,
-0.636011287, 0.731761397, 0.244979388);
Point3 t2 = t1.compose( Point3(Vel1*measurement_dt) );
Pose3 Pose2(R2, t2);
Vector dv = measurement_dt * (R1.matrix() * measurement_acc + world_g);
LieVector Vel2 = Vel1.compose( dv );
imuBias::ConstantBias Bias1;
Values values;
values.insert(PoseKey1, Pose1);
values.insert(PoseKey2, Pose2);
values.insert(VelKey1, Vel1);
values.insert(VelKey2, Vel2);
values.insert(BiasKey1, Bias1);
Ordering ordering;
ordering.push_back(PoseKey1);
ordering.push_back(VelKey1);
ordering.push_back(BiasKey1);
ordering.push_back(PoseKey2);
ordering.push_back(VelKey2);
GaussianFactorGraph graph;
gttic_(LinearizeTiming);
for(size_t i = 0; i < 100000; ++i) {
GaussianFactor::shared_ptr g = f.linearize(values, ordering);
graph.push_back(g);
}
gttoc_(LinearizeTiming);
tictoc_finishedIteration_();
tictoc_print_();
}
/* ************************************************************************* */

View File

@ -515,8 +515,14 @@ public:
* requested to be marginalized. Marginalization leaves a linear
* approximation of the marginal in the system, and the linearization points
* of any variables involved in this linear marginal become fixed. The set
* fixed variables will include any involved with the marginalized variables
* fixed variables will include any key involved with the marginalized variables
* in the original factors, and possibly additional ones due to fill-in.
*
* If provided, 'marginalFactorsIndices' will be augmented with the factor graph
* indices of the marginal factors added during the 'marginalizeLeaves' call
*
* If provided, 'deletedFactorsIndices' will be augmented with the factor graph
* indices of any factor that was removed during the 'marginalizeLeaves' call
*/
void marginalizeLeaves(const FastList<Key>& leafKeys);
@ -580,6 +586,9 @@ public:
/** Access the nonlinear variable index */
const VariableIndex& getVariableIndex() const { return variableIndex_; }
/** Access the nonlinear variable index */
const FastSet<Key>& getFixedVariables() const { return fixedVariables_; }
size_t lastAffectedVariableCount;
size_t lastAffectedFactorCount;
size_t lastAffectedCliqueCount;

View File

@ -58,7 +58,7 @@ TEST( testLabeledSymbol, KeyLabeledSymbolEncoding ) {
string str = "xA5";
EXPECT_LONGS_EQUAL(key, (Key)symbol);
EXPECT(assert_equal(str, DefaultKeyFormatter(symbol)));
EXPECT(assert_equal(str, MultiRobotKeyFormatter(symbol)));
EXPECT(assert_equal(symbol, LabeledSymbol(key)));
}
}

View File

@ -15,7 +15,9 @@ using namespace gtsam;
using namespace gtsam::symbol_shorthand;
using namespace gtsam::noiseModel;
/* ************************************************************************* */
/**
* This TEST should fail. If you want it to pass, change noise to 0.
*/
TEST(BetweenFactor, Rot3) {
Rot3 R1 = Rot3::rodriguez(0.1, 0.2, 0.3);
Rot3 R2 = Rot3::rodriguez(0.4, 0.5, 0.6);
@ -33,13 +35,13 @@ TEST(BetweenFactor, Rot3) {
boost::function<Vector(const Rot3&, const Rot3&)>(boost::bind(
&BetweenFactor<Rot3>::evaluateError, factor, _1, _2, boost::none,
boost::none)), R1, R2, 1e-5);
EXPECT(assert_equal(numericalH1,actualH1, 1e-5));
EXPECT(assert_equal(numericalH1,actualH1));
Matrix numericalH2 = numericalDerivative22(
boost::function<Vector(const Rot3&, const Rot3&)>(boost::bind(
&BetweenFactor<Rot3>::evaluateError, factor, _1, _2, boost::none,
boost::none)), R1, R2, 1e-5);
EXPECT(assert_equal(numericalH2,actualH2, 1e-5));
EXPECT(assert_equal(numericalH2,actualH2));
}
/* ************************************************************************* */

View File

@ -0,0 +1,301 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file SmartProjectionFactorExample_kitti.cpp
* @brief Example usage of SmartProjectionFactor using real dataset
* @date August, 2013
* @author Zsolt Kira
*/
// Both relative poses and recovered trajectory poses will be stored as Pose2 objects
#include <gtsam/geometry/Pose3.h>
// Each variable in the system (poses and landmarks) must be identified with a unique key.
// We can either use simple integer keys (1, 2, 3, ...) or symbols (X1, X2, L1).
// Here we will use Symbols
#include <gtsam/nonlinear/Symbol.h>
// We want to use iSAM2 to solve the range-SLAM problem incrementally
#include <gtsam/nonlinear/ISAM2.h>
// iSAM2 requires as input a set set of new factors to be added stored in a factor graph,
// and initial guesses for any new variables used in the added factors
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
#include <gtsam/nonlinear/Values.h>
// We will use a non-liear solver to batch-inituialize from the first 150 frames
#include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>
// In GTSAM, measurement functions are represented as 'factors'. Several common factors
// have been provided with the library for solving robotics SLAM problems.
#include <gtsam/slam/PriorFactor.h>
#include <gtsam/slam/ProjectionFactor.h>
#include <gtsam_unstable/slam/SmartProjectionFactor.h>
// Standard headers, added last, so we know headers above work on their own
#include <boost/foreach.hpp>
#include <fstream>
#include <iostream>
using namespace std;
using namespace gtsam;
namespace NM = gtsam::noiseModel;
using symbol_shorthand::X;
using symbol_shorthand::L;
typedef PriorFactor<Pose3> Pose3Prior;
//// Helper functions taken from VO code
// Loaded all pose values into list
Values::shared_ptr loadPoseValues(const string& filename) {
Values::shared_ptr values(new Values());
bool addNoise = false;
Pose3 noise_pose = Pose3(Rot3::ypr(-M_PI/10, 0., -M_PI/10), gtsam::Point3(0.5,0.1,0.3));
// read in camera poses
string full_filename = filename;
ifstream fin;
fin.open(full_filename.c_str());
int pose_id;
while (fin >> pose_id) {
double pose_matrix[16];
for (int i = 0; i < 16; i++) {
fin >> pose_matrix[i];
}
if (addNoise) {
values->insert(Symbol('x',pose_id), Pose3(Matrix_(4, 4, pose_matrix)).compose(noise_pose));
} else {
values->insert(Symbol('x',pose_id), Pose3(Matrix_(4, 4, pose_matrix)));
}
}
fin.close();
return values;
}
// Loaded specific pose values that are in key list
Values::shared_ptr loadPoseValues(const string& filename, list<Key> keys) {
Values::shared_ptr values(new Values());
std::list<Key>::iterator kit;
// read in camera poses
string full_filename = filename;
ifstream fin;
fin.open(full_filename.c_str());
int pose_id;
while (fin >> pose_id) {
double pose_matrix[16];
for (int i = 0; i < 16; i++) {
fin >> pose_matrix[i];
}
kit = find (keys.begin(), keys.end(), X(pose_id));
if (kit != keys.end()) {
cout << " Adding " << X(pose_id) << endl;
values->insert(Symbol('x',pose_id), Pose3(Matrix_(4, 4, pose_matrix)));
}
}
fin.close();
return values;
}
// Load calibration info
Cal3_S2::shared_ptr loadCalibration(const string& filename) {
string full_filename = filename;
ifstream fin;
fin.open(full_filename.c_str());
// try loading from parent directory as backup
if(!fin) {
cerr << "Could not load " << full_filename;
exit(1);
}
double fx, fy, s, u, v, b;
fin >> fx >> fy >> s >> u >> v >> b;
fin.close();
Cal3_S2::shared_ptr K(new Cal3_S2(fx, fy, s, u, v));
return K;
}
// main
int main(int argc, char** argv) {
bool debug = false;
// Set to true to use SmartProjectionFactor. Otherwise GenericProjectionFactor will be used
bool useSmartProjectionFactor = true;
// Minimum number of views of a landmark before it is added to the graph (SmartProjectionFactor case only)
unsigned int minimumNumViews = 1;
string HOME = getenv("HOME");
//string input_dir = HOME + "/data/kitti/loop_closures_merged/";
string input_dir = HOME + "/data/KITTI_00_200/";
typedef SmartProjectionFactor<Pose3, Point3, Cal3_S2> SmartFactor;
typedef GenericProjectionFactor<Pose3, Point3, Cal3_S2> ProjectionFactor;
static SharedNoiseModel pixel_sigma(noiseModel::Unit::Create(2));
static SharedNoiseModel prior_model(noiseModel::Diagonal::Sigmas(Vector_(6, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01)));
NonlinearFactorGraph graph;
// Load calibration
//Cal3_S2::shared_ptr K(new Cal3_S2(718.856, 718.856, 0.0, 607.1928, 185.2157));
boost::shared_ptr<Cal3_S2> K = loadCalibration(input_dir+"calibration.txt");
K->print("Calibration");
// Load values from VO camera poses output
gtsam::Values::shared_ptr loaded_values = loadPoseValues(input_dir+"camera_poses.txt");
graph.add(Pose3Prior(X(0),loaded_values->at<Pose3>(X(0)), prior_model));
graph.add(Pose3Prior(X(1),loaded_values->at<Pose3>(X(1)), prior_model));
//graph.print("thegraph");
// Read in kitti dataset
ifstream fin;
fin.open((input_dir+"stereo_factors.txt").c_str());
if(!fin) {
cerr << "Could not open stereo_factors.txt" << endl;
exit(1);
}
// read all measurements tracked by VO stereo
cout << "Loading stereo_factors.txt" << endl;
int count = 0;
Key currentLandmark = 0;
int numLandmarks = 0;
Key r, l;
double uL, uR, v, x, y, z;
std::list<Key> allViews;
std::vector<Key> views;
std::vector<Point2> measurements;
Values values;
while (fin >> r >> l >> uL >> uR >> v >> x >> y >> z) {
if (debug) cout << "CurrentLandmark " << currentLandmark << " Landmark " << l << std::endl;
if (useSmartProjectionFactor == false) {
if (loaded_values->exists<Point3>(L(l)) == boost::none) {
Pose3 camera = loaded_values->at<Pose3>(X(r));
Point3 worldPoint = camera.transform_from(Point3(x, y, z));
loaded_values->insert(L(l), worldPoint); // add point;
}
ProjectionFactor::shared_ptr projectionFactor(new ProjectionFactor(Point2(uL,v), pixel_sigma, X(r), L(l), K));
graph.push_back(projectionFactor);
}
if (currentLandmark != l && views.size() < minimumNumViews) {
// New landmark. Not enough views for previous landmark so move on.
if (debug) cout << "New landmark " << l << " with not enough view for previous one" << std::endl;
currentLandmark = l;
views.clear();
measurements.clear();
} else if (currentLandmark != l) {
// New landmark. Add previous landmark and associated views to new factor
if (debug) cout << "New landmark " << l << " with "<< views.size() << " views for previous landmark " << currentLandmark << std::endl;
if (debug) cout << "Keys ";
BOOST_FOREACH(const Key& k, views) {
allViews.push_back(k);
if (debug) cout << k << " ";
}
if (debug) cout << endl;
if (debug) {
cout << "Measurements ";
BOOST_FOREACH(const Point2& p, measurements) {
cout << p << " ";
}
cout << endl;
}
if (useSmartProjectionFactor) {
SmartFactor::shared_ptr smartFactor(new SmartFactor(measurements, pixel_sigma, views, K));
graph.push_back(smartFactor);
}
numLandmarks++;
currentLandmark = l;
views.clear();
measurements.clear();
} else {
// We have new view for current landmark, so add it to the list later
if (debug) cout << "New view for landmark " << l << " (" << views.size() << " total)" << std::endl;
}
// Add view for new landmark
views += X(r);
measurements += Point2(uL,v);
count++;
if(count==100000) {
cout << "Loading graph... " << graph.size() << endl;
count=0;
}
}
cout << "Graph size: " << graph.size() << endl;
/*
// If using only subset of graph, only read in values for keys that are used
// Get all view in the graph and populate poses from VO output
// TODO: Handle loop closures properly
cout << "All Keys (" << allViews.size() << ")" << endl;
allViews.unique();
cout << "All Keys (" << allViews.size() << ")" << endl;
values = *loadPoseValues(input_dir+"camera_poses.txt", allViews);
BOOST_FOREACH(const Key& k, allViews) {
if (debug) cout << k << " ";
}
cout << endl;
*/
cout << "Optimizing... " << endl;
// Optimize!
LevenbergMarquardtParams params;
params.verbosityLM = LevenbergMarquardtParams::TRYLAMBDA;
params.verbosity = NonlinearOptimizerParams::ERROR;
params.lambdaInitial = 1;
params.lambdaFactor = 10;
params.maxIterations = 100;
params.relativeErrorTol = 1e-5;
params.absoluteErrorTol = 1.0;
params.verbosityLM = LevenbergMarquardtParams::TRYLAMBDA;
params.verbosity = NonlinearOptimizerParams::ERROR;
params.linearSolverType = SuccessiveLinearizationParams::MULTIFRONTAL_CHOLESKY;
LevenbergMarquardtOptimizer optimizer(graph, *loaded_values, params);
Values result;
gttic_(SmartProjectionFactorExample_kitti);
result = optimizer.optimize();
gttoc_(SmartProjectionFactorExample_kitti);
tictoc_finishedIteration_();
cout << "===================================================" << endl;
loaded_values->print("before optimization ");
result.print("results of kitti optimization ");
tictoc_print_();
cout << "===================================================" << endl;
exit(0);
}

View File

@ -13,6 +13,7 @@ virtual class gtsam::Point3;
virtual class gtsam::Rot3;
virtual class gtsam::Pose3;
virtual class gtsam::noiseModel::Base;
virtual class gtsam::noiseModel::Gaussian;
virtual class gtsam::imuBias::ConstantBias;
virtual class gtsam::NonlinearFactor;
virtual class gtsam::GaussianFactor;
@ -308,6 +309,19 @@ virtual class BetweenFactor : gtsam::NonlinearFactor {
void serializable() const; // enabling serialization functionality
};
#include <gtsam_unstable/slam/BetweenFactorEM.h>
template<T = {gtsam::Pose2}>
virtual class BetweenFactorEM : gtsam::NonlinearFactor {
BetweenFactorEM(size_t key1, size_t key2, const T& relativePose,
const gtsam::noiseModel::Gaussian* model_inlier, const gtsam::noiseModel::Gaussian* model_outlier,
double prior_inlier, double prior_outlier);
Vector whitenedError(const gtsam::Values& x);
Vector unwhitenedError(const gtsam::Values& x);
Vector calcIndicatorProb(const gtsam::Values& x);
void serializable() const; // enabling serialization functionality
};
#include <gtsam/slam/RangeFactor.h>
template<POSE, POINT>
@ -632,11 +646,10 @@ class ImuFactorPreintegratedMeasurements {
virtual class ImuFactor : gtsam::NonlinearFactor {
ImuFactor(size_t pose_i, size_t vel_i, size_t pose_j, size_t vel_j, size_t bias,
const gtsam::ImuFactorPreintegratedMeasurements& preintegratedMeasurements, Vector gravity, Vector omegaCoriolis,
const gtsam::noiseModel::Base* model);
const gtsam::ImuFactorPreintegratedMeasurements& preintegratedMeasurements, Vector gravity, Vector omegaCoriolis);
ImuFactor(size_t pose_i, size_t vel_i, size_t pose_j, size_t vel_j, size_t bias,
const gtsam::ImuFactorPreintegratedMeasurements& preintegratedMeasurements, Vector gravity, Vector omegaCoriolis,
const gtsam::noiseModel::Base* model, const gtsam::Pose3& body_P_sensor);
const gtsam::Pose3& body_P_sensor);
// Standard Interface
gtsam::ImuFactorPreintegratedMeasurements preintegratedMeasurements() const;

View File

@ -50,27 +50,25 @@ FixedLagSmoother::Result BatchFixedLagSmoother::update(const NonlinearFactorGrap
std::cout << "BatchFixedLagSmoother::update() START" << std::endl;
}
// Add the new factors
insertFactors(newFactors);
// Add the new variables
// Update all of the internal variables with the new information
gttic(augment_system);
// Add the new variables to theta
theta_.insert(newTheta);
// Add new variables to the end of the ordering
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, newTheta) {
ordering_.push_back(key_value.key);
}
// Augment Delta
std::vector<size_t> dims;
dims.reserve(newTheta.size());
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, newTheta) {
ordering_.push_back(key_value.key);
dims.push_back(key_value.value.dim());
}
// Augment Delta
delta_.append(dims);
for(size_t i = delta_.size() - dims.size(); i < delta_.size(); ++i) {
delta_[i].setZero();
}
// Add the new factors to the graph, updating the variable index
insertFactors(newFactors);
gttoc(augment_system);
// Update the Timestamps associated with the factor keys
updateKeyTimestampMap(timestamps);
@ -90,18 +88,24 @@ FixedLagSmoother::Result BatchFixedLagSmoother::update(const NonlinearFactorGrap
}
// Reorder
gttic(reorder);
reorder(marginalizableKeys);
gttoc(reorder);
// Optimize
gttic(optimize);
Result result;
if(theta_.size() > 0) {
if(factors_.size() > 0) {
result = optimize();
}
gttoc(optimize);
// Marginalize out old variables.
gttic(marginalize);
if(marginalizableKeys.size() > 0) {
marginalize(marginalizableKeys);
}
gttoc(marginalize);
if(debug) {
std::cout << "BatchFixedLagSmoother::update() FINISH" << std::endl;
@ -188,6 +192,20 @@ void BatchFixedLagSmoother::eraseKeys(const std::set<Key>& keys) {
/* ************************************************************************* */
void BatchFixedLagSmoother::reorder(const std::set<Key>& marginalizeKeys) {
const bool debug = ISDEBUG("BatchFixedLagSmoother reorder");
if(debug) {
std::cout << "BatchFixedLagSmoother::reorder() START" << std::endl;
}
if(debug) {
std::cout << "Marginalizable Keys: ";
BOOST_FOREACH(Key key, marginalizeKeys) {
std::cout << DefaultKeyFormatter(key) << " ";
}
std::cout << std::endl;
}
// Calculate a variable index
VariableIndex variableIndex(*factors_.symbolic(ordering_), ordering_.size());
@ -211,10 +229,25 @@ void BatchFixedLagSmoother::reorder(const std::set<Key>& marginalizeKeys) {
// Permute the ordering, variable index, and deltas
ordering_.permuteInPlace(forwardPermutation);
delta_.permuteInPlace(forwardPermutation);
if(debug) {
ordering_.print("New Ordering: ");
}
if(debug) {
std::cout << "BatchFixedLagSmoother::reorder() FINISH" << std::endl;
}
}
/* ************************************************************************* */
FixedLagSmoother::Result BatchFixedLagSmoother::optimize() {
const bool debug = ISDEBUG("BatchFixedLagSmoother optimize");
if(debug) {
std::cout << "BatchFixedLagSmoother::optimize() START" << std::endl;
}
// Create output result structure
Result result;
result.nonlinearVariables = theta_.size() - linearKeys_.size();
@ -224,7 +257,7 @@ FixedLagSmoother::Result BatchFixedLagSmoother::optimize() {
double lambda = parameters_.lambdaInitial;
double lambdaFactor = parameters_.lambdaFactor;
double lambdaUpperBound = parameters_.lambdaUpperBound;
double lambdaLowerBound = 0.5 / parameters_.lambdaUpperBound;
double lambdaLowerBound = 1.0e-10;
size_t maxIterations = parameters_.maxIterations;
double relativeErrorTol = parameters_.relativeErrorTol;
double absoluteErrorTol = parameters_.absoluteErrorTol;
@ -234,6 +267,17 @@ FixedLagSmoother::Result BatchFixedLagSmoother::optimize() {
Values evalpoint = theta_.retract(delta_, ordering_);
result.error = factors_.error(evalpoint);
// check if we're already close enough
if(result.error <= errorTol) {
if(debug) { std::cout << "BatchFixedLagSmoother::optimize Exiting, as error = " << result.error << " < " << errorTol << std::endl; }
return result;
}
if(debug) {
std::cout << "BatchFixedLagSmoother::optimize linearValues: " << linearKeys_.size() << std::endl;
std::cout << "BatchFixedLagSmoother::optimize Initial error: " << result.error << std::endl;
}
// Use a custom optimization loop so the linearization points can be controlled
double previousError;
VectorValues newDelta;
@ -248,6 +292,9 @@ FixedLagSmoother::Result BatchFixedLagSmoother::optimize() {
// Keep increasing lambda until we make make progress
while(true) {
if(debug) { std::cout << "BatchFixedLagSmoother::optimize trying lambda = " << lambda << std::endl; }
// Add prior factors at the current solution
gttic(damp);
GaussianFactorGraph dampedFactorGraph(linearFactorGraph);
@ -279,6 +326,11 @@ FixedLagSmoother::Result BatchFixedLagSmoother::optimize() {
double error = factors_.error(evalpoint);
gttoc(compute_error);
if(debug) {
std::cout << "BatchFixedLagSmoother::optimize linear delta norm = " << newDelta.norm() << std::endl;
std::cout << "BatchFixedLagSmoother::optimize next error = " << error << std::endl;
}
if(error < result.error) {
// Keep this change
// Update the error value
@ -304,22 +356,31 @@ FixedLagSmoother::Result BatchFixedLagSmoother::optimize() {
break;
} else {
// Reject this change
// Increase lambda and continue searching
lambda *= lambdaFactor;
if(lambda > lambdaUpperBound) {
if(lambda >= lambdaUpperBound) {
// The maximum lambda has been used. Print a warning and end the search.
std::cout << "Warning: Levenberg-Marquardt giving up because cannot decrease error with maximum lambda" << std::endl;
break;
} else {
// Increase lambda and continue searching
lambda *= lambdaFactor;
}
}
} // end while
}
gttoc(optimizer_iteration);
if(debug) { std::cout << "BatchFixedLagSmoother::optimize using lambda = " << lambda << std::endl; }
result.iterations++;
} while(result.iterations < maxIterations &&
!checkConvergence(relativeErrorTol, absoluteErrorTol, errorTol, previousError, result.error, NonlinearOptimizerParams::SILENT));
if(debug) { std::cout << "BatchFixedLagSmoother::optimize newError: " << result.error << std::endl; }
if(debug) {
std::cout << "BatchFixedLagSmoother::optimize() FINISH" << std::endl;
}
return result;
}
@ -331,59 +392,63 @@ void BatchFixedLagSmoother::marginalize(const std::set<Key>& marginalizeKeys) {
// from the result of a partial elimination. This function removes the marginalized factors and
// adds the linearized factors back in.
// Calculate marginal factors on the remaining variables (after marginalizing 'marginalizeKeys')
// Note: It is assumed the ordering already has these keys first
// Create the linear factor graph
GaussianFactorGraph linearFactorGraph = *factors_.linearize(theta_, ordering_);
const bool debug = ISDEBUG("BatchFixedLagSmoother marginalize");
// Create a variable index
VariableIndex variableIndex(linearFactorGraph, ordering_.size());
if(debug) std::cout << "BatchFixedLagSmoother::marginalize Begin" << std::endl;
// Use the variable Index to mark the factors that will be marginalized
if(debug) {
std::cout << "BatchFixedLagSmoother::marginalize Marginalize Keys: ";
BOOST_FOREACH(Key key, marginalizeKeys) {
std::cout << DefaultKeyFormatter(key) << " ";
}
std::cout << std::endl;
}
// Identify all of the factors involving any marginalized variable. These must be removed.
std::set<size_t> removedFactorSlots;
VariableIndex variableIndex(*factors_.symbolic(ordering_), theta_.size());
BOOST_FOREACH(Key key, marginalizeKeys) {
const FastList<size_t>& slots = variableIndex[ordering_.at(key)];
removedFactorSlots.insert(slots.begin(), slots.end());
BOOST_FOREACH(size_t slot, slots) {
removedFactorSlots.insert(slot);
}
}
// Construct an elimination tree to perform sparse elimination
std::vector<EliminationForest::shared_ptr> forest( EliminationForest::Create(linearFactorGraph, variableIndex) );
// This is a tree. Only the top-most nodes/indices need to be eliminated; all of the children will be eliminated automatically
// Find the subset of nodes/keys that must be eliminated
std::set<Index> indicesToEliminate;
BOOST_FOREACH(Key key, marginalizeKeys) {
indicesToEliminate.insert(ordering_.at(key));
if(debug) {
std::cout << "BatchFixedLagSmoother::marginalize Removed Factor Slots: ";
BOOST_FOREACH(size_t slot, removedFactorSlots) {
std::cout << slot << " ";
}
BOOST_FOREACH(Key key, marginalizeKeys) {
EliminationForest::removeChildrenIndices(indicesToEliminate, forest.at(ordering_.at(key)));
std::cout << std::endl;
}
// Eliminate each top-most key, returning a Gaussian Factor on some of the remaining variables
// Convert the marginal factors into Linear Container Factors
// Add the marginal factor variables to the separator
NonlinearFactorGraph marginalFactors;
BOOST_FOREACH(Index index, indicesToEliminate) {
GaussianFactor::shared_ptr gaussianFactor = forest.at(index)->eliminateRecursive(parameters_.getEliminationFunction());
if(gaussianFactor->size() > 0) {
LinearContainerFactor::shared_ptr marginalFactor(new LinearContainerFactor(gaussianFactor, ordering_, theta_));
marginalFactors.push_back(marginalFactor);
// Add the keys associated with the marginal factor to the separator values
BOOST_FOREACH(Key key, *marginalFactor) {
if(!linearKeys_.exists(key)) {
linearKeys_.insert(key, theta_.at(key));
// Add the removed factors to a factor graph
NonlinearFactorGraph removedFactors;
BOOST_FOREACH(size_t slot, removedFactorSlots) {
if(factors_.at(slot)) {
removedFactors.push_back(factors_.at(slot));
}
}
}
}
insertFactors(marginalFactors);
// Remove the marginalized variables and factors from the filter
if(debug) {
PrintSymbolicGraph(removedFactors, "BatchFixedLagSmoother::marginalize Removed Factors: ");
}
// Calculate marginal factors on the remaining keys
NonlinearFactorGraph marginalFactors = calculateMarginalFactors(removedFactors, theta_, marginalizeKeys, parameters_.getEliminationFunction());
if(debug) {
PrintSymbolicGraph(removedFactors, "BatchFixedLagSmoother::marginalize Marginal Factors: ");
}
// Remove marginalized factors from the factor graph
removeFactors(removedFactorSlots);
// Remove marginalized keys from the system
eraseKeys(marginalizeKeys);
// Insert the new marginal factors
insertFactors(marginalFactors);
}
/* ************************************************************************* */
@ -395,6 +460,15 @@ void BatchFixedLagSmoother::PrintKeySet(const std::set<Key>& keys, const std::st
std::cout << std::endl;
}
/* ************************************************************************* */
void BatchFixedLagSmoother::PrintKeySet(const gtsam::FastSet<Key>& keys, const std::string& label) {
std::cout << label;
BOOST_FOREACH(gtsam::Key key, keys) {
std::cout << " " << gtsam::DefaultKeyFormatter(key);
}
std::cout << std::endl;
}
/* ************************************************************************* */
void BatchFixedLagSmoother::PrintSymbolicFactor(const NonlinearFactor::shared_ptr& factor) {
std::cout << "f(";
@ -523,5 +597,102 @@ void BatchFixedLagSmoother::EliminationForest::removeChildrenIndices(std::set<In
}
}
/* ************************************************************************* */
NonlinearFactorGraph BatchFixedLagSmoother::calculateMarginalFactors(const NonlinearFactorGraph& graph, const Values& theta,
const std::set<Key>& marginalizeKeys, const GaussianFactorGraph::Eliminate& eliminateFunction) {
const bool debug = ISDEBUG("BatchFixedLagSmoother calculateMarginalFactors");
if(debug) std::cout << "BatchFixedLagSmoother::calculateMarginalFactors START" << std::endl;
if(debug) PrintKeySet(marginalizeKeys, "BatchFixedLagSmoother::calculateMarginalFactors Marginalize Keys: ");
// Get the set of all keys involved in the factor graph
FastSet<Key> allKeys(graph.keys());
if(debug) PrintKeySet(allKeys, "BatchFixedLagSmoother::calculateMarginalFactors All Keys: ");
// Calculate the set of RemainingKeys = AllKeys \Intersect marginalizeKeys
FastSet<Key> remainingKeys;
std::set_difference(allKeys.begin(), allKeys.end(), marginalizeKeys.begin(), marginalizeKeys.end(), std::inserter(remainingKeys, remainingKeys.end()));
if(debug) PrintKeySet(remainingKeys, "BatchFixedLagSmoother::calculateMarginalFactors Remaining Keys: ");
if(marginalizeKeys.size() == 0) {
// There are no keys to marginalize. Simply return the input factors
if(debug) std::cout << "BatchFixedLagSmoother::calculateMarginalFactors FINISH" << std::endl;
return graph;
} else {
// Create a subset of theta that only contains the required keys
Values values;
BOOST_FOREACH(Key key, allKeys) {
values.insert(key, theta.at(key));
}
// Calculate the ordering: [Others Root]
std::map<Key, int> constraints;
BOOST_FOREACH(Key key, marginalizeKeys) {
constraints[key] = 0;
}
BOOST_FOREACH(Key key, remainingKeys) {
constraints[key] = 1;
}
Ordering ordering = *graph.orderingCOLAMDConstrained(values, constraints);
// Create the linear factor graph
GaussianFactorGraph linearFactorGraph = *graph.linearize(values, ordering);
// Construct a variable index
VariableIndex variableIndex(linearFactorGraph, ordering.size());
// Construct an elimination tree to perform sparse elimination
std::vector<EliminationForest::shared_ptr> forest( BatchFixedLagSmoother::EliminationForest::Create(linearFactorGraph, variableIndex) );
// This is a forest. Only the top-most node/index of each tree needs to be eliminated; all of the children will be eliminated automatically
// Find the subset of nodes/keys that must be eliminated
std::set<Index> indicesToEliminate;
BOOST_FOREACH(Key key, marginalizeKeys) {
indicesToEliminate.insert(ordering.at(key));
}
BOOST_FOREACH(Key key, marginalizeKeys) {
EliminationForest::removeChildrenIndices(indicesToEliminate, forest.at(ordering.at(key)));
}
if(debug) PrintKeySet(indicesToEliminate, "BatchFixedLagSmoother::calculateMarginalFactors Indices To Eliminate: ");
// Eliminate each top-most key, returning a Gaussian Factor on some of the remaining variables
// Convert the marginal factors into Linear Container Factors
NonlinearFactorGraph marginalFactors;
BOOST_FOREACH(Index index, indicesToEliminate) {
GaussianFactor::shared_ptr gaussianFactor = forest.at(index)->eliminateRecursive(eliminateFunction);
if(gaussianFactor->size() > 0) {
LinearContainerFactor::shared_ptr marginalFactor(new LinearContainerFactor(gaussianFactor, ordering, values));
marginalFactors.push_back(marginalFactor);
if(debug) {
std::cout << "BatchFixedLagSmoother::calculateMarginalFactors Marginal Factor: ";
PrintSymbolicFactor(marginalFactor);
}
}
}
// Also add any remaining factors that were unaffected by marginalizing out the selected variables.
// These are part of the marginal on the remaining variables as well.
BOOST_FOREACH(Key key, remainingKeys) {
BOOST_FOREACH(const GaussianFactor::shared_ptr& gaussianFactor, forest.at(ordering.at(key))->factors()) {
LinearContainerFactor::shared_ptr marginalFactor(new LinearContainerFactor(gaussianFactor, ordering, values));
marginalFactors.push_back(marginalFactor);
if(debug) {
std::cout << "BatchFixedLagSmoother::calculateMarginalFactors Remaining Factor: ";
PrintSymbolicFactor(marginalFactor);
}
}
}
if(debug) PrintSymbolicGraph(marginalFactors, "BatchFixedLagSmoother::calculateMarginalFactors All Marginal Factors: ");
if(debug) std::cout << "BatchFixedLagSmoother::calculateMarginalFactors FINISH" << std::endl;
return marginalFactors;
}
}
/* ************************************************************************* */
} /// namespace gtsam

View File

@ -205,9 +205,13 @@ protected:
static void removeChildrenIndices(std::set<Index>& indices, const EliminationForest::shared_ptr& tree);
};
static NonlinearFactorGraph calculateMarginalFactors(const NonlinearFactorGraph& graph, const Values& theta,
const std::set<Key>& marginalizeKeys, const GaussianFactorGraph::Eliminate& eliminateFunction);
private:
/** Private methods for printing debug information */
static void PrintKeySet(const std::set<Key>& keys, const std::string& label);
static void PrintKeySet(const gtsam::FastSet<Key>& keys, const std::string& label);
static void PrintSymbolicFactor(const NonlinearFactor::shared_ptr& factor);
static void PrintSymbolicFactor(const GaussianFactor::shared_ptr& factor, const Ordering& ordering);
static void PrintSymbolicGraph(const NonlinearFactorGraph& graph, const std::string& label);

View File

@ -15,7 +15,6 @@
* Concurrent Filtering and Smoothing interface.
* @author Stephen Williams
*/
#include <gtsam_unstable/nonlinear/ConcurrentBatchFilter.h>
#include <gtsam/nonlinear/LinearContainerFactor.h>
#include <gtsam/linear/GaussianJunctionTree.h>
@ -24,14 +23,96 @@
namespace gtsam {
/* ************************************************************************* */
void ConcurrentBatchFilter::PrintNonlinearFactor(const NonlinearFactor::shared_ptr& factor,
const std::string& indent, const KeyFormatter& keyFormatter) {
std::cout << indent;
if(factor) {
if(boost::dynamic_pointer_cast<LinearContainerFactor>(factor)) {
std::cout << "l( ";
} else {
std::cout << "f( ";
}
BOOST_FOREACH(Key key, *factor) {
std::cout << keyFormatter(key) << " ";
}
std::cout << ")" << std::endl;
} else {
std::cout << "{ NULL }" << std::endl;
}
}
/* ************************************************************************* */
void ConcurrentBatchFilter::PrintNonlinearFactorGraph(const NonlinearFactorGraph& factors,
const std::string& indent, const std::string& title, const KeyFormatter& keyFormatter) {
std::cout << indent << title << std::endl;
BOOST_FOREACH(const NonlinearFactor::shared_ptr& factor, factors) {
PrintNonlinearFactor(factor, indent + " ", keyFormatter);
}
}
/* ************************************************************************* */
void ConcurrentBatchFilter::PrintNonlinearFactorGraph(const NonlinearFactorGraph& factors, const std::vector<size_t>& slots,
const std::string& indent, const std::string& title, const KeyFormatter& keyFormatter) {
std::cout << indent << title << std::endl;
BOOST_FOREACH(size_t slot, slots) {
PrintNonlinearFactor(factors.at(slot), indent + " ", keyFormatter);
}
}
/* ************************************************************************* */
void ConcurrentBatchFilter::PrintLinearFactor(const GaussianFactor::shared_ptr& factor, const Ordering& ordering,
const std::string& indent, const KeyFormatter& keyFormatter) {
std::cout << indent;
if(factor) {
JacobianFactor::shared_ptr jf = boost::dynamic_pointer_cast<JacobianFactor>(factor);
HessianFactor::shared_ptr hf = boost::dynamic_pointer_cast<HessianFactor>(factor);
if(jf) {
std::cout << "j( ";
} else if(hf) {
std::cout << "h( ";
} else {
std::cout << "g( ";
}
BOOST_FOREACH(Index index, *factor) {
std::cout << keyFormatter(ordering.key(index)) << " ";
}
std::cout << ")" << std::endl;
} else {
std::cout << "{ NULL }" << std::endl;
}
}
/* ************************************************************************* */
void ConcurrentBatchFilter::PrintLinearFactorGraph(const GaussianFactorGraph& factors, const Ordering& ordering,
const std::string& indent, const std::string& title, const KeyFormatter& keyFormatter) {
std::cout << indent << title << std::endl;
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, factors) {
PrintLinearFactor(factor, ordering, indent + " ", keyFormatter);
}
}
/* ************************************************************************* */
template<>
void ConcurrentBatchFilter::PrintKeys<Values>(const Values& values, const std::string& indent, const std::string& title, const KeyFormatter& keyFormatter) {
FastList<Key> keys = values.keys();
PrintKeys(keys, indent, title, keyFormatter);
}
/* ************************************************************************* */
template<>
void ConcurrentBatchFilter::PrintKeys<NonlinearFactorGraph>(const NonlinearFactorGraph& graph, const std::string& indent, const std::string& title, const KeyFormatter& keyFormatter) {
FastSet<Key> keys = graph.keys();
PrintKeys(keys, indent, title, keyFormatter);
}
/* ************************************************************************* */
void ConcurrentBatchFilter::print(const std::string& s, const KeyFormatter& keyFormatter) const {
std::cout << s;
std::cout << " Factors:" << std::endl;
BOOST_FOREACH(const NonlinearFactor::shared_ptr& factor, factors_) {
PrintNonlinearFactor(factor, " ", keyFormatter);
}
theta_.print("Values:\n");
PrintNonlinearFactorGraph(factors_, " ", "Factors:");
PrintKeys(theta_, " ", "Values:");
PrintNonlinearFactorGraph(smootherFactors_, " ", "Cached Smoother Factors:");
PrintKeys(smootherValues_, " ", "Cached Smoother Values:");
}
/* ************************************************************************* */
@ -42,17 +123,30 @@ bool ConcurrentBatchFilter::equals(const ConcurrentFilter& rhs, double tol) cons
&& theta_.equals(filter->theta_)
&& ordering_.equals(filter->ordering_)
&& delta_.equals(filter->delta_)
&& separatorValues_.equals(filter->separatorValues_);
&& separatorValues_.equals(filter->separatorValues_)
&& smootherSummarization_.equals(filter->smootherSummarization_)
&& smootherShortcut_.equals(filter->smootherShortcut_)
&& filterSummarization_.equals(filter->filterSummarization_)
&& smootherFactors_.equals(filter->smootherFactors_)
&& smootherValues_.equals(filter->smootherValues_);
}
/* ************************************************************************* */
ConcurrentBatchFilter::Result ConcurrentBatchFilter::update(const NonlinearFactorGraph& newFactors, const Values& newTheta, const boost::optional<FastList<Key> >& keysToMove) {
ConcurrentBatchFilter::Result ConcurrentBatchFilter::update(const NonlinearFactorGraph& newFactors, const Values& newTheta,
const boost::optional<FastList<Key> >& keysToMove, const boost::optional< std::vector<size_t> >& removeFactorIndices) {
gttic(update);
// const bool debug = ISDEBUG("ConcurrentBatchFilter update");
const bool debug = false;
if(debug) std::cout << "ConcurrentBatchFilter::update Begin" << std::endl;
// Create the return result meta-data
Result result;
if(debug) std::cout << "ConcurrentBatchFilter::update Augmenting System ..." << std::endl;
// Update all of the internal variables with the new information
gttic(augment_system);
// Add the new variables to theta
@ -70,26 +164,37 @@ ConcurrentBatchFilter::Result ConcurrentBatchFilter::update(const NonlinearFacto
delta_[i].setZero();
}
// Add the new factors to the graph, updating the variable index
insertFactors(newFactors);
result.newFactorsIndices = insertFactors(newFactors);
// Remove any user-specified factors from the graph
if(removeFactorIndices)
removeFactors(*removeFactorIndices);
gttoc(augment_system);
if(debug) std::cout << "ConcurrentBatchFilter::update Reordering System ..." << std::endl;
// Reorder the system to ensure efficient optimization (and marginalization) performance
gttic(reorder);
reorder(keysToMove);
gttoc(reorder);
if(debug) std::cout << "ConcurrentBatchFilter::update Optimizing System ..." << std::endl;
// Optimize the factors using a modified version of L-M
gttic(optimize);
if(factors_.size() > 0) {
result = optimize(factors_, theta_, ordering_, delta_, separatorValues_, parameters_);
optimize(factors_, theta_, ordering_, delta_, separatorValues_, parameters_, result);
}
gttoc(optimize);
gttic(marginalize);
if(debug) std::cout << "ConcurrentBatchFilter::update Moving Separator ..." << std::endl;
gttic(move_separator);
if(keysToMove && keysToMove->size() > 0){
marginalize(*keysToMove);
moveSeparator(*keysToMove);
}
gttoc(marginalize);
gttoc(move_separator);
if(debug) std::cout << "ConcurrentBatchFilter::update End" << std::endl;
gttoc(update);
@ -105,115 +210,92 @@ void ConcurrentBatchFilter::presync() {
}
/* ************************************************************************* */
void ConcurrentBatchFilter::synchronize(const NonlinearFactorGraph& summarizedFactors, const Values& separatorValues) {
void ConcurrentBatchFilter::synchronize(const NonlinearFactorGraph& smootherSummarization, const Values& smootherSummarizationValues) {
gttic(synchronize);
// Remove the previous smoother summarization
removeFactors(smootherSummarizationSlots_);
// const bool debug = ISDEBUG("ConcurrentBatchFilter synchronize");
const bool debug = false;
// Create a factor graph containing the new smoother summarization, the factors to be sent to the smoother,
// and all of the filter factors. This is the set of factors on the filter side since the smoother started
// its previous update cycle.
if(debug) std::cout << "ConcurrentBatchFilter::synchronize Begin" << std::endl;
if(debug) { PrintNonlinearFactorGraph(smootherSummarization_, "ConcurrentBatchFilter::synchronize ", "Previous Smoother Summarization:", DefaultKeyFormatter); }
#ifndef NDEBUG
std::set<Key> oldKeys = smootherSummarization_.keys();
std::set<Key> newKeys = smootherSummarization.keys();
assert(oldKeys.size() == newKeys.size());
assert(std::equal(oldKeys.begin(), oldKeys.end(), newKeys.begin()));
#endif
// Update the smoother summarization on the old separator
smootherSummarization_ = smootherSummarization;
if(debug) { PrintNonlinearFactorGraph(smootherSummarization_, "ConcurrentBatchFilter::synchronize ", "Updated Smoother Summarization:", DefaultKeyFormatter); }
// Find the set of new separator keys
FastSet<Key> newSeparatorKeys;
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, separatorValues_) {
newSeparatorKeys.insert(key_value.key);
}
if(debug) { PrintKeys(newSeparatorKeys, "ConcurrentBatchFilter::synchronize ", "Current Separator Keys:"); }
// Use the shortcut to calculate an updated marginal on the current separator
{
// Combine just the shortcut and the previousSmootherSummarization
NonlinearFactorGraph graph;
graph.push_back(factors_);
graph.push_back(smootherFactors_);
graph.push_back(summarizedFactors);
graph.push_back(smootherSummarization_);
graph.push_back(smootherShortcut_);
Values values;
values.insert(theta_);
values.insert(smootherValues_);
values.update(separatorValues); // ensure the smoother summarized factors are linearized around the values in the smoother
if(factors_.size() > 0) {
// Perform an optional optimization on the to-be-sent-to-the-smoother factors
if(relin_) {
// Create ordering and delta
Ordering ordering = graph.orderingCOLAMD();
VectorValues delta = values.zeroVectors();
// Optimize this graph using a modified version of L-M
optimize(graph, values, ordering, delta, separatorValues, parameters_);
// Update filter theta and delta
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, theta_) {
theta_.update(key_value.key, values.at(key_value.key));
delta_.at(ordering_.at(key_value.key)) = delta.at(ordering.at(key_value.key));
}
// Update the fixed linearization points (since they just changed)
values.insert(smootherSummarizationValues);
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, separatorValues_) {
separatorValues_.update(key_value.key, values.at(key_value.key));
if(!values.exists(key_value.key)) {
values.insert(key_value.key, key_value.value);
}
}
// Calculate the summarized factor on just the new separator keys
smootherSummarization_ = internal::calculateMarginalFactors(graph, values, newSeparatorKeys, parameters_.getEliminationFunction());
// Remove the old factors on the separator and insert new
removeFactors(separatorSummarizationSlots_);
separatorSummarizationSlots_ = insertFactors(smootherSummarization_);
// Clear the smoother shortcut
smootherShortcut_.resize(0);
}
// Create separate ordering constraints that force either the filter keys or the smoother keys to the front
typedef std::map<Key, int> OrderingConstraints;
OrderingConstraints filterConstraints;
OrderingConstraints smootherConstraints;
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, theta_) { /// the filter keys
filterConstraints[key_value.key] = 0;
smootherConstraints[key_value.key] = 1;
if(debug) { PrintNonlinearFactorGraph(factors_, separatorSummarizationSlots_, "ConcurrentBatchFilter::synchronize ", "Current Separator Summarization:", DefaultKeyFormatter); }
// Calculate the marginal on the new separator from the filter factors
// Note: This could also be done during each filter update so it would simply be available
{
// Calculate the summarized factor on just the new separator keys (from the filter side)
// Copy all of the factors from the filter, not including the smoother summarization
NonlinearFactorGraph factors;
for(size_t slot = 0; slot < factors_.size(); ++slot) {
if(factors_.at(slot) && std::find(separatorSummarizationSlots_.begin(), separatorSummarizationSlots_.end(), slot) == separatorSummarizationSlots_.end()) {
factors.push_back(factors_.at(slot));
}
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, smootherValues_) { /// the smoother keys
filterConstraints[key_value.key] = 1;
smootherConstraints[key_value.key] = 0;
}
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, separatorValues_) { /// the *new* separator keys
filterConstraints[key_value.key] = 2;
smootherConstraints[key_value.key] = 2;
filterSummarization_ = internal::calculateMarginalFactors(factors, theta_, newSeparatorKeys, parameters_.getEliminationFunction());
}
// Generate separate orderings that place the filter keys or the smoother keys first
// TODO: This is convenient, but it recalculates the variable index each time
Ordering filterOrdering = graph.orderingCOLAMDConstrained(filterConstraints);
Ordering smootherOrdering = graph.orderingCOLAMDConstrained(smootherConstraints);
if(debug) { PrintNonlinearFactorGraph(filterSummarization_, "ConcurrentBatchFilter::synchronize ", "Updated Filter Summarization:", DefaultKeyFormatter); }
// Extract the set of filter keys and smoother keys
std::set<Key> filterKeys;
std::set<Key> separatorKeys;
std::set<Key> smootherKeys;
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, theta_) {
filterKeys.insert(key_value.key);
}
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, separatorValues_) {
separatorKeys.insert(key_value.key);
filterKeys.erase(key_value.key);
}
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, smootherValues_) {
smootherKeys.insert(key_value.key);
}
if(debug) std::cout << "ConcurrentBatchFilter::synchronize End" << std::endl;
// Calculate the marginal on the new separator from the filter factors. This is performed by marginalizing out
// all of the filter variables that are not part of the new separator. This filter summarization will then be
// sent to the smoother.
filterSummarization_ = marginalize(graph, values, filterOrdering, filterKeys, parameters_.getEliminationFunction());
// The filter summarization should also include any nonlinear factors that involve only the separator variables.
// Otherwise the smoother will be missing this information
BOOST_FOREACH(const NonlinearFactor::shared_ptr& factor, factors_) {
if(factor) {
NonlinearFactor::const_iterator key = factor->begin();
while((key != factor->end()) && (std::binary_search(separatorKeys.begin(), separatorKeys.end(), *key))) {
++key;
}
if(key == factor->end()) {
filterSummarization_.push_back(factor);
}
}
}
// Calculate the marginal on the new separator from the smoother factors. This is performed by marginalizing out
// all of the smoother variables that are not part of the new separator. This smoother summarization will be
// stored locally for use in the filter
smootherSummarizationSlots_ = insertFactors( marginalize(graph, values, smootherOrdering, smootherKeys, parameters_.getEliminationFunction()) );
}
gttoc(synchronize);
}
/* ************************************************************************* */
void ConcurrentBatchFilter::getSummarizedFactors(NonlinearFactorGraph& summarizedFactors, Values& separatorValues) {
void ConcurrentBatchFilter::getSummarizedFactors(NonlinearFactorGraph& filterSummarization, Values& filterSummarizationValues) {
gttic(get_summarized_factors);
// Copy the previous calculated smoother summarization factors into the output
summarizedFactors.push_back(filterSummarization_);
separatorValues.insert(separatorValues_);
filterSummarization.push_back(filterSummarization_);
filterSummarizationValues.insert(separatorValues_);
gttoc(get_summarized_factors);
}
@ -320,11 +402,16 @@ void ConcurrentBatchFilter::reorder(const boost::optional<FastList<Key> >& keysT
}
/* ************************************************************************* */
ConcurrentBatchFilter::Result ConcurrentBatchFilter::optimize(const NonlinearFactorGraph& factors, Values& theta, const Ordering& ordering,
VectorValues& delta, const Values& linearValues, const LevenbergMarquardtParams& parameters) {
void ConcurrentBatchFilter::optimize(const NonlinearFactorGraph& factors, Values& theta, const Ordering& ordering,
VectorValues& delta, const Values& linearValues, const LevenbergMarquardtParams& parameters,
ConcurrentBatchFilter::Result& result) {
// const bool debug = ISDEBUG("ConcurrentBatchFilter optimize");
const bool debug = false;
if(debug) std::cout << "ConcurrentBatchFilter::optimize Begin" << std::endl;
// Create output result structure
Result result;
result.nonlinearVariables = theta.size() - linearValues.size();
result.linearVariables = linearValues.size();
@ -332,16 +419,26 @@ ConcurrentBatchFilter::Result ConcurrentBatchFilter::optimize(const NonlinearFac
double lambda = parameters.lambdaInitial;
double lambdaFactor = parameters.lambdaFactor;
double lambdaUpperBound = parameters.lambdaUpperBound;
double lambdaLowerBound = 0.5 / parameters.lambdaUpperBound;
double lambdaLowerBound = 1.0e-10;
size_t maxIterations = parameters.maxIterations;
double relativeErrorTol = parameters.relativeErrorTol;
double absoluteErrorTol = parameters.absoluteErrorTol;
double errorTol = parameters.errorTol;
// Create a Values that holds the current evaluation point
Values evalpoint = theta.retract(delta);
Values evalpoint = theta.retract(delta, ordering);
result.error = factors.error(evalpoint);
// check if we're already close enough
if(result.error <= errorTol) {
if(debug) { std::cout << "Exiting, as error = " << result.error << " < " << errorTol << std::endl; }
}
if(debug) {
std::cout << "linearValues: " << linearValues.size() << std::endl;
std::cout << "Initial error: " << result.error << std::endl;
}
// Use a custom optimization loop so the linearization points can be controlled
double previousError;
VectorValues newDelta;
@ -352,20 +449,24 @@ ConcurrentBatchFilter::Result ConcurrentBatchFilter::optimize(const NonlinearFac
gttic(optimizer_iteration);
{
// Linearize graph around the linearization point
GaussianFactorGraph linearFactorGraph = *factors.linearize(theta);
GaussianFactorGraph linearFactorGraph = *factors.linearize(theta, ordering);
// Keep increasing lambda until we make make progress
while(true) {
if(debug) { std::cout << "trying lambda = " << lambda << std::endl; }
// Add prior factors at the current solution
gttic(damp);
GaussianFactorGraph dampedFactorGraph(linearFactorGraph);
dampedFactorGraph.reserve(linearFactorGraph.size() + delta.size());
double sigma = 1.0 / std::sqrt(lambda);
{
// for each of the variables, add a prior at the current solution
for(size_t j=0; j<delta.size(); ++j) {
Matrix A = lambda * eye(delta[j].size());
Vector b = lambda * delta[j];
SharedDiagonal model = noiseModel::Unit::Create(delta[j].size());
Matrix A = eye(delta[j].size());
Vector b = delta[j];
SharedDiagonal model = noiseModel::Isotropic::Sigma(delta[j].size(), sigma);
GaussianFactor::shared_ptr prior(new JacobianFactor(j, A, b, model));
dampedFactorGraph.push_back(prior);
}
@ -377,7 +478,7 @@ ConcurrentBatchFilter::Result ConcurrentBatchFilter::optimize(const NonlinearFac
// Solve Damped Gaussian Factor Graph
newDelta = GaussianJunctionTree(dampedFactorGraph).optimize(parameters.getEliminationFunction());
// update the evalpoint with the new delta
evalpoint = theta.retract(newDelta);
evalpoint = theta.retract(newDelta, ordering);
gttoc(solve);
// Evaluate the new nonlinear error
@ -385,6 +486,11 @@ ConcurrentBatchFilter::Result ConcurrentBatchFilter::optimize(const NonlinearFac
double error = factors.error(evalpoint);
gttoc(compute_error);
if(debug) {
std::cout << "linear delta norm = " << newDelta.norm() << std::endl;
std::cout << "next error = " << error << std::endl;
}
if(error < result.error) {
// Keep this change
// Update the error value
@ -410,119 +516,167 @@ ConcurrentBatchFilter::Result ConcurrentBatchFilter::optimize(const NonlinearFac
break;
} else {
// Reject this change
// Increase lambda and continue searching
lambda *= lambdaFactor;
if(lambda > lambdaUpperBound) {
if(lambda >= lambdaUpperBound) {
// The maximum lambda has been used. Print a warning and end the search.
std::cout << "Warning: Levenberg-Marquardt giving up because cannot decrease error with maximum lambda" << std::endl;
break;
} else {
// Increase lambda and continue searching
lambda *= lambdaFactor;
}
}
} // end while
}
gttoc(optimizer_iteration);
if(debug) { std::cout << "using lambda = " << lambda << std::endl; }
result.iterations++;
} while(result.iterations < maxIterations &&
!checkConvergence(relativeErrorTol, absoluteErrorTol, errorTol, previousError, result.error, NonlinearOptimizerParams::SILENT));
return result;
if(debug) { std::cout << "newError: " << result.error << std::endl; }
if(debug) std::cout << "ConcurrentBatchFilter::optimize End" << std::endl;
}
/* ************************************************************************* */
void ConcurrentBatchFilter::marginalize(const FastList<Key>& keysToMove) {
// In order to marginalize out the selected variables, the factors involved in those variables
// must be identified and removed. Also, the effect of those removed factors on the
// remaining variables needs to be accounted for. This will be done with linear container factors
// from the result of a partial elimination. This function removes the marginalized factors and
// adds the linearized factors back in.
void ConcurrentBatchFilter::moveSeparator(const FastList<Key>& keysToMove) {
// In order to move the separator, we need to calculate the marginal information on the new
// separator from all of the factors on the smoother side (both the factors actually in the
// smoother and the ones to be transitioned to the smoother but stored in the filter).
// This is exactly the same operation that is performed by a fixed-lag smoother, calculating
// a marginal factor from the variables outside the smoother window.
//
// However, for the concurrent system, we would like to calculate this marginal in a particular
// way, such that an intermediate term is produced that provides a "shortcut" between the old
// separator (as defined in the smoother) and the new separator. This will allow us to quickly
// update the new separator with changes at the old separator (from the smoother)
// TODO: This is currently not very efficient: multiple calls to graph.keys(), etc.
// Calculate marginal factors on the remaining variables (after marginalizing 'keyToMove')
// Note: It is assumed the ordering already has these keys first
// const bool debug = ISDEBUG("ConcurrentBatchFilter moveSeparator");
const bool debug = false;
// Create the linear factor graph
GaussianFactorGraph linearFactorGraph = *factors_.linearize(theta_);
if(debug) std::cout << "ConcurrentBatchFilter::moveSeparator Begin" << std::endl;
// Calculate the variable index
VariableIndex variableIndex(linearFactorGraph, ordering_.size());
if(debug) { PrintKeys(keysToMove, "ConcurrentBatchFilter::moveSeparator ", "Keys To Move:", DefaultKeyFormatter); }
// Use the variable Index to mark the factors that will be marginalized
std::set<size_t> removedFactorSlots;
// Identify all of the new factors to be sent to the smoother (any factor involving keysToMove)
std::vector<size_t> removedFactorSlots;
VariableIndex variableIndex(*factors_.symbolic(ordering_), theta_.size());
BOOST_FOREACH(Key key, keysToMove) {
const FastList<size_t>& slots = variableIndex[ordering_.at(key)];
removedFactorSlots.insert(slots.begin(), slots.end());
removedFactorSlots.insert(removedFactorSlots.end(), slots.begin(), slots.end());
}
// Sort and remove duplicates
std::sort(removedFactorSlots.begin(), removedFactorSlots.end());
removedFactorSlots.erase(std::unique(removedFactorSlots.begin(), removedFactorSlots.end()), removedFactorSlots.end());
// Remove any linear/marginal factor that made it into the set
BOOST_FOREACH(size_t index, separatorSummarizationSlots_) {
removedFactorSlots.erase(std::remove(removedFactorSlots.begin(), removedFactorSlots.end(), index), removedFactorSlots.end());
}
// Construct an elimination tree to perform sparse elimination
std::vector<EliminationForest::shared_ptr> forest( EliminationForest::Create(linearFactorGraph, variableIndex) );
// TODO: Make this compact
if(debug) {
std::cout << "ConcurrentBatchFilter::moveSeparator Removed Factor Slots: ";
BOOST_FOREACH(size_t slot, removedFactorSlots) {
std::cout << slot << " ";
}
std::cout << std::endl;
}
// This is a tree. Only the top-most nodes/indices need to be eliminated; all of the children will be eliminated automatically
// Find the subset of nodes/keys that must be eliminated
std::set<Index> indicesToEliminate;
BOOST_FOREACH(Key key, keysToMove) {
indicesToEliminate.insert(ordering_.at(key));
// Add these factors to a factor graph
NonlinearFactorGraph removedFactors;
BOOST_FOREACH(size_t slot, removedFactorSlots) {
if(factors_.at(slot)) {
removedFactors.push_back(factors_.at(slot));
}
}
if(debug) {
PrintNonlinearFactorGraph(removedFactors, "ConcurrentBatchFilter::synchronize ", "Removed Factors:", DefaultKeyFormatter);
PrintNonlinearFactorGraph(smootherShortcut_, "ConcurrentBatchFilter::synchronize ", "Old Shortcut:", DefaultKeyFormatter);
PrintKeys(smootherShortcut_, "ConcurrentBatchFilter::moveSeparator ", "Old Shortcut Keys:", DefaultKeyFormatter);
PrintKeys(separatorValues_, "ConcurrentBatchFilter::moveSeparator ", "Previous Separator Keys:", DefaultKeyFormatter);
}
// Calculate the set of new separator keys: AffectedKeys + PreviousSeparatorKeys - KeysToMove
FastSet<Key> newSeparatorKeys = removedFactors.keys();
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, separatorValues_) {
newSeparatorKeys.insert(key_value.key);
}
BOOST_FOREACH(Key key, keysToMove) {
EliminationForest::removeChildrenIndices(indicesToEliminate, forest.at(ordering_.at(key)));
newSeparatorKeys.erase(key);
}
// Eliminate each top-most key, returning a Gaussian Factor on some of the remaining variables
// Convert the marginal factors into Linear Container Factors
// Add the marginal factor variables to the separator
NonlinearFactorGraph marginalFactors;
BOOST_FOREACH(Index index, indicesToEliminate) {
GaussianFactor::shared_ptr gaussianFactor = forest.at(index)->eliminateRecursive(parameters_.getEliminationFunction());
if(gaussianFactor->size() > 0) {
LinearContainerFactor::shared_ptr marginalFactor(new LinearContainerFactor(gaussianFactor, theta_));
marginalFactors.push_back(marginalFactor);
// Add the keys associated with the marginal factor to the separator values
BOOST_FOREACH(Key key, *marginalFactor) {
if(!separatorValues_.exists(key)) {
if(debug) { PrintKeys(newSeparatorKeys, "ConcurrentBatchFilter::synchronize ", "New Separator Keys:", DefaultKeyFormatter); }
// Calculate the set of shortcut keys: NewSeparatorKeys + OldSeparatorKeys
FastSet<Key> shortcutKeys = newSeparatorKeys;
BOOST_FOREACH(Key key, smootherSummarization_.keys()) {
shortcutKeys.insert(key);
}
if(debug) { PrintKeys(shortcutKeys, "ConcurrentBatchFilter::moveSeparator ", "Old Shortcut Keys:", DefaultKeyFormatter); }
// Calculate a new shortcut
{
// Combine the previous shortcut factor with all of the new factors being sent to the smoother
NonlinearFactorGraph graph;
graph.push_back(removedFactors);
graph.push_back(smootherShortcut_);
Values values;
values.insert(smootherValues_);
values.insert(theta_);
// Calculate the summarized factor on the shortcut keys
smootherShortcut_ = internal::calculateMarginalFactors(graph, values, shortcutKeys, parameters_.getEliminationFunction());
}
if(debug) {
PrintNonlinearFactorGraph(smootherShortcut_, "ConcurrentBatchFilter::synchronize ", "New Shortcut:", DefaultKeyFormatter);
PrintNonlinearFactorGraph(smootherSummarization_, "ConcurrentBatchFilter::synchronize ", "Smoother Summarization:", DefaultKeyFormatter);
PrintNonlinearFactorGraph(factors_, separatorSummarizationSlots_, "ConcurrentBatchFilter::synchronize ", "Current Separator Summarization:", DefaultKeyFormatter);
}
// Calculate the new smoother summarization on the new separator using the shortcut
NonlinearFactorGraph separatorSummarization;
{
// Combine just the shortcut and the previousSmootherSummarization
NonlinearFactorGraph graph;
graph.push_back(smootherSummarization_);
graph.push_back(smootherShortcut_);
Values values;
values.insert(smootherValues_);
values.insert(theta_);
// Calculate the summarized factor on just the new separator
separatorSummarization = internal::calculateMarginalFactors(graph, values, newSeparatorKeys, parameters_.getEliminationFunction());
}
// Remove the previous marginal factors and insert the new marginal factors
removeFactors(separatorSummarizationSlots_);
separatorSummarizationSlots_ = insertFactors(separatorSummarization);
if(debug) { PrintNonlinearFactorGraph(factors_, separatorSummarizationSlots_, "ConcurrentBatchFilter::synchronize ", "New Separator Summarization:", DefaultKeyFormatter); }
// Update the separatorValues object (should only contain the new separator keys)
separatorValues_.clear();
BOOST_FOREACH(Key key, separatorSummarization.keys()) {
separatorValues_.insert(key, theta_.at(key));
}
}
}
}
std::vector<size_t> marginalSlots = insertFactors(marginalFactors);
// Cache marginalized variables and factors for later transmission to the smoother
{
// Add the new marginal factors to the list of smootherSeparatorFactors. In essence, we have just moved the separator
smootherSummarizationSlots_.insert(smootherSummarizationSlots_.end(), marginalSlots.begin(), marginalSlots.end());
// Move the marginalized factors from the filter to the smoother (holding area)
// Note: Be careful to only move nonlinear factors and not any marginals that may also need to be removed
BOOST_FOREACH(size_t slot, removedFactorSlots) {
std::vector<size_t>::iterator iter = std::find(smootherSummarizationSlots_.begin(), smootherSummarizationSlots_.end(), slot);
if(iter == smootherSummarizationSlots_.end()) {
// This is a real nonlinear factor. Add it to the smoother factor cache.
smootherFactors_.push_back(factors_.at(slot));
} else {
// This is a marginal factor that was removed and replaced by a new marginal factor. Remove this slot from the separator factor list.
smootherSummarizationSlots_.erase(iter);
}
}
// Remove the marginalized factors and add them to the smoother cache
smootherFactors_.push_back(removedFactors);
removeFactors(removedFactorSlots);
// Add the linearization point of the moved variables to the smoother cache
BOOST_FOREACH(Key key, keysToMove) {
smootherValues_.insert(key, theta_.at(key));
}
}
// Remove the marginalized variables and factors from the filter
{
// Remove marginalized factors from the factor graph
std::vector<size_t> slots(removedFactorSlots.begin(), removedFactorSlots.end());
removeFactors(slots);
// Remove marginalized keys from values (and separator)
BOOST_FOREACH(Key key, keysToMove) {
theta_.erase(key);
if(separatorValues_.exists(key)) {
separatorValues_.erase(key);
}
}
// Permute the ordering such that the removed keys are at the end.
@ -540,174 +694,9 @@ void ConcurrentBatchFilter::marginalize(const FastList<Key>& keysToMove) {
ordering_.pop_back();
delta_.pop_back();
}
}
if(debug) std::cout << "ConcurrentBatchFilter::moveSeparator End" << std::endl;
}
/* ************************************************************************* */
NonlinearFactorGraph ConcurrentBatchFilter::marginalize(const NonlinearFactorGraph& graph, const Values& values,
const Ordering& ordering, const std::set<Key>& marginalizeKeys, const GaussianFactorGraph::Eliminate& function) {
// Calculate marginal factors on the remaining variables (after marginalizing 'marginalizeKeys')
// Note: It is assumed the ordering already has these keys first
// Create the linear factor graph
GaussianFactorGraph linearFactorGraph = *graph.linearize(values);
// Construct a variable index
VariableIndex variableIndex(linearFactorGraph, ordering.size());
// Construct an elimination tree to perform sparse elimination
std::vector<EliminationForest::shared_ptr> forest( EliminationForest::Create(linearFactorGraph, variableIndex) );
// This is a forest. Only the top-most node/index of each tree needs to be eliminated; all of the children will be eliminated automatically
// Find the subset of nodes/keys that must be eliminated
std::set<Index> indicesToEliminate;
BOOST_FOREACH(Key key, marginalizeKeys) {
indicesToEliminate.insert(ordering.at(key));
}
BOOST_FOREACH(Key key, marginalizeKeys) {
EliminationForest::removeChildrenIndices(indicesToEliminate, forest.at(ordering.at(key)));
}
// Eliminate each top-most key, returning a Gaussian Factor on some of the remaining variables
// Convert the marginal factors into Linear Container Factors
// Add the marginal factor variables to the separator
NonlinearFactorGraph marginalFactors;
BOOST_FOREACH(Index index, indicesToEliminate) {
GaussianFactor::shared_ptr gaussianFactor = forest.at(index)->eliminateRecursive(function);
if(gaussianFactor->size() > 0) {
LinearContainerFactor::shared_ptr marginalFactor(new LinearContainerFactor(gaussianFactor, values));
marginalFactors.push_back(marginalFactor);
}
}
return marginalFactors;
}
/* ************************************************************************* */
void ConcurrentBatchFilter::PrintNonlinearFactor(const NonlinearFactor::shared_ptr& factor,
const std::string& indent, const KeyFormatter& keyFormatter) {
std::cout << indent;
if(factor) {
if(boost::dynamic_pointer_cast<LinearContainerFactor>(factor)) {
std::cout << "l( ";
} else {
std::cout << "f( ";
}
BOOST_FOREACH(Key key, *factor) {
std::cout << keyFormatter(key) << " ";
}
std::cout << ")" << std::endl;
} else {
std::cout << "{ NULL }" << std::endl;
}
}
/* ************************************************************************* */
void ConcurrentBatchFilter::PrintLinearFactor(const GaussianFactor::shared_ptr& factor,
const std::string& indent, const KeyFormatter& keyFormatter) {
std::cout << indent;
if(factor) {
std::cout << "g( ";
BOOST_FOREACH(Index key, *factor) {
std::cout << keyFormatter(key) << " ";
}
std::cout << ")" << std::endl;
} else {
std::cout << "{ NULL }" << std::endl;
}
}
/* ************************************************************************* */
std::vector<Index> ConcurrentBatchFilter::EliminationForest::ComputeParents(const VariableIndex& structure) {
// Number of factors and variables
const size_t m = structure.nFactors();
const size_t n = structure.size();
static const Index none = std::numeric_limits<Index>::max();
// Allocate result parent vector and vector of last factor columns
std::vector<Index> parents(n, none);
std::vector<Index> prevCol(m, none);
// for column j \in 1 to n do
for (Index j = 0; j < n; j++) {
// for row i \in Struct[A*j] do
BOOST_FOREACH(const size_t i, structure[j]) {
if (prevCol[i] != none) {
Index k = prevCol[i];
// find root r of the current tree that contains k
Index r = k;
while (parents[r] != none)
r = parents[r];
if (r != j) parents[r] = j;
}
prevCol[i] = j;
}
}
return parents;
}
/* ************************************************************************* */
std::vector<ConcurrentBatchFilter::EliminationForest::shared_ptr> ConcurrentBatchFilter::EliminationForest::Create(const GaussianFactorGraph& factorGraph, const VariableIndex& structure) {
// Compute the tree structure
std::vector<Index> parents(ComputeParents(structure));
// Number of variables
const size_t n = structure.size();
static const Index none = std::numeric_limits<Index>::max();
// Create tree structure
std::vector<shared_ptr> trees(n);
for (Index k = 1; k <= n; k++) {
Index j = n - k; // Start at the last variable and loop down to 0
trees[j].reset(new EliminationForest(j)); // Create a new node on this variable
if (parents[j] != none) // If this node has a parent, add it to the parent's children
trees[parents[j]]->add(trees[j]);
}
// Hang factors in right places
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, factorGraph) {
if(factor && factor->size() > 0) {
Index j = *std::min_element(factor->begin(), factor->end());
if(j < structure.size())
trees[j]->add(factor);
}
}
return trees;
}
/* ************************************************************************* */
GaussianFactor::shared_ptr ConcurrentBatchFilter::EliminationForest::eliminateRecursive(GaussianFactorGraph::Eliminate function) {
// Create the list of factors to be eliminated, initially empty, and reserve space
GaussianFactorGraph factors;
factors.reserve(this->factors_.size() + this->subTrees_.size());
// Add all factors associated with the current node
factors.push_back(this->factors_.begin(), this->factors_.end());
// for all subtrees, eliminate into Bayes net and a separator factor, added to [factors]
BOOST_FOREACH(const shared_ptr& child, subTrees_)
factors.push_back(child->eliminateRecursive(function));
// Combine all factors (from this node and from subtrees) into a joint factor
GaussianFactorGraph::EliminationResult eliminated(function(factors, 1));
return eliminated.second;
}
/* ************************************************************************* */
void ConcurrentBatchFilter::EliminationForest::removeChildrenIndices(std::set<Index>& indices, const ConcurrentBatchFilter::EliminationForest::shared_ptr& tree) {
BOOST_FOREACH(const EliminationForest::shared_ptr& child, tree->children()) {
indices.erase(child->key());
removeChildrenIndices(indices, child);
}
}
/* ************************************************************************* */
}/// namespace gtsam

View File

@ -40,6 +40,7 @@ public:
size_t lambdas; ///< The number of different L-M lambda factors that were tried during optimization
size_t nonlinearVariables; ///< The number of variables that can be relinearized
size_t linearVariables; ///< The number of variables that must keep a constant linearization point
std::vector<size_t> newFactorsIndices; ///< The indices of the newly-added factors, in 1-to-1 correspondence with the factors passed in
double error; ///< The final factor graph error
/// Constructor
@ -54,16 +55,16 @@ public:
};
/** Default constructor */
ConcurrentBatchFilter(const LevenbergMarquardtParams& parameters = LevenbergMarquardtParams(), bool relin = true) : parameters_(parameters), relin_(relin) {};
ConcurrentBatchFilter(const LevenbergMarquardtParams& parameters = LevenbergMarquardtParams()) : parameters_(parameters) {};
/** Default destructor */
virtual ~ConcurrentBatchFilter() {};
/** Implement a GTSAM standard 'print' function */
void print(const std::string& s = "Concurrent Batch Filter:\n", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const;
virtual void print(const std::string& s = "Concurrent Batch Filter:\n", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const;
/** Check if two Concurrent Filters are equal */
bool equals(const ConcurrentFilter& rhs, double tol = 1e-9) const;
virtual bool equals(const ConcurrentFilter& rhs, double tol = 1e-9) const;
/** Access the current set of factors */
const NonlinearFactorGraph& getFactors() const {
@ -113,27 +114,11 @@ public:
* @param newTheta Initialization points for new variables to be added to the filter
* You must include here all new variables occurring in newFactors that were not already
* in the filter.
* @param keysToMove An optional set of keys to remove from the filter and
* @param keysToMove An optional set of keys to move from the filter to the smoother
* @param removeFactorIndices An optional set of indices corresponding to the factors you want to remove from the graph
*/
Result update(const NonlinearFactorGraph& newFactors = NonlinearFactorGraph(), const Values& newTheta = Values(),
const boost::optional<FastList<Key> >& keysToMove = boost::none);
protected:
LevenbergMarquardtParams parameters_; ///< LM parameters
bool relin_;
NonlinearFactorGraph factors_; ///< The set of all factors currently in the filter
Values theta_; ///< Current linearization point of all variables in the filter
Ordering ordering_; ///< The current ordering used to calculate the linear deltas
VectorValues delta_; ///< The current set of linear deltas from the linearization point
std::queue<size_t> availableSlots_; ///< The set of available factor graph slots caused by deleting factors
Values separatorValues_; ///< The linearization points of the separator variables. These should not be updated during optimization.
std::vector<size_t> smootherSummarizationSlots_; ///< The slots in factor graph that correspond to the current smoother summarization factors
// Storage for information to be sent to the smoother
NonlinearFactorGraph filterSummarization_; ///< A temporary holding place for calculated filter summarization factors to be sent to the smoother
NonlinearFactorGraph smootherFactors_; ///< A temporary holding place for the set of full nonlinear factors being sent to the smoother
Values smootherValues_; ///< A temporary holding place for the linearization points of all keys being sent to the smoother
virtual Result update(const NonlinearFactorGraph& newFactors = NonlinearFactorGraph(), const Values& newTheta = Values(),
const boost::optional<FastList<Key> >& keysToMove = boost::none, const boost::optional< std::vector<size_t> >& removeFactorIndices = boost::none);
/**
* Perform any required operations before the synchronization process starts.
@ -148,7 +133,7 @@ protected:
* @param summarizedFactors The summarized factors for the filter branch
* @param rootValues The linearization points of the root clique variables
*/
virtual void getSummarizedFactors(NonlinearFactorGraph& summarizedFactors, Values& separatorValues);
virtual void getSummarizedFactors(NonlinearFactorGraph& filterSummarization, Values& filterSummarizationValues);
/**
* Populate the provided containers with factors being sent to the smoother from the filter. These
@ -165,7 +150,7 @@ protected:
*
* @param summarizedFactors An updated version of the smoother branch summarized factors
*/
virtual void synchronize(const NonlinearFactorGraph& summarizedFactors, const Values& separatorValues);
virtual void synchronize(const NonlinearFactorGraph& smootherSummarization, const Values& smootherSummarizationValues);
/**
* Perform any required operations after the synchronization process finishes.
@ -173,6 +158,25 @@ protected:
*/
virtual void postsync();
protected:
LevenbergMarquardtParams parameters_; ///< LM parameters
NonlinearFactorGraph factors_; ///< The set of all factors currently in the filter
Values theta_; ///< Current linearization point of all variables in the filter
Ordering ordering_; ///< The current ordering used to calculate the linear deltas
VectorValues delta_; ///< The current set of linear deltas from the linearization point
std::queue<size_t> availableSlots_; ///< The set of available factor graph slots caused by deleting factors
Values separatorValues_; ///< The linearization points of the separator variables. These should not be updated during optimization.
std::vector<size_t> separatorSummarizationSlots_; ///< The slots in factor graph that correspond to the current smoother summarization on the current separator
// Storage for information from the Smoother
NonlinearFactorGraph smootherSummarization_; ///< The smoother summarization on the old separator sent by the smoother during the last synchronization
NonlinearFactorGraph smootherShortcut_; ///< A set of conditional factors from the old separator to the current separator (recursively calculated during each filter update)
// Storage for information to be sent to the smoother
NonlinearFactorGraph filterSummarization_; ///< A temporary holding place for calculated filter summarization factors to be sent to the smoother
NonlinearFactorGraph smootherFactors_; ///< A temporary holding place for the set of full nonlinear factors being sent to the smoother
Values smootherValues_; ///< A temporary holding place for the linearization points of all keys being sent to the smoother
private:
@ -192,81 +196,55 @@ private:
/** Use colamd to update into an efficient ordering */
void reorder(const boost::optional<FastList<Key> >& keysToMove = boost::none);
/** Marginalize out the set of requested variables from the filter, caching them for the smoother
* This effectively moves the separator.
*
* @param keysToMove The set of keys to move from the filter to the smoother
*/
void moveSeparator(const FastList<Key>& keysToMove);
/** Use a modified version of L-M to update the linearization point and delta */
static Result optimize(const NonlinearFactorGraph& factors, Values& theta, const Ordering& ordering,
VectorValues& delta, const Values& linearValues, const LevenbergMarquardtParams& parameters);
/** Marginalize out the set of requested variables from the filter, caching them for the smoother
* This effectively moves the separator.
*/
void marginalize(const FastList<Key>& keysToMove);
/** Marginalize out the set of requested variables from the filter, caching them for the smoother
* This effectively moves the separator.
*/
static NonlinearFactorGraph marginalize(const NonlinearFactorGraph& graph, const Values& values,
const Ordering& ordering, const std::set<Key>& marginalizeKeys, const GaussianFactorGraph::Eliminate& function = EliminateQR);
static void optimize(const NonlinearFactorGraph& factors, Values& theta, const Ordering& ordering,
VectorValues& delta, const Values& linearValues, const LevenbergMarquardtParams& parameters,
Result& result);
/** Print just the nonlinear keys in a nonlinear factor */
static void PrintNonlinearFactor(const NonlinearFactor::shared_ptr& factor,
const std::string& indent = "", const KeyFormatter& keyFormatter = DefaultKeyFormatter);
/** Print just the nonlinear keys in each factor for a whole Nonlinear Factor Graph */
static void PrintNonlinearFactorGraph(const NonlinearFactorGraph& factors,
const std::string& indent = "", const std::string& title = "", const KeyFormatter& keyFormatter = DefaultKeyFormatter);
/** Print just the nonlinear keys of specific factors in a Nonlinear Factor Graph */
static void PrintNonlinearFactorGraph(const NonlinearFactorGraph& factors, const std::vector<size_t>& slots,
const std::string& indent = "", const std::string& title = "", const KeyFormatter& keyFormatter = DefaultKeyFormatter);
/** Print just the nonlinear keys in a linear factor */
static void PrintLinearFactor(const GaussianFactor::shared_ptr& factor,
const std::string& indent = "", const KeyFormatter& keyFormatter = DefaultKeyFormatter);
// A custom elimination tree that supports forests and partial elimination
class EliminationForest {
public:
typedef boost::shared_ptr<EliminationForest> shared_ptr; ///< Shared pointer to this class
/** Print just the nonlinear keys in each linear factor for a whole Gaussian Factor Graph */
static void PrintLinearFactorGraph(const GaussianFactorGraph& factors, const Ordering& ordering,
const std::string& indent = "", const std::string& title = "", const KeyFormatter& keyFormatter = DefaultKeyFormatter);
private:
typedef FastList<GaussianFactor::shared_ptr> Factors;
typedef FastList<shared_ptr> SubTrees;
typedef std::vector<GaussianConditional::shared_ptr> Conditionals;
Index key_; ///< index associated with root
Factors factors_; ///< factors associated with root
SubTrees subTrees_; ///< sub-trees
/** default constructor, private, as you should use Create below */
EliminationForest(Index key = 0) : key_(key) {}
/**
* Static internal function to build a vector of parent pointers using the
* algorithm of Gilbert et al., 2001, BIT.
*/
static std::vector<Index> ComputeParents(const VariableIndex& structure);
/** add a factor, for Create use only */
void add(const GaussianFactor::shared_ptr& factor) { factors_.push_back(factor); }
/** add a subtree, for Create use only */
void add(const shared_ptr& child) { subTrees_.push_back(child); }
public:
/** return the key associated with this tree node */
Index key() const { return key_; }
/** return the const reference of children */
const SubTrees& children() const { return subTrees_; }
/** return the const reference to the factors */
const Factors& factors() const { return factors_; }
/** Create an elimination tree from a factor graph */
static std::vector<shared_ptr> Create(const GaussianFactorGraph& factorGraph, const VariableIndex& structure);
/** Recursive routine that eliminates the factors arranged in an elimination tree */
GaussianFactor::shared_ptr eliminateRecursive(GaussianFactorGraph::Eliminate function);
/** Recursive function that helps find the top of each tree */
static void removeChildrenIndices(std::set<Index>& indices, const EliminationForest::shared_ptr& tree);
};
/** Print just the nonlinear keys contained inside a container */
template<class Container>
static void PrintKeys(const Container& keys, const std::string& indent = "",
const std::string& title = "", const KeyFormatter& keyFormatter = DefaultKeyFormatter);
}; // ConcurrentBatchFilter
/// Implementation of PrintKeys
template<class Container>
void ConcurrentBatchFilter::PrintKeys(const Container& keys, const std::string& indent, const std::string& title, const KeyFormatter& keyFormatter) {
std::cout << indent << title;
BOOST_FOREACH(Key key, keys) {
std::cout << " " << keyFormatter(key);
}
std::cout << std::endl;
}
/// Typedef for Matlab wrapping
typedef ConcurrentBatchFilter::Result ConcurrentBatchFilterResult;

View File

@ -265,19 +265,17 @@ ConcurrentBatchSmoother::Result ConcurrentBatchSmoother::optimize() {
result.nonlinearVariables = theta_.size() - separatorValues_.size();
result.linearVariables = separatorValues_.size();
// Set optimization parameters
// Pull out parameters we'll use
const NonlinearOptimizerParams::Verbosity nloVerbosity = parameters_.verbosity;
const LevenbergMarquardtParams::VerbosityLM lmVerbosity = parameters_.verbosityLM;
double lambda = parameters_.lambdaInitial;
double lambdaFactor = parameters_.lambdaFactor;
double lambdaUpperBound = parameters_.lambdaUpperBound;
double lambdaLowerBound = 0.5 / parameters_.lambdaUpperBound;
size_t maxIterations = parameters_.maxIterations;
double relativeErrorTol = parameters_.relativeErrorTol;
double absoluteErrorTol = parameters_.absoluteErrorTol;
double errorTol = parameters_.errorTol;
// Create a Values that holds the current evaluation point
Values evalpoint = theta_.retract(delta_, ordering_);
result.error = factors_.error(evalpoint);
if(result.error < parameters_.errorTol) {
return result;
}
// Use a custom optimization loop so the linearization points can be controlled
double previousError;
@ -293,6 +291,9 @@ ConcurrentBatchSmoother::Result ConcurrentBatchSmoother::optimize() {
// Keep increasing lambda until we make make progress
while(true) {
if (lmVerbosity >= LevenbergMarquardtParams::TRYLAMBDA)
std::cout << "trying lambda = " << lambda << std::endl;
// Add prior factors at the current solution
gttic(damp);
GaussianFactorGraph dampedFactorGraph(linearFactorGraph);
@ -300,14 +301,16 @@ ConcurrentBatchSmoother::Result ConcurrentBatchSmoother::optimize() {
{
// for each of the variables, add a prior at the current solution
for(size_t j=0; j<delta_.size(); ++j) {
Matrix A = lambda * eye(delta_[j].size());
Vector b = lambda * delta_[j];
SharedDiagonal model = noiseModel::Unit::Create(delta_[j].size());
Matrix A = eye(delta_[j].size());
Vector b = delta_[j];
SharedDiagonal model = noiseModel::Isotropic::Sigma(delta_[j].size(), 1.0 / std::sqrt(lambda));
GaussianFactor::shared_ptr prior(new JacobianFactor(j, A, b, model));
dampedFactorGraph.push_back(prior);
}
}
gttoc(damp);
if (lmVerbosity >= LevenbergMarquardtParams::DAMPED)
dampedFactorGraph.print("damped");
result.lambdas++;
gttic(solve);
@ -317,11 +320,19 @@ ConcurrentBatchSmoother::Result ConcurrentBatchSmoother::optimize() {
evalpoint = theta_.retract(newDelta, ordering_);
gttoc(solve);
if (lmVerbosity >= LevenbergMarquardtParams::TRYLAMBDA)
std::cout << "linear delta norm = " << newDelta.norm() << std::endl;
if (lmVerbosity >= LevenbergMarquardtParams::TRYDELTA)
newDelta.print("delta");
// Evaluate the new error
gttic(compute_error);
double error = factors_.error(evalpoint);
gttoc(compute_error);
if (lmVerbosity >= LevenbergMarquardtParams::TRYLAMBDA)
std::cout << "next error = " << error << std::endl;
if(error < result.error) {
// Keep this change
// Update the error value
@ -339,29 +350,30 @@ ConcurrentBatchSmoother::Result ConcurrentBatchSmoother::optimize() {
}
}
// Decrease lambda for next time
lambda /= lambdaFactor;
if(lambda < lambdaLowerBound) {
lambda = lambdaLowerBound;
}
lambda /= parameters_.lambdaFactor;
// End this lambda search iteration
break;
} else {
// Reject this change
// Increase lambda and continue searching
lambda *= lambdaFactor;
if(lambda > lambdaUpperBound) {
if(lambda >= parameters_.lambdaUpperBound) {
// The maximum lambda has been used. Print a warning and end the search.
std::cout << "Warning: Levenberg-Marquardt giving up because cannot decrease error with maximum lambda" << std::endl;
break;
} else {
// Increase lambda and continue searching
lambda *= parameters_.lambdaFactor;
}
}
} // end while
}
gttoc(optimizer_iteration);
if (lmVerbosity >= LevenbergMarquardtParams::LAMBDA)
std::cout << "using lambda = " << lambda << std::endl;
result.iterations++;
} while(result.iterations < maxIterations &&
!checkConvergence(relativeErrorTol, absoluteErrorTol, errorTol, previousError, result.error, NonlinearOptimizerParams::SILENT));
} while(result.iterations < parameters_.maxIterations &&
!checkConvergence(parameters_.relativeErrorTol, parameters_.absoluteErrorTol, parameters_.errorTol, previousError, result.error, NonlinearOptimizerParams::SILENT));
return result;
}
@ -374,43 +386,20 @@ void ConcurrentBatchSmoother::updateSmootherSummarization() {
// These marginal factors will be cached for later transmission to the filter using
// linear container factors
// Clear out any existing smoother summarized factors
smootherSummarization_.resize(0);
// Reorder the system so that the separator keys are eliminated last
// TODO: This is currently being done twice: here and in 'update'. Fix it.
reorder();
// Create the linear factor graph
GaussianFactorGraph linearFactorGraph = *factors_.linearize(theta_, ordering_);
// Construct an elimination tree to perform sparse elimination
std::vector<EliminationForest::shared_ptr> forest( EliminationForest::Create(linearFactorGraph, variableIndex_) );
// This is a forest. Only the top-most node/index of each tree needs to be eliminated; all of the children will be eliminated automatically
// Find the subset of nodes/keys that must be eliminated
std::set<Index> indicesToEliminate;
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, theta_) {
indicesToEliminate.insert(ordering_.at(key_value.key));
// Create a nonlinear factor graph without the filter summarization factors
NonlinearFactorGraph graph(factors_);
BOOST_FOREACH(size_t slot, filterSummarizationSlots_) {
graph.remove(slot);
}
// Get the set of separator keys
gtsam::FastSet<Key> separatorKeys;
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, separatorValues_) {
indicesToEliminate.erase(ordering_.at(key_value.key));
}
std::vector<Index> indices(indicesToEliminate.begin(), indicesToEliminate.end());
BOOST_FOREACH(Index index, indices) {
EliminationForest::removeChildrenIndices(indicesToEliminate, forest.at(index));
}
// Eliminate each top-most key, returning a Gaussian Factor on some of the remaining variables
// Convert the marginal factors into Linear Container Factors and store
BOOST_FOREACH(Index index, indicesToEliminate) {
GaussianFactor::shared_ptr gaussianFactor = forest.at(index)->eliminateRecursive(parameters_.getEliminationFunction());
if(gaussianFactor->size() > 0) {
LinearContainerFactor::shared_ptr marginalFactor(new LinearContainerFactor(gaussianFactor, ordering_, theta_));
smootherSummarization_.push_back(marginalFactor);
}
separatorKeys.insert(key_value.key);
}
// Calculate the marginal factors on the separator
smootherSummarization_ = internal::calculateMarginalFactors(graph, theta_, separatorKeys, parameters_.getEliminationFunction());
}
/* ************************************************************************* */
@ -446,95 +435,4 @@ void ConcurrentBatchSmoother::PrintLinearFactor(const GaussianFactor::shared_ptr
}
/* ************************************************************************* */
std::vector<Index> ConcurrentBatchSmoother::EliminationForest::ComputeParents(const VariableIndex& structure) {
// Number of factors and variables
const size_t m = structure.nFactors();
const size_t n = structure.size();
static const Index none = std::numeric_limits<Index>::max();
// Allocate result parent vector and vector of last factor columns
std::vector<Index> parents(n, none);
std::vector<Index> prevCol(m, none);
// for column j \in 1 to n do
for (Index j = 0; j < n; j++) {
// for row i \in Struct[A*j] do
BOOST_FOREACH(const size_t i, structure[j]) {
if (prevCol[i] != none) {
Index k = prevCol[i];
// find root r of the current tree that contains k
Index r = k;
while (parents[r] != none)
r = parents[r];
if (r != j) parents[r] = j;
}
prevCol[i] = j;
}
}
return parents;
}
/* ************************************************************************* */
std::vector<ConcurrentBatchSmoother::EliminationForest::shared_ptr> ConcurrentBatchSmoother::EliminationForest::Create(const GaussianFactorGraph& factorGraph, const VariableIndex& structure) {
// Compute the tree structure
std::vector<Index> parents(ComputeParents(structure));
// Number of variables
const size_t n = structure.size();
static const Index none = std::numeric_limits<Index>::max();
// Create tree structure
std::vector<shared_ptr> trees(n);
for (Index k = 1; k <= n; k++) {
Index j = n - k; // Start at the last variable and loop down to 0
trees[j].reset(new EliminationForest(j)); // Create a new node on this variable
if (parents[j] != none) // If this node has a parent, add it to the parent's children
trees[parents[j]]->add(trees[j]);
}
// Hang factors in right places
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, factorGraph) {
if(factor && factor->size() > 0) {
Index j = *std::min_element(factor->begin(), factor->end());
if(j < structure.size())
trees[j]->add(factor);
}
}
return trees;
}
/* ************************************************************************* */
GaussianFactor::shared_ptr ConcurrentBatchSmoother::EliminationForest::eliminateRecursive(GaussianFactorGraph::Eliminate function) {
// Create the list of factors to be eliminated, initially empty, and reserve space
GaussianFactorGraph factors;
factors.reserve(this->factors_.size() + this->subTrees_.size());
// Add all factors associated with the current node
factors.push_back(this->factors_.begin(), this->factors_.end());
// for all subtrees, eliminate into Bayes net and a separator factor, added to [factors]
BOOST_FOREACH(const shared_ptr& child, subTrees_)
factors.push_back(child->eliminateRecursive(function));
// Combine all factors (from this node and from subtrees) into a joint factor
GaussianFactorGraph::EliminationResult eliminated(function(factors, 1));
return eliminated.second;
}
/* ************************************************************************* */
void ConcurrentBatchSmoother::EliminationForest::removeChildrenIndices(std::set<Index>& indices, const ConcurrentBatchSmoother::EliminationForest::shared_ptr& tree) {
BOOST_FOREACH(const EliminationForest::shared_ptr& child, tree->children()) {
indices.erase(child->key());
removeChildrenIndices(indices, child);
}
}
/* ************************************************************************* */
}/// namespace gtsam

View File

@ -60,10 +60,10 @@ public:
virtual ~ConcurrentBatchSmoother() {};
/** Implement a GTSAM standard 'print' function */
void print(const std::string& s = "Concurrent Batch Smoother:\n", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const;
virtual void print(const std::string& s = "Concurrent Batch Smoother:\n", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const;
/** Check if two Concurrent Smoothers are equal */
bool equals(const ConcurrentSmoother& rhs, double tol = 1e-9) const;
virtual bool equals(const ConcurrentSmoother& rhs, double tol = 1e-9) const;
/** Access the current set of factors */
const NonlinearFactorGraph& getFactors() const {
@ -118,23 +118,7 @@ public:
* in the smoother). There must not be any variables here that do not occur in newFactors,
* and additionally, variables that were already in the system must not be included here.
*/
Result update(const NonlinearFactorGraph& newFactors = NonlinearFactorGraph(), const Values& newTheta = Values());
protected:
LevenbergMarquardtParams parameters_; ///< LM parameters
NonlinearFactorGraph factors_; ///< The set of all factors currently in the smoother
Values theta_; ///< Current linearization point of all variables in the smoother
Ordering ordering_; ///< The current ordering used to calculate the linear deltas
VectorValues delta_; ///< The current set of linear deltas from the linearization point
VariableIndex variableIndex_; ///< The current variable index, which allows efficient factor lookup by variable
std::queue<size_t> availableSlots_; ///< The set of available factor graph slots caused by deleting factors
Values separatorValues_; ///< The linearization points of the separator variables. These should not be updated during optimization.
std::vector<size_t> filterSummarizationSlots_; ///< The slots in factor graph that correspond to the current filter summarization factors
// Storage for information to be sent to the filter
NonlinearFactorGraph smootherSummarization_; ///< A temporary holding place for calculated smoother summarization
virtual Result update(const NonlinearFactorGraph& newFactors = NonlinearFactorGraph(), const Values& newTheta = Values());
/**
* Perform any required operations before the synchronization process starts.
@ -168,6 +152,21 @@ protected:
*/
virtual void postsync();
protected:
LevenbergMarquardtParams parameters_; ///< LM parameters
NonlinearFactorGraph factors_; ///< The set of all factors currently in the smoother
Values theta_; ///< Current linearization point of all variables in the smoother
Ordering ordering_; ///< The current ordering used to calculate the linear deltas
VectorValues delta_; ///< The current set of linear deltas from the linearization point
VariableIndex variableIndex_; ///< The current variable index, which allows efficient factor lookup by variable
std::queue<size_t> availableSlots_; ///< The set of available factor graph slots caused by deleting factors
Values separatorValues_; ///< The linearization points of the separator variables. These should not be updated during optimization.
std::vector<size_t> filterSummarizationSlots_; ///< The slots in factor graph that correspond to the current filter summarization factors
// Storage for information to be sent to the filter
NonlinearFactorGraph smootherSummarization_; ///< A temporary holding place for calculated smoother summarization
private:
/** Augment the graph with new factors
@ -200,56 +199,6 @@ private:
static void PrintLinearFactor(const GaussianFactor::shared_ptr& factor, const Ordering& ordering,
const std::string& indent = "", const KeyFormatter& keyFormatter = DefaultKeyFormatter);
// A custom elimination tree that supports forests and partial elimination
class EliminationForest {
public:
typedef boost::shared_ptr<EliminationForest> shared_ptr; ///< Shared pointer to this class
private:
typedef FastList<GaussianFactor::shared_ptr> Factors;
typedef FastList<shared_ptr> SubTrees;
typedef std::vector<GaussianConditional::shared_ptr> Conditionals;
Index key_; ///< index associated with root
Factors factors_; ///< factors associated with root
SubTrees subTrees_; ///< sub-trees
/** default constructor, private, as you should use Create below */
EliminationForest(Index key = 0) : key_(key) {}
/**
* Static internal function to build a vector of parent pointers using the
* algorithm of Gilbert et al., 2001, BIT.
*/
static std::vector<Index> ComputeParents(const VariableIndex& structure);
/** add a factor, for Create use only */
void add(const GaussianFactor::shared_ptr& factor) { factors_.push_back(factor); }
/** add a subtree, for Create use only */
void add(const shared_ptr& child) { subTrees_.push_back(child); }
public:
/** return the key associated with this tree node */
Index key() const { return key_; }
/** return the const reference of children */
const SubTrees& children() const { return subTrees_; }
/** return the const reference to the factors */
const Factors& factors() const { return factors_; }
/** Create an elimination tree from a factor graph */
static std::vector<shared_ptr> Create(const GaussianFactorGraph& factorGraph, const VariableIndex& structure);
/** Recursive routine that eliminates the factors arranged in an elimination tree */
GaussianFactor::shared_ptr eliminateRecursive(GaussianFactorGraph::Eliminate function);
/** Recursive function that helps find the top of each tree */
static void removeChildrenIndices(std::set<Index>& indices, const EliminationForest::shared_ptr& tree);
};
}; // ConcurrentBatchSmoother
/// Typedef for Matlab wrapping

View File

@ -19,10 +19,12 @@
// \callgraph
#include "ConcurrentFilteringAndSmoothing.h"
#include <gtsam_unstable/nonlinear/ConcurrentFilteringAndSmoothing.h>
#include <gtsam/nonlinear/LinearContainerFactor.h>
namespace gtsam {
/* ************************************************************************* */
void synchronize(ConcurrentFilter& filter, ConcurrentSmoother& smoother) {
NonlinearFactorGraph smootherFactors, filterSumarization, smootherSummarization;
@ -46,4 +48,227 @@ void synchronize(ConcurrentFilter& filter, ConcurrentSmoother& smoother) {
smoother.postsync();
}
namespace internal {
// TODO: Remove this and replace with the standard Elimination Tree once GTSAM 3.0 is released and supports forests
// A custom elimination tree that supports forests and partial elimination
class EliminationForest {
public:
typedef boost::shared_ptr<EliminationForest> shared_ptr; ///< Shared pointer to this class
private:
typedef FastList<GaussianFactor::shared_ptr> Factors;
typedef FastList<shared_ptr> SubTrees;
typedef std::vector<GaussianConditional::shared_ptr> Conditionals;
Index key_; ///< index associated with root
Factors factors_; ///< factors associated with root
SubTrees subTrees_; ///< sub-trees
/** default constructor, private, as you should use Create below */
EliminationForest(Index key = 0) : key_(key) {}
/**
* Static internal function to build a vector of parent pointers using the
* algorithm of Gilbert et al., 2001, BIT.
*/
static std::vector<Index> ComputeParents(const VariableIndex& structure);
/** add a factor, for Create use only */
void add(const GaussianFactor::shared_ptr& factor) { factors_.push_back(factor); }
/** add a subtree, for Create use only */
void add(const shared_ptr& child) { subTrees_.push_back(child); }
public:
/** return the key associated with this tree node */
Index key() const { return key_; }
/** return the const reference of children */
const SubTrees& children() const { return subTrees_; }
/** return the const reference to the factors */
const Factors& factors() const { return factors_; }
/** Create an elimination tree from a factor graph */
static std::vector<shared_ptr> Create(const GaussianFactorGraph& factorGraph, const VariableIndex& structure);
/** Recursive routine that eliminates the factors arranged in an elimination tree */
GaussianFactor::shared_ptr eliminateRecursive(GaussianFactorGraph::Eliminate function);
/** Recursive function that helps find the top of each tree */
static void removeChildrenIndices(std::set<Index>& indices, const EliminationForest::shared_ptr& tree);
};
/* ************************************************************************* */
std::vector<Index> EliminationForest::ComputeParents(const VariableIndex& structure) {
// Number of factors and variables
const size_t m = structure.nFactors();
const size_t n = structure.size();
static const Index none = std::numeric_limits<Index>::max();
// Allocate result parent vector and vector of last factor columns
std::vector<Index> parents(n, none);
std::vector<Index> prevCol(m, none);
// for column j \in 1 to n do
for (Index j = 0; j < n; j++) {
// for row i \in Struct[A*j] do
BOOST_FOREACH(const size_t i, structure[j]) {
if (prevCol[i] != none) {
Index k = prevCol[i];
// find root r of the current tree that contains k
Index r = k;
while (parents[r] != none)
r = parents[r];
if (r != j) parents[r] = j;
}
prevCol[i] = j;
}
}
return parents;
}
/* ************************************************************************* */
std::vector<EliminationForest::shared_ptr> EliminationForest::Create(const GaussianFactorGraph& factorGraph, const VariableIndex& structure) {
// Compute the tree structure
std::vector<Index> parents(ComputeParents(structure));
// Number of variables
const size_t n = structure.size();
static const Index none = std::numeric_limits<Index>::max();
// Create tree structure
std::vector<shared_ptr> trees(n);
for (Index k = 1; k <= n; k++) {
Index j = n - k; // Start at the last variable and loop down to 0
trees[j].reset(new EliminationForest(j)); // Create a new node on this variable
if (parents[j] != none) // If this node has a parent, add it to the parent's children
trees[parents[j]]->add(trees[j]);
}
// Hang factors in right places
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, factorGraph) {
if(factor && factor->size() > 0) {
Index j = *std::min_element(factor->begin(), factor->end());
if(j < structure.size())
trees[j]->add(factor);
}
}
return trees;
}
/* ************************************************************************* */
GaussianFactor::shared_ptr EliminationForest::eliminateRecursive(GaussianFactorGraph::Eliminate function) {
// Create the list of factors to be eliminated, initially empty, and reserve space
GaussianFactorGraph factors;
factors.reserve(this->factors_.size() + this->subTrees_.size());
// Add all factors associated with the current node
factors.push_back(this->factors_.begin(), this->factors_.end());
// for all subtrees, eliminate into Bayes net and a separator factor, added to [factors]
BOOST_FOREACH(const shared_ptr& child, subTrees_)
factors.push_back(child->eliminateRecursive(function));
// Combine all factors (from this node and from subtrees) into a joint factor
GaussianFactorGraph::EliminationResult eliminated(function(factors, 1));
return eliminated.second;
}
/* ************************************************************************* */
void EliminationForest::removeChildrenIndices(std::set<Index>& indices, const EliminationForest::shared_ptr& tree) {
BOOST_FOREACH(const EliminationForest::shared_ptr& child, tree->children()) {
indices.erase(child->key());
removeChildrenIndices(indices, child);
}
}
/* ************************************************************************* */
NonlinearFactorGraph calculateMarginalFactors(const NonlinearFactorGraph& graph, const Values& theta,
const FastSet<Key>& remainingKeys, const GaussianFactorGraph::Eliminate& eliminateFunction) {
// Calculate the set of RootKeys = AllKeys \Intersect RemainingKeys
FastSet<Key> rootKeys;
FastSet<Key> allKeys(graph.keys());
std::set_intersection(allKeys.begin(), allKeys.end(), remainingKeys.begin(), remainingKeys.end(), std::inserter(rootKeys, rootKeys.end()));
// Calculate the set of MarginalizeKeys = AllKeys - RemainingKeys
FastSet<Key> marginalizeKeys;
std::set_difference(allKeys.begin(), allKeys.end(), remainingKeys.begin(), remainingKeys.end(), std::inserter(marginalizeKeys, marginalizeKeys.end()));
if(marginalizeKeys.size() == 0) {
// There are no keys to marginalize. Simply return the input factors
return graph;
} else {
// Create a subset of theta that only contains the required keys
Values values;
BOOST_FOREACH(Key key, allKeys) {
values.insert(key, theta.at(key));
}
// Calculate the ordering: [Others Root]
std::map<Key, int> constraints;
BOOST_FOREACH(Key key, marginalizeKeys) {
constraints[key] = 0;
}
BOOST_FOREACH(Key key, rootKeys) {
constraints[key] = 1;
}
Ordering ordering = *graph.orderingCOLAMDConstrained(values, constraints);
// Create the linear factor graph
GaussianFactorGraph linearFactorGraph = *graph.linearize(values, ordering);
// Construct a variable index
VariableIndex variableIndex(linearFactorGraph, ordering.size());
// Construct an elimination tree to perform sparse elimination
std::vector<EliminationForest::shared_ptr> forest( EliminationForest::Create(linearFactorGraph, variableIndex) );
// This is a forest. Only the top-most node/index of each tree needs to be eliminated; all of the children will be eliminated automatically
// Find the subset of nodes/keys that must be eliminated
std::set<Index> indicesToEliminate;
BOOST_FOREACH(Key key, marginalizeKeys) {
indicesToEliminate.insert(ordering.at(key));
}
BOOST_FOREACH(Key key, marginalizeKeys) {
EliminationForest::removeChildrenIndices(indicesToEliminate, forest.at(ordering.at(key)));
}
// Eliminate each top-most key, returning a Gaussian Factor on some of the remaining variables
// Convert the marginal factors into Linear Container Factors
NonlinearFactorGraph marginalFactors;
BOOST_FOREACH(Index index, indicesToEliminate) {
GaussianFactor::shared_ptr gaussianFactor = forest.at(index)->eliminateRecursive(eliminateFunction);
if(gaussianFactor->size() > 0) {
LinearContainerFactor::shared_ptr marginalFactor(new LinearContainerFactor(gaussianFactor, ordering, values));
marginalFactors.push_back(marginalFactor);
}
}
// Also add any remaining factors that were unaffected by marginalizing out the selected variables.
// These are part of the marginal on the remaining variables as well.
BOOST_FOREACH(Key key, rootKeys) {
BOOST_FOREACH(const GaussianFactor::shared_ptr& gaussianFactor, forest.at(ordering.at(key))->factors()) {
LinearContainerFactor::shared_ptr marginalFactor(new LinearContainerFactor(gaussianFactor, ordering, values));
marginalFactors.push_back(marginalFactor);
}
}
return marginalFactors;
}
}
/* ************************************************************************* */
}/// namespace internal
/* ************************************************************************* */
}/// namespace gtsam

View File

@ -39,22 +39,18 @@ class GTSAM_UNSTABLE_EXPORT ConcurrentFilter {
public:
typedef boost::shared_ptr<ConcurrentFilter> shared_ptr;
/** Implement a standard 'print' function */
virtual void print(const std::string& s = "Concurrent Filter:\n", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const = 0;
/** Check if two Concurrent Smoothers are equal */
virtual bool equals(const ConcurrentFilter& rhs, double tol = 1e-9) const = 0;
protected:
friend void GTSAM_UNSTABLE_EXPORT synchronize(ConcurrentFilter& filter, ConcurrentSmoother& smoother);
/** Default constructor */
ConcurrentFilter() {};
/** Default destructor */
virtual ~ConcurrentFilter() {};
/** Implement a standard 'print' function */
virtual void print(const std::string& s = "Concurrent Filter:\n", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const = 0;
/** Check if two Concurrent Smoothers are equal */
virtual bool equals(const ConcurrentFilter& rhs, double tol = 1e-9) const = 0;
/**
* Perform any required operations before the synchronization process starts.
* Called by 'synchronize'
@ -103,22 +99,18 @@ class GTSAM_UNSTABLE_EXPORT ConcurrentSmoother {
public:
typedef boost::shared_ptr<ConcurrentSmoother> shared_ptr;
/** Implement a standard 'print' function */
virtual void print(const std::string& s = "Concurrent Smoother:\n", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const = 0;
/** Check if two Concurrent Smoothers are equal */
virtual bool equals(const ConcurrentSmoother& rhs, double tol = 1e-9) const = 0;
protected:
GTSAM_UNSTABLE_EXPORT friend void synchronize(ConcurrentFilter& filter, ConcurrentSmoother& smoother);
/** Default constructor */
ConcurrentSmoother() {};
/** Default destructor */
virtual ~ConcurrentSmoother() {};
/** Implement a standard 'print' function */
virtual void print(const std::string& s = "Concurrent Smoother:\n", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const = 0;
/** Check if two Concurrent Smoothers are equal */
virtual bool equals(const ConcurrentSmoother& rhs, double tol = 1e-9) const = 0;
/**
* Perform any required operations before the synchronization process starts.
* Called by 'synchronize'
@ -154,4 +146,13 @@ protected:
}; // ConcurrentSmoother
namespace internal {
/** Calculate the marginal on the specified keys, returning a set of LinearContainerFactors.
* Unlike other GTSAM functions with similar purposes, this version can operate on disconnected graphs. */
NonlinearFactorGraph calculateMarginalFactors(const NonlinearFactorGraph& graph, const Values& theta,
const FastSet<Key>& remainingKeys, const GaussianFactorGraph::Eliminate& eliminateFunction);
}
}/// namespace gtsam

View File

@ -0,0 +1,415 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file ConcurrentIncrementalFilter.cpp
* @brief An iSAM2-based Filter that implements the
* Concurrent Filtering and Smoothing interface.
* @author Stephen Williams
*/
#include <gtsam_unstable/nonlinear/ConcurrentIncrementalFilter.h>
#include <gtsam/nonlinear/LinearContainerFactor.h>
#include <gtsam/base/timing.h>
#include <gtsam/base/debug.h>
namespace gtsam {
/* ************************************************************************* */
void ConcurrentIncrementalFilter::print(const std::string& s, const KeyFormatter& keyFormatter) const {
std::cout << s;
isam2_.print("");
}
/* ************************************************************************* */
bool ConcurrentIncrementalFilter::equals(const ConcurrentFilter& rhs, double tol) const {
const ConcurrentIncrementalFilter* filter = dynamic_cast<const ConcurrentIncrementalFilter*>(&rhs);
return filter
&& isam2_.equals(filter->isam2_)
&& (currentSmootherSummarizationSlots_.size() == filter->currentSmootherSummarizationSlots_.size())
&& std::equal(currentSmootherSummarizationSlots_.begin(), currentSmootherSummarizationSlots_.end(), filter->currentSmootherSummarizationSlots_.begin())
&& previousSmootherSummarization_.equals(filter->previousSmootherSummarization_)
&& smootherShortcut_.equals(filter->smootherShortcut_)
&& smootherFactors_.equals(filter->smootherFactors_)
&& smootherValues_.equals(filter->smootherValues_);
}
/* ************************************************************************* */
ConcurrentIncrementalFilter::Result ConcurrentIncrementalFilter::update(const NonlinearFactorGraph& newFactors, const Values& newTheta,
const boost::optional<FastList<Key> >& keysToMove, const boost::optional< std::vector<size_t> >& removeFactorIndices) {
gttic(update);
// const bool debug = ISDEBUG("ConcurrentIncrementalFilter update");
const bool debug = false;
if(debug) std::cout << "ConcurrentIncrementalFilter::update Begin" << std::endl;
// Create the return result meta-data
Result result;
// Remove any user-provided factors from iSAM2
gtsam::FastVector<size_t> removedFactors;
if(removeFactorIndices){
removedFactors.insert(removedFactors.end(), removeFactorIndices->begin(), removeFactorIndices->end());
}
// Generate ordering constraints that force the 'keys to move' to the end
boost::optional<gtsam::FastMap<gtsam::Key,int> > orderingConstraints = boost::none;
if(keysToMove && keysToMove->size() > 0) {
orderingConstraints = gtsam::FastMap<gtsam::Key,int>();
int group = 1;
// Set all existing variables to Group1
if(isam2_.getLinearizationPoint().size() > 0) {
BOOST_FOREACH(const gtsam::Values::ConstKeyValuePair& key_value, isam2_.getLinearizationPoint()) {
orderingConstraints->operator[](key_value.key) = group;
}
++group;
}
// Assign new variables to the root
BOOST_FOREACH(const gtsam::Values::ConstKeyValuePair& key_value, newTheta) {
orderingConstraints->operator[](key_value.key) = group;
}
// Set marginalizable variables to Group0
BOOST_FOREACH(gtsam::Key key, *keysToMove){
orderingConstraints->operator[](key) = 0;
}
}
// Create the set of linear keys that iSAM2 should hold constant
// iSAM2 takes care of this for us; no need to specify additional noRelin keys
boost::optional<gtsam::FastList<gtsam::Key> > noRelinKeys = boost::none;
// Mark additional keys between the 'keys to move' and the leaves
boost::optional<FastList<Key> > additionalKeys = boost::none;
if(keysToMove && keysToMove->size() > 0) {
std::set<Key> markedKeys;
BOOST_FOREACH(gtsam::Key key, *keysToMove) {
if(isam2_.getLinearizationPoint().exists(key)) {
Index index = isam2_.getOrdering().at(key);
ISAM2Clique::shared_ptr clique = isam2_[index];
GaussianConditional::const_iterator index_iter = clique->conditional()->begin();
while(*index_iter != index) {
markedKeys.insert(isam2_.getOrdering().key(*index_iter));
++index_iter;
}
BOOST_FOREACH(const gtsam::ISAM2Clique::shared_ptr& child, clique->children()) {
RecursiveMarkAffectedKeys(index, child, isam2_.getOrdering(), markedKeys);
}
}
}
additionalKeys = FastList<Key>(markedKeys.begin(), markedKeys.end());
}
// Update the system using iSAM2
gttic(isam2);
gtsam::ISAM2Result isam2Result = isam2_.update(newFactors, newTheta, removedFactors, orderingConstraints, noRelinKeys, additionalKeys);
gttoc(isam2);
if(keysToMove && keysToMove->size() > 0) {
gttic(cache_smoother_factors);
// Find the set of factors that will be removed
std::vector<size_t> removedFactorSlots = FindAdjacentFactors(isam2_, *keysToMove, currentSmootherSummarizationSlots_);
// Cache these factors for later transmission to the smoother
NonlinearFactorGraph removedFactors;
BOOST_FOREACH(size_t slot, removedFactorSlots) {
const NonlinearFactor::shared_ptr& factor = isam2_.getFactorsUnsafe().at(slot);
if(factor) {
smootherFactors_.push_back(factor);
removedFactors.push_back(factor);
}
}
// Cache removed values for later transmission to the smoother
BOOST_FOREACH(Key key, *keysToMove) {
smootherValues_.insert(key, isam2_.getLinearizationPoint().at(key));
}
gttoc(cache_smoother_factors);
gttic(marginalize);
std::vector<size_t> marginalFactorsIndices;
std::vector<size_t> deletedFactorsIndices;
isam2_.marginalizeLeaves(*keysToMove, marginalFactorsIndices, deletedFactorsIndices);
currentSmootherSummarizationSlots_.insert(currentSmootherSummarizationSlots_.end(), marginalFactorsIndices.begin(), marginalFactorsIndices.end());
BOOST_FOREACH(size_t index, deletedFactorsIndices) {
currentSmootherSummarizationSlots_.erase(std::remove(currentSmootherSummarizationSlots_.begin(), currentSmootherSummarizationSlots_.end(), index), currentSmootherSummarizationSlots_.end());
}
gttoc(marginalize);
// Calculate a new shortcut
updateShortcut(removedFactors);
}
// Extract the ConcurrentIncrementalFilter::Result information
result.iterations = 1;
result.linearVariables = isam2_.getFixedVariables().size();
result.nonlinearVariables = isam2_.getLinearizationPoint().size() - result.linearVariables;
result.newFactorsIndices = isam2Result.newFactorsIndices;
// result.error = isam2_.getFactorsUnsafe().error(isam2_.calculateEstimate());
if(debug) std::cout << "ConcurrentIncrementalFilter::update End" << std::endl;
gttoc(update);
return result;
}
/* ************************************************************************* */
void ConcurrentIncrementalFilter::presync() {
gttic(presync);
gttoc(presync);
}
/* ************************************************************************* */
void ConcurrentIncrementalFilter::synchronize(const NonlinearFactorGraph& smootherSummarization, const Values& smootherSummarizationValues) {
gttic(synchronize);
// const bool debug = ISDEBUG("ConcurrentIncrementalFilter synchronize");
const bool debug = false;
if(debug) std::cout << "ConcurrentIncrementalFilter::synchronize Begin" << std::endl;
// Update the smoother summarization on the old separator
previousSmootherSummarization_ = smootherSummarization;
// Find the set of new separator keys
const FastSet<Key>& newSeparatorKeys = isam2_.getFixedVariables();
// Use the shortcut to calculate an updated marginal on the current separator
// Combine just the shortcut and the previousSmootherSummarization
NonlinearFactorGraph graph;
graph.push_back(previousSmootherSummarization_);
graph.push_back(smootherShortcut_);
Values values;
values.insert(smootherSummarizationValues);
BOOST_FOREACH(Key key, newSeparatorKeys) {
if(!values.exists(key)) {
values.insert(key, isam2_.getLinearizationPoint().at(key));
}
}
// Force iSAM2 not to relinearize anything during this iteration
FastList<Key> noRelinKeys;
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, isam2_.getLinearizationPoint()) {
noRelinKeys.push_back(key_value.key);
}
// Calculate the summarized factor on just the new separator keys
NonlinearFactorGraph currentSmootherSummarization = internal::calculateMarginalFactors(graph, values, newSeparatorKeys,
isam2_.params().factorization == ISAM2Params::CHOLESKY ? EliminateCholesky : EliminateQR);
// Remove the old factors on the separator and insert the new ones
FastVector<size_t> removeFactors(currentSmootherSummarizationSlots_.begin(), currentSmootherSummarizationSlots_.end());
ISAM2Result result = isam2_.update(currentSmootherSummarization, Values(), removeFactors, boost::none, noRelinKeys, boost::none, false);
currentSmootherSummarizationSlots_ = result.newFactorsIndices;
// Set the previous smoother summarization to the current smoother summarization and clear the smoother shortcut
previousSmootherSummarization_ = currentSmootherSummarization;
smootherShortcut_.resize(0);
if(debug) std::cout << "ConcurrentIncrementalFilter::synchronize End" << std::endl;
gttoc(synchronize);
}
/* ************************************************************************* */
void ConcurrentIncrementalFilter::getSummarizedFactors(NonlinearFactorGraph& filterSummarization, Values& filterSummarizationValues) {
gttic(get_summarized_factors);
// Calculate the current filter summarization
filterSummarization = calculateFilterSummarization();
// Copy the current separator values into the output
BOOST_FOREACH(Key key, isam2_.getFixedVariables()) {
filterSummarizationValues.insert(key, isam2_.getLinearizationPoint().at(key));
}
gttoc(get_summarized_factors);
}
/* ************************************************************************* */
void ConcurrentIncrementalFilter::getSmootherFactors(NonlinearFactorGraph& smootherFactors, Values& smootherValues) {
gttic(get_smoother_factors);
// Copy the previous calculated smoother summarization factors into the output
smootherFactors.push_back(smootherFactors_);
smootherValues.insert(smootherValues_);
gttoc(get_smoother_factors);
}
/* ************************************************************************* */
void ConcurrentIncrementalFilter::postsync() {
gttic(postsync);
// Clear out the factors and values that were just sent to the smoother
smootherFactors_.resize(0);
smootherValues_.clear();
gttoc(postsync);
}
/* ************************************************************************* */
void ConcurrentIncrementalFilter::RecursiveMarkAffectedKeys(Index index, const ISAM2Clique::shared_ptr& clique, const Ordering& ordering, std::set<Key>& additionalKeys) {
// Check if the separator keys of the current clique contain the specified key
if(std::find(clique->conditional()->beginParents(), clique->conditional()->endParents(), index) != clique->conditional()->endParents()) {
// Mark the frontal keys of the current clique
BOOST_FOREACH(Index idx, clique->conditional()->frontals()) {
additionalKeys.insert(ordering.key(idx));
}
// Recursively mark all of the children
BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children()) {
RecursiveMarkAffectedKeys(index, child, ordering, additionalKeys);
}
}
// If the key was not found in the separator/parents, then none of its children can have it either
}
/* ************************************************************************* */
std::vector<size_t> ConcurrentIncrementalFilter::FindAdjacentFactors(const ISAM2& isam2, const FastList<Key>& keys, const std::vector<size_t>& factorsToIgnore) {
// Identify any new factors to be sent to the smoother (i.e. any factor involving keysToMove)
std::vector<size_t> removedFactorSlots;
const VariableIndex& variableIndex = isam2.getVariableIndex();
BOOST_FOREACH(Key key, keys) {
const FastList<size_t>& slots = variableIndex[isam2.getOrdering().at(key)];
removedFactorSlots.insert(removedFactorSlots.end(), slots.begin(), slots.end());
}
// Sort and remove duplicates
std::sort(removedFactorSlots.begin(), removedFactorSlots.end());
removedFactorSlots.erase(std::unique(removedFactorSlots.begin(), removedFactorSlots.end()), removedFactorSlots.end());
// Remove any linear/marginal factor that made it into the set
BOOST_FOREACH(size_t index, factorsToIgnore) {
removedFactorSlots.erase(std::remove(removedFactorSlots.begin(), removedFactorSlots.end(), index), removedFactorSlots.end());
}
return removedFactorSlots;
}
///* ************************************************************************* */
// TODO: Make this a static function
void ConcurrentIncrementalFilter::updateShortcut(const NonlinearFactorGraph& removedFactors) {
// Calculate the set of shortcut keys: NewSeparatorKeys + OldSeparatorKeys
FastSet<Key> shortcutKeys;
BOOST_FOREACH(size_t slot, currentSmootherSummarizationSlots_) {
const NonlinearFactor::shared_ptr& factor = isam2_.getFactorsUnsafe().at(slot);
if(factor) {
shortcutKeys.insert(factor->begin(), factor->end());
}
}
BOOST_FOREACH(Key key, previousSmootherSummarization_.keys()) {
shortcutKeys.insert(key);
}
// Combine the previous shortcut factor with all of the new factors being sent to the smoother
NonlinearFactorGraph graph;
graph.push_back(removedFactors);
graph.push_back(smootherShortcut_);
Values values;
values.insert(smootherValues_);
values.insert(isam2_.getLinearizationPoint());
// Calculate the summarized factor on the shortcut keys
smootherShortcut_ = internal::calculateMarginalFactors(graph, values, shortcutKeys,
isam2_.params().factorization == ISAM2Params::CHOLESKY ? EliminateCholesky : EliminateQR);
}
/* ************************************************************************* */
// TODO: Make this a static function
NonlinearFactorGraph ConcurrentIncrementalFilter::calculateFilterSummarization() const {
// The filter summarization factors are the resulting marginal factors on the separator
// variables that result from marginalizing out all of the other variables
// Find the set of current separator keys
const FastSet<Key>& separatorKeys = isam2_.getFixedVariables();
// Find all cliques that contain any separator variables
std::set<ISAM2Clique::shared_ptr> separatorCliques;
BOOST_FOREACH(Key key, separatorKeys) {
Index index = isam2_.getOrdering().at(key);
ISAM2Clique::shared_ptr clique = isam2_[index];
separatorCliques.insert( clique );
}
// Create the set of clique keys
std::vector<Index> cliqueIndices;
std::vector<Key> cliqueKeys;
BOOST_FOREACH(const ISAM2Clique::shared_ptr& clique, separatorCliques) {
BOOST_FOREACH(Index index, clique->conditional()->frontals()) {
cliqueIndices.push_back(index);
cliqueKeys.push_back(isam2_.getOrdering().key(index));
}
}
std::sort(cliqueIndices.begin(), cliqueIndices.end());
std::sort(cliqueKeys.begin(), cliqueKeys.end());
// Gather all factors that involve only clique keys
std::set<size_t> cliqueFactorSlots;
BOOST_FOREACH(Index index, cliqueIndices) {
BOOST_FOREACH(size_t slot, isam2_.getVariableIndex()[index]) {
const NonlinearFactor::shared_ptr& factor = isam2_.getFactorsUnsafe().at(slot);
if(factor) {
std::set<Key> factorKeys(factor->begin(), factor->end());
if(std::includes(cliqueKeys.begin(), cliqueKeys.end(), factorKeys.begin(), factorKeys.end())) {
cliqueFactorSlots.insert(slot);
}
}
}
}
// Remove any factor included in the current smoother summarization
BOOST_FOREACH(size_t slot, currentSmootherSummarizationSlots_) {
cliqueFactorSlots.erase(slot);
}
// Create a factor graph from the identified factors
NonlinearFactorGraph graph;
BOOST_FOREACH(size_t slot, cliqueFactorSlots) {
graph.push_back(isam2_.getFactorsUnsafe().at(slot));
}
// Find the set of children of the separator cliques
std::set<ISAM2Clique::shared_ptr> childCliques;
// Add all of the children
BOOST_FOREACH(const ISAM2Clique::shared_ptr& clique, separatorCliques) {
childCliques.insert(clique->children().begin(), clique->children().end());
}
// Remove any separator cliques that were added because they were children of other separator cliques
BOOST_FOREACH(const ISAM2Clique::shared_ptr& clique, separatorCliques) {
childCliques.erase(clique);
}
// Augment the factor graph with cached factors from the children
BOOST_FOREACH(const ISAM2Clique::shared_ptr& clique, childCliques) {
LinearContainerFactor::shared_ptr factor(new LinearContainerFactor(clique->cachedFactor(), isam2_.getOrdering(), isam2_.getLinearizationPoint()));
graph.push_back( factor );
}
// Calculate the marginal factors on the separator
NonlinearFactorGraph filterSummarization = internal::calculateMarginalFactors(graph, isam2_.getLinearizationPoint(), separatorKeys,
isam2_.params().factorization == ISAM2Params::CHOLESKY ? EliminateCholesky : EliminateQR);
return filterSummarization;
}
/* ************************************************************************* */
}/// namespace gtsam

View File

@ -0,0 +1,192 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file ConcurrentBatchFilter.h
* @brief An iSAM2-based Filter that implements the
* Concurrent Filtering and Smoothing interface.
* @author Stephen Williams
*/
// \callgraph
#pragma once
#include <gtsam_unstable/nonlinear/ConcurrentFilteringAndSmoothing.h>
#include <gtsam/nonlinear/ISAM2.h>
namespace gtsam {
/**
* An iSAM2-based Batch Filter that implements the Concurrent Filtering and Smoother interface.
*/
class GTSAM_UNSTABLE_EXPORT ConcurrentIncrementalFilter : public virtual ConcurrentFilter {
public:
typedef boost::shared_ptr<ConcurrentIncrementalFilter> shared_ptr;
typedef ConcurrentFilter Base; ///< typedef for base class
/** Meta information returned about the update */
struct Result {
size_t iterations; ///< The number of optimizer iterations performed
size_t nonlinearVariables; ///< The number of variables that can be relinearized
size_t linearVariables; ///< The number of variables that must keep a constant linearization point
std::vector<size_t> newFactorsIndices; ///< The indices of the newly-added factors, in 1-to-1 correspondence with the factors passed in
double error; ///< The final factor graph error
/// Constructor
Result() : iterations(0), nonlinearVariables(0), linearVariables(0), error(0) {};
/// Getter methods
size_t getIterations() const { return iterations; }
size_t getNonlinearVariables() const { return nonlinearVariables; }
size_t getLinearVariables() const { return linearVariables; }
double getError() const { return error; }
};
/** Default constructor */
ConcurrentIncrementalFilter(const ISAM2Params& parameters = ISAM2Params()) : isam2_(parameters) {};
/** Default destructor */
virtual ~ConcurrentIncrementalFilter() {};
/** Implement a GTSAM standard 'print' function */
virtual void print(const std::string& s = "Concurrent Incremental Filter:\n", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const;
/** Check if two Concurrent Filters are equal */
virtual bool equals(const ConcurrentFilter& rhs, double tol = 1e-9) const;
/** Access the current set of factors */
const NonlinearFactorGraph& getFactors() const {
return isam2_.getFactorsUnsafe();
}
/** Access the current linearization point */
const Values& getLinearizationPoint() const {
return isam2_.getLinearizationPoint();
}
/** Access the current ordering */
const Ordering& getOrdering() const {
return isam2_.getOrdering();
}
/** Access the current set of deltas to the linearization point */
const VectorValues& getDelta() const {
return isam2_.getDelta();
}
/** Compute the current best estimate of all variables and return a full Values structure.
* If only a single variable is needed, it may be faster to call calculateEstimate(const KEY&).
*/
Values calculateEstimate() const {
return isam2_.calculateEstimate();
}
/** Compute the current best estimate of a single variable. This is generally faster than
* calling the no-argument version of calculateEstimate if only specific variables are needed.
* @param key
* @return
*/
template<class VALUE>
VALUE calculateEstimate(Key key) const {
return isam2_.calculateEstimate<VALUE>(key);
}
/**
* Add new factors and variables to the filter.
*
* Add new measurements, and optionally new variables, to the filter.
* This runs a full update step of the derived filter algorithm
*
* @param newFactors The new factors to be added to the smoother
* @param newTheta Initialization points for new variables to be added to the filter
* You must include here all new variables occurring in newFactors that were not already
* in the filter.
* @param keysToMove An optional set of keys to move from the filter to the smoother
* @param removeFactorIndices An optional set of indices corresponding to the factors you want to remove from the graph
*/
Result update(const NonlinearFactorGraph& newFactors = NonlinearFactorGraph(), const Values& newTheta = Values(),
const boost::optional<FastList<Key> >& keysToMove = boost::none,
const boost::optional< std::vector<size_t> >& removeFactorIndices = boost::none);
/**
* Perform any required operations before the synchronization process starts.
* Called by 'synchronize'
*/
virtual void presync();
/**
* Populate the provided containers with factors that constitute the filter branch summarization
* needed by the smoother. Also, linearization points for the new root clique must be provided.
*
* @param summarizedFactors The summarized factors for the filter branch
* @param rootValues The linearization points of the root clique variables
*/
virtual void getSummarizedFactors(NonlinearFactorGraph& filterSummarization, Values& filterSummarizationValues);
/**
* Populate the provided containers with factors being sent to the smoother from the filter. These
* may be original nonlinear factors, or factors encoding a summarization of the filter information.
* The specifics will be implementation-specific for a given filter.
*
* @param smootherFactors The new factors to be added to the smoother
* @param smootherValues The linearization points of any new variables
*/
virtual void getSmootherFactors(NonlinearFactorGraph& smootherFactors, Values& smootherValues);
/**
* Apply the updated version of the smoother branch summarized factors.
*
* @param summarizedFactors An updated version of the smoother branch summarized factors
*/
virtual void synchronize(const NonlinearFactorGraph& smootherSummarization, const Values& smootherSummarizationValues);
/**
* Perform any required operations after the synchronization process finishes.
* Called by 'synchronize'
*/
virtual void postsync();
protected:
ISAM2 isam2_; ///< The iSAM2 inference engine
// ???
NonlinearFactorGraph previousSmootherSummarization_; ///< The smoother summarization on the old separator sent by the smoother during the last synchronization
std::vector<size_t> currentSmootherSummarizationSlots_; ///< The slots in factor graph that correspond to the current smoother summarization on the current separator
NonlinearFactorGraph smootherShortcut_; ///< A set of conditional factors from the old separator to the current separator (recursively calculated during each filter update)
// Storage for information to be sent to the smoother
NonlinearFactorGraph smootherFactors_; ///< A temporary holding place for the set of full nonlinear factors being sent to the smoother
Values smootherValues_; ///< A temporary holding place for the linearization points of all keys being sent to the smoother
private:
/** Traverse the iSAM2 Bayes Tree, inserting all descendants of the provided index/key into 'additionalKeys' */
static void RecursiveMarkAffectedKeys(Index index, const ISAM2Clique::shared_ptr& clique, const Ordering& ordering, std::set<Key>& additionalKeys);
/** Find the set of iSAM2 factors adjacent to 'keys' */
static std::vector<size_t> FindAdjacentFactors(const ISAM2& isam2, const FastList<Key>& keys, const std::vector<size_t>& factorsToIgnore);
/** Update the shortcut marginal between the current separator keys and the previous separator keys */
// TODO: Make this a static function
void updateShortcut(const NonlinearFactorGraph& removedFactors);
/** Calculate marginal factors on the current separator variables using just the information in the filter */
// TODO: Make this a static function
NonlinearFactorGraph calculateFilterSummarization() const;
}; // ConcurrentBatchFilter
/// Typedef for Matlab wrapping
typedef ConcurrentIncrementalFilter::Result ConcurrentIncrementalFilterResult;
}/// namespace gtsam

View File

@ -0,0 +1,253 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file ConcurrentIncrementalSmoother.cpp
* @brief An iSAM2-based Smoother that implements the
* Concurrent Filtering and Smoothing interface.
* @author Stephen Williams
*/
#include <gtsam_unstable/nonlinear/ConcurrentIncrementalSmoother.h>
#include <gtsam/nonlinear/LinearContainerFactor.h>
#include <gtsam/base/timing.h>
#include <gtsam/base/debug.h>
namespace gtsam {
/* ************************************************************************* */
void ConcurrentIncrementalSmoother::print(const std::string& s, const KeyFormatter& keyFormatter) const {
std::cout << s;
isam2_.print("");
}
/* ************************************************************************* */
bool ConcurrentIncrementalSmoother::equals(const ConcurrentSmoother& rhs, double tol) const {
const ConcurrentIncrementalSmoother* smoother = dynamic_cast<const ConcurrentIncrementalSmoother*>(&rhs);
return smoother
&& isam2_.equals(smoother->isam2_)
&& smootherFactors_.equals(smoother->smootherFactors_)
&& smootherValues_.equals(smoother->smootherValues_)
&& filterSummarizationFactors_.equals(smoother->filterSummarizationFactors_)
&& separatorValues_.equals(smoother->separatorValues_)
&& (filterSummarizationSlots_.size() == smoother->filterSummarizationSlots_.size())
&& std::equal(filterSummarizationSlots_.begin(), filterSummarizationSlots_.end(), smoother->filterSummarizationSlots_.begin())
&& smootherSummarization_.equals(smoother->smootherSummarization_);
}
/* ************************************************************************* */
ConcurrentIncrementalSmoother::Result ConcurrentIncrementalSmoother::update(const NonlinearFactorGraph& newFactors, const Values& newTheta) {
gttic(update);
// Create the return result meta-data
Result result;
// Constrain the separator keys to remain in the root
// Also, mark the separator keys as fixed linearization points
FastMap<Key,int> constrainedKeys;
FastList<Key> noRelinKeys;
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, separatorValues_) {
constrainedKeys[key_value.key] = 1;
noRelinKeys.push_back(key_value.key);
}
// Use iSAM2 to perform an update
gtsam::ISAM2Result isam2Result;
if(isam2_.getFactorsUnsafe().size() + newFactors.size() + smootherFactors_.size() + filterSummarizationFactors_.size() > 0) {
if(synchronizationUpdatesAvailable_) {
// Augment any new factors/values with the cached data from the last synchronization
NonlinearFactorGraph graph(newFactors);
graph.push_back(smootherFactors_);
graph.push_back(filterSummarizationFactors_);
Values values(newTheta);
// Unfortunately, we must be careful here, as some of the smoother values
// and/or separator values may have been added previously
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, smootherValues_) {
if(!isam2_.getLinearizationPoint().exists(key_value.key)) {
values.insert(key_value.key, smootherValues_.at(key_value.key));
}
}
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, separatorValues_) {
if(!isam2_.getLinearizationPoint().exists(key_value.key)) {
values.insert(key_value.key, separatorValues_.at(key_value.key));
}
}
// Update the system using iSAM2
isam2Result = isam2_.update(graph, values, filterSummarizationSlots_, constrainedKeys, noRelinKeys);
// Clear out the cache and update the filter summarization slots
smootherFactors_.resize(0);
smootherValues_.clear();
filterSummarizationSlots_.clear();
filterSummarizationSlots_.insert(filterSummarizationSlots_.end(),
isam2Result.newFactorsIndices.end()-filterSummarizationFactors_.size(), isam2Result.newFactorsIndices.end());
filterSummarizationFactors_.resize(0);
synchronizationUpdatesAvailable_ = false;
} else {
// Update the system using iSAM2
isam2Result = isam2_.update(newFactors, newTheta, FastVector<size_t>(), constrainedKeys, noRelinKeys);
}
}
// Extract the ConcurrentIncrementalSmoother::Result information
result.iterations = 1;
result.linearVariables = separatorValues_.size();
result.nonlinearVariables = isam2_.getLinearizationPoint().size() - separatorValues_.size();
result.error = isam2_.getFactorsUnsafe().error(isam2_.calculateEstimate());
// Calculate the marginal on the separator from the smoother factors
if(separatorValues_.size() > 0) {
gttic(presync);
updateSmootherSummarization();
gttoc(presync);
}
gttoc(update);
return result;
}
/* ************************************************************************* */
void ConcurrentIncrementalSmoother::presync() {
gttic(presync);
gttoc(presync);
}
/* ************************************************************************* */
void ConcurrentIncrementalSmoother::getSummarizedFactors(NonlinearFactorGraph& summarizedFactors, Values& separatorValues) {
gttic(get_summarized_factors);
// Copy the previous calculated smoother summarization factors into the output
summarizedFactors.push_back(smootherSummarization_);
// Copy the separator values into the output
separatorValues.insert(separatorValues_);
gttoc(get_summarized_factors);
}
/* ************************************************************************* */
void ConcurrentIncrementalSmoother::synchronize(const NonlinearFactorGraph& smootherFactors, const Values& smootherValues,
const NonlinearFactorGraph& summarizedFactors, const Values& separatorValues) {
gttic(synchronize);
// Store the new smoother factors and values for addition during the next update call
smootherFactors_ = smootherFactors;
smootherValues_ = smootherValues;
// Store the new filter summarization and separator, to be replaced during the next update call
filterSummarizationFactors_ = summarizedFactors;
separatorValues_ = separatorValues;
// Flag the next smoother update to include the synchronization data
synchronizationUpdatesAvailable_ = true;
gttoc(synchronize);
}
/* ************************************************************************* */
void ConcurrentIncrementalSmoother::postsync() {
gttic(postsync);
gttoc(postsync);
}
/* ************************************************************************* */
void ConcurrentIncrementalSmoother::updateSmootherSummarization() {
// The smoother summarization factors are the resulting marginal factors on the separator
// variables that result from marginalizing out all of the other variables
// These marginal factors will be cached for later transmission to the filter using
// linear container factors
// Find all cliques that contain any separator variables
std::set<ISAM2Clique::shared_ptr> separatorCliques;
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, separatorValues_) {
Index index = isam2_.getOrdering().at(key_value.key);
ISAM2Clique::shared_ptr clique = isam2_[index];
separatorCliques.insert( clique );
}
// Create the set of clique keys
std::vector<Index> cliqueIndices;
std::vector<Key> cliqueKeys;
BOOST_FOREACH(const ISAM2Clique::shared_ptr& clique, separatorCliques) {
BOOST_FOREACH(Index index, clique->conditional()->frontals()) {
cliqueIndices.push_back(index);
cliqueKeys.push_back(isam2_.getOrdering().key(index));
}
}
std::sort(cliqueIndices.begin(), cliqueIndices.end());
std::sort(cliqueKeys.begin(), cliqueKeys.end());
// Gather all factors that involve only clique keys
std::set<size_t> cliqueFactorSlots;
BOOST_FOREACH(Index index, cliqueIndices) {
BOOST_FOREACH(size_t slot, isam2_.getVariableIndex()[index]) {
const NonlinearFactor::shared_ptr& factor = isam2_.getFactorsUnsafe().at(slot);
if(factor) {
std::set<Key> factorKeys(factor->begin(), factor->end());
if(std::includes(cliqueKeys.begin(), cliqueKeys.end(), factorKeys.begin(), factorKeys.end())) {
cliqueFactorSlots.insert(slot);
}
}
}
}
// Remove any factor included in the filter summarization
BOOST_FOREACH(size_t slot, filterSummarizationSlots_) {
cliqueFactorSlots.erase(slot);
}
// Create a factor graph from the identified factors
NonlinearFactorGraph graph;
BOOST_FOREACH(size_t slot, cliqueFactorSlots) {
graph.push_back(isam2_.getFactorsUnsafe().at(slot));
}
// Find the set of children of the separator cliques
std::set<ISAM2Clique::shared_ptr> childCliques;
// Add all of the children
BOOST_FOREACH(const ISAM2Clique::shared_ptr& clique, separatorCliques) {
childCliques.insert(clique->children().begin(), clique->children().end());
}
// Remove any separator cliques that were added because they were children of other separator cliques
BOOST_FOREACH(const ISAM2Clique::shared_ptr& clique, separatorCliques) {
childCliques.erase(clique);
}
// Augment the factor graph with cached factors from the children
BOOST_FOREACH(const ISAM2Clique::shared_ptr& clique, childCliques) {
LinearContainerFactor::shared_ptr factor(new LinearContainerFactor(clique->cachedFactor(), isam2_.getOrdering(), isam2_.getLinearizationPoint()));
graph.push_back( factor );
}
// Get the set of separator keys
gtsam::FastSet<Key> separatorKeys;
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, separatorValues_) {
separatorKeys.insert(key_value.key);
}
// Calculate the marginal factors on the separator
smootherSummarization_ = internal::calculateMarginalFactors(graph, isam2_.getLinearizationPoint(), separatorKeys,
isam2_.params().factorization == ISAM2Params::CHOLESKY ? EliminateCholesky : EliminateQR);
}
/* ************************************************************************* */
}/// namespace gtsam

View File

@ -0,0 +1,175 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file ConcurrentIncrementalSmoother.h
* @brief An iSAM2-based Smoother that implements the
* Concurrent Filtering and Smoothing interface.
* @author Stephen Williams
*/
// \callgraph
#pragma once
#include <gtsam_unstable/nonlinear/ConcurrentFilteringAndSmoothing.h>
#include <gtsam/nonlinear/ISAM2.h>
namespace gtsam {
/**
* A Levenberg-Marquardt Batch Smoother that implements the Concurrent Filtering and Smoother interface.
*/
class GTSAM_UNSTABLE_EXPORT ConcurrentIncrementalSmoother : public virtual ConcurrentSmoother {
public:
typedef boost::shared_ptr<ConcurrentIncrementalSmoother> shared_ptr;
typedef ConcurrentSmoother Base; ///< typedef for base class
/** Meta information returned about the update */
struct Result {
size_t iterations; ///< The number of optimizer iterations performed
size_t nonlinearVariables; ///< The number of variables that can be relinearized
size_t linearVariables; ///< The number of variables that must keep a constant linearization point
double error; ///< The final factor graph error
/// Constructor
Result() : iterations(0), nonlinearVariables(0), linearVariables(0), error(0) {};
/// Getter methods
size_t getIterations() const { return iterations; }
size_t getNonlinearVariables() const { return nonlinearVariables; }
size_t getLinearVariables() const { return linearVariables; }
double getError() const { return error; }
};
/** Default constructor */
ConcurrentIncrementalSmoother(const ISAM2Params& parameters = ISAM2Params()) : isam2_(parameters) {};
/** Default destructor */
virtual ~ConcurrentIncrementalSmoother() {};
/** Implement a GTSAM standard 'print' function */
virtual void print(const std::string& s = "Concurrent Incremental Smoother:\n", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const;
/** Check if two Concurrent Smoothers are equal */
virtual bool equals(const ConcurrentSmoother& rhs, double tol = 1e-9) const;
/** Access the current set of factors */
const NonlinearFactorGraph& getFactors() const {
return isam2_.getFactorsUnsafe();
}
/** Access the current linearization point */
const Values& getLinearizationPoint() const {
return isam2_.getLinearizationPoint();
}
/** Access the current ordering */
const Ordering& getOrdering() const {
return isam2_.getOrdering();
}
/** Access the current set of deltas to the linearization point */
const VectorValues& getDelta() const {
return isam2_.getDelta();
}
/** Compute the current best estimate of all variables and return a full Values structure.
* If only a single variable is needed, it may be faster to call calculateEstimate(const KEY&).
*/
Values calculateEstimate() const {
return isam2_.calculateEstimate();
}
/** Compute the current best estimate of a single variable. This is generally faster than
* calling the no-argument version of calculateEstimate if only specific variables are needed.
* @param key
* @return
*/
template<class VALUE>
VALUE calculateEstimate(Key key) const {
return isam2_.calculateEstimate<VALUE>(key);
}
/**
* Add new factors and variables to the smoother.
*
* Add new measurements, and optionally new variables, to the smoother.
* This runs a full step of the ISAM2 algorithm, relinearizing and updating
* the solution as needed, according to the wildfire and relinearize
* thresholds.
*
* @param newFactors The new factors to be added to the smoother
* @param newTheta Initialization points for new variables to be added to the smoother
* You must include here all new variables occuring in newFactors (which were not already
* in the smoother). There must not be any variables here that do not occur in newFactors,
* and additionally, variables that were already in the system must not be included here.
*/
Result update(const NonlinearFactorGraph& newFactors = NonlinearFactorGraph(), const Values& newTheta = Values());
/**
* Perform any required operations before the synchronization process starts.
* Called by 'synchronize'
*/
virtual void presync();
/**
* Populate the provided containers with factors that constitute the smoother branch summarization
* needed by the filter.
*
* @param summarizedFactors The summarized factors for the filter branch
*/
virtual void getSummarizedFactors(NonlinearFactorGraph& summarizedFactors, Values& separatorValues);
/**
* Apply the new smoother factors sent by the filter, and the updated version of the filter
* branch summarized factors.
*
* @param smootherFactors A set of new factors added to the smoother from the filter
* @param smootherValues Linearization points for any new variables
* @param summarizedFactors An updated version of the filter branch summarized factors
* @param rootValues The linearization point of the root variables
*/
virtual void synchronize(const NonlinearFactorGraph& smootherFactors, const Values& smootherValues,
const NonlinearFactorGraph& summarizedFactors, const Values& separatorValues);
/**
* Perform any required operations after the synchronization process finishes.
* Called by 'synchronize'
*/
virtual void postsync();
protected:
ISAM2 isam2_; ///< iSAM2 inference engine
// Storage for information received from the filter during the last synchronization
NonlinearFactorGraph smootherFactors_; ///< New factors to be added to the smoother during the next update
Values smootherValues_; ///< New variables to be added to the smoother during the next update
NonlinearFactorGraph filterSummarizationFactors_; ///< New filter summarization factors to replace the existing filter summarization during the next update
Values separatorValues_; ///< The linearization points of the separator variables. These should not be changed during optimization.
FastVector<size_t> filterSummarizationSlots_; ///< The slots in factor graph that correspond to the current filter summarization factors
bool synchronizationUpdatesAvailable_; ///< Flag indicating the currently stored synchronization updates have not been applied yet
// Storage for information to be sent to the filter
NonlinearFactorGraph smootherSummarization_; ///< A temporary holding place for calculated smoother summarization
private:
/** Calculate the smoother marginal factors on the separator variables */
void updateSmootherSummarization();
}; // ConcurrentBatchSmoother
/// Typedef for Matlab wrapping
typedef ConcurrentIncrementalSmoother::Result ConcurrentIncrementalSmootherResult;
}/// namespace gtsam

View File

@ -79,6 +79,26 @@ public:
return isam_.params();
}
/** Access the current set of factors */
const NonlinearFactorGraph& getFactors() const {
return isam_.getFactorsUnsafe();
}
/** Access the current linearization point */
const Values& getLinearizationPoint() const {
return isam_.getLinearizationPoint();
}
/** Access the current ordering */
const Ordering& getOrdering() const {
return isam_.getOrdering();
}
/** Access the current set of deltas to the linearization point */
const VectorValues& getDelta() const {
return isam_.getDelta();
}
protected:
/** An iSAM2 object used to perform inference. The smoother lag is controlled
* by what factors are removed each iteration */

View File

@ -52,7 +52,11 @@ TEST( BatchFixedLagSmoother, Example )
// the BatchFixedLagSmoother should be identical (even with the linearized approximations at
// the end of the smoothing lag)
SETDEBUG("BatchFixedLagSmoother update", false);
// SETDEBUG("BatchFixedLagSmoother update", true);
// SETDEBUG("BatchFixedLagSmoother reorder", true);
// SETDEBUG("BatchFixedLagSmoother optimize", true);
// SETDEBUG("BatchFixedLagSmoother marginalize", true);
// SETDEBUG("BatchFixedLagSmoother calculateMarginalFactors", true);
// Set up parameters
SharedDiagonal odometerNoise = noiseModel::Diagonal::Sigmas(Vector_(2, 0.1, 0.1));

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,614 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file testConcurrentIncrementalSmoother.cpp
* @brief Unit tests for the Concurrent Batch Smoother
* @author Stephen Williams (swilliams8@gatech.edu)
* @date Jan 5, 2013
*/
#include <gtsam_unstable/nonlinear/ConcurrentIncrementalSmoother.h>
#include <gtsam/slam/PriorFactor.h>
#include <gtsam/slam/BetweenFactor.h>
#include <gtsam/nonlinear/ISAM2.h>
#include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
#include <gtsam/nonlinear/LinearContainerFactor.h>
#include <gtsam/nonlinear/Ordering.h>
#include <gtsam/nonlinear/Values.h>
#include <gtsam/nonlinear/Symbol.h>
#include <gtsam/nonlinear/Key.h>
#include <gtsam/inference/JunctionTree.h>
#include <gtsam/geometry/Pose3.h>
#include <gtsam/base/TestableAssertions.h>
#include <CppUnitLite/TestHarness.h>
using namespace std;
using namespace gtsam;
namespace {
// Set up initial pose, odometry difference, loop closure difference, and initialization errors
const Pose3 poseInitial;
const Pose3 poseOdometry( Rot3::RzRyRx(Vector_(3, 0.05, 0.10, -0.75)), Point3(1.0, -0.25, 0.10) );
const Pose3 poseError( Rot3::RzRyRx(Vector_(3, 0.01, 0.02, -0.1)), Point3(0.05, -0.05, 0.02) );
// Set up noise models for the factors
const SharedDiagonal noisePrior = noiseModel::Isotropic::Sigma(6, 0.10);
const SharedDiagonal noiseOdometery = noiseModel::Diagonal::Sigmas(Vector_(6, 0.1, 0.1, 0.1, 0.5, 0.5, 0.5));
const SharedDiagonal noiseLoop = noiseModel::Diagonal::Sigmas(Vector_(6, 0.25, 0.25, 0.25, 1.0, 1.0, 1.0));
/* ************************************************************************* */
Values BatchOptimize(const NonlinearFactorGraph& graph, const Values& theta, int maxIter = 100) {
// Create an L-M optimizer
ISAM2Params parameters;
parameters.optimizationParams = ISAM2DoglegParams();
// parameters.maxIterations = maxIter;
// parameters.lambdaUpperBound = 1;
// parameters.lambdaInitial = 1;
// parameters.verbosity = NonlinearOptimizerParams::ERROR;
// parameters.verbosityLM = ISAM2Params::DAMPED;
// parameters.linearSolverType = SuccessiveLinearizationParams::MULTIFRONTAL_QR;
ISAM2 optimizer(parameters);
optimizer.update( graph, theta );
Values result = optimizer.calculateEstimate();
return result;
}
} // end namespace
/* ************************************************************************* */
TEST( ConcurrentIncrementalSmoother, equals )
{
// TODO: Test 'equals' more vigorously
// Create a Concurrent Batch Smoother
ISAM2Params parameters1;
parameters1.optimizationParams = ISAM2DoglegParams();
ConcurrentIncrementalSmoother smoother1(parameters1);
// Create an identical Concurrent Batch Smoother
ISAM2Params parameters2;
parameters2.optimizationParams = ISAM2DoglegParams();
ConcurrentIncrementalSmoother smoother2(parameters2);
// Create a different Concurrent Batch Smoother
ISAM2Params parameters3;
parameters3.optimizationParams = ISAM2DoglegParams();
// parameters3.maxIterations = 1;
ConcurrentIncrementalSmoother smoother3(parameters3);
CHECK(assert_equal(smoother1, smoother1));
CHECK(assert_equal(smoother1, smoother2));
// CHECK(assert_inequal(smoother1, smoother3));
}
/* ************************************************************************* */
TEST( ConcurrentIncrementalSmoother, getFactors )
{
// Create a Concurrent Batch Smoother
ISAM2Params parameters;
parameters.optimizationParams = ISAM2DoglegParams();
ConcurrentIncrementalSmoother smoother(parameters);
// Expected graph is empty
NonlinearFactorGraph expected1;
// Get actual graph
NonlinearFactorGraph actual1 = smoother.getFactors();
// Check
CHECK(assert_equal(expected1, actual1));
// Add some factors to the smoother
NonlinearFactorGraph newFactors1;
newFactors1.add(PriorFactor<Pose3>(1, poseInitial, noisePrior));
newFactors1.add(BetweenFactor<Pose3>(1, 2, poseOdometry, noiseOdometery));
Values newValues1;
newValues1.insert(1, Pose3());
newValues1.insert(2, newValues1.at<Pose3>(1).compose(poseOdometry));
smoother.update(newFactors1, newValues1);
// Expected graph
NonlinearFactorGraph expected2;
expected2.push_back(newFactors1);
// Get actual graph
NonlinearFactorGraph actual2 = smoother.getFactors();
// Check
CHECK(assert_equal(expected2, actual2));
// Add some more factors to the smoother
NonlinearFactorGraph newFactors2;
newFactors2.add(BetweenFactor<Pose3>(2, 3, poseOdometry, noiseOdometery));
newFactors2.add(BetweenFactor<Pose3>(3, 4, poseOdometry, noiseOdometery));
Values newValues2;
newValues2.insert(3, newValues1.at<Pose3>(2).compose(poseOdometry));
newValues2.insert(4, newValues2.at<Pose3>(3).compose(poseOdometry));
smoother.update(newFactors2, newValues2);
// Expected graph
NonlinearFactorGraph expected3;
expected3.push_back(newFactors1);
expected3.push_back(newFactors2);
// Get actual graph
NonlinearFactorGraph actual3 = smoother.getFactors();
// Check
CHECK(assert_equal(expected3, actual3));
}
/* ************************************************************************* */
TEST( ConcurrentIncrementalSmoother, getLinearizationPoint )
{
// Create a Concurrent Batch Smoother
ISAM2Params parameters;
parameters.optimizationParams = ISAM2DoglegParams();
ConcurrentIncrementalSmoother smoother(parameters);
// Expected values is empty
Values expected1;
// Get Linearization Point
Values actual1 = smoother.getLinearizationPoint();
// Check
CHECK(assert_equal(expected1, actual1));
// Add some factors to the smoother
NonlinearFactorGraph newFactors1;
newFactors1.add(PriorFactor<Pose3>(1, poseInitial, noisePrior));
newFactors1.add(BetweenFactor<Pose3>(1, 2, poseOdometry, noiseOdometery));
Values newValues1;
newValues1.insert(1, Pose3());
newValues1.insert(2, newValues1.at<Pose3>(1).compose(poseOdometry));
smoother.update(newFactors1, newValues1);
// Expected values is equivalent to the provided values only because the provided linearization points were optimal
Values expected2;
expected2.insert(newValues1);
// Get actual linearization point
Values actual2 = smoother.getLinearizationPoint();
// Check
CHECK(assert_equal(expected2, actual2));
// Add some more factors to the smoother
NonlinearFactorGraph newFactors2;
newFactors2.add(BetweenFactor<Pose3>(2, 3, poseOdometry, noiseOdometery));
newFactors2.add(BetweenFactor<Pose3>(3, 4, poseOdometry, noiseOdometery));
Values newValues2;
newValues2.insert(3, newValues1.at<Pose3>(2).compose(poseOdometry));
newValues2.insert(4, newValues2.at<Pose3>(3).compose(poseOdometry));
smoother.update(newFactors2, newValues2);
// Expected values is equivalent to the provided values only because the provided linearization points were optimal
Values expected3;
expected3.insert(newValues1);
expected3.insert(newValues2);
// Get actual linearization point
Values actual3 = smoother.getLinearizationPoint();
// Check
CHECK(assert_equal(expected3, actual3));
}
/* ************************************************************************* */
TEST( ConcurrentIncrementalSmoother, getOrdering )
{
// TODO: Think about how to check ordering...
}
/* ************************************************************************* */
TEST( ConcurrentIncrementalSmoother, getDelta )
{
// TODO: Think about how to check ordering...
}
/* ************************************************************************* */
TEST( ConcurrentIncrementalSmoother, calculateEstimate )
{
// Create a Concurrent Batch Smoother
ISAM2Params parameters;
parameters.optimizationParams = ISAM2DoglegParams();
ConcurrentIncrementalSmoother smoother(parameters);
// Expected values is empty
Values expected1;
// Get Linearization Point
Values actual1 = smoother.calculateEstimate();
// Check
CHECK(assert_equal(expected1, actual1));
// Add some factors to the smoother
NonlinearFactorGraph newFactors2;
newFactors2.add(PriorFactor<Pose3>(1, poseInitial, noisePrior));
newFactors2.add(BetweenFactor<Pose3>(1, 2, poseOdometry, noiseOdometery));
Values newValues2;
newValues2.insert(1, Pose3().compose(poseError));
newValues2.insert(2, newValues2.at<Pose3>(1).compose(poseOdometry).compose(poseError));
smoother.update(newFactors2, newValues2);
// Expected values from full batch optimization
NonlinearFactorGraph allFactors2;
allFactors2.push_back(newFactors2);
Values allValues2;
allValues2.insert(newValues2);
Values expected2 = BatchOptimize(allFactors2, allValues2);
// Get actual linearization point
Values actual2 = smoother.calculateEstimate();
// Check
CHECK(assert_equal(expected2, actual2, 1e-6));
// Add some more factors to the smoother
NonlinearFactorGraph newFactors3;
newFactors3.add(BetweenFactor<Pose3>(2, 3, poseOdometry, noiseOdometery));
newFactors3.add(BetweenFactor<Pose3>(3, 4, poseOdometry, noiseOdometery));
Values newValues3;
newValues3.insert(3, newValues2.at<Pose3>(2).compose(poseOdometry).compose(poseError));
newValues3.insert(4, newValues3.at<Pose3>(3).compose(poseOdometry).compose(poseError));
smoother.update(newFactors3, newValues3);
// Expected values from full batch optimization
NonlinearFactorGraph allFactors3;
allFactors3.push_back(newFactors2);
allFactors3.push_back(newFactors3);
Values allValues3;
allValues3.insert(newValues2);
allValues3.insert(newValues3);
Values expected3 = BatchOptimize(allFactors3, allValues3);
// Get actual linearization point
Values actual3 = smoother.calculateEstimate();
// Check
CHECK(assert_equal(expected3, actual3, 1e-6));
// Also check the single-variable version
Pose3 expectedPose1 = expected3.at<Pose3>(1);
Pose3 expectedPose2 = expected3.at<Pose3>(2);
Pose3 expectedPose3 = expected3.at<Pose3>(3);
Pose3 expectedPose4 = expected3.at<Pose3>(4);
// Also check the single-variable version
Pose3 actualPose1 = smoother.calculateEstimate<Pose3>(1);
Pose3 actualPose2 = smoother.calculateEstimate<Pose3>(2);
Pose3 actualPose3 = smoother.calculateEstimate<Pose3>(3);
Pose3 actualPose4 = smoother.calculateEstimate<Pose3>(4);
// Check
CHECK(assert_equal(expectedPose1, actualPose1, 1e-6));
CHECK(assert_equal(expectedPose2, actualPose2, 1e-6));
CHECK(assert_equal(expectedPose3, actualPose3, 1e-6));
CHECK(assert_equal(expectedPose4, actualPose4, 1e-6));
}
/* ************************************************************************* */
TEST( ConcurrentIncrementalSmoother, update_empty )
{
// Create a set of optimizer parameters
ISAM2Params parameters;
parameters.optimizationParams = ISAM2DoglegParams();
// Create a Concurrent Batch Smoother
ConcurrentIncrementalSmoother smoother(parameters);
// Call update
smoother.update();
}
/* ************************************************************************* */
TEST( ConcurrentIncrementalSmoother, update_multiple )
{
// Create a Concurrent Batch Smoother
ISAM2Params parameters;
parameters.optimizationParams = ISAM2DoglegParams();
ConcurrentIncrementalSmoother smoother(parameters);
// Expected values is empty
Values expected1;
// Get Linearization Point
Values actual1 = smoother.calculateEstimate();
// Check
CHECK(assert_equal(expected1, actual1));
// Add some factors to the smoother
NonlinearFactorGraph newFactors2;
newFactors2.add(PriorFactor<Pose3>(1, poseInitial, noisePrior));
newFactors2.add(BetweenFactor<Pose3>(1, 2, poseOdometry, noiseOdometery));
Values newValues2;
newValues2.insert(1, Pose3().compose(poseError));
newValues2.insert(2, newValues2.at<Pose3>(1).compose(poseOdometry).compose(poseError));
smoother.update(newFactors2, newValues2);
// Expected values from full batch optimization
NonlinearFactorGraph allFactors2;
allFactors2.push_back(newFactors2);
Values allValues2;
allValues2.insert(newValues2);
Values expected2 = BatchOptimize(allFactors2, allValues2);
// Get actual linearization point
Values actual2 = smoother.calculateEstimate();
// Check
CHECK(assert_equal(expected2, actual2, 1e-6));
// Add some more factors to the smoother
NonlinearFactorGraph newFactors3;
newFactors3.add(BetweenFactor<Pose3>(2, 3, poseOdometry, noiseOdometery));
newFactors3.add(BetweenFactor<Pose3>(3, 4, poseOdometry, noiseOdometery));
Values newValues3;
newValues3.insert(3, newValues2.at<Pose3>(2).compose(poseOdometry).compose(poseError));
newValues3.insert(4, newValues3.at<Pose3>(3).compose(poseOdometry).compose(poseError));
smoother.update(newFactors3, newValues3);
// Expected values from full batch optimization
NonlinearFactorGraph allFactors3;
allFactors3.push_back(newFactors2);
allFactors3.push_back(newFactors3);
Values allValues3;
allValues3.insert(newValues2);
allValues3.insert(newValues3);
Values expected3 = BatchOptimize(allFactors3, allValues3);
// Get actual linearization point
Values actual3 = smoother.calculateEstimate();
// Check
CHECK(assert_equal(expected3, actual3, 1e-6));
}
/* ************************************************************************* */
TEST( ConcurrentIncrementalSmoother, synchronize_empty )
{
// Create a set of optimizer parameters
ISAM2Params parameters;
parameters.optimizationParams = ISAM2DoglegParams();
// Create a Concurrent Batch Smoother
ConcurrentIncrementalSmoother smoother(parameters);
// Create empty containers *from* the filter
NonlinearFactorGraph smootherFactors, filterSumarization;
Values smootherValues, filterSeparatorValues;
// Create expected values: these will be empty for this case
NonlinearFactorGraph expectedSmootherSummarization;
Values expectedSmootherSeparatorValues;
// Synchronize
NonlinearFactorGraph actualSmootherSummarization;
Values actualSmootherSeparatorValues;
smoother.presync();
smoother.getSummarizedFactors(actualSmootherSummarization, actualSmootherSeparatorValues);
smoother.synchronize(smootherFactors, smootherValues, filterSumarization, filterSeparatorValues);
smoother.postsync();
// Check
CHECK(assert_equal(expectedSmootherSummarization, actualSmootherSummarization, 1e-6));
CHECK(assert_equal(expectedSmootherSeparatorValues, actualSmootherSeparatorValues, 1e-6));
}
/* ************************************************************************* */
TEST( ConcurrentIncrementalSmoother, synchronize_1 )
{
// Create a set of optimizer parameters
ISAM2Params parameters;
parameters.optimizationParams = ISAM2DoglegParams();
// parameters.maxIterations = 1;
// Create a Concurrent Batch Smoother
ConcurrentIncrementalSmoother smoother(parameters);
// Create a simple separator *from* the filter
NonlinearFactorGraph smootherFactors, filterSumarization;
Values smootherValues, filterSeparatorValues;
filterSeparatorValues.insert(1, Pose3().compose(poseError));
filterSeparatorValues.insert(2, filterSeparatorValues.at<Pose3>(1).compose(poseOdometry).compose(poseError));
Ordering ordering;
ordering.push_back(1);
ordering.push_back(2);
filterSumarization.add(LinearContainerFactor(PriorFactor<Pose3>(1, poseInitial, noisePrior).linearize(filterSeparatorValues, ordering), ordering, filterSeparatorValues));
filterSumarization.add(LinearContainerFactor(BetweenFactor<Pose3>(1, 2, poseOdometry, noiseOdometery).linearize(filterSeparatorValues, ordering), ordering, filterSeparatorValues));
// Create expected values: the smoother output will be empty for this case
NonlinearFactorGraph expectedSmootherSummarization;
Values expectedSmootherSeparatorValues;
NonlinearFactorGraph actualSmootherSummarization;
Values actualSmootherSeparatorValues;
smoother.presync();
smoother.getSummarizedFactors(actualSmootherSummarization, actualSmootherSeparatorValues);
smoother.synchronize(smootherFactors, smootherValues, filterSumarization, filterSeparatorValues);
smoother.postsync();
// Check
CHECK(assert_equal(expectedSmootherSummarization, actualSmootherSummarization, 1e-6));
CHECK(assert_equal(expectedSmootherSeparatorValues, actualSmootherSeparatorValues, 1e-6));
// Update the smoother
smoother.update();
// Check the factor graph. It should contain only the filter-provided factors
NonlinearFactorGraph expectedFactorGraph = filterSumarization;
NonlinearFactorGraph actualFactorGraph = smoother.getFactors();
CHECK(assert_equal(expectedFactorGraph, actualFactorGraph, 1e-6));
// Check the optimized value of smoother state
NonlinearFactorGraph allFactors;
allFactors.push_back(filterSumarization);
Values allValues;
allValues.insert(filterSeparatorValues);
Values expectedValues = BatchOptimize(allFactors, allValues,1);
Values actualValues = smoother.calculateEstimate();
CHECK(assert_equal(expectedValues, actualValues, 1e-6));
// Check the linearization point. The separator should remain identical to the filter provided values
Values expectedLinearizationPoint = filterSeparatorValues;
Values actualLinearizationPoint = smoother.getLinearizationPoint();
CHECK(assert_equal(expectedLinearizationPoint, actualLinearizationPoint, 1e-6));
}
/* ************************************************************************* */
TEST( ConcurrentIncrementalSmoother, synchronize_2 )
{
// Create a set of optimizer parameters
ISAM2Params parameters;
parameters.optimizationParams = ISAM2DoglegParams();
// parameters.maxIterations = 1;
// parameters.lambdaUpperBound = 1;
// parameters.lambdaInitial = 1;
// parameters.verbosity = NonlinearOptimizerParams::ERROR;
// parameters.verbosityLM = ISAM2Params::DAMPED;
// Create a Concurrent Batch Smoother
ConcurrentIncrementalSmoother smoother(parameters);
// Create a separator and cached smoother factors *from* the filter
NonlinearFactorGraph smootherFactors, filterSumarization;
Values smootherValues, filterSeparatorValues;
Ordering ordering;
ordering.push_back(1);
ordering.push_back(2);
filterSeparatorValues.insert(1, Pose3().compose(poseError));
filterSeparatorValues.insert(2, filterSeparatorValues.at<Pose3>(1).compose(poseOdometry).compose(poseError));
filterSumarization.add(LinearContainerFactor(PriorFactor<Pose3>(1, poseInitial, noisePrior).linearize(filterSeparatorValues, ordering), ordering, filterSeparatorValues));
filterSumarization.add(LinearContainerFactor(BetweenFactor<Pose3>(1, 2, poseOdometry, noiseOdometery).linearize(filterSeparatorValues, ordering), ordering, filterSeparatorValues));
smootherFactors.add(BetweenFactor<Pose3>(2, 3, poseOdometry, noiseOdometery));
smootherFactors.add(BetweenFactor<Pose3>(3, 4, poseOdometry, noiseOdometery));
smootherValues.insert(3, filterSeparatorValues.at<Pose3>(2).compose(poseOdometry).compose(poseError));
smootherValues.insert(4, smootherValues.at<Pose3>(3).compose(poseOdometry).compose(poseError));
// Create expected values: the smoother output will be empty for this case
NonlinearFactorGraph expectedSmootherSummarization;
Values expectedSmootherSeparatorValues;
NonlinearFactorGraph actualSmootherSummarization;
Values actualSmootherSeparatorValues;
smoother.presync();
smoother.getSummarizedFactors(actualSmootherSummarization, actualSmootherSeparatorValues);
smoother.synchronize(smootherFactors, smootherValues, filterSumarization, filterSeparatorValues);
smoother.postsync();
// Check
CHECK(assert_equal(expectedSmootherSummarization, actualSmootherSummarization, 1e-6));
CHECK(assert_equal(expectedSmootherSeparatorValues, actualSmootherSeparatorValues, 1e-6));
// Update the smoother
smoother.update();
// Check the factor graph. It should contain only the filter-provided factors
NonlinearFactorGraph expectedFactorGraph;
expectedFactorGraph.push_back(smootherFactors);
expectedFactorGraph.push_back(filterSumarization);
NonlinearFactorGraph actualFactorGraph = smoother.getFactors();
CHECK(assert_equal(expectedFactorGraph, actualFactorGraph, 1e-6));
// Check the optimized value of smoother state
NonlinearFactorGraph allFactors;
allFactors.push_back(filterSumarization);
allFactors.push_back(smootherFactors);
Values allValues;
allValues.insert(filterSeparatorValues);
allValues.insert(smootherValues);
// allValues.print("Batch LinPoint:\n");
Values expectedValues = BatchOptimize(allFactors, allValues, 1);
Values actualValues = smoother.calculateEstimate();
CHECK(assert_equal(expectedValues, actualValues, 1e-6));
// Check the linearization point. The separator should remain identical to the filter provided values, but the others should be the optimal values
// Isam2 is changing internally the linearization points, so the following check is done only on the separator variables
// Values expectedLinearizationPoint = BatchOptimize(allFactors, allValues, 1);
Values expectedLinearizationPoint = filterSeparatorValues;
Values actualLinearizationPoint;
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, filterSeparatorValues) {
actualLinearizationPoint.insert(key_value.key, smoother.getLinearizationPoint().at(key_value.key));
}
CHECK(assert_equal(expectedLinearizationPoint, actualLinearizationPoint, 1e-6));
}
/* ************************************************************************* */
TEST( ConcurrentIncrementalSmoother, synchronize_3 )
{
// Create a set of optimizer parameters
ISAM2Params parameters;
parameters.optimizationParams = ISAM2DoglegParams();
// parameters.maxIterations = 1;
// parameters.lambdaUpperBound = 1;
// parameters.lambdaInitial = 1;
// parameters.verbosity = NonlinearOptimizerParams::ERROR;
// parameters.verbosityLM = ISAM2Params::DAMPED;
// Create a Concurrent Batch Smoother
ConcurrentIncrementalSmoother smoother(parameters);
// Create a separator and cached smoother factors *from* the filter
NonlinearFactorGraph smootherFactors, filterSumarization;
Values smootherValues, filterSeparatorValues;
Ordering ordering;
ordering.push_back(1);
ordering.push_back(2);
filterSeparatorValues.insert(1, Pose3().compose(poseError));
filterSeparatorValues.insert(2, filterSeparatorValues.at<Pose3>(1).compose(poseOdometry).compose(poseError));
filterSumarization.add(LinearContainerFactor(PriorFactor<Pose3>(1, poseInitial, noisePrior).linearize(filterSeparatorValues, ordering), ordering, filterSeparatorValues));
filterSumarization.add(LinearContainerFactor(BetweenFactor<Pose3>(1, 2, poseOdometry, noiseOdometery).linearize(filterSeparatorValues, ordering), ordering, filterSeparatorValues));
smootherFactors.add(BetweenFactor<Pose3>(2, 3, poseOdometry, noiseOdometery));
smootherFactors.add(BetweenFactor<Pose3>(3, 4, poseOdometry, noiseOdometery));
smootherFactors.add(PriorFactor<Pose3>(4, poseInitial, noisePrior));
smootherValues.insert(3, filterSeparatorValues.at<Pose3>(2).compose(poseOdometry).compose(poseError));
smootherValues.insert(4, smootherValues.at<Pose3>(3).compose(poseOdometry).compose(poseError));
// Create expected values: the smoother output will be empty for this case
NonlinearFactorGraph expectedSmootherSummarization;
Values expectedSmootherSeparatorValues;
NonlinearFactorGraph actualSmootherSummarization;
Values actualSmootherSeparatorValues;
smoother.presync();
smoother.getSummarizedFactors(actualSmootherSummarization, actualSmootherSeparatorValues);
smoother.synchronize(smootherFactors, smootherValues, filterSumarization, filterSeparatorValues);
smoother.postsync();
// Check
CHECK(assert_equal(expectedSmootherSummarization, actualSmootherSummarization, 1e-6));
CHECK(assert_equal(expectedSmootherSeparatorValues, actualSmootherSeparatorValues, 1e-6));
// Update the smoother
smoother.update();
smoother.presync();
smoother.getSummarizedFactors(actualSmootherSummarization, actualSmootherSeparatorValues);
// Check the optimized value of smoother state
NonlinearFactorGraph allFactors = smootherFactors;
Values allValues = smoother.getLinearizationPoint();
ordering = smoother.getOrdering(); // I'm really hoping this is an acceptable ordering...
GaussianFactorGraph::shared_ptr LinFactorGraph = allFactors.linearize(allValues, ordering);
// GaussianSequentialSolver GSS = GaussianSequentialSolver(*LinFactorGraph);
// GaussianBayesNet::shared_ptr GBNsptr = GSS.eliminate();
FastSet<Index> allkeys = LinFactorGraph->keys();
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, filterSeparatorValues) {
Index index = ordering.at(key_value.key);
allkeys.erase(index);
}
std::vector<Index> variables(allkeys.begin(), allkeys.end());
std::pair<GaussianConditional::shared_ptr, GaussianFactorGraph> result = LinFactorGraph->eliminate(variables, EliminateCholesky);
expectedSmootherSummarization.resize(0);
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, result.second) {
expectedSmootherSummarization.add(LinearContainerFactor(factor, ordering, allValues));
}
CHECK(assert_equal(expectedSmootherSummarization, actualSmootherSummarization, 1e-6));
}
/* ************************************************************************* */
int main() { TestResult tr; return TestRegistry::runAllTests(tr);}
/* ************************************************************************* */

View File

@ -0,0 +1,614 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file testConcurrentIncrementalSmoother.cpp
* @brief Unit tests for the Concurrent Batch Smoother
* @author Stephen Williams (swilliams8@gatech.edu)
* @date Jan 5, 2013
*/
#include <gtsam_unstable/nonlinear/ConcurrentIncrementalSmoother.h>
#include <gtsam/slam/PriorFactor.h>
#include <gtsam/slam/BetweenFactor.h>
#include <gtsam/nonlinear/ISAM2.h>
#include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
#include <gtsam/nonlinear/LinearContainerFactor.h>
#include <gtsam/nonlinear/Ordering.h>
#include <gtsam/nonlinear/Values.h>
#include <gtsam/nonlinear/Symbol.h>
#include <gtsam/nonlinear/Key.h>
#include <gtsam/inference/JunctionTree.h>
#include <gtsam/geometry/Pose3.h>
#include <gtsam/base/TestableAssertions.h>
#include <CppUnitLite/TestHarness.h>
using namespace std;
using namespace gtsam;
namespace {
// Set up initial pose, odometry difference, loop closure difference, and initialization errors
const Pose3 poseInitial;
const Pose3 poseOdometry( Rot3::RzRyRx(Vector_(3, 0.05, 0.10, -0.75)), Point3(1.0, -0.25, 0.10) );
const Pose3 poseError( Rot3::RzRyRx(Vector_(3, 0.01, 0.02, -0.1)), Point3(0.05, -0.05, 0.02) );
// Set up noise models for the factors
const SharedDiagonal noisePrior = noiseModel::Isotropic::Sigma(6, 0.10);
const SharedDiagonal noiseOdometery = noiseModel::Diagonal::Sigmas(Vector_(6, 0.1, 0.1, 0.1, 0.5, 0.5, 0.5));
const SharedDiagonal noiseLoop = noiseModel::Diagonal::Sigmas(Vector_(6, 0.25, 0.25, 0.25, 1.0, 1.0, 1.0));
/* ************************************************************************* */
Values BatchOptimize(const NonlinearFactorGraph& graph, const Values& theta, int maxIter = 100) {
// Create an L-M optimizer
ISAM2Params parameters;
parameters.optimizationParams = ISAM2GaussNewtonParams();
// parameters.maxIterations = maxIter;
// parameters.lambdaUpperBound = 1;
// parameters.lambdaInitial = 1;
// parameters.verbosity = NonlinearOptimizerParams::ERROR;
// parameters.verbosityLM = ISAM2Params::DAMPED;
// parameters.linearSolverType = SuccessiveLinearizationParams::MULTIFRONTAL_QR;
ISAM2 optimizer(parameters);
optimizer.update( graph, theta );
Values result = optimizer.calculateEstimate();
return result;
}
} // end namespace
/* ************************************************************************* */
TEST( ConcurrentIncrementalSmoother, equals )
{
// TODO: Test 'equals' more vigorously
// Create a Concurrent Batch Smoother
ISAM2Params parameters1;
parameters1.optimizationParams = ISAM2GaussNewtonParams();
ConcurrentIncrementalSmoother smoother1(parameters1);
// Create an identical Concurrent Batch Smoother
ISAM2Params parameters2;
parameters2.optimizationParams = ISAM2GaussNewtonParams();
ConcurrentIncrementalSmoother smoother2(parameters2);
// Create a different Concurrent Batch Smoother
ISAM2Params parameters3;
parameters3.optimizationParams = ISAM2GaussNewtonParams();
// parameters3.maxIterations = 1;
ConcurrentIncrementalSmoother smoother3(parameters3);
CHECK(assert_equal(smoother1, smoother1));
CHECK(assert_equal(smoother1, smoother2));
// CHECK(assert_inequal(smoother1, smoother3));
}
/* ************************************************************************* */
TEST( ConcurrentIncrementalSmoother, getFactors )
{
// Create a Concurrent Batch Smoother
ISAM2Params parameters;
parameters.optimizationParams = ISAM2GaussNewtonParams();
ConcurrentIncrementalSmoother smoother(parameters);
// Expected graph is empty
NonlinearFactorGraph expected1;
// Get actual graph
NonlinearFactorGraph actual1 = smoother.getFactors();
// Check
CHECK(assert_equal(expected1, actual1));
// Add some factors to the smoother
NonlinearFactorGraph newFactors1;
newFactors1.add(PriorFactor<Pose3>(1, poseInitial, noisePrior));
newFactors1.add(BetweenFactor<Pose3>(1, 2, poseOdometry, noiseOdometery));
Values newValues1;
newValues1.insert(1, Pose3());
newValues1.insert(2, newValues1.at<Pose3>(1).compose(poseOdometry));
smoother.update(newFactors1, newValues1);
// Expected graph
NonlinearFactorGraph expected2;
expected2.push_back(newFactors1);
// Get actual graph
NonlinearFactorGraph actual2 = smoother.getFactors();
// Check
CHECK(assert_equal(expected2, actual2));
// Add some more factors to the smoother
NonlinearFactorGraph newFactors2;
newFactors2.add(BetweenFactor<Pose3>(2, 3, poseOdometry, noiseOdometery));
newFactors2.add(BetweenFactor<Pose3>(3, 4, poseOdometry, noiseOdometery));
Values newValues2;
newValues2.insert(3, newValues1.at<Pose3>(2).compose(poseOdometry));
newValues2.insert(4, newValues2.at<Pose3>(3).compose(poseOdometry));
smoother.update(newFactors2, newValues2);
// Expected graph
NonlinearFactorGraph expected3;
expected3.push_back(newFactors1);
expected3.push_back(newFactors2);
// Get actual graph
NonlinearFactorGraph actual3 = smoother.getFactors();
// Check
CHECK(assert_equal(expected3, actual3));
}
/* ************************************************************************* */
TEST( ConcurrentIncrementalSmoother, getLinearizationPoint )
{
// Create a Concurrent Batch Smoother
ISAM2Params parameters;
parameters.optimizationParams = ISAM2GaussNewtonParams();
ConcurrentIncrementalSmoother smoother(parameters);
// Expected values is empty
Values expected1;
// Get Linearization Point
Values actual1 = smoother.getLinearizationPoint();
// Check
CHECK(assert_equal(expected1, actual1));
// Add some factors to the smoother
NonlinearFactorGraph newFactors1;
newFactors1.add(PriorFactor<Pose3>(1, poseInitial, noisePrior));
newFactors1.add(BetweenFactor<Pose3>(1, 2, poseOdometry, noiseOdometery));
Values newValues1;
newValues1.insert(1, Pose3());
newValues1.insert(2, newValues1.at<Pose3>(1).compose(poseOdometry));
smoother.update(newFactors1, newValues1);
// Expected values is equivalent to the provided values only because the provided linearization points were optimal
Values expected2;
expected2.insert(newValues1);
// Get actual linearization point
Values actual2 = smoother.getLinearizationPoint();
// Check
CHECK(assert_equal(expected2, actual2));
// Add some more factors to the smoother
NonlinearFactorGraph newFactors2;
newFactors2.add(BetweenFactor<Pose3>(2, 3, poseOdometry, noiseOdometery));
newFactors2.add(BetweenFactor<Pose3>(3, 4, poseOdometry, noiseOdometery));
Values newValues2;
newValues2.insert(3, newValues1.at<Pose3>(2).compose(poseOdometry));
newValues2.insert(4, newValues2.at<Pose3>(3).compose(poseOdometry));
smoother.update(newFactors2, newValues2);
// Expected values is equivalent to the provided values only because the provided linearization points were optimal
Values expected3;
expected3.insert(newValues1);
expected3.insert(newValues2);
// Get actual linearization point
Values actual3 = smoother.getLinearizationPoint();
// Check
CHECK(assert_equal(expected3, actual3));
}
/* ************************************************************************* */
TEST( ConcurrentIncrementalSmoother, getOrdering )
{
// TODO: Think about how to check ordering...
}
/* ************************************************************************* */
TEST( ConcurrentIncrementalSmoother, getDelta )
{
// TODO: Think about how to check ordering...
}
/* ************************************************************************* */
TEST( ConcurrentIncrementalSmoother, calculateEstimate )
{
// Create a Concurrent Batch Smoother
ISAM2Params parameters;
parameters.optimizationParams = ISAM2GaussNewtonParams();
ConcurrentIncrementalSmoother smoother(parameters);
// Expected values is empty
Values expected1;
// Get Linearization Point
Values actual1 = smoother.calculateEstimate();
// Check
CHECK(assert_equal(expected1, actual1));
// Add some factors to the smoother
NonlinearFactorGraph newFactors2;
newFactors2.add(PriorFactor<Pose3>(1, poseInitial, noisePrior));
newFactors2.add(BetweenFactor<Pose3>(1, 2, poseOdometry, noiseOdometery));
Values newValues2;
newValues2.insert(1, Pose3().compose(poseError));
newValues2.insert(2, newValues2.at<Pose3>(1).compose(poseOdometry).compose(poseError));
smoother.update(newFactors2, newValues2);
// Expected values from full batch optimization
NonlinearFactorGraph allFactors2;
allFactors2.push_back(newFactors2);
Values allValues2;
allValues2.insert(newValues2);
Values expected2 = BatchOptimize(allFactors2, allValues2);
// Get actual linearization point
Values actual2 = smoother.calculateEstimate();
// Check
CHECK(assert_equal(expected2, actual2, 1e-6));
// Add some more factors to the smoother
NonlinearFactorGraph newFactors3;
newFactors3.add(BetweenFactor<Pose3>(2, 3, poseOdometry, noiseOdometery));
newFactors3.add(BetweenFactor<Pose3>(3, 4, poseOdometry, noiseOdometery));
Values newValues3;
newValues3.insert(3, newValues2.at<Pose3>(2).compose(poseOdometry).compose(poseError));
newValues3.insert(4, newValues3.at<Pose3>(3).compose(poseOdometry).compose(poseError));
smoother.update(newFactors3, newValues3);
// Expected values from full batch optimization
NonlinearFactorGraph allFactors3;
allFactors3.push_back(newFactors2);
allFactors3.push_back(newFactors3);
Values allValues3;
allValues3.insert(newValues2);
allValues3.insert(newValues3);
Values expected3 = BatchOptimize(allFactors3, allValues3);
// Get actual linearization point
Values actual3 = smoother.calculateEstimate();
// Check
CHECK(assert_equal(expected3, actual3, 1e-6));
// Also check the single-variable version
Pose3 expectedPose1 = expected3.at<Pose3>(1);
Pose3 expectedPose2 = expected3.at<Pose3>(2);
Pose3 expectedPose3 = expected3.at<Pose3>(3);
Pose3 expectedPose4 = expected3.at<Pose3>(4);
// Also check the single-variable version
Pose3 actualPose1 = smoother.calculateEstimate<Pose3>(1);
Pose3 actualPose2 = smoother.calculateEstimate<Pose3>(2);
Pose3 actualPose3 = smoother.calculateEstimate<Pose3>(3);
Pose3 actualPose4 = smoother.calculateEstimate<Pose3>(4);
// Check
CHECK(assert_equal(expectedPose1, actualPose1, 1e-6));
CHECK(assert_equal(expectedPose2, actualPose2, 1e-6));
CHECK(assert_equal(expectedPose3, actualPose3, 1e-6));
CHECK(assert_equal(expectedPose4, actualPose4, 1e-6));
}
/* ************************************************************************* */
TEST( ConcurrentIncrementalSmoother, update_empty )
{
// Create a set of optimizer parameters
ISAM2Params parameters;
parameters.optimizationParams = ISAM2GaussNewtonParams();
// Create a Concurrent Batch Smoother
ConcurrentIncrementalSmoother smoother(parameters);
// Call update
smoother.update();
}
/* ************************************************************************* */
TEST( ConcurrentIncrementalSmoother, update_multiple )
{
// Create a Concurrent Batch Smoother
ISAM2Params parameters;
parameters.optimizationParams = ISAM2GaussNewtonParams();
ConcurrentIncrementalSmoother smoother(parameters);
// Expected values is empty
Values expected1;
// Get Linearization Point
Values actual1 = smoother.calculateEstimate();
// Check
CHECK(assert_equal(expected1, actual1));
// Add some factors to the smoother
NonlinearFactorGraph newFactors2;
newFactors2.add(PriorFactor<Pose3>(1, poseInitial, noisePrior));
newFactors2.add(BetweenFactor<Pose3>(1, 2, poseOdometry, noiseOdometery));
Values newValues2;
newValues2.insert(1, Pose3().compose(poseError));
newValues2.insert(2, newValues2.at<Pose3>(1).compose(poseOdometry).compose(poseError));
smoother.update(newFactors2, newValues2);
// Expected values from full batch optimization
NonlinearFactorGraph allFactors2;
allFactors2.push_back(newFactors2);
Values allValues2;
allValues2.insert(newValues2);
Values expected2 = BatchOptimize(allFactors2, allValues2);
// Get actual linearization point
Values actual2 = smoother.calculateEstimate();
// Check
CHECK(assert_equal(expected2, actual2, 1e-6));
// Add some more factors to the smoother
NonlinearFactorGraph newFactors3;
newFactors3.add(BetweenFactor<Pose3>(2, 3, poseOdometry, noiseOdometery));
newFactors3.add(BetweenFactor<Pose3>(3, 4, poseOdometry, noiseOdometery));
Values newValues3;
newValues3.insert(3, newValues2.at<Pose3>(2).compose(poseOdometry).compose(poseError));
newValues3.insert(4, newValues3.at<Pose3>(3).compose(poseOdometry).compose(poseError));
smoother.update(newFactors3, newValues3);
// Expected values from full batch optimization
NonlinearFactorGraph allFactors3;
allFactors3.push_back(newFactors2);
allFactors3.push_back(newFactors3);
Values allValues3;
allValues3.insert(newValues2);
allValues3.insert(newValues3);
Values expected3 = BatchOptimize(allFactors3, allValues3);
// Get actual linearization point
Values actual3 = smoother.calculateEstimate();
// Check
CHECK(assert_equal(expected3, actual3, 1e-6));
}
/* ************************************************************************* */
TEST( ConcurrentIncrementalSmoother, synchronize_empty )
{
// Create a set of optimizer parameters
ISAM2Params parameters;
parameters.optimizationParams = ISAM2GaussNewtonParams();
// Create a Concurrent Batch Smoother
ConcurrentIncrementalSmoother smoother(parameters);
// Create empty containers *from* the filter
NonlinearFactorGraph smootherFactors, filterSumarization;
Values smootherValues, filterSeparatorValues;
// Create expected values: these will be empty for this case
NonlinearFactorGraph expectedSmootherSummarization;
Values expectedSmootherSeparatorValues;
// Synchronize
NonlinearFactorGraph actualSmootherSummarization;
Values actualSmootherSeparatorValues;
smoother.presync();
smoother.getSummarizedFactors(actualSmootherSummarization, actualSmootherSeparatorValues);
smoother.synchronize(smootherFactors, smootherValues, filterSumarization, filterSeparatorValues);
smoother.postsync();
// Check
CHECK(assert_equal(expectedSmootherSummarization, actualSmootherSummarization, 1e-6));
CHECK(assert_equal(expectedSmootherSeparatorValues, actualSmootherSeparatorValues, 1e-6));
}
/* ************************************************************************* */
TEST( ConcurrentIncrementalSmoother, synchronize_1 )
{
// Create a set of optimizer parameters
ISAM2Params parameters;
parameters.optimizationParams = ISAM2GaussNewtonParams();
// parameters.maxIterations = 1;
// Create a Concurrent Batch Smoother
ConcurrentIncrementalSmoother smoother(parameters);
// Create a simple separator *from* the filter
NonlinearFactorGraph smootherFactors, filterSumarization;
Values smootherValues, filterSeparatorValues;
filterSeparatorValues.insert(1, Pose3().compose(poseError));
filterSeparatorValues.insert(2, filterSeparatorValues.at<Pose3>(1).compose(poseOdometry).compose(poseError));
Ordering ordering;
ordering.push_back(1);
ordering.push_back(2);
filterSumarization.add(LinearContainerFactor(PriorFactor<Pose3>(1, poseInitial, noisePrior).linearize(filterSeparatorValues, ordering), ordering, filterSeparatorValues));
filterSumarization.add(LinearContainerFactor(BetweenFactor<Pose3>(1, 2, poseOdometry, noiseOdometery).linearize(filterSeparatorValues, ordering), ordering, filterSeparatorValues));
// Create expected values: the smoother output will be empty for this case
NonlinearFactorGraph expectedSmootherSummarization;
Values expectedSmootherSeparatorValues;
NonlinearFactorGraph actualSmootherSummarization;
Values actualSmootherSeparatorValues;
smoother.presync();
smoother.getSummarizedFactors(actualSmootherSummarization, actualSmootherSeparatorValues);
smoother.synchronize(smootherFactors, smootherValues, filterSumarization, filterSeparatorValues);
smoother.postsync();
// Check
CHECK(assert_equal(expectedSmootherSummarization, actualSmootherSummarization, 1e-6));
CHECK(assert_equal(expectedSmootherSeparatorValues, actualSmootherSeparatorValues, 1e-6));
// Update the smoother
smoother.update();
// Check the factor graph. It should contain only the filter-provided factors
NonlinearFactorGraph expectedFactorGraph = filterSumarization;
NonlinearFactorGraph actualFactorGraph = smoother.getFactors();
CHECK(assert_equal(expectedFactorGraph, actualFactorGraph, 1e-6));
// Check the optimized value of smoother state
NonlinearFactorGraph allFactors;
allFactors.push_back(filterSumarization);
Values allValues;
allValues.insert(filterSeparatorValues);
Values expectedValues = BatchOptimize(allFactors, allValues,1);
Values actualValues = smoother.calculateEstimate();
CHECK(assert_equal(expectedValues, actualValues, 1e-6));
// Check the linearization point. The separator should remain identical to the filter provided values
Values expectedLinearizationPoint = filterSeparatorValues;
Values actualLinearizationPoint = smoother.getLinearizationPoint();
CHECK(assert_equal(expectedLinearizationPoint, actualLinearizationPoint, 1e-6));
}
/* ************************************************************************* */
TEST( ConcurrentIncrementalSmoother, synchronize_2 )
{
// Create a set of optimizer parameters
ISAM2Params parameters;
parameters.optimizationParams = ISAM2GaussNewtonParams();
// parameters.maxIterations = 1;
// parameters.lambdaUpperBound = 1;
// parameters.lambdaInitial = 1;
// parameters.verbosity = NonlinearOptimizerParams::ERROR;
// parameters.verbosityLM = ISAM2Params::DAMPED;
// Create a Concurrent Batch Smoother
ConcurrentIncrementalSmoother smoother(parameters);
// Create a separator and cached smoother factors *from* the filter
NonlinearFactorGraph smootherFactors, filterSumarization;
Values smootherValues, filterSeparatorValues;
Ordering ordering;
ordering.push_back(1);
ordering.push_back(2);
filterSeparatorValues.insert(1, Pose3().compose(poseError));
filterSeparatorValues.insert(2, filterSeparatorValues.at<Pose3>(1).compose(poseOdometry).compose(poseError));
filterSumarization.add(LinearContainerFactor(PriorFactor<Pose3>(1, poseInitial, noisePrior).linearize(filterSeparatorValues, ordering), ordering, filterSeparatorValues));
filterSumarization.add(LinearContainerFactor(BetweenFactor<Pose3>(1, 2, poseOdometry, noiseOdometery).linearize(filterSeparatorValues, ordering), ordering, filterSeparatorValues));
smootherFactors.add(BetweenFactor<Pose3>(2, 3, poseOdometry, noiseOdometery));
smootherFactors.add(BetweenFactor<Pose3>(3, 4, poseOdometry, noiseOdometery));
smootherValues.insert(3, filterSeparatorValues.at<Pose3>(2).compose(poseOdometry).compose(poseError));
smootherValues.insert(4, smootherValues.at<Pose3>(3).compose(poseOdometry).compose(poseError));
// Create expected values: the smoother output will be empty for this case
NonlinearFactorGraph expectedSmootherSummarization;
Values expectedSmootherSeparatorValues;
NonlinearFactorGraph actualSmootherSummarization;
Values actualSmootherSeparatorValues;
smoother.presync();
smoother.getSummarizedFactors(actualSmootherSummarization, actualSmootherSeparatorValues);
smoother.synchronize(smootherFactors, smootherValues, filterSumarization, filterSeparatorValues);
smoother.postsync();
// Check
CHECK(assert_equal(expectedSmootherSummarization, actualSmootherSummarization, 1e-6));
CHECK(assert_equal(expectedSmootherSeparatorValues, actualSmootherSeparatorValues, 1e-6));
// Update the smoother
smoother.update();
// Check the factor graph. It should contain only the filter-provided factors
NonlinearFactorGraph expectedFactorGraph;
expectedFactorGraph.push_back(smootherFactors);
expectedFactorGraph.push_back(filterSumarization);
NonlinearFactorGraph actualFactorGraph = smoother.getFactors();
CHECK(assert_equal(expectedFactorGraph, actualFactorGraph, 1e-6));
// Check the optimized value of smoother state
NonlinearFactorGraph allFactors;
allFactors.push_back(filterSumarization);
allFactors.push_back(smootherFactors);
Values allValues;
allValues.insert(filterSeparatorValues);
allValues.insert(smootherValues);
// allValues.print("Batch LinPoint:\n");
Values expectedValues = BatchOptimize(allFactors, allValues, 1);
Values actualValues = smoother.calculateEstimate();
CHECK(assert_equal(expectedValues, actualValues, 1e-6));
// Check the linearization point. The separator should remain identical to the filter provided values, but the others should be the optimal values
// Isam2 is changing internally the linearization points, so the following check is done only on the separator variables
// Values expectedLinearizationPoint = BatchOptimize(allFactors, allValues, 1);
Values expectedLinearizationPoint = filterSeparatorValues;
Values actualLinearizationPoint;
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, filterSeparatorValues) {
actualLinearizationPoint.insert(key_value.key, smoother.getLinearizationPoint().at(key_value.key));
}
CHECK(assert_equal(expectedLinearizationPoint, actualLinearizationPoint, 1e-6));
}
/* ************************************************************************* */
TEST( ConcurrentIncrementalSmoother, synchronize_3 )
{
// Create a set of optimizer parameters
ISAM2Params parameters;
parameters.optimizationParams = ISAM2GaussNewtonParams();
// parameters.maxIterations = 1;
// parameters.lambdaUpperBound = 1;
// parameters.lambdaInitial = 1;
// parameters.verbosity = NonlinearOptimizerParams::ERROR;
// parameters.verbosityLM = ISAM2Params::DAMPED;
// Create a Concurrent Batch Smoother
ConcurrentIncrementalSmoother smoother(parameters);
// Create a separator and cached smoother factors *from* the filter
NonlinearFactorGraph smootherFactors, filterSumarization;
Values smootherValues, filterSeparatorValues;
Ordering ordering;
ordering.push_back(1);
ordering.push_back(2);
filterSeparatorValues.insert(1, Pose3().compose(poseError));
filterSeparatorValues.insert(2, filterSeparatorValues.at<Pose3>(1).compose(poseOdometry).compose(poseError));
filterSumarization.add(LinearContainerFactor(PriorFactor<Pose3>(1, poseInitial, noisePrior).linearize(filterSeparatorValues, ordering), ordering, filterSeparatorValues));
filterSumarization.add(LinearContainerFactor(BetweenFactor<Pose3>(1, 2, poseOdometry, noiseOdometery).linearize(filterSeparatorValues, ordering), ordering, filterSeparatorValues));
smootherFactors.add(BetweenFactor<Pose3>(2, 3, poseOdometry, noiseOdometery));
smootherFactors.add(BetweenFactor<Pose3>(3, 4, poseOdometry, noiseOdometery));
smootherFactors.add(PriorFactor<Pose3>(4, poseInitial, noisePrior));
smootherValues.insert(3, filterSeparatorValues.at<Pose3>(2).compose(poseOdometry).compose(poseError));
smootherValues.insert(4, smootherValues.at<Pose3>(3).compose(poseOdometry).compose(poseError));
// Create expected values: the smoother output will be empty for this case
NonlinearFactorGraph expectedSmootherSummarization;
Values expectedSmootherSeparatorValues;
NonlinearFactorGraph actualSmootherSummarization;
Values actualSmootherSeparatorValues;
smoother.presync();
smoother.getSummarizedFactors(actualSmootherSummarization, actualSmootherSeparatorValues);
smoother.synchronize(smootherFactors, smootherValues, filterSumarization, filterSeparatorValues);
smoother.postsync();
// Check
CHECK(assert_equal(expectedSmootherSummarization, actualSmootherSummarization, 1e-6));
CHECK(assert_equal(expectedSmootherSeparatorValues, actualSmootherSeparatorValues, 1e-6));
// Update the smoother
smoother.update();
smoother.presync();
smoother.getSummarizedFactors(actualSmootherSummarization, actualSmootherSeparatorValues);
// Check the optimized value of smoother state
NonlinearFactorGraph allFactors = smootherFactors;
Values allValues = smoother.getLinearizationPoint();
ordering = smoother.getOrdering(); // I'm really hoping this is an acceptable ordering...
GaussianFactorGraph::shared_ptr LinFactorGraph = allFactors.linearize(allValues, ordering);
// GaussianSequentialSolver GSS = GaussianSequentialSolver(*LinFactorGraph);
// GaussianBayesNet::shared_ptr GBNsptr = GSS.eliminate();
FastSet<Index> allkeys = LinFactorGraph->keys();
BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, filterSeparatorValues) {
Index index = ordering.at(key_value.key);
allkeys.erase(index);
}
std::vector<Index> variables(allkeys.begin(), allkeys.end());
std::pair<GaussianConditional::shared_ptr, GaussianFactorGraph> result = LinFactorGraph->eliminate(variables, EliminateCholesky);
expectedSmootherSummarization.resize(0);
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, result.second) {
expectedSmootherSummarization.add(LinearContainerFactor(factor, ordering, allValues));
}
CHECK(assert_equal(expectedSmootherSummarization, actualSmootherSummarization, 1e-6));
}
/* ************************************************************************* */
int main() { TestResult tr; return TestRegistry::runAllTests(tr);}
/* ************************************************************************* */

View File

@ -0,0 +1,299 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file BetweenFactorEM.h
* @author Vadim Indelman
**/
#pragma once
#include <ostream>
#include <gtsam/base/Testable.h>
#include <gtsam/base/Lie.h>
#include <gtsam/nonlinear/NonlinearFactor.h>
#include <gtsam/linear/GaussianFactor.h>
namespace gtsam {
/**
* A class for a measurement predicted by "between(config[key1],config[key2])"
* @tparam VALUE the Value type
* @addtogroup SLAM
*/
template<class VALUE>
class BetweenFactorEM: public NonlinearFactor {
public:
typedef VALUE T;
private:
typedef BetweenFactorEM<VALUE> This;
typedef gtsam::NonlinearFactor Base;
gtsam::Key key1_;
gtsam::Key key2_;
VALUE measured_; /** The measurement */
SharedGaussian model_inlier_;
SharedGaussian model_outlier_;
double prior_inlier_;
double prior_outlier_;
/** concept check by type */
GTSAM_CONCEPT_LIE_TYPE(T)
GTSAM_CONCEPT_TESTABLE_TYPE(T)
public:
// shorthand for a smart pointer to a factor
typedef typename boost::shared_ptr<BetweenFactorEM> shared_ptr;
/** default constructor - only use for serialization */
BetweenFactorEM() {}
/** Constructor */
BetweenFactorEM(Key key1, Key key2, const VALUE& measured,
const SharedGaussian& model_inlier, const SharedGaussian& model_outlier,
const double prior_inlier, const double prior_outlier) :
Base(key1, key2), key1_(key1), key2_(key2), measured_(measured),
model_inlier_(model_inlier), model_outlier_(model_outlier),
prior_inlier_(prior_inlier), prior_outlier_(prior_outlier){
}
virtual ~BetweenFactorEM() {}
/** implement functions needed for Testable */
/** print */
virtual void print(const std::string& s, const KeyFormatter& keyFormatter = DefaultKeyFormatter) const {
std::cout << s << "BetweenFactorEM("
<< keyFormatter(key1_) << ","
<< keyFormatter(key2_) << ")\n";
measured_.print(" measured: ");
model_inlier_->print(" noise model inlier: ");
model_outlier_->print(" noise model outlier: ");
std::cout << "(prior_inlier, prior_outlier_) = ("
<< prior_inlier_ << ","
<< prior_outlier_ << ")\n";
// Base::print(s, keyFormatter);
}
/** equals */
virtual bool equals(const NonlinearFactor& f, double tol=1e-9) const {
const This *t = dynamic_cast<const This*> (&f);
if(t && Base::equals(f))
return key1_ == t->key1_ && key2_ == t->key2_ &&
// model_inlier_->equals(t->model_inlier_ ) && // TODO: fix here
// model_outlier_->equals(t->model_outlier_ ) &&
prior_outlier_ == t->prior_outlier_ && prior_inlier_ == t->prior_inlier_ && measured_.equals(t->measured_);
else
return false;
}
/** implement functions needed to derive from Factor */
/* ************************************************************************* */
virtual double error(const gtsam::Values& x) const {
return whitenedError(x).squaredNorm();
}
/* ************************************************************************* */
/**
* Linearize a non-linearFactorN to get a gtsam::GaussianFactor,
* \f$ Ax-b \approx h(x+\delta x)-z = h(x) + A \delta x - z \f$
* Hence \f$ b = z - h(x) = - \mathtt{error\_vector}(x) \f$
*/
/* This version of linearize recalculates the noise model each time */
virtual boost::shared_ptr<gtsam::GaussianFactor> linearize(const gtsam::Values& x, const gtsam::Ordering& ordering) const {
// Only linearize if the factor is active
if (!this->active(x))
return boost::shared_ptr<gtsam::JacobianFactor>();
//std::cout<<"About to linearize"<<std::endl;
gtsam::Matrix A1, A2;
std::vector<gtsam::Matrix> A(this->size());
gtsam::Vector b = -whitenedError(x, A);
A1 = A[0];
A2 = A[1];
return gtsam::GaussianFactor::shared_ptr(
new gtsam::JacobianFactor(ordering[key1_], A1, ordering[key2_], A2, b, gtsam::noiseModel::Unit::Create(b.size())));
}
/* ************************************************************************* */
gtsam::Vector whitenedError(const gtsam::Values& x,
boost::optional<std::vector<gtsam::Matrix>&> H = boost::none) const {
bool debug = true;
const T& p1 = x.at<T>(key1_);
const T& p2 = x.at<T>(key2_);
Matrix H1, H2;
T hx = p1.between(p2, H1, H2); // h(x)
// manifold equivalent of h(x)-z -> log(z,h(x))
Vector err = measured_.localCoordinates(hx);
// Calculate indicator probabilities (inlier and outlier)
Vector p_inlier_outlier = calcIndicatorProb(x);
double p_inlier = p_inlier_outlier[0];
double p_outlier = p_inlier_outlier[1];
Vector err_wh_inlier = model_inlier_->whiten(err);
Vector err_wh_outlier = model_outlier_->whiten(err);
Matrix invCov_inlier = model_inlier_->R().transpose() * model_inlier_->R();
Matrix invCov_outlier = model_outlier_->R().transpose() * model_outlier_->R();
Vector err_wh_eq;
err_wh_eq.resize(err_wh_inlier.rows()*2);
err_wh_eq << sqrt(p_inlier) * err_wh_inlier.array() , sqrt(p_outlier) * err_wh_outlier.array();
if (H){
// stack Jacobians for the two indicators for each of the key
Matrix H1_inlier = sqrt(p_inlier)*model_inlier_->Whiten(H1);
Matrix H1_outlier = sqrt(p_outlier)*model_outlier_->Whiten(H1);
Matrix H1_aug = gtsam::stack(2, &H1_inlier, &H1_outlier);
Matrix H2_inlier = sqrt(p_inlier)*model_inlier_->Whiten(H2);
Matrix H2_outlier = sqrt(p_outlier)*model_outlier_->Whiten(H2);
Matrix H2_aug = gtsam::stack(2, &H2_inlier, &H2_outlier);
(*H)[0].resize(H1_aug.rows(),H1_aug.cols());
(*H)[1].resize(H2_aug.rows(),H2_aug.cols());
(*H)[0] = H1_aug;
(*H)[1] = H2_aug;
}
if (debug){
// std::cout<<"unwhitened error: "<<err[0]<<" "<<err[1]<<" "<<err[2]<<std::endl;
// std::cout<<"err_wh_inlier: "<<err_wh_inlier[0]<<" "<<err_wh_inlier[1]<<" "<<err_wh_inlier[2]<<std::endl;
// std::cout<<"err_wh_outlier: "<<err_wh_outlier[0]<<" "<<err_wh_outlier[1]<<" "<<err_wh_outlier[2]<<std::endl;
//
// std::cout<<"p_inlier, p_outlier, sumP: "<<p_inlier<<" "<<p_outlier<<" " << sumP << std::endl;
//
// std::cout<<"prior_inlier_, prior_outlier_: "<<prior_inlier_<<" "<<prior_outlier_<< std::endl;
//
// double s_inl = -0.5 * err_wh_inlier.dot(err_wh_inlier);
// double s_outl = -0.5 * err_wh_outlier.dot(err_wh_outlier);
// std::cout<<"s_inl, s_outl: "<<s_inl<<" "<<s_outl<<std::endl;
//
// std::cout<<"norm of invCov_inlier, invCov_outlier: "<<invCov_inlier.norm()<<" "<<invCov_outlier.norm()<<std::endl;
// double q_inl = invCov_inlier.norm() * exp( -0.5 * err_wh_inlier.dot(err_wh_inlier) );
// double q_outl = invCov_outlier.norm() * exp( -0.5 * err_wh_outlier.dot(err_wh_outlier) );
// std::cout<<"q_inl, q_outl: "<<q_inl<<" "<<q_outl<<std::endl;
// Matrix Cov_inlier = invCov_inlier.inverse();
// Matrix Cov_outlier = invCov_outlier.inverse();
// std::cout<<"Cov_inlier: "<<std::endl<<
// Cov_inlier(0,0) << " " << Cov_inlier(0,1) << " " << Cov_inlier(0,2) <<std::endl<<
// Cov_inlier(1,0) << " " << Cov_inlier(1,1) << " " << Cov_inlier(1,2) <<std::endl<<
// Cov_inlier(2,0) << " " << Cov_inlier(2,1) << " " << Cov_inlier(2,2) <<std::endl;
// std::cout<<"Cov_outlier: "<<std::endl<<
// Cov_outlier(0,0) << " " << Cov_outlier(0,1) << " " << Cov_outlier(0,2) <<std::endl<<
// Cov_outlier(1,0) << " " << Cov_outlier(1,1) << " " << Cov_outlier(1,2) <<std::endl<<
// Cov_outlier(2,0) << " " << Cov_outlier(2,1) << " " << Cov_outlier(2,2) <<std::endl;
// std::cout<<"===="<<std::endl;
}
return err_wh_eq;
}
/* ************************************************************************* */
gtsam::Vector calcIndicatorProb(const gtsam::Values& x) const {
Vector err = unwhitenedError(x);
// Calculate indicator probabilities (inlier and outlier)
Vector err_wh_inlier = model_inlier_->whiten(err);
Vector err_wh_outlier = model_outlier_->whiten(err);
Matrix invCov_inlier = model_inlier_->R().transpose() * model_inlier_->R();
Matrix invCov_outlier = model_outlier_->R().transpose() * model_outlier_->R();
double p_inlier = prior_inlier_ * invCov_inlier.norm() * exp( -0.5 * err_wh_inlier.dot(err_wh_inlier) );
double p_outlier = prior_outlier_ * invCov_outlier.norm() * exp( -0.5 * err_wh_outlier.dot(err_wh_outlier) );
double sumP = p_inlier + p_outlier;
p_inlier /= sumP;
p_outlier /= sumP;
// Bump up near-zero probabilities (as in linerFlow.h)
double minP = 0.05; // == 0.1 / 2 indicator variables
if (p_inlier < minP || p_outlier < minP){
if (p_inlier < minP)
p_inlier = minP;
if (p_outlier < minP)
p_outlier = minP;
sumP = p_inlier + p_outlier;
p_inlier /= sumP;
p_outlier /= sumP;
}
return Vector_(2, p_inlier, p_outlier);
}
/* ************************************************************************* */
gtsam::Vector unwhitenedError(const gtsam::Values& x) const {
bool debug = true;
const T& p1 = x.at<T>(key1_);
const T& p2 = x.at<T>(key2_);
Matrix H1, H2;
T hx = p1.between(p2, H1, H2); // h(x)
return measured_.localCoordinates(hx);
}
/* ************************************************************************* */
/** return the measured */
const VALUE& measured() const {
return measured_;
}
/** number of variables attached to this factor */
std::size_t size() const {
return 2;
}
virtual size_t dim() const {
return model_inlier_->R().rows() + model_inlier_->R().cols();
}
private:
/** Serialization function */
friend class boost::serialization::access;
template<class ARCHIVE>
void serialize(ARCHIVE & ar, const unsigned int version) {
ar & boost::serialization::make_nvp("NonlinearFactor",
boost::serialization::base_object<Base>(*this));
ar & BOOST_SERIALIZATION_NVP(measured_);
}
}; // \class BetweenFactorEM
} /// namespace gtsam

View File

@ -312,8 +312,8 @@ namespace gtsam {
/** Constructor */
ImuFactor(Key pose_i, Key vel_i, Key pose_j, Key vel_j, Key bias,
const PreintegratedMeasurements& preintegratedMeasurements, const Vector3& gravity, const Vector3& omegaCoriolis,
const SharedNoiseModel& model, boost::optional<const Pose3&> body_P_sensor = boost::none) :
Base(model, pose_i, vel_i, pose_j, vel_j, bias),
boost::optional<const Pose3&> body_P_sensor = boost::none) :
Base(noiseModel::Gaussian::Covariance(preintegratedMeasurements.PreintMeasCov), pose_i, vel_i, pose_j, vel_j, bias),
preintegratedMeasurements_(preintegratedMeasurements),
gravity_(gravity),
omegaCoriolis_(omegaCoriolis),

View File

@ -44,8 +44,6 @@ namespace gtsam {
const SharedNoiseModel noise_; ///< noise model used
boost::optional<POSE> body_P_sensor_; ///< The pose of the sensor in the body frame
// verbosity handling for Cheirality Exceptions
bool throwCheirality_; ///< If true, rethrows Cheirality exceptions (default: false)
bool verboseCheirality_; ///< If true, prints text for Cheirality exceptions (default: false)
@ -140,17 +138,16 @@ namespace gtsam {
&& ((!body_P_sensor_ && !e->body_P_sensor_) || (body_P_sensor_ && e->body_P_sensor_ && body_P_sensor_->equals(*e->body_P_sensor_)));
}
/// get the dimension of the factor (number of rows on linearization)
virtual size_t dim() const {
return 6*keys_.size();
}
/// linearize returns a Hessianfactor that is an approximation of error(p)
virtual boost::shared_ptr<GaussianFactor> linearize(const Values& values) const {
// std::cout.precision(20);
virtual boost::shared_ptr<GaussianFactor> linearize(const Values& values, const Ordering& ordering) const {
bool debug = false;
bool blockwise = true;
// Collect all poses (Cameras)
std::vector<Pose3> cameraPoses;
@ -164,17 +161,33 @@ namespace gtsam {
// We triangulate the 3D position of the landmark
boost::optional<Point3> point = triangulatePoint3(cameraPoses, measured_, *K_);
if (!point)
return HessianFactor::shared_ptr(new HessianFactor());
if (debug) {
std::cout << "point " << *point << std::endl;
}
std::vector<Index> js;
std::vector<Matrix> Gs(keys_.size()*(keys_.size()+1)/2);
std::vector<Vector> gs(keys_.size());
double f=0;
bool blockwise = false;
// fill in the keys
BOOST_FOREACH(const Key& k, keys_) {
js += ordering[k];
}
// {
// point is behind one of the cameras, turn factor off by setting everything to 0
if (!point) {
BOOST_FOREACH(gtsam::Matrix& m, Gs) m = zeros(6,6);
BOOST_FOREACH(Vector& v, gs) v = zero(6);
return HessianFactor::shared_ptr(new HessianFactor(js, Gs, gs, f));
}
// For debug only
std::vector<Matrix> Gs1;
std::vector<Vector> gs1;
if (blockwise || debug){
// ==========================================================================================================
std::vector<Matrix> Hx(keys_.size());
std::vector<Matrix> Hl(keys_.size());
@ -185,7 +198,8 @@ namespace gtsam {
std::cout << "pose " << pose << std::endl;
PinholeCamera<CALIBRATION> camera(pose, *K_);
b.at(i) = ( camera.project(*point,Hx.at(i),Hl.at(i)) - measured_.at(i) ).vector();
// std::cout << "b.at(i) " << b.at(i) << std::endl;
noise_-> WhitenSystem(Hx.at(i), Hl.at(i), b.at(i));
f += b.at(i).squaredNorm();
}
// Shur complement trick
@ -198,17 +212,16 @@ namespace gtsam {
for(size_t i1 = 0; i1 < keys_.size(); i1++) {
C += Hl.at(i1).transpose() * Hl.at(i1);
}
// std::cout << "Cnoinv"<< "=[" << Ctemp << "];" << std::endl;
C = C.inverse().eval(); // this is very important: without eval, because of eigen aliasing the results will be incorrect
// Calculate sub blocks
for(size_t i1 = 0; i1 < keys_.size(); i1++) {
for(size_t i2 = 0; i2 < keys_.size(); i2++) {
// we only need the upper triangular entries
Hxl[i1][i2] = Hx.at(i1).transpose() * Hl.at(i1) * C * Hl.at(i2).transpose();
if (i1==0 & i2==0){
if (i1==0 && i2==0){
if (debug) {
std::cout << "Hoff"<< i1 << i2 << "=[" << Hx.at(i1).transpose() * Hl.at(i1) * C * Hl.at(i2).transpose() << "];" << std::endl;
std::cout << "Hxoff"<< "=[" << Hx.at(i1) << "];" << std::endl;
std::cout << "Hloff"<< "=[" << Hl.at(i1) << "];" << std::endl;
@ -217,6 +230,7 @@ namespace gtsam {
}
}
}
}
// Populate Gs and gs
int GsCount = 0;
for(size_t i1 = 0; i1 < keys_.size(); i1++) {
@ -227,29 +241,39 @@ namespace gtsam {
if (i2 == i1){
Gs.at(GsCount) = Hx.at(i1).transpose() * Hx.at(i1) - Hxl[i1][i2] * Hx.at(i2);
if (debug) {
std::cout << "HxlH"<< GsCount << "=[" << Hxl[i1][i2] * Hx.at(i2) << "];" << std::endl;
std::cout << "Hx2_"<< GsCount << "=[" << Hx.at(i2) << "];" << std::endl;
std::cout << "H"<< GsCount << "=[" << Gs.at(GsCount) << "];" << std::endl;
}
GsCount++;
}
if (i2 > i1) {
Gs.at(GsCount) = - Hxl[i1][i2] * Hx.at(i2);
if (debug) {
std::cout << "HxlH"<< GsCount << "=[" << Hxl[i1][i2] * Hx.at(i2) << "];" << std::endl;
std::cout << "Hx2_"<< GsCount << "=[" << Hx.at(i2) << "];" << std::endl;
std::cout << "H"<< GsCount << "=[" << Gs.at(GsCount) << "];" << std::endl;
}
GsCount++;
}
}
}
if (debug) {
// Copy result for later comparison
BOOST_FOREACH(const Matrix& m, Gs) {
Gs1.push_back(m);
}
// Copy result for later comparison
BOOST_FOREACH(const Matrix& m, gs) {
gs1.push_back(m);
}
}
}
// std::cout << "GsCount " << GsCount << std::endl;
// }
// debug only
std::vector<Matrix> Gs2(keys_.size()*(keys_.size()+1)/2);
std::vector<Vector> gs2(keys_.size());
// { // version with full matrix multiplication
if (blockwise == false || debug){ // version with full matrix multiplication
// ==========================================================================================================
Matrix Hx2 = zeros(2*keys_.size(), 6*keys_.size());
Matrix Hl2 = zeros(2*keys_.size(), 3);
@ -260,20 +284,20 @@ namespace gtsam {
PinholeCamera<CALIBRATION> camera(pose, *K_);
Matrix Hxi, Hli;
Vector bi = ( camera.project(*point,Hxi,Hli) - measured_.at(i) ).vector();
noise_-> WhitenSystem(Hxi, Hli, bi);
f += bi.squaredNorm();
Hx2.block( 2*i, 6*i, 2, 6 ) = Hxi;
Hl2.block( 2*i, 0, 2, 3 ) = Hli;
// std::cout << "Hxi= \n" << Hxi << std::endl;
// std::cout << "Hxi.transpose() * Hxi= \n" << Hxi.transpose() * Hxi << std::endl;
// std::cout << "Hxl.transpose() * Hxl= \n" << Hli.transpose() * Hli << std::endl;
if (debug) {
std::cout << "Hxi= \n" << Hxi << std::endl;
std::cout << "Hxi.transpose() * Hxi= \n" << Hxi.transpose() * Hxi << std::endl;
std::cout << "Hxl.transpose() * Hxl= \n" << Hli.transpose() * Hli << std::endl;
}
subInsert(b2,bi,2*i);
// std::cout << "================= measurement " << i << std::endl;
// std::cout << "Hx " << Hx2 << std::endl;
// std::cout << "Hl " << Hl2 << std::endl;
// std::cout << "b " << b2.transpose() << std::endl;
// std::cout << "b.at(i) " << b.at(i) << std::endl;
// std::cout << "Hxi - Hx.at(i) " << Hxi - Hx.at(i) << std::endl;
// std::cout << "Hli - Hl.at(i) " << Hli - Hl.at(i) << std::endl;
}
// Shur complement trick
@ -281,57 +305,57 @@ namespace gtsam {
Matrix3 C2 = (Hl2.transpose() * Hl2).inverse();
H = Hx2.transpose() * Hx2 - Hx2.transpose() * Hl2 * C2 * Hl2.transpose() * Hx2;
if (debug) {
std::cout << "Hx2" << "=[" << Hx2 << "];" << std::endl;
std::cout << "Hl2" << "=[" << Hl2 << "];" << std::endl;
std::cout << "H" << "=[" << H << "];" << std::endl;
std::cout << "Cnoinv2"<< "=[" << Hl2.transpose() * Hl2 << "];" << std::endl;
std::cout << "C2"<< "=[" << C2 << "];" << std::endl;
// std::cout << "Hx2= \n" << Hx2 << std::endl;
// std::cout << "Hx2.transpose() * Hx2= \n" << Hx2.transpose() * Hx2 << std::endl;
Vector gs2_vector = Hx2.transpose() * b2 - Hx2.transpose() * Hl2 * C2 * Hl2.transpose() * b2;
std::cout << "================================================================================" << std::endl;
}
Vector gs_vector = Hx2.transpose() * b2 - Hx2.transpose() * Hl2 * C2 * Hl2.transpose() * b2;
// Populate Gs and gs
int GsCount2 = 0;
for(size_t i1 = 0; i1 < keys_.size(); i1++) {
gs2.at(i1) = sub(gs2_vector, 6*i1, 6*i1 + 6);
gs.at(i1) = sub(gs_vector, 6*i1, 6*i1 + 6);
for(size_t i2 = 0; i2 < keys_.size(); i2++) {
if (i2 >= i1) {
Gs2.at(GsCount2) = H.block(6*i1, 6*i2, 6, 6);
Gs.at(GsCount2) = H.block(6*i1, 6*i2, 6, 6);
GsCount2++;
}
}
}
// }
//
}
if (debug) {
// Compare blockwise and full version
bool gs2_equal_gs = true;
bool gs1_equal_gs = true;
for(size_t i = 0; i < measured_.size(); i++) {
std::cout << "gs.at(i) " << gs.at(i).transpose() << std::endl;
std::cout << "gs2.at(i) " << gs2.at(i).transpose() << std::endl;
std::cout << "gs.error " << (gs.at(i)- gs2.at(i)).transpose() << std::endl;
if( !equal(gs.at(i), gs2.at(i)), 1e-7) {
gs2_equal_gs = false;
std::cout << "gs1.at(i) " << gs1.at(i).transpose() << std::endl;
std::cout << "gs.error " << (gs.at(i)- gs1.at(i)).transpose() << std::endl;
if( !equal(gs.at(i), gs1.at(i)), 1e-7) {
gs1_equal_gs = false;
}
}
std::cout << "gs2_equal_gs " << gs2_equal_gs << std::endl;
std::cout << "gs1_equal_gs " << gs1_equal_gs << std::endl;
for(size_t i = 0; i < keys_.size()*(keys_.size()+1)/2; i++) {
std::cout << "Gs.at(i) " << Gs.at(i).transpose() << std::endl;
std::cout << "Gs2.at(i) " << Gs2.at(i).transpose() << std::endl;
std::cout << "Gs.error " << (Gs.at(i)- Gs2.at(i)).transpose() << std::endl;
std::cout << "Gs1.at(i) " << Gs1.at(i).transpose() << std::endl;
std::cout << "Gs.error " << (Gs.at(i)- Gs1.at(i)).transpose() << std::endl;
}
std::cout << "Gs1_equal_Gs " << gs1_equal_gs << std::endl;
}
std::cout << "Gs2_equal_Gs " << gs2_equal_gs << std::endl;
// ==========================================================================================================
return HessianFactor::shared_ptr(new HessianFactor(keys_, Gs, gs, f));
return HessianFactor::shared_ptr(new HessianFactor(js, Gs, gs, f));
}
/**
@ -360,19 +384,14 @@ namespace gtsam {
if(point)
{ // triangulation produced a good estimate of landmark position
// std::cout << "point " << *point << std::endl;
for(size_t i = 0; i < measured_.size(); i++) {
Pose3 pose = cameraPoses.at(i);
PinholeCamera<CALIBRATION> camera(pose, *K_);
// std::cout << "pose.compose(*body_P_sensor_) " << pose << std::endl;
Point2 reprojectionError(camera.project(*point) - measured_.at(i));
// std::cout << "reprojectionError " << reprojectionError << std::endl;
overallError += noise_->distance( reprojectionError.vector() );
// std::cout << "noise_->distance( reprojectionError.vector() ) " << noise_->distance( reprojectionError.vector() ) << std::endl;
}
return sqrt(overallError);
return std::sqrt(overallError);
}else{ // triangulation failed: we deactivate the factor, then the error should not contribute to the overall error
return 0.0;
}

View File

@ -0,0 +1,477 @@
/**
* @file testBetweenFactorEM.cpp
* @brief Unit test for the BetweenFactorEM
* @author Vadim Indelman
*/
#include <CppUnitLite/TestHarness.h>
#include <gtsam_unstable/slam/BetweenFactorEM.h>
#include <gtsam/geometry/Pose2.h>
#include <gtsam/nonlinear/Values.h>
#include <gtsam/base/LieVector.h>
#include <gtsam/base/numericalDerivative.h>
#include <gtsam/slam/BetweenFactor.h>
//#include <gtsam/nonlinear/NonlinearOptimizer.h>
//#include <gtsam/nonlinear/NonlinearFactorGraph.h>
//#include <gtsam/linear/GaussianSequentialSolver.h>
using namespace std;
using namespace gtsam;
/* ************************************************************************* */
LieVector predictionError(const Pose2& p1, const Pose2& p2, const gtsam::Key& key1, const gtsam::Key& key2, const BetweenFactorEM<gtsam::Pose2>& factor){
gtsam::Values values;
values.insert(key1, p1);
values.insert(key2, p2);
// LieVector err = factor.whitenedError(values);
// return err;
return LieVector::Expmap(factor.whitenedError(values));
}
/* ************************************************************************* */
LieVector predictionError_standard(const Pose2& p1, const Pose2& p2, const gtsam::Key& key1, const gtsam::Key& key2, const BetweenFactor<gtsam::Pose2>& factor){
gtsam::Values values;
values.insert(key1, p1);
values.insert(key2, p2);
// LieVector err = factor.whitenedError(values);
// return err;
return LieVector::Expmap(factor.whitenedError(values));
}
/* ************************************************************************* */
TEST( BetweenFactorEM, ConstructorAndEquals)
{
gtsam::Key key1(1);
gtsam::Key key2(2);
gtsam::Pose2 p1(10.0, 15.0, 0.1);
gtsam::Pose2 p2(15.0, 15.0, 0.3);
gtsam::Pose2 noise(0.5, 0.4, 0.01);
gtsam::Pose2 rel_pose_ideal = p1.between(p2);
gtsam::Pose2 rel_pose_msr = rel_pose_ideal.compose(noise);
SharedGaussian model_inlier(noiseModel::Diagonal::Sigmas(gtsam::Vector_(3, 0.5, 0.5, 0.05)));
SharedGaussian model_outlier(noiseModel::Diagonal::Sigmas(gtsam::Vector_(3, 5, 5, 1.0)));
double prior_outlier = 0.5;
double prior_inlier = 0.5;
// Constructor
BetweenFactorEM<gtsam::Pose2> f(key1, key2, rel_pose_msr, model_inlier, model_outlier,
prior_inlier, prior_outlier);
BetweenFactorEM<gtsam::Pose2> g(key1, key2, rel_pose_msr, model_inlier, model_outlier,
prior_inlier, prior_outlier);
// Equals
CHECK(assert_equal(f, g, 1e-5));
}
/* ************************************************************************* */
TEST( BetweenFactorEM, EvaluateError)
{
gtsam::Key key1(1);
gtsam::Key key2(2);
// Inlier test
gtsam::Pose2 p1(10.0, 15.0, 0.1);
gtsam::Pose2 p2(15.0, 15.0, 0.3);
gtsam::Pose2 noise(0.5, 0.4, 0.01);
gtsam::Pose2 rel_pose_ideal = p1.between(p2);
gtsam::Pose2 rel_pose_msr = rel_pose_ideal.compose(noise);
SharedGaussian model_inlier(noiseModel::Diagonal::Sigmas(gtsam::Vector_(3, 0.5, 0.5, 0.05)));
SharedGaussian model_outlier(noiseModel::Diagonal::Sigmas(gtsam::Vector_(3, 50.0, 50.0, 10.0)));
gtsam::Values values;
values.insert(key1, p1);
values.insert(key2, p2);
double prior_outlier = 0.5;
double prior_inlier = 0.5;
BetweenFactorEM<gtsam::Pose2> f(key1, key2, rel_pose_msr, model_inlier, model_outlier,
prior_inlier, prior_outlier);
Vector actual_err_wh = f.whitenedError(values);
Vector actual_err_wh_inlier = Vector_(3, actual_err_wh[0], actual_err_wh[1], actual_err_wh[2]);
Vector actual_err_wh_outlier = Vector_(3, actual_err_wh[3], actual_err_wh[4], actual_err_wh[5]);
// in case of inlier, inlier-mode whitented error should be dominant
assert(actual_err_wh_inlier.norm() > 1000.0*actual_err_wh_outlier.norm());
cout << "Inlier test. norm of actual_err_wh_inlier, actual_err_wh_outlier: "<<actual_err_wh_inlier.norm()<<","<<actual_err_wh_outlier.norm()<<endl;
cout<<actual_err_wh[0]<<" "<<actual_err_wh[1]<<" "<<actual_err_wh[2]<<actual_err_wh[3]<<" "<<actual_err_wh[4]<<" "<<actual_err_wh[5]<<endl;
// Outlier test
noise = gtsam::Pose2(10.5, 20.4, 2.01);
gtsam::Pose2 rel_pose_msr_test2 = rel_pose_ideal.compose(noise);
BetweenFactorEM<gtsam::Pose2> g(key1, key2, rel_pose_msr_test2, model_inlier, model_outlier,
prior_inlier, prior_outlier);
actual_err_wh = g.whitenedError(values);
actual_err_wh_inlier = Vector_(3, actual_err_wh[0], actual_err_wh[1], actual_err_wh[2]);
actual_err_wh_outlier = Vector_(3, actual_err_wh[3], actual_err_wh[4], actual_err_wh[5]);
// in case of outlier, outlier-mode whitented error should be dominant
assert(actual_err_wh_inlier.norm() < 1000.0*actual_err_wh_outlier.norm());
cout << "Outlier test. norm of actual_err_wh_inlier, actual_err_wh_outlier: "<<actual_err_wh_inlier.norm()<<","<<actual_err_wh_outlier<<endl;
cout<<actual_err_wh[0]<<" "<<actual_err_wh[1]<<" "<<actual_err_wh[2]<<actual_err_wh[3]<<" "<<actual_err_wh[4]<<" "<<actual_err_wh[5]<<endl;
// Compare with standard between factor for the inlier case
prior_outlier = 0.0;
prior_inlier = 1.0;
BetweenFactorEM<gtsam::Pose2> h_EM(key1, key2, rel_pose_msr, model_inlier, model_outlier,
prior_inlier, prior_outlier);
actual_err_wh = h_EM.whitenedError(values);
actual_err_wh_inlier = Vector_(3, actual_err_wh[0], actual_err_wh[1], actual_err_wh[2]);
BetweenFactor<gtsam::Pose2> h(key1, key2, rel_pose_msr, model_inlier );
Vector actual_err_wh_stnd = h.whitenedError(values);
cout<<"actual_err_wh: "<<actual_err_wh_inlier[0]<<", "<<actual_err_wh_inlier[1]<<", "<<actual_err_wh_inlier[2]<<endl;
cout<<"actual_err_wh_stnd: "<<actual_err_wh_stnd[0]<<", "<<actual_err_wh_stnd[1]<<", "<<actual_err_wh_stnd[2]<<endl;
CHECK( assert_equal(actual_err_wh_inlier, actual_err_wh_stnd, 1e-8));
}
///* ************************************************************************** */
TEST (BetweenFactorEM, jacobian ) {
gtsam::Key key1(1);
gtsam::Key key2(2);
// Inlier test
gtsam::Pose2 p1(10.0, 15.0, 0.1);
gtsam::Pose2 p2(15.0, 15.0, 0.3);
gtsam::Pose2 noise(0.5, 0.4, 0.01);
gtsam::Pose2 rel_pose_ideal = p1.between(p2);
gtsam::Pose2 rel_pose_msr = rel_pose_ideal.compose(noise);
SharedGaussian model_inlier(noiseModel::Diagonal::Sigmas(gtsam::Vector_(3, 0.5, 0.5, 0.05)));
SharedGaussian model_outlier(noiseModel::Diagonal::Sigmas(gtsam::Vector_(3, 50.0, 50.0, 10.0)));
gtsam::Values values;
values.insert(key1, p1);
values.insert(key2, p2);
double prior_outlier = 0.0;
double prior_inlier = 1.0;
BetweenFactorEM<gtsam::Pose2> f(key1, key2, rel_pose_msr, model_inlier, model_outlier,
prior_inlier, prior_outlier);
std::vector<gtsam::Matrix> H_actual(2);
Vector actual_err_wh = f.whitenedError(values, H_actual);
Matrix H1_actual = H_actual[0];
Matrix H2_actual = H_actual[1];
// compare to standard between factor
BetweenFactor<gtsam::Pose2> h(key1, key2, rel_pose_msr, model_inlier );
Vector actual_err_wh_stnd = h.whitenedError(values);
Vector actual_err_wh_inlier = Vector_(3, actual_err_wh[0], actual_err_wh[1], actual_err_wh[2]);
CHECK( assert_equal(actual_err_wh_stnd, actual_err_wh_inlier, 1e-8));
std::vector<gtsam::Matrix> H_actual_stnd_unwh(2);
(void)h.unwhitenedError(values, H_actual_stnd_unwh);
Matrix H1_actual_stnd_unwh = H_actual_stnd_unwh[0];
Matrix H2_actual_stnd_unwh = H_actual_stnd_unwh[1];
Matrix H1_actual_stnd = model_inlier->Whiten(H1_actual_stnd_unwh);
Matrix H2_actual_stnd = model_inlier->Whiten(H2_actual_stnd_unwh);
// CHECK( assert_equal(H1_actual_stnd, H1_actual, 1e-8));
// CHECK( assert_equal(H2_actual_stnd, H2_actual, 1e-8));
double stepsize = 1.0e-9;
Matrix H1_expected = gtsam::numericalDerivative11<LieVector, Pose2>(boost::bind(&predictionError, _1, p2, key1, key2, f), p1, stepsize);
Matrix H2_expected = gtsam::numericalDerivative11<LieVector, Pose2>(boost::bind(&predictionError, p1, _1, key1, key2, f), p2, stepsize);
// try to check numerical derivatives of a standard between factor
Matrix H1_expected_stnd = gtsam::numericalDerivative11<LieVector, Pose2>(boost::bind(&predictionError_standard, _1, p2, key1, key2, h), p1, stepsize);
CHECK( assert_equal(H1_expected_stnd, H1_actual_stnd, 1e-5));
CHECK( assert_equal(H1_expected, H1_actual, 1e-8));
CHECK( assert_equal(H2_expected, H2_actual, 1e-8));
}
/* ************************************************************************* */
TEST( InertialNavFactor, Equals)
{
// gtsam::Key Pose1(11);
// gtsam::Key Pose2(12);
// gtsam::Key Vel1(21);
// gtsam::Key Vel2(22);
// gtsam::Key Bias1(31);
//
// Vector measurement_acc(Vector_(3,0.1,0.2,0.4));
// Vector measurement_gyro(Vector_(3, -0.2, 0.5, 0.03));
//
// double measurement_dt(0.1);
// Vector world_g(Vector_(3, 0.0, 0.0, 9.81));
// Vector world_rho(Vector_(3, 0.0, -1.5724e-05, 0.0)); // NED system
// gtsam::Vector ECEF_omega_earth(Vector_(3, 0.0, 0.0, 7.292115e-5));
// gtsam::Vector world_omega_earth(world_R_ECEF.matrix() * ECEF_omega_earth);
//
// SharedGaussian model(noiseModel::Isotropic::Sigma(9, 0.1));
//
// InertialNavFactor<Pose3, LieVector, imuBias::ConstantBias> f(Pose1, Vel1, Bias1, Pose2, Vel2, measurement_acc, measurement_gyro, measurement_dt, world_g, world_rho, world_omega_earth, model);
// InertialNavFactor<Pose3, LieVector, imuBias::ConstantBias> g(Pose1, Vel1, Bias1, Pose2, Vel2, measurement_acc, measurement_gyro, measurement_dt, world_g, world_rho, world_omega_earth, model);
// CHECK(assert_equal(f, g, 1e-5));
}
/* ************************************************************************* */
TEST( InertialNavFactor, Predict)
{
// gtsam::Key PoseKey1(11);
// gtsam::Key PoseKey2(12);
// gtsam::Key VelKey1(21);
// gtsam::Key VelKey2(22);
// gtsam::Key BiasKey1(31);
//
// double measurement_dt(0.1);
// Vector world_g(Vector_(3, 0.0, 0.0, 9.81));
// Vector world_rho(Vector_(3, 0.0, -1.5724e-05, 0.0)); // NED system
// gtsam::Vector ECEF_omega_earth(Vector_(3, 0.0, 0.0, 7.292115e-5));
// gtsam::Vector world_omega_earth(world_R_ECEF.matrix() * ECEF_omega_earth);
//
// SharedGaussian model(noiseModel::Isotropic::Sigma(9, 0.1));
//
//
// // First test: zero angular motion, some acceleration
// Vector measurement_acc(Vector_(3,0.1,0.2,0.3-9.81));
// Vector measurement_gyro(Vector_(3, 0.0, 0.0, 0.0));
//
// InertialNavFactor<Pose3, LieVector, imuBias::ConstantBias> f(PoseKey1, VelKey1, BiasKey1, PoseKey2, VelKey2, measurement_acc, measurement_gyro, measurement_dt, world_g, world_rho, world_omega_earth, model);
//
// Pose3 Pose1(Rot3(), Point3(2.00, 1.00, 3.00));
// LieVector Vel1(3, 0.50, -0.50, 0.40);
// imuBias::ConstantBias Bias1;
// Pose3 expectedPose2(Rot3(), Point3(2.05, 0.95, 3.04));
// LieVector expectedVel2(3, 0.51, -0.48, 0.43);
// Pose3 actualPose2;
// LieVector actualVel2;
// f.predict(Pose1, Vel1, Bias1, actualPose2, actualVel2);
//
// CHECK(assert_equal(expectedPose2, actualPose2, 1e-5));
// CHECK(assert_equal(expectedVel2, actualVel2, 1e-5));
}
/* ************************************************************************* */
TEST( InertialNavFactor, ErrorPosVel)
{
// gtsam::Key PoseKey1(11);
// gtsam::Key PoseKey2(12);
// gtsam::Key VelKey1(21);
// gtsam::Key VelKey2(22);
// gtsam::Key BiasKey1(31);
//
// double measurement_dt(0.1);
// Vector world_g(Vector_(3, 0.0, 0.0, 9.81));
// Vector world_rho(Vector_(3, 0.0, -1.5724e-05, 0.0)); // NED system
// gtsam::Vector ECEF_omega_earth(Vector_(3, 0.0, 0.0, 7.292115e-5));
// gtsam::Vector world_omega_earth(world_R_ECEF.matrix() * ECEF_omega_earth);
//
// SharedGaussian model(noiseModel::Isotropic::Sigma(9, 0.1));
//
//
// // First test: zero angular motion, some acceleration
// Vector measurement_acc(Vector_(3,0.1,0.2,0.3-9.81));
// Vector measurement_gyro(Vector_(3, 0.0, 0.0, 0.0));
//
// InertialNavFactor<Pose3, LieVector, imuBias::ConstantBias> f(PoseKey1, VelKey1, BiasKey1, PoseKey2, VelKey2, measurement_acc, measurement_gyro, measurement_dt, world_g, world_rho, world_omega_earth, model);
//
// Pose3 Pose1(Rot3(), Point3(2.00, 1.00, 3.00));
// Pose3 Pose2(Rot3(), Point3(2.05, 0.95, 3.04));
// LieVector Vel1(3, 0.50, -0.50, 0.40);
// LieVector Vel2(3, 0.51, -0.48, 0.43);
// imuBias::ConstantBias Bias1;
//
// Vector ActualErr(f.evaluateError(Pose1, Vel1, Bias1, Pose2, Vel2));
// Vector ExpectedErr(zero(9));
//
// CHECK(assert_equal(ExpectedErr, ActualErr, 1e-5));
}
/* ************************************************************************* */
TEST( InertialNavFactor, ErrorRot)
{
// gtsam::Key PoseKey1(11);
// gtsam::Key PoseKey2(12);
// gtsam::Key VelKey1(21);
// gtsam::Key VelKey2(22);
// gtsam::Key BiasKey1(31);
//
// double measurement_dt(0.1);
// Vector world_g(Vector_(3, 0.0, 0.0, 9.81));
// Vector world_rho(Vector_(3, 0.0, -1.5724e-05, 0.0)); // NED system
// gtsam::Vector ECEF_omega_earth(Vector_(3, 0.0, 0.0, 7.292115e-5));
// gtsam::Vector world_omega_earth(world_R_ECEF.matrix() * ECEF_omega_earth);
//
// SharedGaussian model(noiseModel::Isotropic::Sigma(9, 0.1));
//
// // Second test: zero angular motion, some acceleration
// Vector measurement_acc(Vector_(3,0.0,0.0,0.0-9.81));
// Vector measurement_gyro(Vector_(3, 0.1, 0.2, 0.3));
//
// InertialNavFactor<Pose3, LieVector, imuBias::ConstantBias> f(PoseKey1, VelKey1, BiasKey1, PoseKey2, VelKey2, measurement_acc, measurement_gyro, measurement_dt, world_g, world_rho, world_omega_earth, model);
//
// Pose3 Pose1(Rot3(), Point3(2.0,1.0,3.0));
// Pose3 Pose2(Rot3::Expmap(measurement_gyro*measurement_dt), Point3(2.0,1.0,3.0));
// LieVector Vel1(3,0.0,0.0,0.0);
// LieVector Vel2(3,0.0,0.0,0.0);
// imuBias::ConstantBias Bias1;
//
// Vector ActualErr(f.evaluateError(Pose1, Vel1, Bias1, Pose2, Vel2));
// Vector ExpectedErr(zero(9));
//
// CHECK(assert_equal(ExpectedErr, ActualErr, 1e-5));
}
/* ************************************************************************* */
TEST( InertialNavFactor, ErrorRotPosVel)
{
// gtsam::Key PoseKey1(11);
// gtsam::Key PoseKey2(12);
// gtsam::Key VelKey1(21);
// gtsam::Key VelKey2(22);
// gtsam::Key BiasKey1(31);
//
// double measurement_dt(0.1);
// Vector world_g(Vector_(3, 0.0, 0.0, 9.81));
// Vector world_rho(Vector_(3, 0.0, -1.5724e-05, 0.0)); // NED system
// gtsam::Vector ECEF_omega_earth(Vector_(3, 0.0, 0.0, 7.292115e-5));
// gtsam::Vector world_omega_earth(world_R_ECEF.matrix() * ECEF_omega_earth);
//
// SharedGaussian model(noiseModel::Isotropic::Sigma(9, 0.1));
//
// // Second test: zero angular motion, some acceleration - generated in matlab
// Vector measurement_acc(Vector_(3, 6.501390843381716, -6.763926150509185, -2.300389940090343));
// Vector measurement_gyro(Vector_(3, 0.1, 0.2, 0.3));
//
// InertialNavFactor<Pose3, LieVector, imuBias::ConstantBias> f(PoseKey1, VelKey1, BiasKey1, PoseKey2, VelKey2, measurement_acc, measurement_gyro, measurement_dt, world_g, world_rho, world_omega_earth, model);
//
// Rot3 R1(0.487316618, 0.125253866, 0.86419557,
// 0.580273724, 0.693095498, -0.427669306,
// -0.652537293, 0.709880342, 0.265075427);
// Point3 t1(2.0,1.0,3.0);
// Pose3 Pose1(R1, t1);
// LieVector Vel1(3,0.5,-0.5,0.4);
// Rot3 R2(0.473618898, 0.119523052, 0.872582019,
// 0.609241153, 0.67099888, -0.422594037,
// -0.636011287, 0.731761397, 0.244979388);
// Point3 t2(2.052670960415706, 0.977252139079380, 2.942482135362800);
// Pose3 Pose2(R2, t2);
// LieVector Vel2(3,0.510000000000000, -0.480000000000000, 0.430000000000000);
// imuBias::ConstantBias Bias1;
//
// Vector ActualErr(f.evaluateError(Pose1, Vel1, Bias1, Pose2, Vel2));
// Vector ExpectedErr(zero(9));
//
// CHECK(assert_equal(ExpectedErr, ActualErr, 1e-5));
}
/* ************************************************************************* */
TEST (InertialNavFactor, Jacobian ) {
// gtsam::Key PoseKey1(11);
// gtsam::Key PoseKey2(12);
// gtsam::Key VelKey1(21);
// gtsam::Key VelKey2(22);
// gtsam::Key BiasKey1(31);
//
// double measurement_dt(0.01);
// Vector world_g(Vector_(3, 0.0, 0.0, 9.81));
// Vector world_rho(Vector_(3, 0.0, -1.5724e-05, 0.0)); // NED system
// gtsam::Vector ECEF_omega_earth(Vector_(3, 0.0, 0.0, 7.292115e-5));
// gtsam::Vector world_omega_earth(world_R_ECEF.matrix() * ECEF_omega_earth);
//
// SharedGaussian model(noiseModel::Isotropic::Sigma(9, 0.1));
//
// Vector measurement_acc(Vector_(3, 6.501390843381716, -6.763926150509185, -2.300389940090343));
// Vector measurement_gyro(Vector_(3, 3.14, 3.14/2, -3.14));
//
// InertialNavFactor<Pose3, LieVector, imuBias::ConstantBias> factor(PoseKey1, VelKey1, BiasKey1, PoseKey2, VelKey2, measurement_acc, measurement_gyro, measurement_dt, world_g, world_rho, world_omega_earth, model);
//
// Rot3 R1(0.487316618, 0.125253866, 0.86419557,
// 0.580273724, 0.693095498, -0.427669306,
// -0.652537293, 0.709880342, 0.265075427);
// Point3 t1(2.0,1.0,3.0);
// Pose3 Pose1(R1, t1);
// LieVector Vel1(3,0.5,-0.5,0.4);
// Rot3 R2(0.473618898, 0.119523052, 0.872582019,
// 0.609241153, 0.67099888, -0.422594037,
// -0.636011287, 0.731761397, 0.244979388);
// Point3 t2(2.052670960415706, 0.977252139079380, 2.942482135362800);
// Pose3 Pose2(R2, t2);
// LieVector Vel2(3,0.510000000000000, -0.480000000000000, 0.430000000000000);
// imuBias::ConstantBias Bias1;
//
// Matrix H1_actual, H2_actual, H3_actual, H4_actual, H5_actual;
//
// Vector ActualErr(factor.evaluateError(Pose1, Vel1, Bias1, Pose2, Vel2, H1_actual, H2_actual, H3_actual, H4_actual, H5_actual));
//
// // Checking for Pose part in the jacobians
// // ******
// Matrix H1_actualPose(H1_actual.block(0,0,6,H1_actual.cols()));
// Matrix H2_actualPose(H2_actual.block(0,0,6,H2_actual.cols()));
// Matrix H3_actualPose(H3_actual.block(0,0,6,H3_actual.cols()));
// Matrix H4_actualPose(H4_actual.block(0,0,6,H4_actual.cols()));
// Matrix H5_actualPose(H5_actual.block(0,0,6,H5_actual.cols()));
//
// // Calculate the Jacobian matrices H1 until H5 using the numerical derivative function
// gtsam::Matrix H1_expectedPose, H2_expectedPose, H3_expectedPose, H4_expectedPose, H5_expectedPose;
// H1_expectedPose = gtsam::numericalDerivative11<Pose3, Pose3>(boost::bind(&predictionErrorPose, _1, Vel1, Bias1, Pose2, Vel2, factor), Pose1);
// H2_expectedPose = gtsam::numericalDerivative11<Pose3, LieVector>(boost::bind(&predictionErrorPose, Pose1, _1, Bias1, Pose2, Vel2, factor), Vel1);
// H3_expectedPose = gtsam::numericalDerivative11<Pose3, imuBias::ConstantBias>(boost::bind(&predictionErrorPose, Pose1, Vel1, _1, Pose2, Vel2, factor), Bias1);
// H4_expectedPose = gtsam::numericalDerivative11<Pose3, Pose3>(boost::bind(&predictionErrorPose, Pose1, Vel1, Bias1, _1, Vel2, factor), Pose2);
// H5_expectedPose = gtsam::numericalDerivative11<Pose3, LieVector>(boost::bind(&predictionErrorPose, Pose1, Vel1, Bias1, Pose2, _1, factor), Vel2);
//
// // Verify they are equal for this choice of state
// CHECK( gtsam::assert_equal(H1_expectedPose, H1_actualPose, 1e-6));
// CHECK( gtsam::assert_equal(H2_expectedPose, H2_actualPose, 1e-6));
// CHECK( gtsam::assert_equal(H3_expectedPose, H3_actualPose, 1e-6));
// CHECK( gtsam::assert_equal(H4_expectedPose, H4_actualPose, 1e-6));
// CHECK( gtsam::assert_equal(H5_expectedPose, H5_actualPose, 1e-6));
//
// // Checking for Vel part in the jacobians
// // ******
// Matrix H1_actualVel(H1_actual.block(6,0,3,H1_actual.cols()));
// Matrix H2_actualVel(H2_actual.block(6,0,3,H2_actual.cols()));
// Matrix H3_actualVel(H3_actual.block(6,0,3,H3_actual.cols()));
// Matrix H4_actualVel(H4_actual.block(6,0,3,H4_actual.cols()));
// Matrix H5_actualVel(H5_actual.block(6,0,3,H5_actual.cols()));
//
// // Calculate the Jacobian matrices H1 until H5 using the numerical derivative function
// gtsam::Matrix H1_expectedVel, H2_expectedVel, H3_expectedVel, H4_expectedVel, H5_expectedVel;
// H1_expectedVel = gtsam::numericalDerivative11<LieVector, Pose3>(boost::bind(&predictionErrorVel, _1, Vel1, Bias1, Pose2, Vel2, factor), Pose1);
// H2_expectedVel = gtsam::numericalDerivative11<LieVector, LieVector>(boost::bind(&predictionErrorVel, Pose1, _1, Bias1, Pose2, Vel2, factor), Vel1);
// H3_expectedVel = gtsam::numericalDerivative11<LieVector, imuBias::ConstantBias>(boost::bind(&predictionErrorVel, Pose1, Vel1, _1, Pose2, Vel2, factor), Bias1);
// H4_expectedVel = gtsam::numericalDerivative11<LieVector, Pose3>(boost::bind(&predictionErrorVel, Pose1, Vel1, Bias1, _1, Vel2, factor), Pose2);
// H5_expectedVel = gtsam::numericalDerivative11<LieVector, LieVector>(boost::bind(&predictionErrorVel, Pose1, Vel1, Bias1, Pose2, _1, factor), Vel2);
//
// // Verify they are equal for this choice of state
// CHECK( gtsam::assert_equal(H1_expectedVel, H1_actualVel, 1e-6));
// CHECK( gtsam::assert_equal(H2_expectedVel, H2_actualVel, 1e-6));
// CHECK( gtsam::assert_equal(H3_expectedVel, H3_actualVel, 1e-6));
// CHECK( gtsam::assert_equal(H4_expectedVel, H4_actualVel, 1e-6));
// CHECK( gtsam::assert_equal(H5_expectedVel, H5_actualVel, 1e-6));
}
/* ************************************************************************* */
int main() { TestResult tr; return TestRegistry::runAllTests(tr);}
/* ************************************************************************* */

View File

@ -206,8 +206,7 @@ TEST( CombinedImuFactor, ErrorWithBiases )
// Create factor
noiseModel::Gaussian::shared_ptr model = noiseModel::Gaussian::Covariance(pre_int_data.PreintMeasCov);
ImuFactor factor(X(1), V(1), X(2), V(2), B(1), pre_int_data, gravity, omegaCoriolis, model);
ImuFactor factor(X(1), V(1), X(2), V(2), B(1), pre_int_data, gravity, omegaCoriolis);
noiseModel::Gaussian::shared_ptr Combinedmodel = noiseModel::Gaussian::Covariance(Combined_pre_int_data.PreintMeasCov);
CombinedImuFactor Combinedfactor(X(1), V(1), X(2), V(2), B(1), B(2), Combined_pre_int_data, gravity, omegaCoriolis, Combinedmodel);

View File

@ -182,8 +182,7 @@ TEST( ImuFactor, Error )
pre_int_data.integrateMeasurement(measuredAcc, measuredOmega, deltaT);
// Create factor
noiseModel::Diagonal::shared_ptr model = noiseModel::Diagonal::Sigmas(Vector_(9, 0.15, 0.15, 0.15, 1.5, 1.5, 1.5, 0.5, 0.5, 0.5));
ImuFactor factor(X(1), V(1), X(2), V(2), B(1), pre_int_data, gravity, omegaCoriolis, model);
ImuFactor factor(X(1), V(1), X(2), V(2), B(1), pre_int_data, gravity, omegaCoriolis);
Vector errorActual = factor.evaluateError(x1, v1, x2, v2, bias);
@ -265,8 +264,7 @@ TEST( ImuFactor, ErrorWithBiases )
// pre_int_data.integrateMeasurement(measuredAcc, measuredOmega, deltaT);
// Create factor
noiseModel::Diagonal::shared_ptr model = noiseModel::Diagonal::Sigmas(Vector_(9, 0.15, 0.15, 0.15, 1.5, 1.5, 1.5, 0.5, 0.5, 0.5));
ImuFactor factor(X(1), V(1), X(2), V(2), B(1), pre_int_data, gravity, omegaCoriolis, model);
ImuFactor factor(X(1), V(1), X(2), V(2), B(1), pre_int_data, gravity, omegaCoriolis);
SETDEBUG("ImuFactor evaluateError", false);
Vector errorActual = factor.evaluateError(x1, v1, x2, v2, bias);
@ -320,7 +318,7 @@ TEST( ImuFactor, PartialDerivativeExpmap )
// Compute numerical derivatives
Matrix expectedDelRdelBiasOmega = numericalDerivative11<Rot3, LieVector>(boost::bind(
&evaluateRotation, measuredOmega, _1, deltaT), biasOmega);
&evaluateRotation, measuredOmega, _1, deltaT), LieVector(biasOmega));
const Matrix3 Jr = ImuFactor::rightJacobianExpMapSO3((measuredOmega - biasOmega) * deltaT);
@ -343,7 +341,7 @@ TEST( ImuFactor, PartialDerivativeLogmap )
// Compute numerical derivatives
Matrix expectedDelFdeltheta = numericalDerivative11<LieVector>(boost::bind(
&evaluateLogRotation, thetahat, _1), deltatheta);
&evaluateLogRotation, thetahat, _1), LieVector(deltatheta));
const Vector3 x = thetahat; // parametrization of so(3)
const Matrix3 X = skewSymmetric(x); // element of Lie algebra so(3): X = x^
@ -529,8 +527,7 @@ TEST( ImuFactor, ErrorWithBiasesAndSensorBodyDisplacement )
pre_int_data.integrateMeasurement(measuredAcc, measuredOmega, deltaT);
// Create factor
noiseModel::Diagonal::shared_ptr model = noiseModel::Diagonal::Sigmas(Vector_(9, 0.15, 0.15, 0.15, 1.5, 1.5, 1.5, 0.5, 0.5, 0.5));
ImuFactor factor(X(1), V(1), X(2), V(2), B(1), pre_int_data, gravity, omegaCoriolis, model);
ImuFactor factor(X(1), V(1), X(2), V(2), B(1), pre_int_data, gravity, omegaCoriolis);
// Expected Jacobians
Matrix H1e = numericalDerivative11<Pose3>(

View File

@ -10,7 +10,7 @@
* -------------------------------------------------------------------------- */
/**
* @file testProjectionFactor.cpp
* @file TestSmartProjectionFactor.cpp
* @brief Unit tests for ProjectionFactor Class
* @author Frank Dellaert
* @date Nov 2009
@ -55,63 +55,71 @@ static SharedNoiseModel model(noiseModel::Unit::Create(2));
using symbol_shorthand::X;
using symbol_shorthand::L;
//typedef GenericProjectionFactor<Pose3, Point3> TestProjectionFactor;
typedef SmartProjectionFactor<Pose3, Point3> TestSmartProjectionFactor;
/* ************************************************************************* *
TEST( MultiProjectionFactor, noiseless ){
cout << " ************************ MultiProjectionFactor: noiseless ****************************" << endl;
Values theta;
NonlinearFactorGraph graph;
Symbol x1('X', 1);
Symbol x2('X', 2);
// Symbol x3('X', 3);
const SharedDiagonal noiseProjection = noiseModel::Isotropic::Sigma(2, 1);
/* ************************************************************************* */
TEST( SmartProjectionFactor, Constructor) {
Key poseKey(X(1));
std::vector<Key> views;
views += x1, x2; //, x3;
views += poseKey;
Cal3_S2::shared_ptr K(new Cal3_S2(1500, 1200, 0, 640, 480));
// create first camera. Looking along X-axis, 1 meter above ground plane (x-y)
Pose3 level_pose = Pose3(Rot3::ypr(-M_PI/2, 0., -M_PI/2), gtsam::Point3(0,0,1));
SimpleCamera level_camera(level_pose, *K);
std::vector<Point2> measurements;
measurements.push_back(Point2(323.0, 240.0));
// create second camera 1 meter to the right of first camera
Pose3 level_pose_right = level_pose * Pose3(Rot3(), Point3(1,0,0));
SimpleCamera level_camera_right(level_pose_right, *K);
// landmark ~5 meters infront of camera
Point3 landmark(5, 0.5, 1.2);
// 1. Project two landmarks into two cameras and triangulate
Point2 level_uv = level_camera.project(landmark);
Point2 level_uv_right = level_camera_right.project(landmark);
Values value;
value.insert(x1, level_pose);
value.insert(x2, level_pose_right);
// poses += level_pose, level_pose_right;
vector<Point2> measurements;
measurements += level_uv, level_uv_right;
SmartProjectionFactor<Pose3, Point3, Cal3_S2> smartFactor(measurements, noiseProjection, views, K);
double actualError = smartFactor.error(value);
double expectedError = 0.0;
DOUBLES_EQUAL(expectedError, actualError, 1e-7);
TestSmartProjectionFactor factor(measurements, model, views, K);
}
/* ************************************************************************* *
TEST( MultiProjectionFactor, noisy ){
/* ************************************************************************* */
TEST( SmartProjectionFactor, ConstructorWithTransform) {
Key poseKey(X(1));
std::vector<Key> views;
views += poseKey;
std::vector<Point2> measurements;
measurements.push_back(Point2(323.0, 240.0));
Pose3 body_P_sensor(Rot3::RzRyRx(-M_PI_2, 0.0, -M_PI_2), Point3(0.25, -0.10, 1.0));
TestSmartProjectionFactor factor(measurements, model, views, K, body_P_sensor);
}
/* ************************************************************************* */
TEST( SmartProjectionFactor, Equals ) {
// Create two identical factors and make sure they're equal
std::vector<Point2> measurements;
measurements.push_back(Point2(323.0, 240.0));
std::vector<Key> views;
views += X(1);
TestSmartProjectionFactor factor1(measurements, model, views, K);
TestSmartProjectionFactor factor2(measurements, model, views, K);
CHECK(assert_equal(factor1, factor2));
}
/* ************************************************************************* */
TEST( SmartProjectionFactor, EqualsWithTransform ) {
// Create two identical factors and make sure they're equal
std::vector<Point2> measurements;
measurements.push_back(Point2(323.0, 240.0));
Pose3 body_P_sensor(Rot3::RzRyRx(-M_PI_2, 0.0, -M_PI_2), Point3(0.25, -0.10, 1.0));
std::vector<Key> views;
views += X(1);
TestSmartProjectionFactor factor1(measurements, model, views, K, body_P_sensor);
TestSmartProjectionFactor factor2(measurements, model, views, K, body_P_sensor);
CHECK(assert_equal(factor1, factor2));
}
/* ************************************************************************* */
TEST( SmartProjectionFactor, noisy ){
cout << " ************************ MultiProjectionFactor: noisy ****************************" << endl;
Symbol x1('X', 1);
Symbol x2('X', 2);
// Symbol x3('X', 3);
const SharedDiagonal noiseProjection = noiseModel::Isotropic::Sigma(2, 1);
@ -119,6 +127,7 @@ TEST( MultiProjectionFactor, noisy ){
views += x1, x2; //, x3;
Cal3_S2::shared_ptr K(new Cal3_S2(1500, 1200, 0, 640, 480));
// create first camera. Looking along X-axis, 1 meter above ground plane (x-y)
Pose3 level_pose = Pose3(Rot3::ypr(-M_PI/2, 0., -M_PI/2), gtsam::Point3(0,0,1));
SimpleCamera level_camera(level_pose, *K);
@ -140,7 +149,6 @@ TEST( MultiProjectionFactor, noisy ){
Pose3 noise_pose = Pose3(Rot3::ypr(-M_PI/10, 0., -M_PI/10), gtsam::Point3(0.5,0.1,0.3));
values.insert(x2, level_pose_right.compose(noise_pose));
// poses += level_pose, level_pose_right;
vector<Point2> measurements;
measurements += level_uv, level_uv_right;
@ -148,7 +156,7 @@ TEST( MultiProjectionFactor, noisy ){
smartFactor(new SmartProjectionFactor<Pose3, Point3, Cal3_S2>(measurements, noiseProjection, views, K));
double actualError = smartFactor->error(values);
double expectedError = sqrt(0.08);
std::cout << "Error: " << actualError << std::endl;
// we do not expect to be able to predict the error, since the error on the pixel will change
// the triangulation of the landmark which is internal to the factor.
@ -157,7 +165,7 @@ TEST( MultiProjectionFactor, noisy ){
/* ************************************************************************* */
TEST( MultiProjectionFactor, 3poses ){
TEST( SmartProjectionFactor, 3poses ){
cout << " ************************ MultiProjectionFactor: 3 cams + 3 landmarks **********************" << endl;
Symbol x1('X', 1);
@ -170,6 +178,7 @@ TEST( MultiProjectionFactor, 3poses ){
views += x1, x2, x3;
Cal3_S2::shared_ptr K(new Cal3_S2(1500, 1200, 0, 640, 480));
// create first camera. Looking along X-axis, 1 meter above ground plane (x-y)
Pose3 pose1 = Pose3(Rot3::ypr(-M_PI/2, 0., -M_PI/2), gtsam::Point3(0,0,1));
SimpleCamera cam1(pose1, *K);
@ -213,9 +222,6 @@ TEST( MultiProjectionFactor, 3poses ){
SmartFactor::shared_ptr smartFactor2(new SmartFactor(measurements_cam2, noiseProjection, views, K));
SmartFactor::shared_ptr smartFactor3(new SmartFactor(measurements_cam3, noiseProjection, views, K));
// double actualError = smartFactor->error(values);
// double expectedError = sqrt(0.08);
const SharedDiagonal noisePrior = noiseModel::Isotropic::Sigma(6, 0.10);
NonlinearFactorGraph graph;
@ -225,9 +231,6 @@ TEST( MultiProjectionFactor, 3poses ){
graph.push_back(PriorFactor<Pose3>(x1, pose1, noisePrior));
graph.push_back(PriorFactor<Pose3>(x2, pose2, noisePrior));
// smartFactor1->print("smartFactor1");
Pose3 noise_pose = Pose3(Rot3::ypr(-M_PI/100, 0., -M_PI/100), gtsam::Point3(0.1,0.1,0.1));
Values values;
values.insert(x1, pose1);
@ -237,16 +240,23 @@ TEST( MultiProjectionFactor, 3poses ){
LevenbergMarquardtParams params;
params.verbosityLM = LevenbergMarquardtParams::TRYLAMBDA;
params.verbosity = NonlinearOptimizerParams::ERROR;
Values result;
gttic_(SmartProjectionFactor);
LevenbergMarquardtOptimizer optimizer(graph, values, params);
Values result = optimizer.optimize();
result = optimizer.optimize();
gttoc_(SmartProjectionFactor);
tictoc_finishedIteration_();
result.print("results of 3 camera, 3 landmark optimization \n");
tictoc_print_();
}
/* *************************************************************************
TEST( MultiProjectionFactor, 3poses_projection_factor ){
/* ************************************************************************* */
TEST( SmartProjectionFactor, 3poses_projection_factor ){
cout << " ************************ Normal ProjectionFactor: 3 cams + 3 landmarks **********************" << endl;
Symbol x1('X', 1);
@ -315,6 +325,63 @@ TEST( MultiProjectionFactor, 3poses_projection_factor ){
result.print("Regular Projection Factor: results of 3 camera, 3 landmark optimization \n");
}
/* ************************************************************************* */
TEST( SmartProjectionFactor, Hessian ){
cout << " ************************ Normal ProjectionFactor: Hessian **********************" << endl;
Symbol x1('X', 1);
Symbol x2('X', 2);
const SharedDiagonal noiseProjection = noiseModel::Isotropic::Sigma(2, 1);
std::vector<Key> views;
views += x1, x2;
Cal3_S2::shared_ptr K(new Cal3_S2(1500, 1200, 0, 640, 480));
// create first camera. Looking along X-axis, 1 meter above ground plane (x-y)
Pose3 pose1 = Pose3(Rot3::ypr(-M_PI/2, 0., -M_PI/2), gtsam::Point3(0,0,1));
SimpleCamera cam1(pose1, *K);
// create second camera 1 meter to the right of first camera
Pose3 pose2 = pose1 * Pose3(Rot3(), Point3(1,0,0));
SimpleCamera cam2(pose2, *K);
// three landmarks ~5 meters infront of camera
Point3 landmark1(5, 0.5, 1.2);
// 1. Project three landmarks into three cameras and triangulate
Point2 cam1_uv1 = cam1.project(landmark1);
Point2 cam2_uv1 = cam2.project(landmark1);
vector<Point2> measurements_cam1;
measurements_cam1 += cam1_uv1, cam2_uv1;
SmartProjectionFactor<Pose3, Point3, Cal3_S2> smartFactor(measurements_cam1, noiseProjection, views, K);
Pose3 noise_pose = Pose3(Rot3::ypr(-M_PI/10, 0., -M_PI/10), gtsam::Point3(0.5,0.1,0.3));
Values values;
values.insert(x1, pose1);
values.insert(x2, pose2);
// values.insert(L(1), landmark1);
Ordering ordering;
ordering.push_back(x1);
ordering.push_back(x2);
boost::shared_ptr<GaussianFactor> hessianFactor = smartFactor.linearize(values, ordering);
hessianFactor->print("Hessian factor \n");
// compute triangulation from linearization point
// compute reprojection errors (sum squared)
// compare with hessianFactor.info(): the bottom right element is the squared sum of the reprojection errors (normalized by the covariance)
// check that it is correctly scaled when using noiseProjection = [1/4 0; 0 1/4]
}

View File

@ -0,0 +1,126 @@
%close all
%clc
import gtsam.*;
%% Read data
IMU_metadata = importdata(gtsam.findExampleDataFile('KittiEquivBiasedImu_metadata.txt'));
IMU_data = importdata(gtsam.findExampleDataFile('KittiEquivBiasedImu.txt'));
% Make text file column headers into struct fields
IMU_metadata = cell2struct(num2cell(IMU_metadata.data), IMU_metadata.colheaders, 2);
IMU_data = cell2struct(num2cell(IMU_data.data), IMU_data.colheaders, 2);
GPS_metadata = importdata(gtsam.findExampleDataFile('KittiGps_metadata.txt'));
GPS_data = importdata(gtsam.findExampleDataFile('KittiGps.txt'));
% Make text file column headers into struct fields
GPS_metadata = cell2struct(num2cell(GPS_metadata.data), GPS_metadata.colheaders, 2);
GPS_data = cell2struct(num2cell(GPS_data.data), GPS_data.colheaders, 2);
%% Convert GPS from lat/long to meters
[ x, y, ~ ] = deg2utm( [GPS_data.Latitude], [GPS_data.Longitude] );
for i = 1:numel(x)
GPS_data(i).Position = gtsam.Point3(x(i), y(i), GPS_data(i).Altitude);
end
% % Calculate GPS sigma in meters
% [ xSig, ySig, ~ ] = deg2utm( [GPS_data.Latitude] + [GPS_data.PositionSigma], ...
% [GPS_data.Longitude] + [GPS_data.PositionSigma]);
% xSig = xSig - x;
% ySig = ySig - y;
%% Start at time of first GPS measurement
firstGPSPose = 2;
%% Get initial conditions for the estimated trajectory
currentPoseGlobal = Pose3(Rot3, GPS_data(firstGPSPose).Position); % initial pose is the reference frame (navigation frame)
currentVelocityGlobal = LieVector([0;0;0]); % the vehicle is stationary at the beginning
currentBias = imuBias.ConstantBias(zeros(3,1), zeros(3,1));
%% Solver object
isamParams = ISAM2Params;
isamParams.setFactorization('QR');
isamParams.setRelinearizeSkip(1);
isam = gtsam.ISAM2(isamParams);
newFactors = NonlinearFactorGraph;
newValues = Values;
%% Create initial estimate and prior on initial pose, velocity, and biases
newValues.insert(symbol('x',firstGPSPose), currentPoseGlobal);
newValues.insert(symbol('v',firstGPSPose), currentVelocityGlobal);
newValues.insert(symbol('b',1), currentBias);
sigma_init_x = noiseModel.Diagonal.Precisions([0;0;0; 1;1;1]);
sigma_init_v = noiseModel.Isotropic.Sigma(3, 1000.0);
sigma_init_b = noiseModel.Isotropic.Sigma(6, 0.01);
newFactors.add(PriorFactorPose3(symbol('x',firstGPSPose), currentPoseGlobal, sigma_init_x));
newFactors.add(PriorFactorLieVector(symbol('v',firstGPSPose), currentVelocityGlobal, sigma_init_v));
newFactors.add(PriorFactorConstantBias(symbol('b',1), currentBias, sigma_init_b));
%% Main loop:
% (1) we read the measurements
% (2) we create the corresponding factors in the graph
% (3) we solve the graph to obtain and optimal estimate of robot trajectory
for poseIndex = firstGPSPose:length(GPS_data)
% At each non=IMU measurement we initialize a new node in the graph
currentPoseKey = symbol('x',poseIndex);
currentVelKey = symbol('v',poseIndex);
currentBiasKey = symbol('b',1);
if poseIndex > firstGPSPose
% Summarize IMU data between the previous GPS measurement and now
IMUindices = find([IMU_data.Time] > GPS_data(poseIndex-1).Time ...
& [IMU_data.Time] <= GPS_data(poseIndex).Time);
currentSummarizedMeasurement = gtsam.ImuFactorPreintegratedMeasurements( ...
currentBias, IMU_metadata.AccelerometerSigma.^2 * eye(3), ...
IMU_metadata.GyroscopeSigma.^2 * eye(3), IMU_metadata.IntegrationSigma.^2 * eye(3));
for imuIndex = IMUindices
accMeas = [ IMU_data(imuIndex).accelX; IMU_data(imuIndex).accelY; IMU_data(imuIndex).accelZ ];
omegaMeas = [ IMU_data(imuIndex).omegaX; IMU_data(imuIndex).omegaY; IMU_data(imuIndex).omegaZ ];
deltaT = IMU_data(imuIndex).dt;
currentSummarizedMeasurement.integrateMeasurement(accMeas, omegaMeas, deltaT);
end
% Create IMU factor
newFactors.add(ImuFactor( ...
currentPoseKey-1, currentVelKey-1, ...
currentPoseKey, currentVelKey, ...
currentBiasKey, currentSummarizedMeasurement, [0;0;-9.8], [0;0;0]));
% Create GPS factor
newFactors.add(PriorFactorPose3(currentPoseKey, Pose3(currentPoseGlobal.rotation, GPS_data(poseIndex).Position), ...
noiseModel.Diagonal.Precisions([ zeros(3,1); 1./(GPS_data(poseIndex).PositionSigma).^2*ones(3,1) ])));
% Add initial value
newValues.insert(currentPoseKey, Pose3(currentPoseGlobal.rotation, GPS_data(poseIndex).Position));
newValues.insert(currentVelKey, currentVelocityGlobal);
%newValues.insert(currentBiasKey, currentBias);
% Update solver
% =======================================================================
isam.update(newFactors, newValues);
newFactors = NonlinearFactorGraph;
newValues = Values;
cla;
plot3DTrajectory(isam.calculateEstimate, 'g-');
drawnow;
% =======================================================================
currentPoseGlobal = isam.calculateEstimate(currentPoseKey);
currentVelocityGlobal = isam.calculateEstimate(currentVelKey);
currentBias = isam.calculateEstimate(currentBiasKey);
end
end
disp('TODO: display results')
% figure(1)
% hold on;
% plot(positions(1,:), positions(2,:), '-b');
% plot3DTrajectory(isam.calculateEstimate, 'g-');
% axis equal;
% legend('true trajectory', 'traj integrated in body', 'traj integrated in nav')

View File

@ -0,0 +1,133 @@
function [x,y,utmzone] = deg2utm(Lat,Lon)
% -------------------------------------------------------------------------
% [x,y,utmzone] = deg2utm(Lat,Lon)
%
% Description: Function to convert lat/lon vectors into UTM coordinates (WGS84).
% Some code has been extracted from UTM.m function by Gabriel Ruiz Martinez.
%
% Inputs:
% Lat: Latitude vector. Degrees. +ddd.ddddd WGS84
% Lon: Longitude vector. Degrees. +ddd.ddddd WGS84
%
% Outputs:
% x, y , utmzone. See example
%
% Example 1:
% Lat=[40.3154333; 46.283900; 37.577833; 28.645650; 38.855550; 25.061783];
% Lon=[-3.4857166; 7.8012333; -119.95525; -17.759533; -94.7990166; 121.640266];
% [x,y,utmzone] = deg2utm(Lat,Lon);
% fprintf('%7.0f ',x)
% 458731 407653 239027 230253 343898 362850
% fprintf('%7.0f ',y)
% 4462881 5126290 4163083 3171843 4302285 2772478
% utmzone =
% 30 T
% 32 T
% 11 S
% 28 R
% 15 S
% 51 R
%
% Example 2: If you have Lat/Lon coordinates in Degrees, Minutes and Seconds
% LatDMS=[40 18 55.56; 46 17 2.04];
% LonDMS=[-3 29 8.58; 7 48 4.44];
% Lat=dms2deg(mat2dms(LatDMS)); %convert into degrees
% Lon=dms2deg(mat2dms(LonDMS)); %convert into degrees
% [x,y,utmzone] = deg2utm(Lat,Lon)
%
% Author:
% Rafael Palacios
% Universidad Pontificia Comillas
% Madrid, Spain
% Version: Apr/06, Jun/06, Aug/06, Aug/06
% Aug/06: fixed a problem (found by Rodolphe Dewarrat) related to southern
% hemisphere coordinates.
% Aug/06: corrected m-Lint warnings
%-------------------------------------------------------------------------
% Argument checking
%
error(nargchk(2, 2, nargin)); %2 arguments required
n1=length(Lat);
n2=length(Lon);
if (n1~=n2)
error('Lat and Lon vectors should have the same length');
end
% Memory pre-allocation
%
x=zeros(n1,1);
y=zeros(n1,1);
utmzone(n1,:)='60 X';
% Main Loop
%
for i=1:n1
la=Lat(i);
lo=Lon(i);
sa = 6378137.000000 ; sb = 6356752.314245;
%e = ( ( ( sa ^ 2 ) - ( sb ^ 2 ) ) ^ 0.5 ) / sa;
e2 = ( ( ( sa ^ 2 ) - ( sb ^ 2 ) ) ^ 0.5 ) / sb;
e2cuadrada = e2 ^ 2;
c = ( sa ^ 2 ) / sb;
%alpha = ( sa - sb ) / sa; %f
%ablandamiento = 1 / alpha; % 1/f
lat = la * ( pi / 180 );
lon = lo * ( pi / 180 );
Huso = fix( ( lo / 6 ) + 31);
S = ( ( Huso * 6 ) - 183 );
deltaS = lon - ( S * ( pi / 180 ) );
if (la<-72), Letra='C';
elseif (la<-64), Letra='D';
elseif (la<-56), Letra='E';
elseif (la<-48), Letra='F';
elseif (la<-40), Letra='G';
elseif (la<-32), Letra='H';
elseif (la<-24), Letra='J';
elseif (la<-16), Letra='K';
elseif (la<-8), Letra='L';
elseif (la<0), Letra='M';
elseif (la<8), Letra='N';
elseif (la<16), Letra='P';
elseif (la<24), Letra='Q';
elseif (la<32), Letra='R';
elseif (la<40), Letra='S';
elseif (la<48), Letra='T';
elseif (la<56), Letra='U';
elseif (la<64), Letra='V';
elseif (la<72), Letra='W';
else Letra='X';
end
a = cos(lat) * sin(deltaS);
epsilon = 0.5 * log( ( 1 + a) / ( 1 - a ) );
nu = atan( tan(lat) / cos(deltaS) ) - lat;
v = ( c / ( ( 1 + ( e2cuadrada * ( cos(lat) ) ^ 2 ) ) ) ^ 0.5 ) * 0.9996;
ta = ( e2cuadrada / 2 ) * epsilon ^ 2 * ( cos(lat) ) ^ 2;
a1 = sin( 2 * lat );
a2 = a1 * ( cos(lat) ) ^ 2;
j2 = lat + ( a1 / 2 );
j4 = ( ( 3 * j2 ) + a2 ) / 4;
j6 = ( ( 5 * j4 ) + ( a2 * ( cos(lat) ) ^ 2) ) / 3;
alfa = ( 3 / 4 ) * e2cuadrada;
beta = ( 5 / 3 ) * alfa ^ 2;
gama = ( 35 / 27 ) * alfa ^ 3;
Bm = 0.9996 * c * ( lat - alfa * j2 + beta * j4 - gama * j6 );
xx = epsilon * v * ( 1 + ( ta / 3 ) ) + 500000;
yy = nu * v * ( 1 + ta ) + Bm;
if (yy<0)
yy=9999999+yy;
end
x(i)=xx;
y(i)=yy;
utmzone(i,:)=sprintf('%02d %c',Huso,Letra);
end