1 Star 1 Fork 0

kindlytrees/orbslam3-with-comments

加入 Gitee
与超过 1200万 开发者一起发现、参与优秀开源项目,私有仓库也完全免费 :)
免费加入
文件
克隆/下载
Tracking.cc 149.09 KB
一键复制 编辑 原始数据 按行查看 历史
kindlytrees 提交于 2025-02-21 10:16 +08:00 . update comments
1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207
/**
* This file is part of ORB-SLAM3
*
* Copyright (C) 2017-2021 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
*
* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with ORB-SLAM3.
* If not, see <http://www.gnu.org/licenses/>.
*/
#include "Tracking.h"
#include "ORBmatcher.h"
#include "FrameDrawer.h"
#include "Converter.h"
#include "G2oTypes.h"
#include "Optimizer.h"
#include "Pinhole.h"
#include "KannalaBrandt8.h"
#include "MLPnPsolver.h"
#include "GeometricTools.h"
#include <iostream>
#include <mutex>
#include <chrono>
using namespace std;
namespace ORB_SLAM3
{
Tracking::Tracking(System *pSys, ORBVocabulary* pVoc, FrameDrawer *pFrameDrawer, MapDrawer *pMapDrawer, Atlas *pAtlas, KeyFrameDatabase* pKFDB, const string &strSettingPath, const int sensor, Settings* settings, const string &_nameSeq):
mState(NO_IMAGES_YET), mSensor(sensor), mTrackedFr(0), mbStep(false),
mbOnlyTracking(false), mbMapUpdated(false), mbVO(false), mpORBVocabulary(pVoc), mpKeyFrameDB(pKFDB),
mbReadyToInitializate(false), mpSystem(pSys), mpViewer(NULL), bStepByStep(false),
mpFrameDrawer(pFrameDrawer), mpMapDrawer(pMapDrawer), mpAtlas(pAtlas), mnLastRelocFrameId(0), time_recently_lost(5.0),
mnInitialFrameId(0), mbCreatedMap(false), mnFirstFrameId(0), mpCamera2(nullptr), mpLastKeyFrame(static_cast<KeyFrame*>(NULL))
{
// Load camera parameters from settings file
if(settings){
newParameterLoader(settings);
}
else{
cv::FileStorage fSettings(strSettingPath, cv::FileStorage::READ);
bool b_parse_cam = ParseCamParamFile(fSettings);
if(!b_parse_cam)
{
std::cout << "*Error with the camera parameters in the config file*" << std::endl;
}
// Load ORB parameters
bool b_parse_orb = ParseORBParamFile(fSettings);
if(!b_parse_orb)
{
std::cout << "*Error with the ORB parameters in the config file*" << std::endl;
}
bool b_parse_imu = true;
if(sensor==System::IMU_MONOCULAR || sensor==System::IMU_STEREO || sensor==System::IMU_RGBD)
{
b_parse_imu = ParseIMUParamFile(fSettings);
if(!b_parse_imu)
{
std::cout << "*Error with the IMU parameters in the config file*" << std::endl;
}
mnFramesToResetIMU = mMaxFrames;
}
if(!b_parse_cam || !b_parse_orb || !b_parse_imu)
{
std::cerr << "**ERROR in the config file, the format is not correct**" << std::endl;
try
{
throw -1;
}
catch(exception &e)
{
}
}
}
initID = 0; lastID = 0;
mbInitWith3KFs = false;
mnNumDataset = 0;
vector<GeometricCamera*> vpCams = mpAtlas->GetAllCameras();
std::cout << "There are " << vpCams.size() << " cameras in the atlas" << std::endl;
for(GeometricCamera* pCam : vpCams)
{
std::cout << "Camera " << pCam->GetId();
if(pCam->GetType() == GeometricCamera::CAM_PINHOLE)
{
std::cout << " is pinhole" << std::endl;
}
else if(pCam->GetType() == GeometricCamera::CAM_FISHEYE)
{
std::cout << " is fisheye" << std::endl;
}
else
{
std::cout << " is unknown" << std::endl;
}
}
#ifdef REGISTER_TIMES
vdRectStereo_ms.clear();
vdResizeImage_ms.clear();
vdORBExtract_ms.clear();
vdStereoMatch_ms.clear();
vdIMUInteg_ms.clear();
vdPosePred_ms.clear();
vdLMTrack_ms.clear();
vdNewKF_ms.clear();
vdTrackTotal_ms.clear();
#endif
}
#ifdef REGISTER_TIMES
double calcAverage(vector<double> v_times)
{
double accum = 0;
for(double value : v_times)
{
accum += value;
}
return accum / v_times.size();
}
double calcDeviation(vector<double> v_times, double average)
{
double accum = 0;
for(double value : v_times)
{
accum += pow(value - average, 2);
}
return sqrt(accum / v_times.size());
}
double calcAverage(vector<int> v_values)
{
double accum = 0;
int total = 0;
for(double value : v_values)
{
if(value == 0)
continue;
accum += value;
total++;
}
return accum / total;
}
double calcDeviation(vector<int> v_values, double average)
{
double accum = 0;
int total = 0;
for(double value : v_values)
{
if(value == 0)
continue;
accum += pow(value - average, 2);
total++;
}
return sqrt(accum / total);
}
void Tracking::LocalMapStats2File()
{
ofstream f;
f.open("LocalMapTimeStats.txt");
f << fixed << setprecision(6);
f << "#Stereo rect[ms], MP culling[ms], MP creation[ms], LBA[ms], KF culling[ms], Total[ms]" << endl;
for(int i=0; i<mpLocalMapper->vdLMTotal_ms.size(); ++i)
{
f << mpLocalMapper->vdKFInsert_ms[i] << "," << mpLocalMapper->vdMPCulling_ms[i] << ","
<< mpLocalMapper->vdMPCreation_ms[i] << "," << mpLocalMapper->vdLBASync_ms[i] << ","
<< mpLocalMapper->vdKFCullingSync_ms[i] << "," << mpLocalMapper->vdLMTotal_ms[i] << endl;
}
f.close();
f.open("LBA_Stats.txt");
f << fixed << setprecision(6);
f << "#LBA time[ms], KF opt[#], KF fixed[#], MP[#], Edges[#]" << endl;
for(int i=0; i<mpLocalMapper->vdLBASync_ms.size(); ++i)
{
f << mpLocalMapper->vdLBASync_ms[i] << "," << mpLocalMapper->vnLBA_KFopt[i] << ","
<< mpLocalMapper->vnLBA_KFfixed[i] << "," << mpLocalMapper->vnLBA_MPs[i] << ","
<< mpLocalMapper->vnLBA_edges[i] << endl;
}
f.close();
}
void Tracking::TrackStats2File()
{
ofstream f;
f.open("SessionInfo.txt");
f << fixed;
f << "Number of KFs: " << mpAtlas->GetAllKeyFrames().size() << endl;
f << "Number of MPs: " << mpAtlas->GetAllMapPoints().size() << endl;
f << "OpenCV version: " << CV_VERSION << endl;
f.close();
f.open("TrackingTimeStats.txt");
f << fixed << setprecision(6);
f << "#Image Rect[ms], Image Resize[ms], ORB ext[ms], Stereo match[ms], IMU preint[ms], Pose pred[ms], LM track[ms], KF dec[ms], Total[ms]" << endl;
for(int i=0; i<vdTrackTotal_ms.size(); ++i)
{
double stereo_rect = 0.0;
if(!vdRectStereo_ms.empty())
{
stereo_rect = vdRectStereo_ms[i];
}
double resize_image = 0.0;
if(!vdResizeImage_ms.empty())
{
resize_image = vdResizeImage_ms[i];
}
double stereo_match = 0.0;
if(!vdStereoMatch_ms.empty())
{
stereo_match = vdStereoMatch_ms[i];
}
double imu_preint = 0.0;
if(!vdIMUInteg_ms.empty())
{
imu_preint = vdIMUInteg_ms[i];
}
f << stereo_rect << "," << resize_image << "," << vdORBExtract_ms[i] << "," << stereo_match << "," << imu_preint << ","
<< vdPosePred_ms[i] << "," << vdLMTrack_ms[i] << "," << vdNewKF_ms[i] << "," << vdTrackTotal_ms[i] << endl;
}
f.close();
}
void Tracking::PrintTimeStats()
{
// Save data in files
TrackStats2File();
LocalMapStats2File();
ofstream f;
f.open("ExecMean.txt");
f << fixed;
//Report the mean and std of each one
std::cout << std::endl << " TIME STATS in ms (mean$\\pm$std)" << std::endl;
f << " TIME STATS in ms (mean$\\pm$std)" << std::endl;
cout << "OpenCV version: " << CV_VERSION << endl;
f << "OpenCV version: " << CV_VERSION << endl;
std::cout << "---------------------------" << std::endl;
std::cout << "Tracking" << std::setprecision(5) << std::endl << std::endl;
f << "---------------------------" << std::endl;
f << "Tracking" << std::setprecision(5) << std::endl << std::endl;
double average, deviation;
if(!vdRectStereo_ms.empty())
{
average = calcAverage(vdRectStereo_ms);
deviation = calcDeviation(vdRectStereo_ms, average);
std::cout << "Stereo Rectification: " << average << "$\\pm$" << deviation << std::endl;
f << "Stereo Rectification: " << average << "$\\pm$" << deviation << std::endl;
}
if(!vdResizeImage_ms.empty())
{
average = calcAverage(vdResizeImage_ms);
deviation = calcDeviation(vdResizeImage_ms, average);
std::cout << "Image Resize: " << average << "$\\pm$" << deviation << std::endl;
f << "Image Resize: " << average << "$\\pm$" << deviation << std::endl;
}
average = calcAverage(vdORBExtract_ms);
deviation = calcDeviation(vdORBExtract_ms, average);
std::cout << "ORB Extraction: " << average << "$\\pm$" << deviation << std::endl;
f << "ORB Extraction: " << average << "$\\pm$" << deviation << std::endl;
if(!vdStereoMatch_ms.empty())
{
average = calcAverage(vdStereoMatch_ms);
deviation = calcDeviation(vdStereoMatch_ms, average);
std::cout << "Stereo Matching: " << average << "$\\pm$" << deviation << std::endl;
f << "Stereo Matching: " << average << "$\\pm$" << deviation << std::endl;
}
if(!vdIMUInteg_ms.empty())
{
average = calcAverage(vdIMUInteg_ms);
deviation = calcDeviation(vdIMUInteg_ms, average);
std::cout << "IMU Preintegration: " << average << "$\\pm$" << deviation << std::endl;
f << "IMU Preintegration: " << average << "$\\pm$" << deviation << std::endl;
}
average = calcAverage(vdPosePred_ms);
deviation = calcDeviation(vdPosePred_ms, average);
std::cout << "Pose Prediction: " << average << "$\\pm$" << deviation << std::endl;
f << "Pose Prediction: " << average << "$\\pm$" << deviation << std::endl;
average = calcAverage(vdLMTrack_ms);
deviation = calcDeviation(vdLMTrack_ms, average);
std::cout << "LM Track: " << average << "$\\pm$" << deviation << std::endl;
f << "LM Track: " << average << "$\\pm$" << deviation << std::endl;
average = calcAverage(vdNewKF_ms);
deviation = calcDeviation(vdNewKF_ms, average);
std::cout << "New KF decision: " << average << "$\\pm$" << deviation << std::endl;
f << "New KF decision: " << average << "$\\pm$" << deviation << std::endl;
average = calcAverage(vdTrackTotal_ms);
deviation = calcDeviation(vdTrackTotal_ms, average);
std::cout << "Total Tracking: " << average << "$\\pm$" << deviation << std::endl;
f << "Total Tracking: " << average << "$\\pm$" << deviation << std::endl;
// Local Mapping time stats
std::cout << std::endl << std::endl << std::endl;
std::cout << "Local Mapping" << std::endl << std::endl;
f << std::endl << "Local Mapping" << std::endl << std::endl;
average = calcAverage(mpLocalMapper->vdKFInsert_ms);
deviation = calcDeviation(mpLocalMapper->vdKFInsert_ms, average);
std::cout << "KF Insertion: " << average << "$\\pm$" << deviation << std::endl;
f << "KF Insertion: " << average << "$\\pm$" << deviation << std::endl;
average = calcAverage(mpLocalMapper->vdMPCulling_ms);
deviation = calcDeviation(mpLocalMapper->vdMPCulling_ms, average);
std::cout << "MP Culling: " << average << "$\\pm$" << deviation << std::endl;
f << "MP Culling: " << average << "$\\pm$" << deviation << std::endl;
average = calcAverage(mpLocalMapper->vdMPCreation_ms);
deviation = calcDeviation(mpLocalMapper->vdMPCreation_ms, average);
std::cout << "MP Creation: " << average << "$\\pm$" << deviation << std::endl;
f << "MP Creation: " << average << "$\\pm$" << deviation << std::endl;
average = calcAverage(mpLocalMapper->vdLBA_ms);
deviation = calcDeviation(mpLocalMapper->vdLBA_ms, average);
std::cout << "LBA: " << average << "$\\pm$" << deviation << std::endl;
f << "LBA: " << average << "$\\pm$" << deviation << std::endl;
average = calcAverage(mpLocalMapper->vdKFCulling_ms);
deviation = calcDeviation(mpLocalMapper->vdKFCulling_ms, average);
std::cout << "KF Culling: " << average << "$\\pm$" << deviation << std::endl;
f << "KF Culling: " << average << "$\\pm$" << deviation << std::endl;
average = calcAverage(mpLocalMapper->vdLMTotal_ms);
deviation = calcDeviation(mpLocalMapper->vdLMTotal_ms, average);
std::cout << "Total Local Mapping: " << average << "$\\pm$" << deviation << std::endl;
f << "Total Local Mapping: " << average << "$\\pm$" << deviation << std::endl;
// Local Mapping LBA complexity
std::cout << "---------------------------" << std::endl;
std::cout << std::endl << "LBA complexity (mean$\\pm$std)" << std::endl;
f << "---------------------------" << std::endl;
f << std::endl << "LBA complexity (mean$\\pm$std)" << std::endl;
average = calcAverage(mpLocalMapper->vnLBA_edges);
deviation = calcDeviation(mpLocalMapper->vnLBA_edges, average);
std::cout << "LBA Edges: " << average << "$\\pm$" << deviation << std::endl;
f << "LBA Edges: " << average << "$\\pm$" << deviation << std::endl;
average = calcAverage(mpLocalMapper->vnLBA_KFopt);
deviation = calcDeviation(mpLocalMapper->vnLBA_KFopt, average);
std::cout << "LBA KF optimized: " << average << "$\\pm$" << deviation << std::endl;
f << "LBA KF optimized: " << average << "$\\pm$" << deviation << std::endl;
average = calcAverage(mpLocalMapper->vnLBA_KFfixed);
deviation = calcDeviation(mpLocalMapper->vnLBA_KFfixed, average);
std::cout << "LBA KF fixed: " << average << "$\\pm$" << deviation << std::endl;
f << "LBA KF fixed: " << average << "$\\pm$" << deviation << std::endl;
average = calcAverage(mpLocalMapper->vnLBA_MPs);
deviation = calcDeviation(mpLocalMapper->vnLBA_MPs, average);
std::cout << "LBA MP: " << average << "$\\pm$" << deviation << std::endl << std::endl;
f << "LBA MP: " << average << "$\\pm$" << deviation << std::endl << std::endl;
std::cout << "LBA executions: " << mpLocalMapper->nLBA_exec << std::endl;
std::cout << "LBA aborts: " << mpLocalMapper->nLBA_abort << std::endl;
f << "LBA executions: " << mpLocalMapper->nLBA_exec << std::endl;
f << "LBA aborts: " << mpLocalMapper->nLBA_abort << std::endl;
// Map complexity
std::cout << "---------------------------" << std::endl;
std::cout << std::endl << "Map complexity" << std::endl;
std::cout << "KFs in map: " << mpAtlas->GetAllKeyFrames().size() << std::endl;
std::cout << "MPs in map: " << mpAtlas->GetAllMapPoints().size() << std::endl;
f << "---------------------------" << std::endl;
f << std::endl << "Map complexity" << std::endl;
vector<Map*> vpMaps = mpAtlas->GetAllMaps();
Map* pBestMap = vpMaps[0];
for(int i=1; i<vpMaps.size(); ++i)
{
if(pBestMap->GetAllKeyFrames().size() < vpMaps[i]->GetAllKeyFrames().size())
{
pBestMap = vpMaps[i];
}
}
f << "KFs in map: " << pBestMap->GetAllKeyFrames().size() << std::endl;
f << "MPs in map: " << pBestMap->GetAllMapPoints().size() << std::endl;
f << "---------------------------" << std::endl;
f << std::endl << "Place Recognition (mean$\\pm$std)" << std::endl;
std::cout << "---------------------------" << std::endl;
std::cout << std::endl << "Place Recognition (mean$\\pm$std)" << std::endl;
average = calcAverage(mpLoopClosing->vdDataQuery_ms);
deviation = calcDeviation(mpLoopClosing->vdDataQuery_ms, average);
f << "Database Query: " << average << "$\\pm$" << deviation << std::endl;
std::cout << "Database Query: " << average << "$\\pm$" << deviation << std::endl;
average = calcAverage(mpLoopClosing->vdEstSim3_ms);
deviation = calcDeviation(mpLoopClosing->vdEstSim3_ms, average);
f << "SE3 estimation: " << average << "$\\pm$" << deviation << std::endl;
std::cout << "SE3 estimation: " << average << "$\\pm$" << deviation << std::endl;
average = calcAverage(mpLoopClosing->vdPRTotal_ms);
deviation = calcDeviation(mpLoopClosing->vdPRTotal_ms, average);
f << "Total Place Recognition: " << average << "$\\pm$" << deviation << std::endl << std::endl;
std::cout << "Total Place Recognition: " << average << "$\\pm$" << deviation << std::endl << std::endl;
f << std::endl << "Loop Closing (mean$\\pm$std)" << std::endl;
std::cout << std::endl << "Loop Closing (mean$\\pm$std)" << std::endl;
average = calcAverage(mpLoopClosing->vdLoopFusion_ms);
deviation = calcDeviation(mpLoopClosing->vdLoopFusion_ms, average);
f << "Loop Fusion: " << average << "$\\pm$" << deviation << std::endl;
std::cout << "Loop Fusion: " << average << "$\\pm$" << deviation << std::endl;
average = calcAverage(mpLoopClosing->vdLoopOptEss_ms);
deviation = calcDeviation(mpLoopClosing->vdLoopOptEss_ms, average);
f << "Essential Graph: " << average << "$\\pm$" << deviation << std::endl;
std::cout << "Essential Graph: " << average << "$\\pm$" << deviation << std::endl;
average = calcAverage(mpLoopClosing->vdLoopTotal_ms);
deviation = calcDeviation(mpLoopClosing->vdLoopTotal_ms, average);
f << "Total Loop Closing: " << average << "$\\pm$" << deviation << std::endl << std::endl;
std::cout << "Total Loop Closing: " << average << "$\\pm$" << deviation << std::endl << std::endl;
f << "Numb exec: " << mpLoopClosing->nLoop << std::endl;
std::cout << "Num exec: " << mpLoopClosing->nLoop << std::endl;
average = calcAverage(mpLoopClosing->vnLoopKFs);
deviation = calcDeviation(mpLoopClosing->vnLoopKFs, average);
f << "Number of KFs: " << average << "$\\pm$" << deviation << std::endl;
std::cout << "Number of KFs: " << average << "$\\pm$" << deviation << std::endl;
f << std::endl << "Map Merging (mean$\\pm$std)" << std::endl;
std::cout << std::endl << "Map Merging (mean$\\pm$std)" << std::endl;
average = calcAverage(mpLoopClosing->vdMergeMaps_ms);
deviation = calcDeviation(mpLoopClosing->vdMergeMaps_ms, average);
f << "Merge Maps: " << average << "$\\pm$" << deviation << std::endl;
std::cout << "Merge Maps: " << average << "$\\pm$" << deviation << std::endl;
average = calcAverage(mpLoopClosing->vdWeldingBA_ms);
deviation = calcDeviation(mpLoopClosing->vdWeldingBA_ms, average);
f << "Welding BA: " << average << "$\\pm$" << deviation << std::endl;
std::cout << "Welding BA: " << average << "$\\pm$" << deviation << std::endl;
average = calcAverage(mpLoopClosing->vdMergeOptEss_ms);
deviation = calcDeviation(mpLoopClosing->vdMergeOptEss_ms, average);
f << "Optimization Ess.: " << average << "$\\pm$" << deviation << std::endl;
std::cout << "Optimization Ess.: " << average << "$\\pm$" << deviation << std::endl;
average = calcAverage(mpLoopClosing->vdMergeTotal_ms);
deviation = calcDeviation(mpLoopClosing->vdMergeTotal_ms, average);
f << "Total Map Merging: " << average << "$\\pm$" << deviation << std::endl << std::endl;
std::cout << "Total Map Merging: " << average << "$\\pm$" << deviation << std::endl << std::endl;
f << "Numb exec: " << mpLoopClosing->nMerges << std::endl;
std::cout << "Num exec: " << mpLoopClosing->nMerges << std::endl;
average = calcAverage(mpLoopClosing->vnMergeKFs);
deviation = calcDeviation(mpLoopClosing->vnMergeKFs, average);
f << "Number of KFs: " << average << "$\\pm$" << deviation << std::endl;
std::cout << "Number of KFs: " << average << "$\\pm$" << deviation << std::endl;
average = calcAverage(mpLoopClosing->vnMergeMPs);
deviation = calcDeviation(mpLoopClosing->vnMergeMPs, average);
f << "Number of MPs: " << average << "$\\pm$" << deviation << std::endl;
std::cout << "Number of MPs: " << average << "$\\pm$" << deviation << std::endl;
f << std::endl << "Full GBA (mean$\\pm$std)" << std::endl;
std::cout << std::endl << "Full GBA (mean$\\pm$std)" << std::endl;
average = calcAverage(mpLoopClosing->vdGBA_ms);
deviation = calcDeviation(mpLoopClosing->vdGBA_ms, average);
f << "GBA: " << average << "$\\pm$" << deviation << std::endl;
std::cout << "GBA: " << average << "$\\pm$" << deviation << std::endl;
average = calcAverage(mpLoopClosing->vdUpdateMap_ms);
deviation = calcDeviation(mpLoopClosing->vdUpdateMap_ms, average);
f << "Map Update: " << average << "$\\pm$" << deviation << std::endl;
std::cout << "Map Update: " << average << "$\\pm$" << deviation << std::endl;
average = calcAverage(mpLoopClosing->vdFGBATotal_ms);
deviation = calcDeviation(mpLoopClosing->vdFGBATotal_ms, average);
f << "Total Full GBA: " << average << "$\\pm$" << deviation << std::endl << std::endl;
std::cout << "Total Full GBA: " << average << "$\\pm$" << deviation << std::endl << std::endl;
f << "Numb exec: " << mpLoopClosing->nFGBA_exec << std::endl;
std::cout << "Num exec: " << mpLoopClosing->nFGBA_exec << std::endl;
f << "Numb abort: " << mpLoopClosing->nFGBA_abort << std::endl;
std::cout << "Num abort: " << mpLoopClosing->nFGBA_abort << std::endl;
average = calcAverage(mpLoopClosing->vnGBAKFs);
deviation = calcDeviation(mpLoopClosing->vnGBAKFs, average);
f << "Number of KFs: " << average << "$\\pm$" << deviation << std::endl;
std::cout << "Number of KFs: " << average << "$\\pm$" << deviation << std::endl;
average = calcAverage(mpLoopClosing->vnGBAMPs);
deviation = calcDeviation(mpLoopClosing->vnGBAMPs, average);
f << "Number of MPs: " << average << "$\\pm$" << deviation << std::endl;
std::cout << "Number of MPs: " << average << "$\\pm$" << deviation << std::endl;
f.close();
}
#endif
Tracking::~Tracking()
{
//f_track_stats.close();
}
void Tracking::newParameterLoader(Settings *settings) {
mpCamera = settings->camera1();
mpCamera = mpAtlas->AddCamera(mpCamera);
if(settings->needToUndistort()){
mDistCoef = settings->camera1DistortionCoef();
}
else{
mDistCoef = cv::Mat::zeros(4,1,CV_32F);
}
//TODO: missing image scaling and rectification
mImageScale = 1.0f;
mK = cv::Mat::eye(3,3,CV_32F);
mK.at<float>(0,0) = mpCamera->getParameter(0);
mK.at<float>(1,1) = mpCamera->getParameter(1);
mK.at<float>(0,2) = mpCamera->getParameter(2);
mK.at<float>(1,2) = mpCamera->getParameter(3);
mK_.setIdentity();
mK_(0,0) = mpCamera->getParameter(0);
mK_(1,1) = mpCamera->getParameter(1);
mK_(0,2) = mpCamera->getParameter(2);
mK_(1,2) = mpCamera->getParameter(3);
if((mSensor==System::STEREO || mSensor==System::IMU_STEREO || mSensor==System::IMU_RGBD) &&
settings->cameraType() == Settings::KannalaBrandt){
mpCamera2 = settings->camera2();
mpCamera2 = mpAtlas->AddCamera(mpCamera2);
mTlr = settings->Tlr();
mpFrameDrawer->both = true;
}
if(mSensor==System::STEREO || mSensor==System::RGBD || mSensor==System::IMU_STEREO || mSensor==System::IMU_RGBD ){
mbf = settings->bf();
mThDepth = settings->b() * settings->thDepth();
}
if(mSensor==System::RGBD || mSensor==System::IMU_RGBD){
mDepthMapFactor = settings->depthMapFactor();
if(fabs(mDepthMapFactor)<1e-5)
mDepthMapFactor=1;
else
mDepthMapFactor = 1.0f/mDepthMapFactor;
}
mMinFrames = 0;
mMaxFrames = settings->fps();
mbRGB = settings->rgb();
//ORB parameters
int nFeatures = settings->nFeatures();
int nLevels = settings->nLevels();
int fIniThFAST = settings->initThFAST();
int fMinThFAST = settings->minThFAST();
float fScaleFactor = settings->scaleFactor();
mpORBextractorLeft = new ORBextractor(nFeatures,fScaleFactor,nLevels,fIniThFAST,fMinThFAST);
if(mSensor==System::STEREO || mSensor==System::IMU_STEREO)
mpORBextractorRight = new ORBextractor(nFeatures,fScaleFactor,nLevels,fIniThFAST,fMinThFAST);
if(mSensor==System::MONOCULAR || mSensor==System::IMU_MONOCULAR)
mpIniORBextractor = new ORBextractor(5*nFeatures,fScaleFactor,nLevels,fIniThFAST,fMinThFAST);
//IMU parameters
Sophus::SE3f Tbc = settings->Tbc();
mInsertKFsLost = settings->insertKFsWhenLost();
mImuFreq = settings->imuFrequency();
mImuPer = 0.001; //1.0 / (double) mImuFreq; //TODO: ESTO ESTA BIEN?
float Ng = settings->noiseGyro();
float Na = settings->noiseAcc();
float Ngw = settings->gyroWalk();
float Naw = settings->accWalk();
const float sf = sqrt(mImuFreq);
mpImuCalib = new IMU::Calib(Tbc,Ng*sf,Na*sf,Ngw/sf,Naw/sf);
mpImuPreintegratedFromLastKF = new IMU::Preintegrated(IMU::Bias(),*mpImuCalib);
}
bool Tracking::ParseCamParamFile(cv::FileStorage &fSettings)
{
mDistCoef = cv::Mat::zeros(4,1,CV_32F);
cout << endl << "Camera Parameters: " << endl;
bool b_miss_params = false;
string sCameraName = fSettings["Camera.type"];
if(sCameraName == "PinHole")
{
float fx, fy, cx, cy;
mImageScale = 1.f;
// Camera calibration parameters
cv::FileNode node = fSettings["Camera.fx"];
if(!node.empty() && node.isReal())
{
fx = node.real();
}
else
{
std::cerr << "*Camera.fx parameter doesn't exist or is not a real number*" << std::endl;
b_miss_params = true;
}
node = fSettings["Camera.fy"];
if(!node.empty() && node.isReal())
{
fy = node.real();
}
else
{
std::cerr << "*Camera.fy parameter doesn't exist or is not a real number*" << std::endl;
b_miss_params = true;
}
node = fSettings["Camera.cx"];
if(!node.empty() && node.isReal())
{
cx = node.real();
}
else
{
std::cerr << "*Camera.cx parameter doesn't exist or is not a real number*" << std::endl;
b_miss_params = true;
}
node = fSettings["Camera.cy"];
if(!node.empty() && node.isReal())
{
cy = node.real();
}
else
{
std::cerr << "*Camera.cy parameter doesn't exist or is not a real number*" << std::endl;
b_miss_params = true;
}
// Distortion parameters
node = fSettings["Camera.k1"];
if(!node.empty() && node.isReal())
{
mDistCoef.at<float>(0) = node.real();
}
else
{
std::cerr << "*Camera.k1 parameter doesn't exist or is not a real number*" << std::endl;
b_miss_params = true;
}
node = fSettings["Camera.k2"];
if(!node.empty() && node.isReal())
{
mDistCoef.at<float>(1) = node.real();
}
else
{
std::cerr << "*Camera.k2 parameter doesn't exist or is not a real number*" << std::endl;
b_miss_params = true;
}
node = fSettings["Camera.p1"];
if(!node.empty() && node.isReal())
{
mDistCoef.at<float>(2) = node.real();
}
else
{
std::cerr << "*Camera.p1 parameter doesn't exist or is not a real number*" << std::endl;
b_miss_params = true;
}
node = fSettings["Camera.p2"];
if(!node.empty() && node.isReal())
{
mDistCoef.at<float>(3) = node.real();
}
else
{
std::cerr << "*Camera.p2 parameter doesn't exist or is not a real number*" << std::endl;
b_miss_params = true;
}
node = fSettings["Camera.k3"];
if(!node.empty() && node.isReal())
{
mDistCoef.resize(5);
mDistCoef.at<float>(4) = node.real();
}
node = fSettings["Camera.imageScale"];
if(!node.empty() && node.isReal())
{
mImageScale = node.real();
}
if(b_miss_params)
{
return false;
}
if(mImageScale != 1.f)
{
// K matrix parameters must be scaled.
fx = fx * mImageScale;
fy = fy * mImageScale;
cx = cx * mImageScale;
cy = cy * mImageScale;
}
vector<float> vCamCalib{fx,fy,cx,cy};
mpCamera = new Pinhole(vCamCalib);
mpCamera = mpAtlas->AddCamera(mpCamera);
std::cout << "- Camera: Pinhole" << std::endl;
std::cout << "- Image scale: " << mImageScale << std::endl;
std::cout << "- fx: " << fx << std::endl;
std::cout << "- fy: " << fy << std::endl;
std::cout << "- cx: " << cx << std::endl;
std::cout << "- cy: " << cy << std::endl;
std::cout << "- k1: " << mDistCoef.at<float>(0) << std::endl;
std::cout << "- k2: " << mDistCoef.at<float>(1) << std::endl;
std::cout << "- p1: " << mDistCoef.at<float>(2) << std::endl;
std::cout << "- p2: " << mDistCoef.at<float>(3) << std::endl;
if(mDistCoef.rows==5)
std::cout << "- k3: " << mDistCoef.at<float>(4) << std::endl;
mK = cv::Mat::eye(3,3,CV_32F);
mK.at<float>(0,0) = fx;
mK.at<float>(1,1) = fy;
mK.at<float>(0,2) = cx;
mK.at<float>(1,2) = cy;
mK_.setIdentity();
mK_(0,0) = fx;
mK_(1,1) = fy;
mK_(0,2) = cx;
mK_(1,2) = cy;
}
else if(sCameraName == "KannalaBrandt8")
{
float fx, fy, cx, cy;
float k1, k2, k3, k4;
mImageScale = 1.f;
// Camera calibration parameters
cv::FileNode node = fSettings["Camera.fx"];
if(!node.empty() && node.isReal())
{
fx = node.real();
}
else
{
std::cerr << "*Camera.fx parameter doesn't exist or is not a real number*" << std::endl;
b_miss_params = true;
}
node = fSettings["Camera.fy"];
if(!node.empty() && node.isReal())
{
fy = node.real();
}
else
{
std::cerr << "*Camera.fy parameter doesn't exist or is not a real number*" << std::endl;
b_miss_params = true;
}
node = fSettings["Camera.cx"];
if(!node.empty() && node.isReal())
{
cx = node.real();
}
else
{
std::cerr << "*Camera.cx parameter doesn't exist or is not a real number*" << std::endl;
b_miss_params = true;
}
node = fSettings["Camera.cy"];
if(!node.empty() && node.isReal())
{
cy = node.real();
}
else
{
std::cerr << "*Camera.cy parameter doesn't exist or is not a real number*" << std::endl;
b_miss_params = true;
}
// Distortion parameters
node = fSettings["Camera.k1"];
if(!node.empty() && node.isReal())
{
k1 = node.real();
}
else
{
std::cerr << "*Camera.k1 parameter doesn't exist or is not a real number*" << std::endl;
b_miss_params = true;
}
node = fSettings["Camera.k2"];
if(!node.empty() && node.isReal())
{
k2 = node.real();
}
else
{
std::cerr << "*Camera.k2 parameter doesn't exist or is not a real number*" << std::endl;
b_miss_params = true;
}
node = fSettings["Camera.k3"];
if(!node.empty() && node.isReal())
{
k3 = node.real();
}
else
{
std::cerr << "*Camera.k3 parameter doesn't exist or is not a real number*" << std::endl;
b_miss_params = true;
}
node = fSettings["Camera.k4"];
if(!node.empty() && node.isReal())
{
k4 = node.real();
}
else
{
std::cerr << "*Camera.k4 parameter doesn't exist or is not a real number*" << std::endl;
b_miss_params = true;
}
node = fSettings["Camera.imageScale"];
if(!node.empty() && node.isReal())
{
mImageScale = node.real();
}
if(!b_miss_params)
{
if(mImageScale != 1.f)
{
// K matrix parameters must be scaled.
fx = fx * mImageScale;
fy = fy * mImageScale;
cx = cx * mImageScale;
cy = cy * mImageScale;
}
vector<float> vCamCalib{fx,fy,cx,cy,k1,k2,k3,k4};
mpCamera = new KannalaBrandt8(vCamCalib);
mpCamera = mpAtlas->AddCamera(mpCamera);
std::cout << "- Camera: Fisheye" << std::endl;
std::cout << "- Image scale: " << mImageScale << std::endl;
std::cout << "- fx: " << fx << std::endl;
std::cout << "- fy: " << fy << std::endl;
std::cout << "- cx: " << cx << std::endl;
std::cout << "- cy: " << cy << std::endl;
std::cout << "- k1: " << k1 << std::endl;
std::cout << "- k2: " << k2 << std::endl;
std::cout << "- k3: " << k3 << std::endl;
std::cout << "- k4: " << k4 << std::endl;
mK = cv::Mat::eye(3,3,CV_32F);
mK.at<float>(0,0) = fx;
mK.at<float>(1,1) = fy;
mK.at<float>(0,2) = cx;
mK.at<float>(1,2) = cy;
mK_.setIdentity();
mK_(0,0) = fx;
mK_(1,1) = fy;
mK_(0,2) = cx;
mK_(1,2) = cy;
}
if(mSensor==System::STEREO || mSensor==System::IMU_STEREO || mSensor==System::IMU_RGBD){
// Right camera
// Camera calibration parameters
cv::FileNode node = fSettings["Camera2.fx"];
if(!node.empty() && node.isReal())
{
fx = node.real();
}
else
{
std::cerr << "*Camera2.fx parameter doesn't exist or is not a real number*" << std::endl;
b_miss_params = true;
}
node = fSettings["Camera2.fy"];
if(!node.empty() && node.isReal())
{
fy = node.real();
}
else
{
std::cerr << "*Camera2.fy parameter doesn't exist or is not a real number*" << std::endl;
b_miss_params = true;
}
node = fSettings["Camera2.cx"];
if(!node.empty() && node.isReal())
{
cx = node.real();
}
else
{
std::cerr << "*Camera2.cx parameter doesn't exist or is not a real number*" << std::endl;
b_miss_params = true;
}
node = fSettings["Camera2.cy"];
if(!node.empty() && node.isReal())
{
cy = node.real();
}
else
{
std::cerr << "*Camera2.cy parameter doesn't exist or is not a real number*" << std::endl;
b_miss_params = true;
}
// Distortion parameters
node = fSettings["Camera2.k1"];
if(!node.empty() && node.isReal())
{
k1 = node.real();
}
else
{
std::cerr << "*Camera2.k1 parameter doesn't exist or is not a real number*" << std::endl;
b_miss_params = true;
}
node = fSettings["Camera2.k2"];
if(!node.empty() && node.isReal())
{
k2 = node.real();
}
else
{
std::cerr << "*Camera2.k2 parameter doesn't exist or is not a real number*" << std::endl;
b_miss_params = true;
}
node = fSettings["Camera2.k3"];
if(!node.empty() && node.isReal())
{
k3 = node.real();
}
else
{
std::cerr << "*Camera2.k3 parameter doesn't exist or is not a real number*" << std::endl;
b_miss_params = true;
}
node = fSettings["Camera2.k4"];
if(!node.empty() && node.isReal())
{
k4 = node.real();
}
else
{
std::cerr << "*Camera2.k4 parameter doesn't exist or is not a real number*" << std::endl;
b_miss_params = true;
}
int leftLappingBegin = -1;
int leftLappingEnd = -1;
int rightLappingBegin = -1;
int rightLappingEnd = -1;
node = fSettings["Camera.lappingBegin"];
if(!node.empty() && node.isInt())
{
leftLappingBegin = node.operator int();
}
else
{
std::cout << "WARNING: Camera.lappingBegin not correctly defined" << std::endl;
}
node = fSettings["Camera.lappingEnd"];
if(!node.empty() && node.isInt())
{
leftLappingEnd = node.operator int();
}
else
{
std::cout << "WARNING: Camera.lappingEnd not correctly defined" << std::endl;
}
node = fSettings["Camera2.lappingBegin"];
if(!node.empty() && node.isInt())
{
rightLappingBegin = node.operator int();
}
else
{
std::cout << "WARNING: Camera2.lappingBegin not correctly defined" << std::endl;
}
node = fSettings["Camera2.lappingEnd"];
if(!node.empty() && node.isInt())
{
rightLappingEnd = node.operator int();
}
else
{
std::cout << "WARNING: Camera2.lappingEnd not correctly defined" << std::endl;
}
node = fSettings["Tlr"];
cv::Mat cvTlr;
if(!node.empty())
{
cvTlr = node.mat();
if(cvTlr.rows != 3 || cvTlr.cols != 4)
{
std::cerr << "*Tlr matrix have to be a 3x4 transformation matrix*" << std::endl;
b_miss_params = true;
}
}
else
{
std::cerr << "*Tlr matrix doesn't exist*" << std::endl;
b_miss_params = true;
}
if(!b_miss_params)
{
if(mImageScale != 1.f)
{
// K matrix parameters must be scaled.
fx = fx * mImageScale;
fy = fy * mImageScale;
cx = cx * mImageScale;
cy = cy * mImageScale;
leftLappingBegin = leftLappingBegin * mImageScale;
leftLappingEnd = leftLappingEnd * mImageScale;
rightLappingBegin = rightLappingBegin * mImageScale;
rightLappingEnd = rightLappingEnd * mImageScale;
}
static_cast<KannalaBrandt8*>(mpCamera)->mvLappingArea[0] = leftLappingBegin;
static_cast<KannalaBrandt8*>(mpCamera)->mvLappingArea[1] = leftLappingEnd;
mpFrameDrawer->both = true;
vector<float> vCamCalib2{fx,fy,cx,cy,k1,k2,k3,k4};
mpCamera2 = new KannalaBrandt8(vCamCalib2);
mpCamera2 = mpAtlas->AddCamera(mpCamera2);
mTlr = Converter::toSophus(cvTlr);
static_cast<KannalaBrandt8*>(mpCamera2)->mvLappingArea[0] = rightLappingBegin;
static_cast<KannalaBrandt8*>(mpCamera2)->mvLappingArea[1] = rightLappingEnd;
std::cout << "- Camera1 Lapping: " << leftLappingBegin << ", " << leftLappingEnd << std::endl;
std::cout << std::endl << "Camera2 Parameters:" << std::endl;
std::cout << "- Camera: Fisheye" << std::endl;
std::cout << "- Image scale: " << mImageScale << std::endl;
std::cout << "- fx: " << fx << std::endl;
std::cout << "- fy: " << fy << std::endl;
std::cout << "- cx: " << cx << std::endl;
std::cout << "- cy: " << cy << std::endl;
std::cout << "- k1: " << k1 << std::endl;
std::cout << "- k2: " << k2 << std::endl;
std::cout << "- k3: " << k3 << std::endl;
std::cout << "- k4: " << k4 << std::endl;
std::cout << "- mTlr: \n" << cvTlr << std::endl;
std::cout << "- Camera2 Lapping: " << rightLappingBegin << ", " << rightLappingEnd << std::endl;
}
}
if(b_miss_params)
{
return false;
}
}
else
{
std::cerr << "*Not Supported Camera Sensor*" << std::endl;
std::cerr << "Check an example configuration file with the desired sensor" << std::endl;
}
if(mSensor==System::STEREO || mSensor==System::RGBD || mSensor==System::IMU_STEREO || mSensor==System::IMU_RGBD )
{
cv::FileNode node = fSettings["Camera.bf"];
if(!node.empty() && node.isReal())
{
mbf = node.real();
if(mImageScale != 1.f)
{
mbf *= mImageScale;
}
}
else
{
std::cerr << "*Camera.bf parameter doesn't exist or is not a real number*" << std::endl;
b_miss_params = true;
}
}
float fps = fSettings["Camera.fps"];
if(fps==0)
fps=30;
// Max/Min Frames to insert keyframes and to check relocalisation
mMinFrames = 0;
mMaxFrames = fps;
cout << "- fps: " << fps << endl;
int nRGB = fSettings["Camera.RGB"];
mbRGB = nRGB;
if(mbRGB)
cout << "- color order: RGB (ignored if grayscale)" << endl;
else
cout << "- color order: BGR (ignored if grayscale)" << endl;
if(mSensor==System::STEREO || mSensor==System::RGBD || mSensor==System::IMU_STEREO || mSensor==System::IMU_RGBD)
{
float fx = mpCamera->getParameter(0);
cv::FileNode node = fSettings["ThDepth"];
if(!node.empty() && node.isReal())
{
mThDepth = node.real();
mThDepth = mbf*mThDepth/fx;
cout << endl << "Depth Threshold (Close/Far Points): " << mThDepth << endl;
}
else
{
std::cerr << "*ThDepth parameter doesn't exist or is not a real number*" << std::endl;
b_miss_params = true;
}
}
if(mSensor==System::RGBD || mSensor==System::IMU_RGBD)
{
cv::FileNode node = fSettings["DepthMapFactor"];
if(!node.empty() && node.isReal())
{
mDepthMapFactor = node.real();
if(fabs(mDepthMapFactor)<1e-5)
mDepthMapFactor=1;
else
mDepthMapFactor = 1.0f/mDepthMapFactor;
}
else
{
std::cerr << "*DepthMapFactor parameter doesn't exist or is not a real number*" << std::endl;
b_miss_params = true;
}
}
if(b_miss_params)
{
return false;
}
return true;
}
bool Tracking::ParseORBParamFile(cv::FileStorage &fSettings)
{
bool b_miss_params = false;
int nFeatures, nLevels, fIniThFAST, fMinThFAST;
float fScaleFactor;
cv::FileNode node = fSettings["ORBextractor.nFeatures"];
if(!node.empty() && node.isInt())
{
nFeatures = node.operator int();
}
else
{
std::cerr << "*ORBextractor.nFeatures parameter doesn't exist or is not an integer*" << std::endl;
b_miss_params = true;
}
node = fSettings["ORBextractor.scaleFactor"];
if(!node.empty() && node.isReal())
{
fScaleFactor = node.real();
}
else
{
std::cerr << "*ORBextractor.scaleFactor parameter doesn't exist or is not a real number*" << std::endl;
b_miss_params = true;
}
node = fSettings["ORBextractor.nLevels"];
if(!node.empty() && node.isInt())
{
nLevels = node.operator int();
}
else
{
std::cerr << "*ORBextractor.nLevels parameter doesn't exist or is not an integer*" << std::endl;
b_miss_params = true;
}
node = fSettings["ORBextractor.iniThFAST"];
if(!node.empty() && node.isInt())
{
fIniThFAST = node.operator int();
}
else
{
std::cerr << "*ORBextractor.iniThFAST parameter doesn't exist or is not an integer*" << std::endl;
b_miss_params = true;
}
node = fSettings["ORBextractor.minThFAST"];
if(!node.empty() && node.isInt())
{
fMinThFAST = node.operator int();
}
else
{
std::cerr << "*ORBextractor.minThFAST parameter doesn't exist or is not an integer*" << std::endl;
b_miss_params = true;
}
if(b_miss_params)
{
return false;
}
mpORBextractorLeft = new ORBextractor(nFeatures,fScaleFactor,nLevels,fIniThFAST,fMinThFAST);
if(mSensor==System::STEREO || mSensor==System::IMU_STEREO)
mpORBextractorRight = new ORBextractor(nFeatures,fScaleFactor,nLevels,fIniThFAST,fMinThFAST);
if(mSensor==System::MONOCULAR || mSensor==System::IMU_MONOCULAR)
mpIniORBextractor = new ORBextractor(5*nFeatures,fScaleFactor,nLevels,fIniThFAST,fMinThFAST);
cout << endl << "ORB Extractor Parameters: " << endl;
cout << "- Number of Features: " << nFeatures << endl;
cout << "- Scale Levels: " << nLevels << endl;
cout << "- Scale Factor: " << fScaleFactor << endl;
cout << "- Initial Fast Threshold: " << fIniThFAST << endl;
cout << "- Minimum Fast Threshold: " << fMinThFAST << endl;
return true;
}
bool Tracking::ParseIMUParamFile(cv::FileStorage &fSettings)
{
bool b_miss_params = false;
cv::Mat cvTbc;
cv::FileNode node = fSettings["Tbc"];
if(!node.empty())
{
cvTbc = node.mat();
if(cvTbc.rows != 4 || cvTbc.cols != 4)
{
std::cerr << "*Tbc matrix have to be a 4x4 transformation matrix*" << std::endl;
b_miss_params = true;
}
}
else
{
std::cerr << "*Tbc matrix doesn't exist*" << std::endl;
b_miss_params = true;
}
cout << endl;
cout << "Left camera to Imu Transform (Tbc): " << endl << cvTbc << endl;
Eigen::Matrix<float,4,4,Eigen::RowMajor> eigTbc(cvTbc.ptr<float>(0));
Sophus::SE3f Tbc(eigTbc);
node = fSettings["InsertKFsWhenLost"];
mInsertKFsLost = true;
if(!node.empty() && node.isInt())
{
mInsertKFsLost = (bool) node.operator int();
}
if(!mInsertKFsLost)
cout << "Do not insert keyframes when lost visual tracking " << endl;
float Ng, Na, Ngw, Naw;
node = fSettings["IMU.Frequency"];
if(!node.empty() && node.isInt())
{
mImuFreq = node.operator int();
mImuPer = 0.001; //1.0 / (double) mImuFreq;
}
else
{
std::cerr << "*IMU.Frequency parameter doesn't exist or is not an integer*" << std::endl;
b_miss_params = true;
}
node = fSettings["IMU.NoiseGyro"];
if(!node.empty() && node.isReal())
{
Ng = node.real();
}
else
{
std::cerr << "*IMU.NoiseGyro parameter doesn't exist or is not a real number*" << std::endl;
b_miss_params = true;
}
node = fSettings["IMU.NoiseAcc"];
if(!node.empty() && node.isReal())
{
Na = node.real();
}
else
{
std::cerr << "*IMU.NoiseAcc parameter doesn't exist or is not a real number*" << std::endl;
b_miss_params = true;
}
node = fSettings["IMU.GyroWalk"];
if(!node.empty() && node.isReal())
{
Ngw = node.real();
}
else
{
std::cerr << "*IMU.GyroWalk parameter doesn't exist or is not a real number*" << std::endl;
b_miss_params = true;
}
node = fSettings["IMU.AccWalk"];
if(!node.empty() && node.isReal())
{
Naw = node.real();
}
else
{
std::cerr << "*IMU.AccWalk parameter doesn't exist or is not a real number*" << std::endl;
b_miss_params = true;
}
node = fSettings["IMU.fastInit"];
mFastInit = false;
if(!node.empty())
{
mFastInit = static_cast<int>(fSettings["IMU.fastInit"]) != 0;
}
if(mFastInit)
cout << "Fast IMU initialization. Acceleration is not checked \n";
if(b_miss_params)
{
return false;
}
const float sf = sqrt(mImuFreq);
cout << endl;
cout << "IMU frequency: " << mImuFreq << " Hz" << endl;
cout << "IMU gyro noise: " << Ng << " rad/s/sqrt(Hz)" << endl;
cout << "IMU gyro walk: " << Ngw << " rad/s^2/sqrt(Hz)" << endl;
cout << "IMU accelerometer noise: " << Na << " m/s^2/sqrt(Hz)" << endl;
cout << "IMU accelerometer walk: " << Naw << " m/s^3/sqrt(Hz)" << endl;
mpImuCalib = new IMU::Calib(Tbc,Ng*sf,Na*sf,Ngw/sf,Naw/sf);
mpImuPreintegratedFromLastKF = new IMU::Preintegrated(IMU::Bias(),*mpImuCalib);
return true;
}
void Tracking::SetLocalMapper(LocalMapping *pLocalMapper)
{
mpLocalMapper=pLocalMapper;
}
void Tracking::SetLoopClosing(LoopClosing *pLoopClosing)
{
mpLoopClosing=pLoopClosing;
}
void Tracking::SetViewer(Viewer *pViewer)
{
mpViewer=pViewer;
}
void Tracking::SetStepByStep(bool bSet)
{
bStepByStep = bSet;
}
bool Tracking::GetStepByStep()
{
return bStepByStep;
}
Sophus::SE3f Tracking::GrabImageStereo(const cv::Mat &imRectLeft, const cv::Mat &imRectRight, const double &timestamp, string filename)
{
//cout << "GrabImageStereo" << endl;
mImGray = imRectLeft;
cv::Mat imGrayRight = imRectRight;
mImRight = imRectRight;
if(mImGray.channels()==3)
{
//cout << "Image with 3 channels" << endl;
if(mbRGB)
{
cvtColor(mImGray,mImGray,cv::COLOR_RGB2GRAY);
cvtColor(imGrayRight,imGrayRight,cv::COLOR_RGB2GRAY);
}
else
{
cvtColor(mImGray,mImGray,cv::COLOR_BGR2GRAY);
cvtColor(imGrayRight,imGrayRight,cv::COLOR_BGR2GRAY);
}
}
else if(mImGray.channels()==4)
{
//cout << "Image with 4 channels" << endl;
if(mbRGB)
{
cvtColor(mImGray,mImGray,cv::COLOR_RGBA2GRAY);
cvtColor(imGrayRight,imGrayRight,cv::COLOR_RGBA2GRAY);
}
else
{
cvtColor(mImGray,mImGray,cv::COLOR_BGRA2GRAY);
cvtColor(imGrayRight,imGrayRight,cv::COLOR_BGRA2GRAY);
}
}
//cout << "Incoming frame creation" << endl;
if (mSensor == System::STEREO && !mpCamera2)
mCurrentFrame = Frame(mImGray,imGrayRight,timestamp,mpORBextractorLeft,mpORBextractorRight,mpORBVocabulary,mK,mDistCoef,mbf,mThDepth,mpCamera);
else if(mSensor == System::STEREO && mpCamera2)
mCurrentFrame = Frame(mImGray,imGrayRight,timestamp,mpORBextractorLeft,mpORBextractorRight,mpORBVocabulary,mK,mDistCoef,mbf,mThDepth,mpCamera,mpCamera2,mTlr);
else if(mSensor == System::IMU_STEREO && !mpCamera2)
mCurrentFrame = Frame(mImGray,imGrayRight,timestamp,mpORBextractorLeft,mpORBextractorRight,mpORBVocabulary,mK,mDistCoef,mbf,mThDepth,mpCamera,&mLastFrame,*mpImuCalib);
else if(mSensor == System::IMU_STEREO && mpCamera2)
mCurrentFrame = Frame(mImGray,imGrayRight,timestamp,mpORBextractorLeft,mpORBextractorRight,mpORBVocabulary,mK,mDistCoef,mbf,mThDepth,mpCamera,mpCamera2,mTlr,&mLastFrame,*mpImuCalib);
//cout << "Incoming frame ended" << endl;
mCurrentFrame.mNameFile = filename;
mCurrentFrame.mnDataset = mnNumDataset;
#ifdef REGISTER_TIMES
vdORBExtract_ms.push_back(mCurrentFrame.mTimeORB_Ext);
vdStereoMatch_ms.push_back(mCurrentFrame.mTimeStereoMatch);
#endif
//cout << "Tracking start" << endl;
Track();
//cout << "Tracking end" << endl;
return mCurrentFrame.GetPose();
}
Sophus::SE3f Tracking::GrabImageRGBD(const cv::Mat &imRGB,const cv::Mat &imD, const double &timestamp, string filename)
{
mImGray = imRGB;
cv::Mat imDepth = imD;
if(mImGray.channels()==3)
{
if(mbRGB)
cvtColor(mImGray,mImGray,cv::COLOR_RGB2GRAY);
else
cvtColor(mImGray,mImGray,cv::COLOR_BGR2GRAY);
}
else if(mImGray.channels()==4)
{
if(mbRGB)
cvtColor(mImGray,mImGray,cv::COLOR_RGBA2GRAY);
else
cvtColor(mImGray,mImGray,cv::COLOR_BGRA2GRAY);
}
if((fabs(mDepthMapFactor-1.0f)>1e-5) || imDepth.type()!=CV_32F)
imDepth.convertTo(imDepth,CV_32F,mDepthMapFactor);
if (mSensor == System::RGBD)
mCurrentFrame = Frame(mImGray,imDepth,timestamp,mpORBextractorLeft,mpORBVocabulary,mK,mDistCoef,mbf,mThDepth,mpCamera);
else if(mSensor == System::IMU_RGBD)
mCurrentFrame = Frame(mImGray,imDepth,timestamp,mpORBextractorLeft,mpORBVocabulary,mK,mDistCoef,mbf,mThDepth,mpCamera,&mLastFrame,*mpImuCalib);
mCurrentFrame.mNameFile = filename;
mCurrentFrame.mnDataset = mnNumDataset;
#ifdef REGISTER_TIMES
vdORBExtract_ms.push_back(mCurrentFrame.mTimeORB_Ext);
#endif
Track();
return mCurrentFrame.GetPose();
}
Sophus::SE3f Tracking::GrabImageMonocular(const cv::Mat &im, const double &timestamp, string filename)
{
mImGray = im;
if(mImGray.channels()==3)
{
if(mbRGB)
cvtColor(mImGray,mImGray,cv::COLOR_RGB2GRAY);
else
cvtColor(mImGray,mImGray,cv::COLOR_BGR2GRAY);
}
else if(mImGray.channels()==4)
{
if(mbRGB)
cvtColor(mImGray,mImGray,cv::COLOR_RGBA2GRAY);
else
cvtColor(mImGray,mImGray,cv::COLOR_BGRA2GRAY);
}
if (mSensor == System::MONOCULAR)
{
if(mState==NOT_INITIALIZED || mState==NO_IMAGES_YET ||(lastID - initID) < mMaxFrames)
mCurrentFrame = Frame(mImGray,timestamp,mpIniORBextractor,mpORBVocabulary,mpCamera,mDistCoef,mbf,mThDepth);
else
mCurrentFrame = Frame(mImGray,timestamp,mpORBextractorLeft,mpORBVocabulary,mpCamera,mDistCoef,mbf,mThDepth);
}
else if(mSensor == System::IMU_MONOCULAR)
{
if(mState==NOT_INITIALIZED || mState==NO_IMAGES_YET)
{
mCurrentFrame = Frame(mImGray,timestamp,mpIniORBextractor,mpORBVocabulary,mpCamera,mDistCoef,mbf,mThDepth,&mLastFrame,*mpImuCalib);
}
else
mCurrentFrame = Frame(mImGray,timestamp,mpORBextractorLeft,mpORBVocabulary,mpCamera,mDistCoef,mbf,mThDepth,&mLastFrame,*mpImuCalib);
}
if (mState==NO_IMAGES_YET)
t0=timestamp;
mCurrentFrame.mNameFile = filename;
mCurrentFrame.mnDataset = mnNumDataset;
#ifdef REGISTER_TIMES
vdORBExtract_ms.push_back(mCurrentFrame.mTimeORB_Ext);
#endif
lastID = mCurrentFrame.mnId;
Track();
return mCurrentFrame.GetPose();
}
void Tracking::GrabImuData(const IMU::Point &imuMeasurement)
{
unique_lock<mutex> lock(mMutexImuQueue);
mlQueueImuData.push_back(imuMeasurement);
}
void Tracking::PreintegrateIMU()
{
if(!mCurrentFrame.mpPrevFrame)
{
Verbose::PrintMess("non prev frame ", Verbose::VERBOSITY_NORMAL);
mCurrentFrame.setIntegrated();
return;
}
mvImuFromLastFrame.clear();
mvImuFromLastFrame.reserve(mlQueueImuData.size());
if(mlQueueImuData.size() == 0)
{
Verbose::PrintMess("Not IMU data in mlQueueImuData!!", Verbose::VERBOSITY_NORMAL);
mCurrentFrame.setIntegrated();
return;
}
while(true)
{
bool bSleep = false;
{
unique_lock<mutex> lock(mMutexImuQueue);
if(!mlQueueImuData.empty())
{
IMU::Point* m = &mlQueueImuData.front();
cout.precision(17);
if(m->t<mCurrentFrame.mpPrevFrame->mTimeStamp-mImuPer)
{
mlQueueImuData.pop_front();
}
else if(m->t<mCurrentFrame.mTimeStamp-mImuPer)
{
mvImuFromLastFrame.push_back(*m);
mlQueueImuData.pop_front();
}
else
{
mvImuFromLastFrame.push_back(*m);
break;
}
}
else
{
break;
bSleep = true;
}
}
if(bSleep)
usleep(500);
}
const int n = mvImuFromLastFrame.size()-1;
if(n==0){
cout << "Empty IMU measurements vector!!!\n";
return;
}
IMU::Preintegrated* pImuPreintegratedFromLastFrame = new IMU::Preintegrated(mLastFrame.mImuBias,mCurrentFrame.mImuCalib);
for(int i=0; i<n; i++)
{
float tstep;
Eigen::Vector3f acc, angVel;
if((i==0) && (i<(n-1)))
{
float tab = mvImuFromLastFrame[i+1].t-mvImuFromLastFrame[i].t;
float tini = mvImuFromLastFrame[i].t-mCurrentFrame.mpPrevFrame->mTimeStamp;
acc = (mvImuFromLastFrame[i].a+mvImuFromLastFrame[i+1].a-
(mvImuFromLastFrame[i+1].a-mvImuFromLastFrame[i].a)*(tini/tab))*0.5f;
angVel = (mvImuFromLastFrame[i].w+mvImuFromLastFrame[i+1].w-
(mvImuFromLastFrame[i+1].w-mvImuFromLastFrame[i].w)*(tini/tab))*0.5f;
tstep = mvImuFromLastFrame[i+1].t-mCurrentFrame.mpPrevFrame->mTimeStamp;
}
else if(i<(n-1))
{
acc = (mvImuFromLastFrame[i].a+mvImuFromLastFrame[i+1].a)*0.5f;
angVel = (mvImuFromLastFrame[i].w+mvImuFromLastFrame[i+1].w)*0.5f;
tstep = mvImuFromLastFrame[i+1].t-mvImuFromLastFrame[i].t;
}
else if((i>0) && (i==(n-1)))
{
float tab = mvImuFromLastFrame[i+1].t-mvImuFromLastFrame[i].t;
float tend = mvImuFromLastFrame[i+1].t-mCurrentFrame.mTimeStamp;
acc = (mvImuFromLastFrame[i].a+mvImuFromLastFrame[i+1].a-
(mvImuFromLastFrame[i+1].a-mvImuFromLastFrame[i].a)*(tend/tab))*0.5f;
angVel = (mvImuFromLastFrame[i].w+mvImuFromLastFrame[i+1].w-
(mvImuFromLastFrame[i+1].w-mvImuFromLastFrame[i].w)*(tend/tab))*0.5f;
tstep = mCurrentFrame.mTimeStamp-mvImuFromLastFrame[i].t;
}
else if((i==0) && (i==(n-1)))
{
acc = mvImuFromLastFrame[i].a;
angVel = mvImuFromLastFrame[i].w;
tstep = mCurrentFrame.mTimeStamp-mCurrentFrame.mpPrevFrame->mTimeStamp;
}
if (!mpImuPreintegratedFromLastKF)
cout << "mpImuPreintegratedFromLastKF does not exist" << endl;
mpImuPreintegratedFromLastKF->IntegrateNewMeasurement(acc,angVel,tstep);
pImuPreintegratedFromLastFrame->IntegrateNewMeasurement(acc,angVel,tstep);
}
mCurrentFrame.mpImuPreintegratedFrame = pImuPreintegratedFromLastFrame;
mCurrentFrame.mpImuPreintegrated = mpImuPreintegratedFromLastKF;
mCurrentFrame.mpLastKeyFrame = mpLastKeyFrame;
mCurrentFrame.setIntegrated();
//Verbose::PrintMess("Preintegration is finished!! ", Verbose::VERBOSITY_DEBUG);
}
bool Tracking::PredictStateIMU()
{
if(!mCurrentFrame.mpPrevFrame)
{
Verbose::PrintMess("No last frame", Verbose::VERBOSITY_NORMAL);
return false;
}
if(mbMapUpdated && mpLastKeyFrame)
{
const Eigen::Vector3f twb1 = mpLastKeyFrame->GetImuPosition();
const Eigen::Matrix3f Rwb1 = mpLastKeyFrame->GetImuRotation();
const Eigen::Vector3f Vwb1 = mpLastKeyFrame->GetVelocity();
const Eigen::Vector3f Gz(0, 0, -IMU::GRAVITY_VALUE);
const float t12 = mpImuPreintegratedFromLastKF->dT;
Eigen::Matrix3f Rwb2 = IMU::NormalizeRotation(Rwb1 * mpImuPreintegratedFromLastKF->GetDeltaRotation(mpLastKeyFrame->GetImuBias()));
Eigen::Vector3f twb2 = twb1 + Vwb1*t12 + 0.5f*t12*t12*Gz+ Rwb1*mpImuPreintegratedFromLastKF->GetDeltaPosition(mpLastKeyFrame->GetImuBias());
Eigen::Vector3f Vwb2 = Vwb1 + t12*Gz + Rwb1 * mpImuPreintegratedFromLastKF->GetDeltaVelocity(mpLastKeyFrame->GetImuBias());
mCurrentFrame.SetImuPoseVelocity(Rwb2,twb2,Vwb2);
mCurrentFrame.mImuBias = mpLastKeyFrame->GetImuBias();
mCurrentFrame.mPredBias = mCurrentFrame.mImuBias;
return true;
}
else if(!mbMapUpdated)
{
const Eigen::Vector3f twb1 = mLastFrame.GetImuPosition();
const Eigen::Matrix3f Rwb1 = mLastFrame.GetImuRotation();
const Eigen::Vector3f Vwb1 = mLastFrame.GetVelocity();
const Eigen::Vector3f Gz(0, 0, -IMU::GRAVITY_VALUE);
const float t12 = mCurrentFrame.mpImuPreintegratedFrame->dT;
Eigen::Matrix3f Rwb2 = IMU::NormalizeRotation(Rwb1 * mCurrentFrame.mpImuPreintegratedFrame->GetDeltaRotation(mLastFrame.mImuBias));
Eigen::Vector3f twb2 = twb1 + Vwb1*t12 + 0.5f*t12*t12*Gz+ Rwb1 * mCurrentFrame.mpImuPreintegratedFrame->GetDeltaPosition(mLastFrame.mImuBias);
Eigen::Vector3f Vwb2 = Vwb1 + t12*Gz + Rwb1 * mCurrentFrame.mpImuPreintegratedFrame->GetDeltaVelocity(mLastFrame.mImuBias);
mCurrentFrame.SetImuPoseVelocity(Rwb2,twb2,Vwb2);
mCurrentFrame.mImuBias = mLastFrame.mImuBias;
mCurrentFrame.mPredBias = mCurrentFrame.mImuBias;
return true;
}
else
cout << "not IMU prediction!!" << endl;
return false;
}
void Tracking::ResetFrameIMU()
{
// TODO To implement...
}
void Tracking::Track()
{
if (bStepByStep)
{
std::cout << "Tracking: Waiting to the next step" << std::endl;
while(!mbStep && bStepByStep)
usleep(500);
mbStep = false;
}
if(mpLocalMapper->mbBadImu)
{
cout << "TRACK: Reset map because local mapper set the bad imu flag " << endl;
mpSystem->ResetActiveMap();
return;
}
Map* pCurrentMap = mpAtlas->GetCurrentMap();
if(!pCurrentMap)
{
cout << "ERROR: There is not an active map in the atlas" << endl;
}
if(mState!=NO_IMAGES_YET)
{
if(mLastFrame.mTimeStamp>mCurrentFrame.mTimeStamp)
{
cerr << "ERROR: Frame with a timestamp older than previous frame detected!" << endl;
unique_lock<mutex> lock(mMutexImuQueue);
mlQueueImuData.clear();
CreateMapInAtlas();
return;
}
else if(mCurrentFrame.mTimeStamp>mLastFrame.mTimeStamp+1.0)
{
// cout << mCurrentFrame.mTimeStamp << ", " << mLastFrame.mTimeStamp << endl;
// cout << "id last: " << mLastFrame.mnId << " id curr: " << mCurrentFrame.mnId << endl;
if(mpAtlas->isInertial())
{
if(mpAtlas->isImuInitialized())
{
cout << "Timestamp jump detected. State set to LOST. Reseting IMU integration..." << endl;
if(!pCurrentMap->GetIniertialBA2())
{
mpSystem->ResetActiveMap();
}
else
{
CreateMapInAtlas();
}
}
else
{
cout << "Timestamp jump detected, before IMU initialization. Reseting..." << endl;
mpSystem->ResetActiveMap();
}
return;
}
}
}
if ((mSensor == System::IMU_MONOCULAR || mSensor == System::IMU_STEREO || mSensor == System::IMU_RGBD) && mpLastKeyFrame)
mCurrentFrame.SetNewBias(mpLastKeyFrame->GetImuBias());
if(mState==NO_IMAGES_YET)
{
mState = NOT_INITIALIZED;
}
mLastProcessedState=mState;
if ((mSensor == System::IMU_MONOCULAR || mSensor == System::IMU_STEREO || mSensor == System::IMU_RGBD) && !mbCreatedMap)
{
#ifdef REGISTER_TIMES
std::chrono::steady_clock::time_point time_StartPreIMU = std::chrono::steady_clock::now();
#endif
PreintegrateIMU();
#ifdef REGISTER_TIMES
std::chrono::steady_clock::time_point time_EndPreIMU = std::chrono::steady_clock::now();
double timePreImu = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(time_EndPreIMU - time_StartPreIMU).count();
vdIMUInteg_ms.push_back(timePreImu);
#endif
}
mbCreatedMap = false;
// Get Map Mutex -> Map cannot be changed
unique_lock<mutex> lock(pCurrentMap->mMutexMapUpdate);
mbMapUpdated = false;
int nCurMapChangeIndex = pCurrentMap->GetMapChangeIndex();
int nMapChangeIndex = pCurrentMap->GetLastMapChange();
if(nCurMapChangeIndex>nMapChangeIndex)
{
pCurrentMap->SetLastMapChange(nCurMapChangeIndex);
mbMapUpdated = true;
}
if(mState==NOT_INITIALIZED)
{
if(mSensor==System::STEREO || mSensor==System::RGBD || mSensor==System::IMU_STEREO || mSensor==System::IMU_RGBD)
{
StereoInitialization();
}
else
{
MonocularInitialization();
}
//mpFrameDrawer->Update(this);
if(mState!=OK) // If rightly initialized, mState=OK
{
mLastFrame = Frame(mCurrentFrame);
return;
}
if(mpAtlas->GetAllMaps().size() == 1)
{
mnFirstFrameId = mCurrentFrame.mnId;
}
}
else
{
// System is initialized. Track Frame.
bool bOK;
#ifdef REGISTER_TIMES
std::chrono::steady_clock::time_point time_StartPosePred = std::chrono::steady_clock::now();
#endif
// Initial camera pose estimation using motion model or relocalization (if tracking is lost)
//如果不是“仅跟踪”模式(OnlyTracking),即系统同时进行建图和跟踪任务,进入该逻辑。
if(!mbOnlyTracking)
{
// State OK
// Local Mapping is activated. This is the normal behaviour, unless
// you explicitly activate the "only tracking" mode.
if(mState==OK)
{
// Local Mapping might have changed some MapPoints tracked in last frame
//检查上一帧中的地图点(MapPoints)是否被局部建图线程替换,保证引用的地图点一致性。
//检查并替换上一帧中可能被替换的地图点。这通常用于处理地图点的优化或合并,确保当前帧使用的是最新的地图点。
CheckReplacedInLastFrame();
//如果没有运动模型(!mbVelocity)或IMU尚未初始化,或当前帧的ID距离最后一次重定位帧ID太近(即处于重定位恢复阶段),使用参考关键帧跟踪:bOK = TrackReferenceKeyFrame();
if((!mbVelocity && !pCurrentMap->isImuInitialized()) || mCurrentFrame.mnId<mnLastRelocFrameId+2)
{
Verbose::PrintMess("TRACK: Track with respect to the reference KF ", Verbose::VERBOSITY_DEBUG);
bOK = TrackReferenceKeyFrame();//基于最近的关键帧进行匹配和跟踪,适合静止或缓慢运动场景。
}
//否则,优先使用运动模型跟踪,如失败则回退到参考关键帧跟踪:
else
{
Verbose::PrintMess("TRACK: Track with motion model", Verbose::VERBOSITY_DEBUG);
bOK = TrackWithMotionModel(); //基于运动模型预测相机位姿,更适合快速运动的场景。
if(!bOK)
bOK = TrackReferenceKeyFrame();
}
if (!bOK)
{
if ( mCurrentFrame.mnId<=(mnLastRelocFrameId+mnFramesToResetIMU) &&
(mSensor==System::IMU_MONOCULAR || mSensor==System::IMU_STEREO || mSensor == System::IMU_RGBD))
{
mState = LOST;
}
else if(pCurrentMap->KeyFramesInMap()>10)
{
// cout << "KF in map: " << pCurrentMap->KeyFramesInMap() << endl;
mState = RECENTLY_LOST;
mTimeStampLost = mCurrentFrame.mTimeStamp;
}
else
{
mState = LOST;
}
}
}
else
{
if (mState == RECENTLY_LOST)
{
Verbose::PrintMess("Lost for a short time", Verbose::VERBOSITY_NORMAL);
bOK = true;
if((mSensor == System::IMU_MONOCULAR || mSensor == System::IMU_STEREO || mSensor == System::IMU_RGBD))
{
if(pCurrentMap->isImuInitialized())
PredictStateIMU();
else
bOK = false;
if (mCurrentFrame.mTimeStamp-mTimeStampLost>time_recently_lost)
{
mState = LOST;
Verbose::PrintMess("Track Lost...", Verbose::VERBOSITY_NORMAL);
bOK=false;
}
}
else
{
// Relocalization
bOK = Relocalization();
//std::cout << "mCurrentFrame.mTimeStamp:" << to_string(mCurrentFrame.mTimeStamp) << std::endl;
//std::cout << "mTimeStampLost:" << to_string(mTimeStampLost) << std::endl;
if(mCurrentFrame.mTimeStamp-mTimeStampLost>3.0f && !bOK)
{
mState = LOST;
Verbose::PrintMess("Track Lost...", Verbose::VERBOSITY_NORMAL);
bOK=false;
}
}
}
else if (mState == LOST)
{
Verbose::PrintMess("A new map is started...", Verbose::VERBOSITY_NORMAL);
if (pCurrentMap->KeyFramesInMap()<10)
{
mpSystem->ResetActiveMap();
Verbose::PrintMess("Reseting current map...", Verbose::VERBOSITY_NORMAL);
}else
CreateMapInAtlas();
if(mpLastKeyFrame)
mpLastKeyFrame = static_cast<KeyFrame*>(NULL);
Verbose::PrintMess("done", Verbose::VERBOSITY_NORMAL);
return;
}
}
}
else
{
// Localization Mode: Local Mapping is deactivated (TODO Not available in inertial mode)
// 如果是“仅跟踪”模式,则跳过建图,专注于相机定位。
if(mState==LOST)
{
if(mSensor == System::IMU_MONOCULAR || mSensor == System::IMU_STEREO || mSensor == System::IMU_RGBD)
Verbose::PrintMess("IMU. State LOST", Verbose::VERBOSITY_NORMAL);
bOK = Relocalization();
}
else
{
if(!mbVO)
{
// In last frame we tracked enough MapPoints in the map
if(mbVelocity)
{
bOK = TrackWithMotionModel();
}
else
{
bOK = TrackReferenceKeyFrame();
}
}
else
{
// In last frame we tracked mainly "visual odometry" points.
// We compute two camera poses, one from motion model and one doing relocalization.
// If relocalization is sucessfull we choose that solution, otherwise we retain
// the "visual odometry" solution.
bool bOKMM = false;
bool bOKReloc = false;
vector<MapPoint*> vpMPsMM;
vector<bool> vbOutMM;
Sophus::SE3f TcwMM;
if(mbVelocity)
{
bOKMM = TrackWithMotionModel();
vpMPsMM = mCurrentFrame.mvpMapPoints;
vbOutMM = mCurrentFrame.mvbOutlier;
TcwMM = mCurrentFrame.GetPose();
}
bOKReloc = Relocalization();
if(bOKMM && !bOKReloc)
{
mCurrentFrame.SetPose(TcwMM);
mCurrentFrame.mvpMapPoints = vpMPsMM;
mCurrentFrame.mvbOutlier = vbOutMM;
if(mbVO)
{
for(int i =0; i<mCurrentFrame.N; i++)
{
if(mCurrentFrame.mvpMapPoints[i] && !mCurrentFrame.mvbOutlier[i])
{
mCurrentFrame.mvpMapPoints[i]->IncreaseFound();
}
}
}
}
else if(bOKReloc)
{
mbVO = false;
}
bOK = bOKReloc || bOKMM;
}
}
}
if(!mCurrentFrame.mpReferenceKF)
mCurrentFrame.mpReferenceKF = mpReferenceKF;
#ifdef REGISTER_TIMES
std::chrono::steady_clock::time_point time_EndPosePred = std::chrono::steady_clock::now();
double timePosePred = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(time_EndPosePred - time_StartPosePred).count();
vdPosePred_ms.push_back(timePosePred);
#endif
#ifdef REGISTER_TIMES
std::chrono::steady_clock::time_point time_StartLMTrack = std::chrono::steady_clock::now();
#endif
// If we have an initial estimation of the camera pose and matching. Track the local map.
if(!mbOnlyTracking)
{
if(bOK)
{
bOK = TrackLocalMap();
}
if(!bOK)
cout << "Fail to track local map!" << endl;
}
else
{
// mbVO true means that there are few matches to MapPoints in the map. We cannot retrieve
// a local map and therefore we do not perform TrackLocalMap(). Once the system relocalizes
// the camera we will use the local map again.
if(bOK && !mbVO)
bOK = TrackLocalMap();
}
if(bOK)
mState = OK;
else if (mState == OK)
{
if (mSensor == System::IMU_MONOCULAR || mSensor == System::IMU_STEREO || mSensor == System::IMU_RGBD)
{
Verbose::PrintMess("Track lost for less than one second...", Verbose::VERBOSITY_NORMAL);
if(!pCurrentMap->isImuInitialized() || !pCurrentMap->GetIniertialBA2())
{
cout << "IMU is not or recently initialized. Reseting active map..." << endl;
mpSystem->ResetActiveMap();
}
mState=RECENTLY_LOST;
}
else
mState=RECENTLY_LOST; // visual to lost
/*if(mCurrentFrame.mnId>mnLastRelocFrameId+mMaxFrames)
{*/
mTimeStampLost = mCurrentFrame.mTimeStamp;
//}
}
// Save frame if recent relocalization, since they are used for IMU reset (as we are making copy, it shluld be once mCurrFrame is completely modified)
if((mCurrentFrame.mnId<(mnLastRelocFrameId+mnFramesToResetIMU)) && (mCurrentFrame.mnId > mnFramesToResetIMU) &&
(mSensor == System::IMU_MONOCULAR || mSensor == System::IMU_STEREO || mSensor == System::IMU_RGBD) && pCurrentMap->isImuInitialized())
{
// TODO check this situation
Verbose::PrintMess("Saving pointer to frame. imu needs reset...", Verbose::VERBOSITY_NORMAL);
Frame* pF = new Frame(mCurrentFrame);
pF->mpPrevFrame = new Frame(mLastFrame);
// Load preintegration
pF->mpImuPreintegratedFrame = new IMU::Preintegrated(mCurrentFrame.mpImuPreintegratedFrame);
}
if(pCurrentMap->isImuInitialized())
{
if(bOK)
{
if(mCurrentFrame.mnId==(mnLastRelocFrameId+mnFramesToResetIMU))
{
cout << "RESETING FRAME!!!" << endl;
ResetFrameIMU();
}
else if(mCurrentFrame.mnId>(mnLastRelocFrameId+30))
mLastBias = mCurrentFrame.mImuBias;
}
}
#ifdef REGISTER_TIMES
std::chrono::steady_clock::time_point time_EndLMTrack = std::chrono::steady_clock::now();
double timeLMTrack = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(time_EndLMTrack - time_StartLMTrack).count();
vdLMTrack_ms.push_back(timeLMTrack);
#endif
// Update drawer
mpFrameDrawer->Update(this);
if(mCurrentFrame.isSet())
mpMapDrawer->SetCurrentCameraPose(mCurrentFrame.GetPose());
if(bOK || mState==RECENTLY_LOST)
{
// Update motion model
if(mLastFrame.isSet() && mCurrentFrame.isSet())
{
Sophus::SE3f LastTwc = mLastFrame.GetPose().inverse();
mVelocity = mCurrentFrame.GetPose() * LastTwc;
mbVelocity = true;
}
else {
mbVelocity = false;
}
if(mSensor == System::IMU_MONOCULAR || mSensor == System::IMU_STEREO || mSensor == System::IMU_RGBD)
mpMapDrawer->SetCurrentCameraPose(mCurrentFrame.GetPose());
// Clean VO matches
for(int i=0; i<mCurrentFrame.N; i++)
{
MapPoint* pMP = mCurrentFrame.mvpMapPoints[i];
if(pMP)
if(pMP->Observations()<1)
{
mCurrentFrame.mvbOutlier[i] = false;
mCurrentFrame.mvpMapPoints[i]=static_cast<MapPoint*>(NULL);
}
}
// Delete temporal MapPoints
for(list<MapPoint*>::iterator lit = mlpTemporalPoints.begin(), lend = mlpTemporalPoints.end(); lit!=lend; lit++)
{
MapPoint* pMP = *lit;
delete pMP;
}
mlpTemporalPoints.clear();
#ifdef REGISTER_TIMES
std::chrono::steady_clock::time_point time_StartNewKF = std::chrono::steady_clock::now();
#endif
bool bNeedKF = NeedNewKeyFrame();
// Check if we need to insert a new keyframe
// if(bNeedKF && bOK)
if(bNeedKF && (bOK || (mInsertKFsLost && mState==RECENTLY_LOST &&
(mSensor == System::IMU_MONOCULAR || mSensor == System::IMU_STEREO || mSensor == System::IMU_RGBD))))
CreateNewKeyFrame();
#ifdef REGISTER_TIMES
std::chrono::steady_clock::time_point time_EndNewKF = std::chrono::steady_clock::now();
double timeNewKF = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(time_EndNewKF - time_StartNewKF).count();
vdNewKF_ms.push_back(timeNewKF);
#endif
// We allow points with high innovation (considererd outliers by the Huber Function)
// pass to the new keyframe, so that bundle adjustment will finally decide
// if they are outliers or not. We don't want next frame to estimate its position
// with those points so we discard them in the frame. Only has effect if lastframe is tracked
for(int i=0; i<mCurrentFrame.N;i++)
{
if(mCurrentFrame.mvpMapPoints[i] && mCurrentFrame.mvbOutlier[i])
mCurrentFrame.mvpMapPoints[i]=static_cast<MapPoint*>(NULL);
}
}
// Reset if the camera get lost soon after initialization
if(mState==LOST)
{
if(pCurrentMap->KeyFramesInMap()<=10)
{
mpSystem->ResetActiveMap();
return;
}
if (mSensor == System::IMU_MONOCULAR || mSensor == System::IMU_STEREO || mSensor == System::IMU_RGBD)
if (!pCurrentMap->isImuInitialized())
{
Verbose::PrintMess("Track lost before IMU initialisation, reseting...", Verbose::VERBOSITY_QUIET);
mpSystem->ResetActiveMap();
return;
}
CreateMapInAtlas();
return;
}
if(!mCurrentFrame.mpReferenceKF)
mCurrentFrame.mpReferenceKF = mpReferenceKF;
mLastFrame = Frame(mCurrentFrame);
}
if(mState==OK || mState==RECENTLY_LOST)
{
// Store frame pose information to retrieve the complete camera trajectory afterwards.
if(mCurrentFrame.isSet())
{
Sophus::SE3f Tcr_ = mCurrentFrame.GetPose() * mCurrentFrame.mpReferenceKF->GetPoseInverse();
mlRelativeFramePoses.push_back(Tcr_);
mlpReferences.push_back(mCurrentFrame.mpReferenceKF);
mlFrameTimes.push_back(mCurrentFrame.mTimeStamp);
mlbLost.push_back(mState==LOST);
}
else
{
// This can happen if tracking is lost
mlRelativeFramePoses.push_back(mlRelativeFramePoses.back());
mlpReferences.push_back(mlpReferences.back());
mlFrameTimes.push_back(mlFrameTimes.back());
mlbLost.push_back(mState==LOST);
}
}
#ifdef REGISTER_LOOP
if (Stop()) {
// Safe area to stop
while(isStopped())
{
usleep(3000);
}
}
#endif
}
void Tracking::StereoInitialization()
{
if(mCurrentFrame.N>500)
{
if (mSensor == System::IMU_STEREO || mSensor == System::IMU_RGBD)
{
if (!mCurrentFrame.mpImuPreintegrated || !mLastFrame.mpImuPreintegrated)
{
cout << "not IMU meas" << endl;
return;
}
if (!mFastInit && (mCurrentFrame.mpImuPreintegratedFrame->avgA-mLastFrame.mpImuPreintegratedFrame->avgA).norm()<0.5)
{
cout << "not enough acceleration" << endl;
return;
}
if(mpImuPreintegratedFromLastKF)
delete mpImuPreintegratedFromLastKF;
mpImuPreintegratedFromLastKF = new IMU::Preintegrated(IMU::Bias(),*mpImuCalib);
mCurrentFrame.mpImuPreintegrated = mpImuPreintegratedFromLastKF;
}
// Set Frame pose to the origin (In case of inertial SLAM to imu)
if (mSensor == System::IMU_STEREO || mSensor == System::IMU_RGBD)
{
Eigen::Matrix3f Rwb0 = mCurrentFrame.mImuCalib.mTcb.rotationMatrix();
Eigen::Vector3f twb0 = mCurrentFrame.mImuCalib.mTcb.translation();
Eigen::Vector3f Vwb0;
Vwb0.setZero();
mCurrentFrame.SetImuPoseVelocity(Rwb0, twb0, Vwb0);
}
else
mCurrentFrame.SetPose(Sophus::SE3f());
// Create KeyFrame
KeyFrame* pKFini = new KeyFrame(mCurrentFrame,mpAtlas->GetCurrentMap(),mpKeyFrameDB);
// Insert KeyFrame in the map
mpAtlas->AddKeyFrame(pKFini);
// Create MapPoints and asscoiate to KeyFrame
if(!mpCamera2){
for(int i=0; i<mCurrentFrame.N;i++)
{
float z = mCurrentFrame.mvDepth[i];
if(z>0)
{
Eigen::Vector3f x3D;
mCurrentFrame.UnprojectStereo(i, x3D);
MapPoint* pNewMP = new MapPoint(x3D, pKFini, mpAtlas->GetCurrentMap());
pNewMP->AddObservation(pKFini,i);
pKFini->AddMapPoint(pNewMP,i);
pNewMP->ComputeDistinctiveDescriptors();
pNewMP->UpdateNormalAndDepth();
mpAtlas->AddMapPoint(pNewMP);
mCurrentFrame.mvpMapPoints[i]=pNewMP;
}
}
} else{
for(int i = 0; i < mCurrentFrame.Nleft; i++){
int rightIndex = mCurrentFrame.mvLeftToRightMatch[i];
if(rightIndex != -1){
Eigen::Vector3f x3D = mCurrentFrame.mvStereo3Dpoints[i];
MapPoint* pNewMP = new MapPoint(x3D, pKFini, mpAtlas->GetCurrentMap());
pNewMP->AddObservation(pKFini,i);
pNewMP->AddObservation(pKFini,rightIndex + mCurrentFrame.Nleft);
pKFini->AddMapPoint(pNewMP,i);
pKFini->AddMapPoint(pNewMP,rightIndex + mCurrentFrame.Nleft);
pNewMP->ComputeDistinctiveDescriptors();
pNewMP->UpdateNormalAndDepth();
mpAtlas->AddMapPoint(pNewMP);
mCurrentFrame.mvpMapPoints[i]=pNewMP;
mCurrentFrame.mvpMapPoints[rightIndex + mCurrentFrame.Nleft]=pNewMP;
}
}
}
Verbose::PrintMess("New Map created with " + to_string(mpAtlas->MapPointsInMap()) + " points", Verbose::VERBOSITY_QUIET);
//cout << "Active map: " << mpAtlas->GetCurrentMap()->GetId() << endl;
mpLocalMapper->InsertKeyFrame(pKFini);
mLastFrame = Frame(mCurrentFrame);
mnLastKeyFrameId = mCurrentFrame.mnId;
mpLastKeyFrame = pKFini;
//mnLastRelocFrameId = mCurrentFrame.mnId;
mvpLocalKeyFrames.push_back(pKFini);
mvpLocalMapPoints=mpAtlas->GetAllMapPoints();
mpReferenceKF = pKFini;
mCurrentFrame.mpReferenceKF = pKFini;
mpAtlas->SetReferenceMapPoints(mvpLocalMapPoints);
mpAtlas->GetCurrentMap()->mvpKeyFrameOrigins.push_back(pKFini);
mpMapDrawer->SetCurrentCameraPose(mCurrentFrame.GetPose());
mState=OK;
}
}
void Tracking::MonocularInitialization()
{
//如果 mbReadyToInitializate 为 false,说明尚未准备好进行初始化,此时进入第一阶段:设置参考帧。
//如果为 true,说明已经进入初始化阶段,此时尝试通过帧间匹配和三角化恢复相机位姿和初始地图。
if(!mbReadyToInitializate)
{
// Set Reference Frame
if(mCurrentFrame.mvKeys.size()>100)//当前帧提取的 ORB 特征点数大于 100。
{
mInitialFrame = Frame(mCurrentFrame);//将当前帧 mCurrentFrame 设置为初始帧 mInitialFrame 和上一个帧 mLastFrame。
mLastFrame = Frame(mCurrentFrame);
mvbPrevMatched.resize(mCurrentFrame.mvKeysUn.size());//初始化匹配信息 mvbPrevMatched,记录当前帧所有特征点的坐标。
for(size_t i=0; i<mCurrentFrame.mvKeysUn.size(); i++)
mvbPrevMatched[i]=mCurrentFrame.mvKeysUn[i].pt;
fill(mvIniMatches.begin(),mvIniMatches.end(),-1);//设置匹配索引数组 mvIniMatches 为 -1,表示尚未匹配。
if (mSensor == System::IMU_MONOCULAR)//如果传感器类型为 IMU_MONOCULAR,初始化 IMU 的预积分器 mpImuPreintegratedFromLastKF
{
if(mpImuPreintegratedFromLastKF)
{
delete mpImuPreintegratedFromLastKF;
}
mpImuPreintegratedFromLastKF = new IMU::Preintegrated(IMU::Bias(),*mpImuCalib);
mCurrentFrame.mpImuPreintegrated = mpImuPreintegratedFromLastKF;
}
mbReadyToInitializate = true;
return;
}
}
else
{
//当前帧的特征点数少于或等于 100。或如果使用 IMU,且两帧之间的时间差超过 1 秒。初始化失败,将 mbReadyToInitializate 设置为 false,返回第一阶段重新设置参考帧。
if (((int)mCurrentFrame.mvKeys.size()<=100)||((mSensor == System::IMU_MONOCULAR)&&(mLastFrame.mTimeStamp-mInitialFrame.mTimeStamp>1.0)))
{
mbReadyToInitializate = false;
return;
}
// Find correspondences 调用 SearchForInitialization 方法,尝试在初始帧和当前帧之间找到特征点的匹配关系。
ORBmatcher matcher(0.9,true);
int nmatches = matcher.SearchForInitialization(mInitialFrame,mCurrentFrame,mvbPrevMatched,mvIniMatches,100);
// Check if there are enough correspondences
//匹配结果存储在 mvIniMatches 中。如果匹配点数 nmatches 少于 100,初始化失败,返回重新设置参考帧。
if(nmatches<100)
{
mbReadyToInitializate = false;
return;
}
Sophus::SE3f Tcw;
vector<bool> vbTriangulated; // Triangulated Correspondences (mvIniMatches)
//调用 ReconstructWithTwoViews 方法,尝试使用两帧的匹配点进行三角化,恢复 3D 点坐标 mvIniP3D 和相机位姿 Tcw。
if(mpCamera->ReconstructWithTwoViews(mInitialFrame.mvKeysUn,mCurrentFrame.mvKeysUn,mvIniMatches,Tcw,mvIniP3D,vbTriangulated))
{
for(size_t i=0, iend=mvIniMatches.size(); i<iend;i++)
{
if(mvIniMatches[i]>=0 && !vbTriangulated[i]) //检查三角化结果,如果某些匹配点未成功三角化,将其从 mvIniMatches 中移除。
{
mvIniMatches[i]=-1;
nmatches--;
}
}
// Set Frame Poses
mInitialFrame.SetPose(Sophus::SE3f());//设置初始帧和当前帧的位姿。将初始帧(mInitialFrame)的位姿设为 单位元,即 世界坐标系的原点。这相当于假设初始化时相机的参考坐标系与世界坐标系重合。
mCurrentFrame.SetPose(Tcw);//将当前帧(mCurrentFrame)的位姿设为从初始化中估计得到的 Tcw,即相机坐标系到世界坐标系的变换矩阵。Tcw 是通过初始帧和当前帧之间的匹配、三角化等过程计算得到的相对位姿。
CreateInitialMapMonocular();//调用 CreateInitialMapMonocular,创建初始地图。
}
}
}
//单目视觉SLAM的初始地图构建,包括创建初始关键帧、添加地图点、进行优化调整和建立初始的局部地图。这是单目SLAM初始化流程中的重要环节
void Tracking::CreateInitialMapMonocular()
{
// Create KeyFrames, 分别用初始帧 (mInitialFrame) 和当前帧 (mCurrentFrame) 创建两个关键帧对象 pKFini 和 pKFcur。
//它们会存储在地图管理器(mpAtlas)中,并链接到关键帧数据库(mpKeyFrameDB),用于后续查询和优化。
KeyFrame* pKFini = new KeyFrame(mInitialFrame,mpAtlas->GetCurrentMap(),mpKeyFrameDB);
KeyFrame* pKFcur = new KeyFrame(mCurrentFrame,mpAtlas->GetCurrentMap(),mpKeyFrameDB);
if(mSensor == System::IMU_MONOCULAR)
pKFini->mpImuPreintegrated = (IMU::Preintegrated*)(NULL);
pKFini->ComputeBoW(); //为每个关键帧计算BoW向量,用于后续的关键帧关联(如回环检测)。
pKFcur->ComputeBoW();
// Insert KFs in the map 将关键帧插入到当前地图中,构建初始地图结构。
mpAtlas->AddKeyFrame(pKFini);
mpAtlas->AddKeyFrame(pKFcur);
//创建和初始化地图点
for(size_t i=0; i<mvIniMatches.size();i++) //遍历匹配点列表 mvIniMatches,每个匹配点生成一个新的地图点。
{
if(mvIniMatches[i]<0)
continue;
//Create MapPoint.
Eigen::Vector3f worldPos;
worldPos << mvIniP3D[i].x, mvIniP3D[i].y, mvIniP3D[i].z; //初始化地图点的世界坐标 worldPos。
MapPoint* pMP = new MapPoint(worldPos,pKFcur,mpAtlas->GetCurrentMap());
////将地图点与两个关键帧 pKFini 和 pKFcur 相关联。
pKFini->AddMapPoint(pMP,i);
pKFcur->AddMapPoint(pMP,mvIniMatches[i]);
pMP->AddObservation(pKFini,i);
pMP->AddObservation(pKFcur,mvIniMatches[i]);
pMP->ComputeDistinctiveDescriptors();//计算地图点的描述子,并更新其法向量和深度信息。
pMP->UpdateNormalAndDepth();
//Fill Current Frame structure
mCurrentFrame.mvpMapPoints[mvIniMatches[i]] = pMP;
mCurrentFrame.mvbOutlier[mvIniMatches[i]] = false;
//Add to Map
mpAtlas->AddMapPoint(pMP);//插入地图点到地图中。
}
// Update Connections
pKFini->UpdateConnections();
pKFcur->UpdateConnections();
std::set<MapPoint*> sMPs;
sMPs = pKFini->GetMapPoints();
// Bundle Adjustment
Verbose::PrintMess("New Map created with " + to_string(mpAtlas->MapPointsInMap()) + " points", Verbose::VERBOSITY_QUIET);
Optimizer::GlobalBundleAdjustemnt(mpAtlas->GetCurrentMap(),20);//通过全局Bundle Adjustment优化关键帧和地图点的位置。
float medianDepth = pKFini->ComputeSceneMedianDepth(2);
float invMedianDepth;
if(mSensor == System::IMU_MONOCULAR)
invMedianDepth = 4.0f/medianDepth; // 4.0f
else
invMedianDepth = 1.0f/medianDepth;
if(medianDepth<0 || pKFcur->TrackedMapPoints(1)<50) // TODO Check, originally 100 tracks
{
Verbose::PrintMess("Wrong initialization, reseting...", Verbose::VERBOSITY_QUIET);
mpSystem->ResetActiveMap();
return;
}
// Scale initial baseline
Sophus::SE3f Tc2w = pKFcur->GetPose();
Tc2w.translation() *= invMedianDepth;
pKFcur->SetPose(Tc2w);
// Scale points
// 计算初始关键帧场景的中值深度,并根据该深度调整地图点和相机位姿的尺度。
// 缩放地图点和关键帧
// 调整关键帧和地图点的深度比例,确保地图的深度尺度一致。
vector<MapPoint*> vpAllMapPoints = pKFini->GetMapPointMatches();
for(size_t iMP=0; iMP<vpAllMapPoints.size(); iMP++)
{
if(vpAllMapPoints[iMP])
{
MapPoint* pMP = vpAllMapPoints[iMP];
pMP->SetWorldPos(pMP->GetWorldPos()*invMedianDepth);
pMP->UpdateNormalAndDepth();
}
}
if (mSensor == System::IMU_MONOCULAR)
{
pKFcur->mPrevKF = pKFini;
pKFini->mNextKF = pKFcur;
pKFcur->mpImuPreintegrated = mpImuPreintegratedFromLastKF;
mpImuPreintegratedFromLastKF = new IMU::Preintegrated(pKFcur->mpImuPreintegrated->GetUpdatedBias(),pKFcur->mImuCalib);
}
mpLocalMapper->InsertKeyFrame(pKFini);
mpLocalMapper->InsertKeyFrame(pKFcur);
mpLocalMapper->mFirstTs=pKFcur->mTimeStamp;
mCurrentFrame.SetPose(pKFcur->GetPose());
mnLastKeyFrameId=mCurrentFrame.mnId;
mpLastKeyFrame = pKFcur;
//mnLastRelocFrameId = mInitialFrame.mnId;
//将当前关键帧(pKFcur)和初始关键帧(pKFini)加入到局部关键帧列表中(mvpLocalKeyFrames),便于后续跟踪和地图更新。
mvpLocalKeyFrames.push_back(pKFcur);
mvpLocalKeyFrames.push_back(pKFini);
//将地图中的所有地图点加入到局部地图点列表中(mvpLocalMapPoints),作为跟踪和建图的参考。
mvpLocalMapPoints=mpAtlas->GetAllMapPoints();
mpReferenceKF = pKFcur;
mCurrentFrame.mpReferenceKF = pKFcur;
// Compute here initial velocity
//mpAtlas->GetAllKeyFrames() 返回所有关键帧列表。vKFs.back() 是当前帧对应的关键帧,vKFs.front() 是初始关键帧。
vector<KeyFrame*> vKFs = mpAtlas->GetAllKeyFrames();
//deltaT 表示从初始关键帧到当前关键帧的位姿变化,通过初始位姿的逆和当前位姿相乘得到。
Sophus::SE3f deltaT = vKFs.back()->GetPose() * vKFs.front()->GetPoseInverse();
mbVelocity = false;
//使用 so3().log() 提取旋转部分的李代数表示(旋转向量)。
Eigen::Vector3f phi = deltaT.so3().log();
//这里 aux 表示当前帧相对初始帧的时间比例,用于推测速度。
double aux = (mCurrentFrame.mTimeStamp-mLastFrame.mTimeStamp)/(mCurrentFrame.mTimeStamp-mInitialFrame.mTimeStamp);
phi *= aux;
mLastFrame = Frame(mCurrentFrame);
mpAtlas->SetReferenceMapPoints(mvpLocalMapPoints);
mpMapDrawer->SetCurrentCameraPose(pKFcur->GetPose());
mpAtlas->GetCurrentMap()->mvpKeyFrameOrigins.push_back(pKFini);
mState=OK;
initID = pKFcur->mnId;
}
void Tracking::CreateMapInAtlas()
{
mnLastInitFrameId = mCurrentFrame.mnId;
mpAtlas->CreateNewMap();
if (mSensor==System::IMU_STEREO || mSensor == System::IMU_MONOCULAR || mSensor == System::IMU_RGBD)
mpAtlas->SetInertialSensor();
mbSetInit=false;
mnInitialFrameId = mCurrentFrame.mnId+1;
mState = NO_IMAGES_YET;
// Restart the variable with information about the last KF
mbVelocity = false;
//mnLastRelocFrameId = mnLastInitFrameId; // The last relocation KF_id is the current id, because it is the new starting point for new map
Verbose::PrintMess("First frame id in map: " + to_string(mnLastInitFrameId+1), Verbose::VERBOSITY_NORMAL);
mbVO = false; // Init value for know if there are enough MapPoints in the last KF
if(mSensor == System::MONOCULAR || mSensor == System::IMU_MONOCULAR)
{
mbReadyToInitializate = false;
}
if((mSensor == System::IMU_MONOCULAR || mSensor == System::IMU_STEREO || mSensor == System::IMU_RGBD) && mpImuPreintegratedFromLastKF)
{
delete mpImuPreintegratedFromLastKF;
mpImuPreintegratedFromLastKF = new IMU::Preintegrated(IMU::Bias(),*mpImuCalib);
}
if(mpLastKeyFrame)
mpLastKeyFrame = static_cast<KeyFrame*>(NULL);
if(mpReferenceKF)
mpReferenceKF = static_cast<KeyFrame*>(NULL);
mLastFrame = Frame();
mCurrentFrame = Frame();
mvIniMatches.clear();
mbCreatedMap = true;
}
void Tracking::CheckReplacedInLastFrame()
{
for(int i =0; i<mLastFrame.N; i++)
{
MapPoint* pMP = mLastFrame.mvpMapPoints[i];
if(pMP)
{
MapPoint* pRep = pMP->GetReplaced();
if(pRep)
{
mLastFrame.mvpMapPoints[i] = pRep;
}
}
}
}
bool Tracking::TrackReferenceKeyFrame()
{
// Compute Bag of Words vector
mCurrentFrame.ComputeBoW(); //对当前帧提取的ORB特征计算Bag of Words (BoW) 表示。BoW是一个紧凑的特征表示,用于高效地在参考关键帧和当前帧之间进行ORB特征匹配。
// We perform first an ORB matching with the reference keyframe
// If enough matches are found we setup a PnP solver
ORBmatcher matcher(0.7,true);
vector<MapPoint*> vpMapPointMatches;
//调用 SearchByBoW 方法,使用BoW向量在参考关键帧 (mpReferenceKF) 和当前帧 (mCurrentFrame) 中找到对应的ORB特征匹配点。
//返回匹配到的地图点(MapPoints)存储在 vpMapPointMatches 中,并返回匹配数 nmatches。
int nmatches = matcher.SearchByBoW(mpReferenceKF,mCurrentFrame,vpMapPointMatches);
if(nmatches<15)//如果匹配点数少于15,认为匹配质量不够,无法进行位姿估计,直接返回 false。
{
cout << "TRACK_REF_KF: Less than 15 matches!!\n";
return false;
}
mCurrentFrame.mvpMapPoints = vpMapPointMatches;
mCurrentFrame.SetPose(mLastFrame.GetPose());
//mCurrentFrame.PrintPointDistribution();
// cout << " TrackReferenceKeyFrame mLastFrame.mTcw: " << mLastFrame.mTcw << endl;
//使用位姿优化(PnP + Bundle Adjustment)来调整当前帧的位姿。
//利用匹配到的地图点和特征点,通过最小化重投影误差估计位姿。
Optimizer::PoseOptimization(&mCurrentFrame);
//遍历当前帧中每个特征点对应的地图点。
//如果优化中标记某点为离群点(mCurrentFrame.mvbOutlier[i] 为 true),将其剔除(设置为 NULL)。
//更新地图点的跟踪状态(mbTrackInView 和 mbTrackInViewR)。
//对有效的地图点(未被标记为离群点且有观测值)计数,更新 nmatchesMap。
//目的:剔除错误匹配(离群点)以提高后续计算的精确性。
// Discard outliers
int nmatchesMap = 0;
for(int i =0; i<mCurrentFrame.N; i++)
{
//if(i >= mCurrentFrame.Nleft) break;
if(mCurrentFrame.mvpMapPoints[i])
{
if(mCurrentFrame.mvbOutlier[i])
{
MapPoint* pMP = mCurrentFrame.mvpMapPoints[i];
mCurrentFrame.mvpMapPoints[i]=static_cast<MapPoint*>(NULL);
mCurrentFrame.mvbOutlier[i]=false;
if(i < mCurrentFrame.Nleft){
pMP->mbTrackInView = false;
}
else{
pMP->mbTrackInViewR = false;
}
pMP->mbTrackInView = false;
pMP->mnLastFrameSeen = mCurrentFrame.mnId;
nmatches--;
}
else if(mCurrentFrame.mvpMapPoints[i]->Observations()>0)
nmatchesMap++;
}
}
if (mSensor == System::IMU_MONOCULAR || mSensor == System::IMU_STEREO || mSensor == System::IMU_RGBD)
return true;
else
return nmatchesMap>=10;
}
void Tracking::UpdateLastFrame()
{
// Update pose according to reference keyframe
KeyFrame* pRef = mLastFrame.mpReferenceKF;//pRef: 上一帧的参考关键帧 (Reference KeyFrame)
Sophus::SE3f Tlr = mlRelativeFramePoses.back();//Tlr: 表示上一帧相对于参考关键帧的相对位姿(通过跟踪历史相对帧位姿mlRelativeFramePoses获取)。
mLastFrame.SetPose(Tlr * pRef->GetPose());//更新上一帧的全局位姿,通过 Tlr 和参考关键帧的全局位姿计算得到。
//保证即使当前帧不是关键帧,也能利用深度信息补充地图点,增强视觉里程计的鲁棒性。
if(mnLastKeyFrameId==mLastFrame.mnId || mSensor==System::MONOCULAR || mSensor==System::IMU_MONOCULAR || !mbOnlyTracking)
return;
// Create "visual odometry" MapPoints
// We sort points according to their measured depth by the stereo/RGB-D sensor
vector<pair<float,int> > vDepthIdx;
const int Nfeat = mLastFrame.Nleft == -1? mLastFrame.N : mLastFrame.Nleft;
vDepthIdx.reserve(Nfeat);
for(int i=0; i<Nfeat;i++)
{
float z = mLastFrame.mvDepth[i];
if(z>0)
{
vDepthIdx.push_back(make_pair(z,i));
}
}
if(vDepthIdx.empty())
return;
sort(vDepthIdx.begin(),vDepthIdx.end());
// We insert all close points (depth<mThDepth)
// If less than 100 close points, we insert the 100 closest ones.
int nPoints = 0;
for(size_t j=0; j<vDepthIdx.size();j++)
{
int i = vDepthIdx[j].second;
bool bCreateNew = false;
MapPoint* pMP = mLastFrame.mvpMapPoints[i];
if(!pMP)
bCreateNew = true;
else if(pMP->Observations()<1)
bCreateNew = true;
if(bCreateNew)
{
Eigen::Vector3f x3D;
if(mLastFrame.Nleft == -1){
mLastFrame.UnprojectStereo(i, x3D);
}
else{
x3D = mLastFrame.UnprojectStereoFishEye(i);
}
MapPoint* pNewMP = new MapPoint(x3D,mpAtlas->GetCurrentMap(),&mLastFrame,i);
mLastFrame.mvpMapPoints[i]=pNewMP;
mlpTemporalPoints.push_back(pNewMP);
nPoints++;
}
else
{
nPoints++;
}
if(vDepthIdx[j].first>mThDepth && nPoints>100)
break;
}
}
bool Tracking::TrackWithMotionModel()
{
ORBmatcher matcher(0.9,true);
// Update last frame pose according to its reference keyframe
// Create "visual odometry" points if in Localization Mode
UpdateLastFrame();
if (mpAtlas->isImuInitialized() && (mCurrentFrame.mnId>mnLastRelocFrameId+mnFramesToResetIMU))
{
// Predict state with IMU if it is initialized and it doesnt need reset
PredictStateIMU();
return true;
}
else
{
mCurrentFrame.SetPose(mVelocity * mLastFrame.GetPose());
}
//将当前帧的地图点关联数组置空,以准备接下来的投影匹配操作。
fill(mCurrentFrame.mvpMapPoints.begin(),mCurrentFrame.mvpMapPoints.end(),static_cast<MapPoint*>(NULL));
// Project points seen in previous frame
int th;
if(mSensor==System::STEREO)
th=7;
else
th=15;
//使用 SearchByProjection 方法,将上一帧中的地图点投影到当前帧的图像平面上,寻找匹配点。
//投影搜索的窗口大小通过变量 th 设置,不同传感器类型对应不同的窗口大小(如双目、单目等)。
//如果匹配点数不足,则增加窗口大小重新搜索:
int nmatches = matcher.SearchByProjection(mCurrentFrame,mLastFrame,th,mSensor==System::MONOCULAR || mSensor==System::IMU_MONOCULAR);
// If few matches, uses a wider window search
if(nmatches<20)
{
Verbose::PrintMess("Not enough matches, wider window search!!", Verbose::VERBOSITY_NORMAL);
fill(mCurrentFrame.mvpMapPoints.begin(),mCurrentFrame.mvpMapPoints.end(),static_cast<MapPoint*>(NULL));
nmatches = matcher.SearchByProjection(mCurrentFrame,mLastFrame,2*th,mSensor==System::MONOCULAR || mSensor==System::IMU_MONOCULAR);
Verbose::PrintMess("Matches with wider search: " + to_string(nmatches), Verbose::VERBOSITY_NORMAL);
}
//如果匹配点数量依然不足 20,根据传感器类型决定是否继续:
//对于 IMU 单目/双目等场景,返回 true,继续运行。
//否则直接返回 false,表示跟踪失败。
if(nmatches<20)
{
Verbose::PrintMess("Not enough matches!!", Verbose::VERBOSITY_NORMAL);
if (mSensor == System::IMU_MONOCULAR || mSensor == System::IMU_STEREO || mSensor == System::IMU_RGBD)
return true;
else
return false;
}
// Optimize frame pose with all matches
Optimizer::PoseOptimization(&mCurrentFrame);
// Discard outliers
int nmatchesMap = 0;
for(int i =0; i<mCurrentFrame.N; i++)
{
if(mCurrentFrame.mvpMapPoints[i])
{
if(mCurrentFrame.mvbOutlier[i])
{
MapPoint* pMP = mCurrentFrame.mvpMapPoints[i];
mCurrentFrame.mvpMapPoints[i]=static_cast<MapPoint*>(NULL);
mCurrentFrame.mvbOutlier[i]=false;
if(i < mCurrentFrame.Nleft){
pMP->mbTrackInView = false;
}
else{
pMP->mbTrackInViewR = false;
}
pMP->mnLastFrameSeen = mCurrentFrame.mnId;
nmatches--;
}
else if(mCurrentFrame.mvpMapPoints[i]->Observations()>0)
nmatchesMap++;
}
}
if(mbOnlyTracking)
{
mbVO = nmatchesMap<10;
return nmatches>20;
}
if (mSensor == System::IMU_MONOCULAR || mSensor == System::IMU_STEREO || mSensor == System::IMU_RGBD)
return true;
else
return nmatchesMap>=10;
}
bool Tracking::TrackLocalMap()
{
// We have an estimation of the camera pose and some map points tracked in the frame.
// We retrieve the local map and try to find matches to points in the local map.
mTrackedFr++;
UpdateLocalMap();//通常会更新当前帧的局部地图,即从全局地图中提取与当前相机视角相关的地图点。
SearchLocalPoints();
// TOO check outliers before PO
//这段代码统计当前帧中有多少地图点被跟踪,以及其中有多少被判定为外点(mvbOutlier[i] 为 true 的点)。这些外点是指那些匹配质量不高、可能不符合当前视角的点。
int aux1 = 0, aux2=0;
for(int i=0; i<mCurrentFrame.N; i++)
if( mCurrentFrame.mvpMapPoints[i])
{
aux1++;
if(mCurrentFrame.mvbOutlier[i])
aux2++;
}
int inliers;
//如果IMU尚未初始化,则使用 PoseOptimization 进行姿态优化。PoseOptimization采用g2o的非线性优化方法进行姿态优化估计
if (!mpAtlas->isImuInitialized())
Optimizer::PoseOptimization(&mCurrentFrame);
else
{
if(mCurrentFrame.mnId<=mnLastRelocFrameId+mnFramesToResetIMU)//如果IMU已初始化且当前帧的 ID 小于某个阈值,则执行普通的姿态优化。
{
Verbose::PrintMess("TLM: PoseOptimization ", Verbose::VERBOSITY_DEBUG);
Optimizer::PoseOptimization(&mCurrentFrame);
}
//否则,使用惯性优化来更新姿态。在不同的情况下(是否有更新的地图数据),选择不同的优化函数
else
{
// if(!mbMapUpdated && mState == OK) // && (mnMatchesInliers>30))
if(!mbMapUpdated) // && (mnMatchesInliers>30))
{
Verbose::PrintMess("TLM: PoseInertialOptimizationLastFrame ", Verbose::VERBOSITY_DEBUG);
inliers = Optimizer::PoseInertialOptimizationLastFrame(&mCurrentFrame); // , !mpLastKeyFrame->GetMap()->GetIniertialBA1());
}
else
{
Verbose::PrintMess("TLM: PoseInertialOptimizationLastKeyFrame ", Verbose::VERBOSITY_DEBUG);
inliers = Optimizer::PoseInertialOptimizationLastKeyFrame(&mCurrentFrame); // , !mpLastKeyFrame->GetMap()->GetIniertialBA1());
}
}
}
aux1 = 0, aux2 = 0;
for(int i=0; i<mCurrentFrame.N; i++)
if( mCurrentFrame.mvpMapPoints[i])
{
aux1++;
if(mCurrentFrame.mvbOutlier[i])
aux2++;
}
mnMatchesInliers = 0;
// Update MapPoints Statistics
for(int i=0; i<mCurrentFrame.N; i++)
{
if(mCurrentFrame.mvpMapPoints[i])
{
if(!mCurrentFrame.mvbOutlier[i])
{
mCurrentFrame.mvpMapPoints[i]->IncreaseFound();//如果地图点不是外点,则调用 IncreaseFound() 来增加该点的被观测次数。
if(!mbOnlyTracking)
{
if(mCurrentFrame.mvpMapPoints[i]->Observations()>0)
mnMatchesInliers++;
}
////如果是仅跟踪模式(mbOnlyTracking 为 true),则仅更新 mnMatchesInliers,而不考虑地图点的观察次数。
else
mnMatchesInliers++;
}
else if(mSensor==System::STEREO)//如果传感器类型是立体视觉(STEREO),且地图点是外点,则将该点从当前帧的 mvpMapPoints 中移除。
mCurrentFrame.mvpMapPoints[i] = static_cast<MapPoint*>(NULL);
}
}
// Decide if the tracking was succesful
// More restrictive if there was a relocalization recently
mpLocalMapper->mnMatchesInliers=mnMatchesInliers; // 记录当前帧的有效匹配点数。
if(mCurrentFrame.mnId<mnLastRelocFrameId+mMaxFrames && mnMatchesInliers<50)//如果当前帧的 ID 距离最近的重定位帧过近,且内点数小于 50,返回 false。
return false;
if((mnMatchesInliers>10)&&(mState==RECENTLY_LOST))//如果当前状态为 RECENTLY_LOST,且内点数大于 10,返回 true。
return true;
//根据传感器类型(单目、立体或 RGBD)判断是否满足最低内点数要求,如果不满足,则返回 false,否则返回 true。
if (mSensor == System::IMU_MONOCULAR)
{
if((mnMatchesInliers<15 && mpAtlas->isImuInitialized())||(mnMatchesInliers<50 && !mpAtlas->isImuInitialized()))
{
return false;
}
else
return true;
}
else if (mSensor == System::IMU_STEREO || mSensor == System::IMU_RGBD)
{
if(mnMatchesInliers<15)
{
return false;
}
else
return true;
}
else
{
if(mnMatchesInliers<30)
return false;
else
return true;
}
}
//NeedNewKeyFrame 函数的作用是根据当前的跟踪状态、传感器类型和局部映射的情况来决定是否需要插入新的关键帧。关键帧的插入有助于地图的更新、优化和回环检测。
bool Tracking::NeedNewKeyFrame()
{
// 这是为了确保在 IMU 数据未初始化时,时间过长的帧之间不会导致误差积累。
if((mSensor == System::IMU_MONOCULAR || mSensor == System::IMU_STEREO || mSensor == System::IMU_RGBD) && !mpAtlas->GetCurrentMap()->isImuInitialized())
{
if (mSensor == System::IMU_MONOCULAR && (mCurrentFrame.mTimeStamp-mpLastKeyFrame->mTimeStamp)>=0.25)
return true;
else if ((mSensor == System::IMU_STEREO || mSensor == System::IMU_RGBD) && (mCurrentFrame.mTimeStamp-mpLastKeyFrame->mTimeStamp)>=0.25)
return true;
else
return false;
}
//如果系统当前处于只跟踪模式,则不插入新关键帧。
if(mbOnlyTracking)
return false;
// If Local Mapping is freezed by a Loop Closure do not insert keyframes
//如果局部映射器(局部建图器)已停止或请求停止(例如,回环检测时冻结了局部映射,局部建图器),则不插入新关键帧。
if(mpLocalMapper->isStopped() || mpLocalMapper->stopRequested()) {
/*if(mSensor == System::MONOCULAR)
{
std::cout << "NeedNewKeyFrame: localmap stopped" << std::endl;
}*/
return false;
}
const int nKFs = mpAtlas->KeyFramesInMap();
// Do not insert keyframes if not enough frames have passed from last relocalisation
//如果当前帧的 ID 距离上次重新定位的帧 ID 小于设定的最大帧数,且地图中关键帧数量大于最大允许的关键帧数,则不插入新关键帧。
if(mCurrentFrame.mnId<mnLastRelocFrameId+mMaxFrames && nKFs>mMaxFrames)
{
return false;
}
// Tracked MapPoints in the reference keyframe
//参考关键帧的有效匹配点数。如果匹配点数太少,则需要考虑插入新关键帧。
int nMinObs = 3;
if(nKFs<=2)
nMinObs=2;
int nRefMatches = mpReferenceKF->TrackedMapPoints(nMinObs);
// Local Mapping accept keyframes?
//如果局部映射器处于空闲状态(即没有正在进行的关键帧插入),则可以插入新关键帧。
bool bLocalMappingIdle = mpLocalMapper->AcceptKeyFrames();
// Check how many "close" points are being tracked and how many could be potentially created.
int nNonTrackedClose = 0;
int nTrackedClose= 0;
//计算跟踪的“接近”点(深度小于阈值的点)数量。用于评估当前帧是否需要插入新关键帧。
if(mSensor!=System::MONOCULAR && mSensor!=System::IMU_MONOCULAR)
{
int N = (mCurrentFrame.Nleft == -1) ? mCurrentFrame.N : mCurrentFrame.Nleft;
for(int i =0; i<N; i++)
{
if(mCurrentFrame.mvDepth[i]>0 && mCurrentFrame.mvDepth[i]<mThDepth)
{
if(mCurrentFrame.mvpMapPoints[i] && !mCurrentFrame.mvbOutlier[i])
nTrackedClose++;
else
nNonTrackedClose++;
}
}
//Verbose::PrintMess("[NEEDNEWKF]-> closed points: " + to_string(nTrackedClose) + "; non tracked closed points: " + to_string(nNonTrackedClose), Verbose::VERBOSITY_NORMAL);// Verbose::VERBOSITY_DEBUG);
}
bool bNeedToInsertClose;
bNeedToInsertClose = (nTrackedClose<100) && (nNonTrackedClose>70);
// Thresholds
float thRefRatio = 0.75f;
if(nKFs<2)
thRefRatio = 0.4f;
/*int nClosedPoints = nTrackedClose + nNonTrackedClose;
const int thStereoClosedPoints = 15;
if(nClosedPoints < thStereoClosedPoints && (mSensor==System::STEREO || mSensor==System::IMU_STEREO))
{
//Pseudo-monocular, there are not enough close points to be confident about the stereo observations.
thRefRatio = 0.9f;
}*/
if(mSensor==System::MONOCULAR)
thRefRatio = 0.9f;
if(mpCamera2) thRefRatio = 0.75f;
if(mSensor==System::IMU_MONOCULAR)
{
if(mnMatchesInliers>350) // Points tracked from the local map
thRefRatio = 0.75f;
else
thRefRatio = 0.90f;
}
// Condition 1a: More than "MaxFrames" have passed from last keyframe insertion
//条件 c1a:当前帧与上次关键帧的间隔大于最大帧数,表示需要插入新关键帧。
const bool c1a = mCurrentFrame.mnId>=mnLastKeyFrameId+mMaxFrames;
// Condition 1b: More than "MinFrames" have passed and Local Mapping is idle
//条件 c1b:当前帧与上次关键帧的间隔大于最小帧数,并且局部映射器(局部建图器)处于空闲状态。
const bool c1b = ((mCurrentFrame.mnId>=mnLastKeyFrameId+mMinFrames) && bLocalMappingIdle); //mpLocalMapper->KeyframesInQueue() < 2);
//Condition 1c: tracking is weak
//条件 c1c:如果跟踪质量差(例如跟踪点数低于参考关键帧的 25%),则需要插入新关键帧。
const bool c1c = mSensor!=System::MONOCULAR && mSensor!=System::IMU_MONOCULAR && mSensor!=System::IMU_STEREO && mSensor!=System::IMU_RGBD && (mnMatchesInliers<nRefMatches*0.25 || bNeedToInsertClose) ;
// Condition 2: Few tracked points compared to reference keyframe. Lots of visual odometry compared to map matches.
//条件 c2:当前帧的匹配点数低于参考关键帧的比例,或者有足够多的未跟踪的接近点时,需要插入新关键帧。
const bool c2 = (((mnMatchesInliers<nRefMatches*thRefRatio || bNeedToInsertClose)) && mnMatchesInliers>15);
//std::cout << "NeedNewKF: c1a=" << c1a << "; c1b=" << c1b << "; c1c=" << c1c << "; c2=" << c2 << std::endl;
// Temporal condition for Inertial cases
bool c3 = false;
if(mpLastKeyFrame)
{
if (mSensor==System::IMU_MONOCULAR)
{
if ((mCurrentFrame.mTimeStamp-mpLastKeyFrame->mTimeStamp)>=0.5)
c3 = true;
}
else if (mSensor==System::IMU_STEREO || mSensor == System::IMU_RGBD)
{
if ((mCurrentFrame.mTimeStamp-mpLastKeyFrame->mTimeStamp)>=0.5)
c3 = true;
}
}
bool c4 = false;
if ((((mnMatchesInliers<75) && (mnMatchesInliers>15)) || mState==RECENTLY_LOST) && (mSensor == System::IMU_MONOCULAR)) // MODIFICATION_2, originally ((((mnMatchesInliers<75) && (mnMatchesInliers>15)) || mState==RECENTLY_LOST) && ((mSensor == System::IMU_MONOCULAR)))
c4=true;
else
c4=false;
if(((c1a||c1b||c1c) && c2)||c3 ||c4)
{
// If the mapping accepts keyframes, insert keyframe.
// Otherwise send a signal to interrupt BA
if(bLocalMappingIdle || mpLocalMapper->IsInitializing())
{
return true;
}
else
{
mpLocalMapper->InterruptBA();
if(mSensor!=System::MONOCULAR && mSensor!=System::IMU_MONOCULAR)
{
if(mpLocalMapper->KeyframesInQueue()<3)
return true;
else
return false;
}
else
{
//std::cout << "NeedNewKeyFrame: localmap is busy" << std::endl;
return false;
}
}
}
else
return false;
}
void Tracking::CreateNewKeyFrame()
{
if(mpLocalMapper->IsInitializing() && !mpAtlas->isImuInitialized())
return;
if(!mpLocalMapper->SetNotStop(true))
return;
KeyFrame* pKF = new KeyFrame(mCurrentFrame,mpAtlas->GetCurrentMap(),mpKeyFrameDB);
if(mpAtlas->isImuInitialized()) // || mpLocalMapper->IsInitializing())
pKF->bImu = true;
pKF->SetNewBias(mCurrentFrame.mImuBias);
mpReferenceKF = pKF;
mCurrentFrame.mpReferenceKF = pKF;
if(mpLastKeyFrame)
{
pKF->mPrevKF = mpLastKeyFrame;
mpLastKeyFrame->mNextKF = pKF;
}
else
Verbose::PrintMess("No last KF in KF creation!!", Verbose::VERBOSITY_NORMAL);
// Reset preintegration from last KF (Create new object)
if (mSensor == System::IMU_MONOCULAR || mSensor == System::IMU_STEREO || mSensor == System::IMU_RGBD)
{
mpImuPreintegratedFromLastKF = new IMU::Preintegrated(pKF->GetImuBias(),pKF->mImuCalib);
}
if(mSensor!=System::MONOCULAR && mSensor != System::IMU_MONOCULAR) // TODO check if incluide imu_stereo
{
mCurrentFrame.UpdatePoseMatrices();
// cout << "create new MPs" << endl;
// We sort points by the measured depth by the stereo/RGBD sensor.
// We create all those MapPoints whose depth < mThDepth.
// If there are less than 100 close points we create the 100 closest.
int maxPoint = 100;
if(mSensor == System::IMU_STEREO || mSensor == System::IMU_RGBD)
maxPoint = 100;
vector<pair<float,int> > vDepthIdx;
int N = (mCurrentFrame.Nleft != -1) ? mCurrentFrame.Nleft : mCurrentFrame.N;
vDepthIdx.reserve(mCurrentFrame.N);
for(int i=0; i<N; i++)
{
float z = mCurrentFrame.mvDepth[i];
if(z>0)
{
vDepthIdx.push_back(make_pair(z,i));
}
}
if(!vDepthIdx.empty())
{
sort(vDepthIdx.begin(),vDepthIdx.end());
int nPoints = 0;
for(size_t j=0; j<vDepthIdx.size();j++)
{
int i = vDepthIdx[j].second;
bool bCreateNew = false;
MapPoint* pMP = mCurrentFrame.mvpMapPoints[i];
if(!pMP)
bCreateNew = true;
else if(pMP->Observations()<1)
{
bCreateNew = true;
mCurrentFrame.mvpMapPoints[i] = static_cast<MapPoint*>(NULL);
}
if(bCreateNew)
{
Eigen::Vector3f x3D;
if(mCurrentFrame.Nleft == -1){
mCurrentFrame.UnprojectStereo(i, x3D);
}
else{
x3D = mCurrentFrame.UnprojectStereoFishEye(i);
}
MapPoint* pNewMP = new MapPoint(x3D,pKF,mpAtlas->GetCurrentMap());
pNewMP->AddObservation(pKF,i);
//Check if it is a stereo observation in order to not
//duplicate mappoints
if(mCurrentFrame.Nleft != -1 && mCurrentFrame.mvLeftToRightMatch[i] >= 0){
mCurrentFrame.mvpMapPoints[mCurrentFrame.Nleft + mCurrentFrame.mvLeftToRightMatch[i]]=pNewMP;
pNewMP->AddObservation(pKF,mCurrentFrame.Nleft + mCurrentFrame.mvLeftToRightMatch[i]);
pKF->AddMapPoint(pNewMP,mCurrentFrame.Nleft + mCurrentFrame.mvLeftToRightMatch[i]);
}
pKF->AddMapPoint(pNewMP,i);
pNewMP->ComputeDistinctiveDescriptors();
pNewMP->UpdateNormalAndDepth();
mpAtlas->AddMapPoint(pNewMP);
mCurrentFrame.mvpMapPoints[i]=pNewMP;
nPoints++;
}
else
{
nPoints++;
}
if(vDepthIdx[j].first>mThDepth && nPoints>maxPoint)
{
break;
}
}
//Verbose::PrintMess("new mps for stereo KF: " + to_string(nPoints), Verbose::VERBOSITY_NORMAL);
}
}
mpLocalMapper->InsertKeyFrame(pKF);
mpLocalMapper->SetNotStop(false);
mnLastKeyFrameId = mCurrentFrame.mnId;
mpLastKeyFrame = pKF;
}
void Tracking::SearchLocalPoints()
{
// Do not search map points already matched
//这一部分代码遍历当前帧(mCurrentFrame)中的所有地图点(mvpMapPoints)。如果某个地图点已经被标记为坏点(isBad()),则将该点设为 NULL,避免在后续的处理过程中继续使用该点。否则,增加该点的可见性计数,并更新该点的状态信息(如最后一次被看到的帧编号、是否在视野中等)。
for(vector<MapPoint*>::iterator vit=mCurrentFrame.mvpMapPoints.begin(), vend=mCurrentFrame.mvpMapPoints.end(); vit!=vend; vit++)
{
MapPoint* pMP = *vit;
if(pMP)
{
if(pMP->isBad())
{
*vit = static_cast<MapPoint*>(NULL);
}
else
{
pMP->IncreaseVisible();//增加该地图点的可见性,通常是增加一个计数器,记录该点被多少帧看到了。
pMP->mnLastFrameSeen = mCurrentFrame.mnId;//记录该地图点最后一次被看到的帧的 ID。
pMP->mbTrackInView = false;//分别表示该地图点是否在当前帧的视野中。
pMP->mbTrackInViewR = false;
}
}
}
int nToMatch=0;
// Project points in frame and check its visibility
//这一部分代码遍历局部地图点(mvpLocalMapPoints)并进行投影,检查每个地图点是否可以在当前帧中找到匹配。
for(vector<MapPoint*>::iterator vit=mvpLocalMapPoints.begin(), vend=mvpLocalMapPoints.end(); vit!=vend; vit++)
{
MapPoint* pMP = *vit;
if(pMP->mnLastFrameSeen == mCurrentFrame.mnId)//如果当前帧已经看到这个地图点,就跳过。
continue;
if(pMP->isBad())//如果该地图点是无效的,跳过。
continue;
// Project (this fills MapPoint variables for matching)
//检查当前地图点是否在当前帧的视野中。该函数的作用是判断地图点是否落在当前帧的视锥体内,0.5 是视锥体的范围系数。若地图点在视锥体内,则增加其可见性,并计数 nToMatch,表示需要进行匹配的地图点数量。
if(mCurrentFrame.isInFrustum(pMP,0.5))
{
pMP->IncreaseVisible();
nToMatch++;
}
//如果该地图点已经在当前帧的视野中被追踪过,记录该地图点的投影位置(mTrackProjX 和 mTrackProjY),并存储在 mmProjectPoints 中。
if(pMP->mbTrackInView)
{
mCurrentFrame.mmProjectPoints[pMP->mnId] = cv::Point2f(pMP->mTrackProjX, pMP->mTrackProjY);
}
}
if(nToMatch>0)
{
ORBmatcher matcher(0.8);
//这段代码根据不同的传感器类型(如 RGBD、IMU、单目、立体等)设置一个匹配阈值 th。该阈值用于后续的匹配操作。不同的传感器和初始化状态会影响匹配的严格程度,例如:
int th = 1;
if(mSensor==System::RGBD || mSensor==System::IMU_RGBD)
th=3;
if(mpAtlas->isImuInitialized())
{
if(mpAtlas->GetCurrentMap()->GetIniertialBA2())
th=2;
else
th=6;
}
else if(!mpAtlas->isImuInitialized() && (mSensor==System::IMU_MONOCULAR || mSensor==System::IMU_STEREO || mSensor == System::IMU_RGBD))
{
th=10;
}
// If the camera has been relocalised recently, perform a coarser search
//如果当前帧的 ID 比最近一次重定位帧的 ID 小 2 帧以内,说明系统刚刚进行了重定位,此时可能需要执行一个粗匹配,因此将匹配阈值 th 设置为 5。
if(mCurrentFrame.mnId<mnLastRelocFrameId+2)
th=5;
//当系统处于 LOST(丢失)或 RECENTLY_LOST(刚刚丢失)状态时,匹配的阈值会被增加到 15,这通常是因为系统在丢失状态下无法精确地定位,因此需要较大的容忍度来重新找到匹配的点。
if(mState==LOST || mState==RECENTLY_LOST) // Lost for less than 1 second
th=15; // 15
//这行代码使用 ORBmatcher 对当前帧(mCurrentFrame)和局部地图点(mvpLocalMapPoints)进行匹配。匹配过程中使用了 SearchByProjection 方法,传入了匹配阈值 th 以及一些额外的参数(如远距离点的处理)。SearchByProjection 方法将根据图像投影来找到当前帧中的对应地图点。
int matches = matcher.SearchByProjection(mCurrentFrame, mvpLocalMapPoints, th, mpLocalMapper->mbFarPoints, mpLocalMapper->mThFarPoints);
}
}
void Tracking::UpdateLocalMap()
{
// This is for visualization
mpAtlas->SetReferenceMapPoints(mvpLocalMapPoints);
// Update
UpdateLocalKeyFrames();
UpdateLocalPoints();
}
void Tracking::UpdateLocalPoints()
{
mvpLocalMapPoints.clear();
int count_pts = 0;
for(vector<KeyFrame*>::const_reverse_iterator itKF=mvpLocalKeyFrames.rbegin(), itEndKF=mvpLocalKeyFrames.rend(); itKF!=itEndKF; ++itKF)
{
KeyFrame* pKF = *itKF;
const vector<MapPoint*> vpMPs = pKF->GetMapPointMatches();
for(vector<MapPoint*>::const_iterator itMP=vpMPs.begin(), itEndMP=vpMPs.end(); itMP!=itEndMP; itMP++)
{
MapPoint* pMP = *itMP;
if(!pMP)
continue;
if(pMP->mnTrackReferenceForFrame==mCurrentFrame.mnId)
continue;
if(!pMP->isBad())
{
count_pts++;
mvpLocalMapPoints.push_back(pMP);
pMP->mnTrackReferenceForFrame=mCurrentFrame.mnId;
}
}
}
}
void Tracking::UpdateLocalKeyFrames()
{
// Each map point vote for the keyframes in which it has been observed
map<KeyFrame*,int> keyframeCounter;
if(!mpAtlas->isImuInitialized() || (mCurrentFrame.mnId<mnLastRelocFrameId+2))
{
for(int i=0; i<mCurrentFrame.N; i++)
{
MapPoint* pMP = mCurrentFrame.mvpMapPoints[i];
if(pMP)
{
if(!pMP->isBad())
{
const map<KeyFrame*,tuple<int,int>> observations = pMP->GetObservations();
for(map<KeyFrame*,tuple<int,int>>::const_iterator it=observations.begin(), itend=observations.end(); it!=itend; it++)
keyframeCounter[it->first]++;
}
else
{
mCurrentFrame.mvpMapPoints[i]=NULL;
}
}
}
}
else
{
for(int i=0; i<mLastFrame.N; i++)
{
// Using lastframe since current frame has not matches yet
if(mLastFrame.mvpMapPoints[i])
{
MapPoint* pMP = mLastFrame.mvpMapPoints[i];
if(!pMP)
continue;
if(!pMP->isBad())
{
const map<KeyFrame*,tuple<int,int>> observations = pMP->GetObservations();
for(map<KeyFrame*,tuple<int,int>>::const_iterator it=observations.begin(), itend=observations.end(); it!=itend; it++)
keyframeCounter[it->first]++;
}
else
{
// MODIFICATION
mLastFrame.mvpMapPoints[i]=NULL;
}
}
}
}
int max=0;
KeyFrame* pKFmax= static_cast<KeyFrame*>(NULL);
mvpLocalKeyFrames.clear();
mvpLocalKeyFrames.reserve(3*keyframeCounter.size());
// All keyframes that observe a map point are included in the local map. Also check which keyframe shares most points
for(map<KeyFrame*,int>::const_iterator it=keyframeCounter.begin(), itEnd=keyframeCounter.end(); it!=itEnd; it++)
{
KeyFrame* pKF = it->first;
if(pKF->isBad())
continue;
if(it->second>max)
{
max=it->second;
pKFmax=pKF;
}
mvpLocalKeyFrames.push_back(pKF);
pKF->mnTrackReferenceForFrame = mCurrentFrame.mnId;
}
// Include also some not-already-included keyframes that are neighbors to already-included keyframes
for(vector<KeyFrame*>::const_iterator itKF=mvpLocalKeyFrames.begin(), itEndKF=mvpLocalKeyFrames.end(); itKF!=itEndKF; itKF++)
{
// Limit the number of keyframes
if(mvpLocalKeyFrames.size()>80) // 80
break;
KeyFrame* pKF = *itKF;
const vector<KeyFrame*> vNeighs = pKF->GetBestCovisibilityKeyFrames(10);
for(vector<KeyFrame*>::const_iterator itNeighKF=vNeighs.begin(), itEndNeighKF=vNeighs.end(); itNeighKF!=itEndNeighKF; itNeighKF++)
{
KeyFrame* pNeighKF = *itNeighKF;
if(!pNeighKF->isBad())
{
if(pNeighKF->mnTrackReferenceForFrame!=mCurrentFrame.mnId)
{
mvpLocalKeyFrames.push_back(pNeighKF);
pNeighKF->mnTrackReferenceForFrame=mCurrentFrame.mnId;
break;
}
}
}
const set<KeyFrame*> spChilds = pKF->GetChilds();
for(set<KeyFrame*>::const_iterator sit=spChilds.begin(), send=spChilds.end(); sit!=send; sit++)
{
KeyFrame* pChildKF = *sit;
if(!pChildKF->isBad())
{
if(pChildKF->mnTrackReferenceForFrame!=mCurrentFrame.mnId)
{
mvpLocalKeyFrames.push_back(pChildKF);
pChildKF->mnTrackReferenceForFrame=mCurrentFrame.mnId;
break;
}
}
}
KeyFrame* pParent = pKF->GetParent();
if(pParent)
{
if(pParent->mnTrackReferenceForFrame!=mCurrentFrame.mnId)
{
mvpLocalKeyFrames.push_back(pParent);
pParent->mnTrackReferenceForFrame=mCurrentFrame.mnId;
break;
}
}
}
// Add 10 last temporal KFs (mainly for IMU)
if((mSensor == System::IMU_MONOCULAR || mSensor == System::IMU_STEREO || mSensor == System::IMU_RGBD) &&mvpLocalKeyFrames.size()<80)
{
KeyFrame* tempKeyFrame = mCurrentFrame.mpLastKeyFrame;
const int Nd = 20;
for(int i=0; i<Nd; i++){
if (!tempKeyFrame)
break;
if(tempKeyFrame->mnTrackReferenceForFrame!=mCurrentFrame.mnId)
{
mvpLocalKeyFrames.push_back(tempKeyFrame);
tempKeyFrame->mnTrackReferenceForFrame=mCurrentFrame.mnId;
tempKeyFrame=tempKeyFrame->mPrevKF;
}
}
}
if(pKFmax)
{
mpReferenceKF = pKFmax;
mCurrentFrame.mpReferenceKF = mpReferenceKF;
}
}
//基于ORB (Oriented FAST and Rotated BRIEF)特征的重定位实现,用于在视觉SLAM系统中恢复相机姿态。当相机跟踪失败时,通过重新定位(Relocalization)找回相机的位姿,从而继续系统运行。
bool Tracking::Relocalization()
{
Verbose::PrintMess("Starting relocalization", Verbose::VERBOSITY_NORMAL);
// Compute Bag of Words Vector
// 计算当前帧的Bag of Words(BoW)向量,用于高效的关键帧匹配。
mCurrentFrame.ComputeBoW();
// Relocalization is performed when tracking is lost
// Track Lost: Query KeyFrame Database for keyframe candidates for relocalisation
// 查询关键帧数据库,基于BoW和当前帧的描述子检测可能的重定位候选关键帧。如果没有候选关键帧,直接返回false,表示重定位失败。
vector<KeyFrame*> vpCandidateKFs = mpKeyFrameDB->DetectRelocalizationCandidates(&mCurrentFrame, mpAtlas->GetCurrentMap());
if(vpCandidateKFs.empty()) {
Verbose::PrintMess("There are not candidates", Verbose::VERBOSITY_NORMAL);
return false;
}
const int nKFs = vpCandidateKFs.size();
// We perform first an ORB matching with each candidate
// If enough matches are found we setup a PnP solver
ORBmatcher matcher(0.75,true); //创建ORB特征匹配器。
vector<MLPnPsolver*> vpMLPnPsolvers;//为每个候选关键帧初始化PnP求解器 (vpMLPnPsolvers)、匹配点容器 (vvpMapPointMatches) 和是否丢弃标志 (vbDiscarded)。
vpMLPnPsolvers.resize(nKFs);
vector<vector<MapPoint*> > vvpMapPointMatches;
vvpMapPointMatches.resize(nKFs);
vector<bool> vbDiscarded;
vbDiscarded.resize(nKFs);
int nCandidates=0;//nCandidates记录有效候选关键帧的数量。
for(int i=0; i<nKFs; i++)
{
KeyFrame* pKF = vpCandidateKFs[i];
if(pKF->isBad())
vbDiscarded[i] = true;
else
{
int nmatches = matcher.SearchByBoW(pKF,mCurrentFrame,vvpMapPointMatches[i]);
if(nmatches<15)
{
vbDiscarded[i] = true;
continue;
}
else
{
//对有效匹配关键帧,初始化PnP求解器,并设置RANSAC参数:
//置信度0.99。
//最小内点数10。
//最大迭代次数300。
//最少需要6个点才能求解PnP问题。
MLPnPsolver* pSolver = new MLPnPsolver(mCurrentFrame,vvpMapPointMatches[i]);
pSolver->SetRansacParameters(0.99,10,300,6,0.5,5.991); //This solver needs at least 6 points
vpMLPnPsolvers[i] = pSolver;
nCandidates++;
}
}
}
// Alternatively perform some iterations of P4P RANSAC
// Until we found a camera pose supported by enough inliers
bool bMatch = false;
ORBmatcher matcher2(0.9,true);
//对候选关键帧执行RANSAC:
//调用PnP求解器的迭代函数iterate(),计算相机位姿并得到内点数量。
//如果解出相机位姿 (bTcw为true),优化位姿,并将结果与地图点关联。
//根据PnP求解器的内点调用Optimizer::PoseOptimization去优化位姿
//如果优化后内点数少于10,继续处理下一个关键帧。
//如果找到内点数小于50,则继续在粗粒度的窗口内搜索更多的点再进行一次优化。
//如果找到内点数大于50的解,停止重定位。
while(nCandidates>0 && !bMatch)
{
for(int i=0; i<nKFs; i++)
{
if(vbDiscarded[i])
continue;
// Perform 5 Ransac Iterations
vector<bool> vbInliers;
int nInliers;
bool bNoMore;
MLPnPsolver* pSolver = vpMLPnPsolvers[i];
Eigen::Matrix4f eigTcw;
bool bTcw = pSolver->iterate(5,bNoMore,vbInliers,nInliers, eigTcw);
// If Ransac reachs max. iterations discard keyframe
if(bNoMore)
{
vbDiscarded[i]=true;
nCandidates--;
}
// If a Camera Pose is computed, optimize
if(bTcw)
{
Sophus::SE3f Tcw(eigTcw);
mCurrentFrame.SetPose(Tcw);
// Tcw.copyTo(mCurrentFrame.mTcw);
set<MapPoint*> sFound;
const int np = vbInliers.size();
for(int j=0; j<np; j++)
{
if(vbInliers[j])
{
mCurrentFrame.mvpMapPoints[j]=vvpMapPointMatches[i][j];
sFound.insert(vvpMapPointMatches[i][j]);
}
else
mCurrentFrame.mvpMapPoints[j]=NULL;
}
int nGood = Optimizer::PoseOptimization(&mCurrentFrame);
if(nGood<10)
continue;
for(int io =0; io<mCurrentFrame.N; io++)
if(mCurrentFrame.mvbOutlier[io])
mCurrentFrame.mvpMapPoints[io]=static_cast<MapPoint*>(NULL);
// If few inliers, search by projection in a coarse window and optimize again
if(nGood<50)
{
int nadditional =matcher2.SearchByProjection(mCurrentFrame,vpCandidateKFs[i],sFound,10,100);
if(nadditional+nGood>=50)
{
nGood = Optimizer::PoseOptimization(&mCurrentFrame);
// If many inliers but still not enough, search by projection again in a narrower window
// the camera has been already optimized with many points
if(nGood>30 && nGood<50)
{
sFound.clear();
for(int ip =0; ip<mCurrentFrame.N; ip++)
if(mCurrentFrame.mvpMapPoints[ip])
sFound.insert(mCurrentFrame.mvpMapPoints[ip]);
nadditional =matcher2.SearchByProjection(mCurrentFrame,vpCandidateKFs[i],sFound,3,64);
// Final optimization
if(nGood+nadditional>=50)
{
nGood = Optimizer::PoseOptimization(&mCurrentFrame);
for(int io =0; io<mCurrentFrame.N; io++)
if(mCurrentFrame.mvbOutlier[io])
mCurrentFrame.mvpMapPoints[io]=NULL;
}
}
}
}
// If the pose is supported by enough inliers stop ransacs and continue
if(nGood>=50)
{
bMatch = true;
break;
}
}
}
}
if(!bMatch)
{
return false;
}
else
{
mnLastRelocFrameId = mCurrentFrame.mnId;
cout << "Relocalized!!" << endl;
return true;
}
}
void Tracking::Reset(bool bLocMap)
{
Verbose::PrintMess("System Reseting", Verbose::VERBOSITY_NORMAL);
if(mpViewer)
{
mpViewer->RequestStop();
while(!mpViewer->isStopped())
usleep(3000);
}
// Reset Local Mapping
if (!bLocMap)
{
Verbose::PrintMess("Reseting Local Mapper...", Verbose::VERBOSITY_NORMAL);
mpLocalMapper->RequestReset();
Verbose::PrintMess("done", Verbose::VERBOSITY_NORMAL);
}
// Reset Loop Closing
Verbose::PrintMess("Reseting Loop Closing...", Verbose::VERBOSITY_NORMAL);
mpLoopClosing->RequestReset();
Verbose::PrintMess("done", Verbose::VERBOSITY_NORMAL);
// Clear BoW Database
Verbose::PrintMess("Reseting Database...", Verbose::VERBOSITY_NORMAL);
mpKeyFrameDB->clear();
Verbose::PrintMess("done", Verbose::VERBOSITY_NORMAL);
// Clear Map (this erase MapPoints and KeyFrames)
mpAtlas->clearAtlas();
mpAtlas->CreateNewMap();
if (mSensor==System::IMU_STEREO || mSensor == System::IMU_MONOCULAR || mSensor == System::IMU_RGBD)
mpAtlas->SetInertialSensor();
mnInitialFrameId = 0;
KeyFrame::nNextId = 0;
Frame::nNextId = 0;
mState = NO_IMAGES_YET;
mbReadyToInitializate = false;
mbSetInit=false;
mlRelativeFramePoses.clear();
mlpReferences.clear();
mlFrameTimes.clear();
mlbLost.clear();
mCurrentFrame = Frame();
mnLastRelocFrameId = 0;
mLastFrame = Frame();
mpReferenceKF = static_cast<KeyFrame*>(NULL);
mpLastKeyFrame = static_cast<KeyFrame*>(NULL);
mvIniMatches.clear();
if(mpViewer)
mpViewer->Release();
Verbose::PrintMess(" End reseting! ", Verbose::VERBOSITY_NORMAL);
}
void Tracking::ResetActiveMap(bool bLocMap)
{
Verbose::PrintMess("Active map Reseting", Verbose::VERBOSITY_NORMAL);
if(mpViewer)
{
mpViewer->RequestStop();
while(!mpViewer->isStopped())
usleep(3000);
}
Map* pMap = mpAtlas->GetCurrentMap();
if (!bLocMap)
{
Verbose::PrintMess("Reseting Local Mapper...", Verbose::VERBOSITY_VERY_VERBOSE);
mpLocalMapper->RequestResetActiveMap(pMap);
Verbose::PrintMess("done", Verbose::VERBOSITY_VERY_VERBOSE);
}
// Reset Loop Closing
Verbose::PrintMess("Reseting Loop Closing...", Verbose::VERBOSITY_NORMAL);
mpLoopClosing->RequestResetActiveMap(pMap);
Verbose::PrintMess("done", Verbose::VERBOSITY_NORMAL);
// Clear BoW Database
Verbose::PrintMess("Reseting Database", Verbose::VERBOSITY_NORMAL);
mpKeyFrameDB->clearMap(pMap); // Only clear the active map references
Verbose::PrintMess("done", Verbose::VERBOSITY_NORMAL);
// Clear Map (this erase MapPoints and KeyFrames)
mpAtlas->clearMap();
//KeyFrame::nNextId = mpAtlas->GetLastInitKFid();
//Frame::nNextId = mnLastInitFrameId;
mnLastInitFrameId = Frame::nNextId;
//mnLastRelocFrameId = mnLastInitFrameId;
mState = NO_IMAGES_YET; //NOT_INITIALIZED;
mbReadyToInitializate = false;
list<bool> lbLost;
// lbLost.reserve(mlbLost.size());
unsigned int index = mnFirstFrameId;
cout << "mnFirstFrameId = " << mnFirstFrameId << endl;
for(Map* pMap : mpAtlas->GetAllMaps())
{
if(pMap->GetAllKeyFrames().size() > 0)
{
if(index > pMap->GetLowerKFID())
index = pMap->GetLowerKFID();
}
}
//cout << "First Frame id: " << index << endl;
int num_lost = 0;
cout << "mnInitialFrameId = " << mnInitialFrameId << endl;
for(list<bool>::iterator ilbL = mlbLost.begin(); ilbL != mlbLost.end(); ilbL++)
{
if(index < mnInitialFrameId)
lbLost.push_back(*ilbL);
else
{
lbLost.push_back(true);
num_lost += 1;
}
index++;
}
cout << num_lost << " Frames set to lost" << endl;
mlbLost = lbLost;
mnInitialFrameId = mCurrentFrame.mnId;
mnLastRelocFrameId = mCurrentFrame.mnId;
mCurrentFrame = Frame();
mLastFrame = Frame();
mpReferenceKF = static_cast<KeyFrame*>(NULL);
mpLastKeyFrame = static_cast<KeyFrame*>(NULL);
mvIniMatches.clear();
mbVelocity = false;
if(mpViewer)
mpViewer->Release();
Verbose::PrintMess(" End reseting! ", Verbose::VERBOSITY_NORMAL);
}
vector<MapPoint*> Tracking::GetLocalMapMPS()
{
return mvpLocalMapPoints;
}
void Tracking::ChangeCalibration(const string &strSettingPath)
{
cv::FileStorage fSettings(strSettingPath, cv::FileStorage::READ);
float fx = fSettings["Camera.fx"];
float fy = fSettings["Camera.fy"];
float cx = fSettings["Camera.cx"];
float cy = fSettings["Camera.cy"];
mK_.setIdentity();
mK_(0,0) = fx;
mK_(1,1) = fy;
mK_(0,2) = cx;
mK_(1,2) = cy;
cv::Mat K = cv::Mat::eye(3,3,CV_32F);
K.at<float>(0,0) = fx;
K.at<float>(1,1) = fy;
K.at<float>(0,2) = cx;
K.at<float>(1,2) = cy;
K.copyTo(mK);
cv::Mat DistCoef(4,1,CV_32F);
DistCoef.at<float>(0) = fSettings["Camera.k1"];
DistCoef.at<float>(1) = fSettings["Camera.k2"];
DistCoef.at<float>(2) = fSettings["Camera.p1"];
DistCoef.at<float>(3) = fSettings["Camera.p2"];
const float k3 = fSettings["Camera.k3"];
if(k3!=0)
{
DistCoef.resize(5);
DistCoef.at<float>(4) = k3;
}
DistCoef.copyTo(mDistCoef);
mbf = fSettings["Camera.bf"];
Frame::mbInitialComputations = true;
}
void Tracking::InformOnlyTracking(const bool &flag)
{
mbOnlyTracking = flag;
}
void Tracking::UpdateFrameIMU(const float s, const IMU::Bias &b, KeyFrame* pCurrentKeyFrame)
{
Map * pMap = pCurrentKeyFrame->GetMap();
unsigned int index = mnFirstFrameId;
list<ORB_SLAM3::KeyFrame*>::iterator lRit = mlpReferences.begin();
list<bool>::iterator lbL = mlbLost.begin();
for(auto lit=mlRelativeFramePoses.begin(),lend=mlRelativeFramePoses.end();lit!=lend;lit++, lRit++, lbL++)
{
if(*lbL)
continue;
KeyFrame* pKF = *lRit;
while(pKF->isBad())
{
pKF = pKF->GetParent();
}
if(pKF->GetMap() == pMap)
{
(*lit).translation() *= s;
}
}
mLastBias = b;
mpLastKeyFrame = pCurrentKeyFrame;
mLastFrame.SetNewBias(mLastBias);
mCurrentFrame.SetNewBias(mLastBias);
while(!mCurrentFrame.imuIsPreintegrated())
{
usleep(500);
}
if(mLastFrame.mnId == mLastFrame.mpLastKeyFrame->mnFrameId)
{
mLastFrame.SetImuPoseVelocity(mLastFrame.mpLastKeyFrame->GetImuRotation(),
mLastFrame.mpLastKeyFrame->GetImuPosition(),
mLastFrame.mpLastKeyFrame->GetVelocity());
}
else
{
const Eigen::Vector3f Gz(0, 0, -IMU::GRAVITY_VALUE);
const Eigen::Vector3f twb1 = mLastFrame.mpLastKeyFrame->GetImuPosition();
const Eigen::Matrix3f Rwb1 = mLastFrame.mpLastKeyFrame->GetImuRotation();
const Eigen::Vector3f Vwb1 = mLastFrame.mpLastKeyFrame->GetVelocity();
float t12 = mLastFrame.mpImuPreintegrated->dT;
mLastFrame.SetImuPoseVelocity(IMU::NormalizeRotation(Rwb1*mLastFrame.mpImuPreintegrated->GetUpdatedDeltaRotation()),
twb1 + Vwb1*t12 + 0.5f*t12*t12*Gz+ Rwb1*mLastFrame.mpImuPreintegrated->GetUpdatedDeltaPosition(),
Vwb1 + Gz*t12 + Rwb1*mLastFrame.mpImuPreintegrated->GetUpdatedDeltaVelocity());
}
if (mCurrentFrame.mpImuPreintegrated)
{
const Eigen::Vector3f Gz(0, 0, -IMU::GRAVITY_VALUE);
const Eigen::Vector3f twb1 = mCurrentFrame.mpLastKeyFrame->GetImuPosition();
const Eigen::Matrix3f Rwb1 = mCurrentFrame.mpLastKeyFrame->GetImuRotation();
const Eigen::Vector3f Vwb1 = mCurrentFrame.mpLastKeyFrame->GetVelocity();
float t12 = mCurrentFrame.mpImuPreintegrated->dT;
mCurrentFrame.SetImuPoseVelocity(IMU::NormalizeRotation(Rwb1*mCurrentFrame.mpImuPreintegrated->GetUpdatedDeltaRotation()),
twb1 + Vwb1*t12 + 0.5f*t12*t12*Gz+ Rwb1*mCurrentFrame.mpImuPreintegrated->GetUpdatedDeltaPosition(),
Vwb1 + Gz*t12 + Rwb1*mCurrentFrame.mpImuPreintegrated->GetUpdatedDeltaVelocity());
}
mnFirstImuFrameId = mCurrentFrame.mnId;
}
void Tracking::NewDataset()
{
mnNumDataset++;
}
int Tracking::GetNumberDataset()
{
return mnNumDataset;
}
int Tracking::GetMatchesInliers()
{
return mnMatchesInliers;
}
void Tracking::SaveSubTrajectory(string strNameFile_frames, string strNameFile_kf, string strFolder)
{
mpSystem->SaveTrajectoryEuRoC(strFolder + strNameFile_frames);
//mpSystem->SaveKeyFrameTrajectoryEuRoC(strFolder + strNameFile_kf);
}
void Tracking::SaveSubTrajectory(string strNameFile_frames, string strNameFile_kf, Map* pMap)
{
mpSystem->SaveTrajectoryEuRoC(strNameFile_frames, pMap);
if(!strNameFile_kf.empty())
mpSystem->SaveKeyFrameTrajectoryEuRoC(strNameFile_kf, pMap);
}
float Tracking::GetImageScale()
{
return mImageScale;
}
#ifdef REGISTER_LOOP
void Tracking::RequestStop()
{
unique_lock<mutex> lock(mMutexStop);
mbStopRequested = true;
}
bool Tracking::Stop()
{
unique_lock<mutex> lock(mMutexStop);
if(mbStopRequested && !mbNotStop)
{
mbStopped = true;
cout << "Tracking STOP" << endl;
return true;
}
return false;
}
bool Tracking::stopRequested()
{
unique_lock<mutex> lock(mMutexStop);
return mbStopRequested;
}
bool Tracking::isStopped()
{
unique_lock<mutex> lock(mMutexStop);
return mbStopped;
}
void Tracking::Release()
{
unique_lock<mutex> lock(mMutexStop);
mbStopped = false;
mbStopRequested = false;
}
#endif
} //namespace ORB_SLAM
Loading...
马建仓 AI 助手
尝试更多
代码解读
代码找茬
代码优化
1
https://gitee.com/kindlytree/orbslam3-with-comments.git
git@gitee.com:kindlytree/orbslam3-with-comments.git
kindlytree
orbslam3-with-comments
orbslam3-with-comments
master

搜索帮助