BitmapToolkit Scol plugin
sSlam_orb.cpp
Go to the documentation of this file.
1/*
2-----------------------------------------------------------------------------
3This source file is part of OpenSpace3D
4For the latest info, see http://www.openspace3d.com
5
6Copyright (c) 2012 I-maginer
7
8This program is free software; you can redistribute it and/or modify it under
9the terms of the GNU Lesser General Public License as published by the Free Software
10Foundation; either version 2 of the License, or (at your option) any later
11version.
12
13This program is distributed in the hope that it will be useful, but WITHOUT
14ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
15FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
16
17You should have received a copy of the GNU Lesser General Public License along with
18this program; if not, write to the Free Software Foundation, Inc., 59 Temple
19Place - Suite 330, Boston, MA 02111-1307, USA, or go to
20http://www.gnu.org/copyleft/lesser.txt
21
22-----------------------------------------------------------------------------
23*/
24
25//#include "sService.h"
26
27#include "ArManager.h"
28#include "ArCameraParam.h"
29#include "sSlam_orb.h"
30
31#include "ORB_SLAM2/Random.h"
32
33#include <vector>
34
35#ifdef WIN32
36#include <direct.h>
37#define GetCurrentDir _getcwd
38#else
39#include <unistd.h>
40#define GetCurrentDir getcwd
41#endif
42
43std::string GetCurrentWorkingDir(void)
44{
45 char buff[FILENAME_MAX];
46 GetCurrentDir(buff, FILENAME_MAX);
47 std::string current_working_dir(buff);
48
49 for (unsigned int i = 0; i < current_working_dir.length(); i++)
50 {
51 if (current_working_dir.substr(i, 1) == "\\")
52 current_working_dir.replace(i, 1, "/");
53 }
54 return current_working_dir;
55}
56
57const float eps = 1e-4;
58cv::Mat ExpSO3(const float &x, const float &y, const float &z)
59{
60 cv::Mat I = cv::Mat::eye(3, 3, CV_32F);
61 const float d2 = x * x + y * y + z * z;
62 const float d = sqrt(d2);
63 cv::Mat W;
64 W = (cv::Mat_<float>(3, 3) << 0, -z, y, z, 0, -x, -y, x, 0);
65
66 if (d < eps)
67 return (I + W + 0.5f*W*W);
68 else
69 return (I + W * sin(d) / d + W * W*(1.0f - cos(d)) / d2);
70}
71
72cv::Mat ExpSO3(const cv::Mat &v)
73{
74 return ExpSO3(v.at<float>(0), v.at<float>(1), v.at<float>(2));
75}
76
77cv::Point2f Camera2Pixel(cv::Mat poseCamera, cv::Mat mk)
78{
79 return cv::Point2f(
80 poseCamera.at<float>(0, 0) / poseCamera.at<float>(2, 0)*mk.at<float>(0, 0) + mk.at<float>(0, 2),
81 poseCamera.at<float>(1, 0) / poseCamera.at<float>(2, 0)*mk.at<float>(1, 1) + mk.at<float>(1, 2)
82 );
83}
84
85cv::Mat Plane::DetectPlane(const cv::Mat Tcw, const std::vector<ORB_SLAM2::MapPoint*> &vMPs, const int iterations)
86{
87 // Retrieve 3D points
88 std::vector<cv::Mat> vPoints;
89 vPoints.reserve(vMPs.size());
90 std::vector<ORB_SLAM2::MapPoint*> vPointMP;
91 vPointMP.reserve(vMPs.size());
92
93 for (size_t i = 0; i < vMPs.size(); i++)
94 {
95 ORB_SLAM2::MapPoint* pMP = vMPs[i];
96 if (pMP)
97 {
98 if (pMP->Observations() > 5)
99 {
100 vPoints.push_back(pMP->GetWorldPos());
101 vPointMP.push_back(pMP);
102 }
103 }
104 }
105
106 const int N = vPoints.size();
107
108 if (N < 50)
109 return mTpw;
110
111 // Indices for minimum set selection
112 std::vector<size_t> vAllIndices;
113 vAllIndices.reserve(N);
114 std::vector<size_t> vAvailableIndices;
115
116 for (int i = 0; i < N; i++)
117 {
118 vAllIndices.push_back(i);
119 }
120
121 float bestDist = 1e10;
122 std::vector<float> bestvDist;
123
124 //RANSAC
125 for (int n = 0; n < iterations; n++)
126 {
127 vAvailableIndices = vAllIndices;
128
129 cv::Mat A(3, 4, CV_32F);
130 A.col(3) = cv::Mat::ones(3, 1, CV_32F);
131
132 // Get min set of points
133 for (short i = 0; i < 3; ++i)
134 {
135 int randi = DUtils::Random::RandomInt(0, vAvailableIndices.size() - 1);
136
137 int idx = vAvailableIndices[randi];
138
139 A.row(i).colRange(0, 3) = vPoints[idx].t();
140
141 vAvailableIndices[randi] = vAvailableIndices.back();
142 vAvailableIndices.pop_back();
143 }
144
145 cv::Mat u, w, vt;
146 cv::SVDecomp(A, w, u, vt, cv::SVD::MODIFY_A | cv::SVD::FULL_UV);
147
148 const float a = vt.at<float>(3, 0);
149 const float b = vt.at<float>(3, 1);
150 const float c = vt.at<float>(3, 2);
151 const float d = vt.at<float>(3, 3);
152
153 std::vector<float> vDistances(N, 0);
154
155 const float f = 1.0f / sqrt(a*a + b * b + c * c + d * d);
156
157 for (int i = 0; i < N; i++)
158 {
159 vDistances[i] = fabs(vPoints[i].at<float>(0)*a + vPoints[i].at<float>(1)*b + vPoints[i].at<float>(2)*c + d)*f;
160 }
161
162 std::vector<float> vSorted = vDistances;
163 std::sort(vSorted.begin(), vSorted.end());
164
165 int nth = std::max((int)(0.2*N), 20);
166 const float medianDist = vSorted[nth];
167
168 if (medianDist < bestDist)
169 {
170 bestDist = medianDist;
171 bestvDist = vDistances;
172 }
173 }
174
175 // Compute threshold inlier/outlier
176 const float th = 1.4 * bestDist;
177 std::vector<bool> vbInliers(N, false);
178 int nInliers = 0;
179 for (int i = 0; i < N; i++)
180 {
181 if (bestvDist[i] < th)
182 {
183 nInliers++;
184 vbInliers[i] = true;
185 }
186 }
187
188 std::vector<ORB_SLAM2::MapPoint*> vInlierMPs(nInliers, NULL);
189 int nin = 0;
190 for (int i = 0; i < N; i++)
191 {
192 if (vbInliers[i])
193 {
194 vInlierMPs[nin] = vPointMP[i];
195 nin++;
196 }
197 }
198
199 mMvMPs = vInlierMPs;
200 mMTcw = Tcw.clone();
201 mRang = -3.14f / 2 + ((float)rand() / RAND_MAX)*3.14f;
202 Recompute();
203 return mTpw;
204}
205
206void Plane::Recompute()
207{
208 const int N = mMvMPs.size();
209
210 // Recompute plane with all points
211 cv::Mat A = cv::Mat(N, 4, CV_32F);
212 A.col(3) = cv::Mat::ones(N, 1, CV_32F);
213
214 mOrigin = cv::Mat::zeros(3, 1, CV_32F);
215
216 int nPoints = 0;
217 for (int i = 0; i < N; i++)
218 {
219 ORB_SLAM2::MapPoint* pMP = mMvMPs[i];
220 if (!pMP->isBad())
221 {
222 cv::Mat Xw = pMP->GetWorldPos();
223 mOrigin += Xw;
224 A.row(nPoints).colRange(0, 3) = Xw.t();
225 nPoints++;
226 }
227 }
228 A.resize(nPoints);
229
230 cv::Mat u, w, vt;
231 cv::SVDecomp(A, w, u, vt, cv::SVD::MODIFY_A | cv::SVD::FULL_UV);
232
233 float a = vt.at<float>(3, 0);
234 float b = vt.at<float>(3, 1);
235 float c = vt.at<float>(3, 2);
236
237 mOrigin = mOrigin * (1.0f / nPoints);
238 const float f = 1.0f / sqrt(a * a + b * b + c * c);
239
240 // Compute XC just the first time
241 if (mXC.empty())
242 {
243 cv::Mat Oc = -mMTcw.colRange(0, 3).rowRange(0, 3).t() * mMTcw.rowRange(0, 3).col(3);
244 mXC = Oc - mOrigin;
245 }
246
247 if ((mXC.at<float>(0) * a + mXC.at<float>(1) * b + mXC.at<float>(2) * c) < 0)
248 {
249 a = -a;
250 b = -b;
251 c = -c;
252 }
253
254 const float nx = a * f;
255 const float ny = b * f;
256 const float nz = c * f;
257
258 mNormal = (cv::Mat_<float>(3, 1) << nx, ny, nz);
259
260 cv::Mat up;
261 up = (cv::Mat_<float>(3, 1) << 0.0f, 1.0f, 0.0f);
262
263 cv::Mat v = up.cross(mNormal);
264 const float sa = cv::norm(v);
265 const float ca = up.dot(mNormal);
266 const float ang = atan2(sa, ca);
267 mTpw = cv::Mat::eye(4, 4, CV_32F);
268
269 //mTpw.rowRange(0, 3).colRange(0, 3) = ExpSO3(v * ang / sa) * ExpSO3(up * mRang);
270 ExpSO3(v*ang / sa).copyTo(mTpw.rowRange(0, 3).colRange(0, 3));
271 mOrigin.copyTo(mTpw.col(3).rowRange(0, 3));
272}
273
275{
276 mIsDirty = false;
277 needUpdate = false;
278 mScale = 1.0f;
279 mSlam = 0;
280 mTimestamp = 0.0;
281 mFound = false;
282 mInitialized = false;
283 mPlane2World = cv::Mat::eye(4, 4, CV_32F);
284 mFullInit = 0;
285
286 mParams.Camera.width = 640;
287 mParams.Camera.height = 480;
288 mParams.Camera.fx = 640.0;
289 mParams.Camera.fy = 640.0;
290 mParams.Camera.cx = 320.0;
291 mParams.Camera.cy = 240.0;
292 mParams.Camera.k1 = 0.0;
293 mParams.Camera.k2 = 0.0;
294 mParams.Camera.p1 = 0.0;
295 mParams.Camera.p2 = 0.0;
296 mParams.Camera.k3 = 0.0;
297 mParams.Camera.bf = 0.0;
298 mParams.Camera.fps = 30.0;
299 mParams.Camera.RGB = 1;
300
301 mParams.ORBextractor.nFeatures = 800;
302 mParams.ORBextractor.nLevels = 8;
303 mParams.ORBextractor.scaleFactor = 1.2;
304 mParams.ORBextractor.iniThFAST = 20;
305 mParams.ORBextractor.minThFAST = 7;
306
307 mParams.DepthMapFactor = 1.0;
308 mParams.ThDepth = 1;
309
310 std::string strVocFile;
311#ifdef ANDROID
312 strVocFile = CopyFileFromAssetToSD("btdata/orbvoc.bin", "orbvoc.bin", "btdata");
313#else
314 strVocFile = GetCurrentWorkingDir() + "/plugins/btdata/orbvoc.bin";
315#endif
316
317 mVocabulary = new ORB_SLAM2::ORBVocabulary();
318 mVocabulary->loadFromBinaryFile(strVocFile);
319
320 // Run thread
321 StartThreading(boost::bind(&Sslam::GoThread, this));
322}
323
325{
327 if (mSlam)
328 mSlam->Shutdown();
329
330 SAFE_DELETE(mSlam);
331 SAFE_DELETE(mVocabulary);
332}
333
334void Sslam::SetDirty()
335{
336 mIsDirty = true;
337}
338
339void Sslam::BuildCameraParam(const aruco::CameraParameters &camparam)
340{
341 mScale = 1.0f;
342
343 int maxsize = std::max(camparam.CamSize.width, camparam.CamSize.height);
344 if (maxsize > 640)
345 mScale = 640.0f / (float)maxsize;
346
347 /*if (maxsize > 1600)
348 mScale = 0.4f;
349 else if (maxsize > 1200)
350 mScale = 0.53333f;
351 else if (maxsize > 750)
352 mScale = 0.85333f;
353 else if (maxsize > 640)
354 mScale = 0.9f;
355 else
356 mScale = 1.0f;
357
358#if defined(ANDROID) || defined(APPLE_IOS) || defined(RPI)
359 if (maxsize > 640)
360 mScale *= 0.75f;
361#endif
362 */
363
364 /*
365 752.277145
366 752.277145
367 403.000000
368 301.500000
369 */
370
371 //construct camParam
372 mParams.Camera.width = (int)((float)camparam.CamSize.width * mScale);
373 mParams.Camera.height = (int)((float)camparam.CamSize.height * mScale);
374
375 mParams.Camera.cx = static_cast<float>(mParams.Camera.width) * 0.5f;
376 mParams.Camera.cy = static_cast<float>(mParams.Camera.height) * 0.5f;
377
378 float f = std::max(mParams.Camera.width, mParams.Camera.height);
379 mParams.Camera.fx = f;
380 mParams.Camera.fy = f;
381
382 int wlvl = mParams.Camera.width;
383 int hlvl = mParams.Camera.height;
384 int pyrLevels = 1;
385 while (wlvl % 2 == 0 && hlvl % 2 == 0 && wlvl*hlvl > 5000 && pyrLevels < 12)
386 {
387 wlvl /= 2;
388 hlvl /= 2;
389 pyrLevels++;
390 }
391
392 mParams.ORBextractor.nLevels = pyrLevels;
393}
394
395void Sslam::InitDetector()
396{
397 boost::unique_lock<boost::recursive_mutex> ld(mUpdateMutex);
398
399 if (mSlam)
400 mSlam->Shutdown();
401
402 SAFE_DELETE(mSlam);
403
404 try
405 {
406 // build a SLAM system
407 mSlam = new ORB_SLAM2::System(mVocabulary, mParams, ORB_SLAM2::System::MONOCULAR, false);
408 mSlam->DeactivateLocalizationMode();
409 //mSlam->ActivateLocalizationMode();
410 }
411 catch (std::exception &)
412 {
413 SAFE_DELETE(mSlam);
414 }
415 mFound = false;
416 mInitialized = false;
417 mTimestamp = 0.0;
418 ld.unlock();
419}
420
422{
423 return mLastCamPos;
424}
425
427{
428 return mLastCamQuat;
429}
430
431bool Sslam::IsFound()
432{
433 return mFound;
434}
435
436void Sslam::Reset()
437{
438 InitDetector();
439}
440
441void Sslam::DrawLandmarks(cv::Mat image)
442{
443 if (mSlam && !image.empty())
444 {
445 //boost::unique_lock<boost::recursive_mutex> ld(mUpdateMutex);
446 std::vector<cv::KeyPoint> keypts = mSlam->GetTrackedKeyPointsUn();
447 cv::Scalar color(0, 255, 0);
448
449 for (unsigned int i = 0; i < keypts.size(); i++)
450 {
451 cv::KeyPoint kp = keypts[i];
452 cv::circle(image, kp.pt / mScale, 2, color);
453 }
454
455 if (!mCentroid.empty() && (mSlam->GetTrackingState() == 2))
456 {
457 ORB_SLAM2::Tracking* tracker = mSlam->GetTracker();
458
459 std::vector<cv::Mat> axisPoints(4);
460 axisPoints[0] = (cv::Mat_ <float>(4, 1) << 0.0, 0, 0, 1);
461 axisPoints[1] = (cv::Mat_ <float>(4, 1) << 0.3, 0, 0, 1);
462 axisPoints[2] = (cv::Mat_ <float>(4, 1) << 0.0, 0.3, 0, 1);
463 axisPoints[3] = (cv::Mat_ <float>(4, 1) << 0, 0, 0.3, 1);
464 std::vector<cv::Point2f> drawPoints(4);
465 for (int i = 0; i < 4; i++)
466 {
467 axisPoints[i] = mPlane2Camera * axisPoints[i];
468 drawPoints[i] = Camera2Pixel(axisPoints[i], tracker->GetCalibrationMatrix());
469 }
470
471 cv::line(image, drawPoints[0] / mScale, drawPoints[1] / mScale, cv::Scalar(250, 0, 0), 5);
472 cv::line(image, drawPoints[0] / mScale, drawPoints[2] / mScale, cv::Scalar(0, 250, 0), 5);
473 cv::line(image, drawPoints[0] / mScale, drawPoints[3] / mScale, cv::Scalar(0, 0, 250), 5);
474
475 /*
476 std::vector<cv::Point3f> drawRectPoints(8);
477 drawRectPoints[0] = cv::Point3f(-0.15, 0, -0.15);
478 drawRectPoints[1] = cv::Point3f(0.15, 0, -0.15);
479 drawRectPoints[2] = cv::Point3f(-0.15, 0, 0.15);
480 drawRectPoints[3] = cv::Point3f(-0.15, 0.3, -0.15);
481 drawRectPoints[4] = cv::Point3f(-0.15, 0.3, 0.15);
482 drawRectPoints[5] = cv::Point3f(0.15, 0.3, 0.15);
483 drawRectPoints[6] = cv::Point3f(0.15, 0, 0.15);
484 drawRectPoints[7] = cv::Point3f(0.15, 0.3, -0.15);
485
486 cv::Mat Rcp, Tcp;
487 std::vector<cv::Point2f> projectedPoints;
488 cv::Rodrigues(mPlane2Camera.rowRange(0, 3).colRange(0, 3), Rcp);
489 Tcp = mPlane2Camera.col(3).rowRange(0, 3);
490 cv::projectPoints(drawRectPoints, Rcp, Tcp, tracker->GetCalibrationMatrix(), tracker->GetDistortionMatrix(), projectedPoints);
491
492 cv::line(image, projectedPoints[0] / mScale, projectedPoints[1] / mScale, cv::Scalar(250, 0, 0), 5);
493 cv::line(image, projectedPoints[0] / mScale, projectedPoints[2] / mScale, cv::Scalar(0, 250, 0), 5);
494 cv::line(image, projectedPoints[0] / mScale, projectedPoints[3] / mScale, cv::Scalar(0, 0, 250), 5);
495 cv::line(image, projectedPoints[1] / mScale, projectedPoints[7] / mScale, cv::Scalar(10, 0, 50), 2);
496 cv::line(image, projectedPoints[3] / mScale, projectedPoints[7] / mScale, cv::Scalar(20, 0, 50), 2);
497 cv::line(image, projectedPoints[3] / mScale, projectedPoints[4] / mScale, cv::Scalar(30, 0, 50), 2);
498 cv::line(image, projectedPoints[2] / mScale, projectedPoints[4] / mScale, cv::Scalar(40, 0, 50), 2);
499 cv::line(image, projectedPoints[1] / mScale, projectedPoints[6] / mScale, cv::Scalar(50, 0, 50), 2);
500 cv::line(image, projectedPoints[2] / mScale, projectedPoints[6] / mScale, cv::Scalar(60, 0, 50), 2);
501 cv::line(image, projectedPoints[4] / mScale, projectedPoints[5] / mScale, cv::Scalar(70, 0, 50), 2);
502 cv::line(image, projectedPoints[5] / mScale, projectedPoints[6] / mScale, cv::Scalar(80, 0, 50), 2);
503 cv::line(image, projectedPoints[5] / mScale, projectedPoints[7] / mScale, cv::Scalar(90, 0, 50), 2);
504
505 cv::circle(image, projectedPoints[0] / mScale, 4, cv::Scalar(0, 0, 250), 1, 8);
506 */
507 }
508
509 switch (mSlam->GetTrackingState())
510 {
511 case -1: {cv::putText(image, "SYSTEM NOT READY", cv::Point(20, 50), cv::FONT_HERSHEY_SIMPLEX, 0.5, cv::Scalar(0, 0, 255), 1); }break;
512 case 0: {cv::putText(image, "NO IMAGES YET", cv::Point(20, 50), cv::FONT_HERSHEY_SIMPLEX, 0.5, cv::Scalar(0, 0, 255), 1); }break;
513 case 1: {cv::putText(image, "SLAM NOT INITIALIZED", cv::Point(20, 50), cv::FONT_HERSHEY_SIMPLEX, 0.5, cv::Scalar(0, 0, 255), 1); }break;
514 case 2: {cv::putText(image, "SLAM ON", cv::Point(20, 50), cv::FONT_HERSHEY_SIMPLEX, 0.5, cv::Scalar(0, 255, 0), 1); break; }
515 case 3: {cv::putText(image, "SLAM LOST", cv::Point(20, 50), cv::FONT_HERSHEY_SIMPLEX, 0.5, cv::Scalar(0, 0, 255), 1); break; }
516 default:break;
517 }
518 //ld.unlock();
519 }
520}
521
522void Sslam::Detect()
523{
524 int prevState = mSlam->GetTrackingState();
525 cv::Mat pose;
526 {
527 boost::unique_lock<boost::recursive_mutex> ld(mUpdateMutex);
528 pose = mSlam->TrackMonocular(mLastData.gray, mTimestamp);
529 if (mSlam->MapChanged() || (mSlam->GetTrackingState() < 2))
530 {
531 mInitialized = false;
532 mFound = false;
533 }
534
535 //mFound = (mSlam->GetTrackingState() == 2) ? true : false;
536 ld.unlock();
537 }
538
539 // let the time to initialize completly
540 if ((mFullInit < 10) && (mSlam->GetTrackingState() == 2))
541 mFullInit += 1;
542 else if (mSlam->GetTrackingState() != 2)
543 mFullInit = 0;
544
545 if (mFullInit < 10)
546 mFound = false;
547 else
548 {
549 if (pose.empty() || (mSlam->GetTrackingState() != 2))
550 {
551 mFound = false;
552 }
553 else
554 {
555 if (!mInitialized)
556 {
557 Plane mplane;
558 const std::vector<ORB_SLAM2::MapPoint*> vpMPs = mSlam->GetTracker()->GetMap()->GetAllMapPoints();
559 const std::vector<ORB_SLAM2::MapPoint*> vpTMPs = mSlam->GetTrackedMapPoints();
560 if (vpMPs.size() > 150)
561 {
562 mPlane2World = mplane.DetectPlane(pose, vpTMPs, 50);
563
564 if (!mPlane2World.empty())
565 {
566 mCentroid = mplane.mOrigin;
567 mInitialized = true;
568 mPlane2Camera = pose * mPlane2World;
569 }
570 }
571 }
572 else
573 {
574 mPlane2Camera = pose * mPlane2World;
575 }
576
577 if (!mInitialized || pose.empty())
578 {
579 mFound = false;
580 }
581 else
582 {
583 mFound = true;
584 cv::Mat camtoplane = mPlane2Camera.inv();
585
586 double modelview_matrix[16];
587 modelview_matrix[0] = -camtoplane.at<float>(0, 0);
588 modelview_matrix[1] = -camtoplane.at<float>(1, 0);
589 modelview_matrix[2] = -camtoplane.at<float>(2, 0);
590 modelview_matrix[3] = 0.0;
591
592 modelview_matrix[4] = camtoplane.at<float>(0, 1);
593 modelview_matrix[5] = camtoplane.at<float>(1, 1);
594 modelview_matrix[6] = camtoplane.at<float>(2, 1);
595 modelview_matrix[7] = 0.0;
596
597 modelview_matrix[8] = camtoplane.at<float>(0, 2);
598 modelview_matrix[9] = camtoplane.at<float>(1, 2);
599 modelview_matrix[10] = camtoplane.at<float>(2, 2);
600 modelview_matrix[11] = 0.0;
601
602 modelview_matrix[12] = camtoplane.at<float>(0, 3);
603 modelview_matrix[13] = camtoplane.at<float>(1, 3);
604 modelview_matrix[14] = camtoplane.at<float>(2, 3);
605 modelview_matrix[15] = 1.0;
606
607 mLastCamQuat = BtQuaternion(0.0f, 0.0f, 1.0f, 0.0f) * BtQuaternion::FromRotationMatrix(modelview_matrix, mLastData.reversedBitmap);
608 mLastCamPos = Vector3(static_cast<float>(mLastData.reversedBitmap ? -modelview_matrix[12] : modelview_matrix[12]), static_cast<float>(modelview_matrix[13]), static_cast<float>(modelview_matrix[14]));
609 }
610 }
611 }
612}
613
614void Sslam::GoThread()
615{
616 std::chrono::steady_clock::time_point lastTimeStamp = std::chrono::steady_clock::now();
617 while (IsRunning())
618 {
619 if (needUpdate)
620 {
622 manager->GetLastData(mLastData);
623
624 needUpdate = false;
625
626 //cv::blur(mLastData.gray, mLastData.gray, cv::Size(2, 2));
627
628 // Detect all markers seen on gray
629
630 // camera size changed
631 if (!mSlam || (((int)((float)mLastData.camParam.CamSize.width * mScale)) != mParams.Camera.width || ((int)((float)mLastData.camParam.CamSize.height * mScale)) != mParams.Camera.height))
632 mIsDirty = true;
633
634 // init detector
635 if (mIsDirty)
636 {
637 mIsDirty = false;
638 BuildCameraParam(mLastData.camParam);
639 InitDetector();
640 lastTimeStamp = std::chrono::steady_clock::now();
641 }
642
643 if (mScale != 1.0f)
644 cv::resize(mLastData.gray, mLastData.gray, cv::Size((int)((float)mLastData.gray.cols * mScale), (int)((float)mLastData.gray.rows * mScale)), 0, 0, cv::INTER_LINEAR);
645
646 //cv::equalizeHist(mLastData.gray, mLastData.gray);
647
648 // thread to detect do it after mutex lock
649 if (!mLastData.image.empty() && mSlam)
650 {
651 std::chrono::steady_clock::time_point t1 = std::chrono::steady_clock::now();
652 double ttrack = std::chrono::duration_cast<std::chrono::duration<double>>(t1 - lastTimeStamp).count();
653 mTimestamp += ttrack;
654 lastTimeStamp = std::chrono::steady_clock::now();
655 Detect();
656 }
657 }
658 else
659 {
660 //prevent cpu burn
661 boost::this_thread::sleep_for(boost::chrono::milliseconds(1)); //DO not burn too much CPU
662 }
663 }
664}
void GetLastData(LASTDATA &data)
static ArManager * GetInstance()
Definition ArManager.cpp:70
static BtQuaternion FromRotationMatrix(double rotMatrix[16], bool reverseX=false, bool reverseY=true)
static int RandomInt(int min, int max)
Definition Random.cpp:47
cv::Mat image
cv::Mat gray
bool reversedBitmap
Definition sSlam.h:52
cv::Mat mMTcw
Definition sSlam.h:69
float mRang
Definition sSlam.h:63
cv::Mat mXC
Definition sSlam.h:70
cv::Mat mTpw
Definition sSlam.h:65
cv::Mat DetectPlane(cv::Mat campos, std::vector< slamPoint > &vMPs, const int iterations)
Definition sSlam.cpp:135
std::vector< slamPoint > mMvMPs
Definition sSlam.h:67
cv::Mat mOrigin
Definition sSlam.h:61
void Recompute()
Definition sSlam.cpp:255
cv::Mat mNormal
Definition sSlam.h:59
void SetDirty()
Definition sSlam.cpp:583
bool needUpdate
Definition sSlam.h:128
Vector3 GetCameraPosition()
Definition sSlam.cpp:687
~Sslam()
Definition sSlam.cpp:412
Sslam()
Definition sSlam.cpp:328
void GoThread()
Definition sSlam.cpp:820
bool IsFound()
Definition sSlam.cpp:712
BtQuaternion GetCameraOrientation()
Definition sSlam.cpp:700
void DrawLandmarks(cv::Mat image)
Definition sSlam.cpp:739
void Reset()
Definition sSlam.cpp:717
Create a thread type. .
cv::Mat ExpSO3(const float &x, const float &y, const float &z)
Definition sSlam.cpp:108
cv::Point2f Camera2Pixel(cv::Mat poseCamera, cv::Mat mk)
Definition sSlam.cpp:127
std::string GetCurrentWorkingDir(void)
Definition sSlam_orb.cpp:43
cv::Mat ExpSO3(const float &x, const float &y, const float &z)
Definition sSlam_orb.cpp:58
cv::Point2f Camera2Pixel(cv::Mat poseCamera, cv::Mat mk)
Definition sSlam_orb.cpp:77
const float eps
Definition sSlam_orb.cpp:57
#define GetCurrentDir
Definition sSlam_orb.cpp:40