BitmapToolkit Scol plugin
ArFaceMarker.cpp
Go to the documentation of this file.
1/*
2-----------------------------------------------------------------------------
3This source file is part of OpenSpace3D
4For the latest info, see http://www.openspace3d.com
5
6Copyright (c) 2012 I-maginer
7
8This program is free software; you can redistribute it and/or modify it under
9the terms of the GNU Lesser General Public License as published by the Free Software
10Foundation; either version 2 of the License, or (at your option) any later
11version.
12
13This program is distributed in the hope that it will be useful, but WITHOUT
14ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
15FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
16
17You should have received a copy of the GNU Lesser General Public License along with
18this program; if not, write to the Free Software Foundation, Inc., 59 Temple
19Place - Suite 330, Boston, MA 02111-1307, USA, or go to
20http://www.gnu.org/copyleft/lesser.txt
21
22-----------------------------------------------------------------------------
23*/
24
25/*
26 Toolkit based on OpenCV library
27 First version : dec 2010
28 Author : Bastien BOURINEAU
29*/
30
31#include "ArFaceMarker.h"
32#include <cstdio>
33
34//#include "emotime/BoostEmoDetector.h"
35//#include "emotime/SVMEmoDetector.h"
36//#include "emotime/matrix_io.h"
37//#include "emotime/TrainingParameters.h"
38
39//#define DEBUG_FACE_TRACKING
40
44ArFaceMarker::ArFaceMarker(unsigned int mid, float size) : ArFeaturedMarker(mid, size)
45{
46 m_bInitialized = false;
47 m_nbInitFrames = 1;
48 m_initFrames = 1;
49 m_minInliners = 5;
50 m_maxFeatures = 100;
51
52 m_PosSmoothers.Init(0.25f, 0.25f, 0.25f, 0.1f, 0.2f);
53
54 //debug
55#ifdef DEBUG_FACE_TRACKING
56 cv::namedWindow("Face Tracking", CV_WINDOW_AUTOSIZE);
57 cv::namedWindow("Face Reference", CV_WINDOW_AUTOSIZE);
58#endif
59}
60
62{
63 return m_bInitialized;
64}
65
66void ArFaceMarker::SetImage(const cv::Mat image, std::vector<cv::Rect> objRects)
67{
68 m_bInitialized = false;
69
70 if (objRects.size() < 1)
71 return;
72
73 m_ObjRect = objRects[0];
74 // reduce the rect to be sure to get features in the correct area
75 float wdiff = (objRects[0].width / 3.0f);
76 float hdiff = (objRects[0].height / 10.0f);
77 float woff = objRects[0].width - (wdiff * 2.0f);
78 float hoff = objRects[0].height - (hdiff * 8.0f);
79 m_ObjRect.width = (int)wdiff * 2;
80 m_ObjRect.height = (int)hdiff * 8;
81 m_ObjRect.x = objRects[0].x + ((int)woff / 2);
82 m_ObjRect.y = objRects[0].y + ((int)hoff / 2);
83
84 image.copyTo(m_image);
85 m_image = m_image(m_ObjRect);
86 SetTrackedImage(m_image);
87
88 int patchSize = m_image.cols / 6;
89 if (!m_image.empty())
90 {
91 m_pcorners.clear();
92 m_pcorners.push_back(cv::Point2f((float)m_ObjRect.x, (float)m_ObjRect.y));
93 m_pcorners.push_back(cv::Point2f((float)m_ObjRect.x + m_ObjRect.width, (float)m_ObjRect.y));
94 m_pcorners.push_back(cv::Point2f((float)m_ObjRect.x + m_ObjRect.width, (float)m_ObjRect.y + m_ObjRect.height));
95 m_pcorners.push_back(cv::Point2f((float)m_ObjRect.x, (float)m_ObjRect.y + m_ObjRect.height));
96
97 //init corners of the marker
98 for (unsigned int i = 0; i < m_pcorners.size(); i++)
99 this->push_back(cv::Point2f(m_pcorners[i].x, m_pcorners[i].y));
100
101 cv::Mat dmask = cv::Mat(image.rows, image.cols, CV_8UC1);
102 dmask.setTo(0);
103 for (unsigned int i = 1; i < objRects.size(); i++)
104 {
105 cv::Rect orect = objRects[i];
106 if (i > 0)
107 {
108 orect.x += objRects[0].x;
109 orect.y += objRects[0].y;
110 }
111 cv::rectangle(dmask, orect, cv::Scalar(255), cv::FILLED);
112 }
113
114 cv::Size subPixWinSize(5, 5);
115 cv::TermCriteria termcrit(cv::TermCriteria::MAX_ITER | cv::TermCriteria::EPS, 30, 0.03f);
116 try
117 {
118 m_ObjPoints.clear();
119 cv::goodFeaturesToTrack(image, m_ObjPoints, m_maxFeatures, 0.01, 2, dmask, 3, false, 0.03f);
120
121 //cornerSubPix(image, m_ObjPoints, subPixWinSize, cv::Size(-1, -1), termcrit);
122
123 /*for (unsigned int i = 1; i < objRects.size(); i++)
124 {
125 cv::Point2f pt;
126 pt.x = (float)(objRects[i].x + objRects[0].x + (patchSize / 2));
127 pt.y = (float)(objRects[i].y + objRects[0].y + (objRects[i].height / 2));
128 m_ObjPoints.push_back(pt);
129
130 pt.x = (float)(objRects[i].x + objRects[0].x + objRects[i].width - (patchSize / 2));
131 pt.y = (float)(objRects[i].y + objRects[0].y + (objRects[i].height / 2));
132 m_ObjPoints.push_back(pt);
133
134 pt.x = (float)(objRects[i].x + objRects[0].x + (objRects[i].width / 2));
135 pt.y = (float)(objRects[i].y + objRects[0].y + (patchSize / 2));
136 m_ObjPoints.push_back(pt);
137
138 pt.x = (float)(objRects[i].x + objRects[0].x + (objRects[i].width / 2));
139 pt.y = (float)(objRects[i].y + objRects[0].y + objRects[i].height - (patchSize / 2));
140 m_ObjPoints.push_back(pt);
141 }*/
142
143 m_prevFramePoints = m_ObjPoints;
144 m_lastFoundPoints.resize(m_ObjPoints.size(), true);
145 m_lastFound = true;
146
147#ifdef DEBUG_FACE_TRACKING
148 cv::Mat mdebug;
149 m_image.copyTo(mdebug);
150 cv::cvtColor(mdebug, mdebug, CV_GRAY2BGR);
151
152 for (int i = 0; i < (int)m_ObjPoints.size(); i++)
153 {
154 cv::circle(mdebug, m_ObjPoints[i] - cv::Point2f((float)m_ObjRect.x, (float)m_ObjRect.y), 2, cv::Scalar(0, 0, 255), 1);
155 }
156
157 int xoff = (objRects[0].width - m_ObjRect.width) / 2;
158 int yoff = (objRects[0].height - m_ObjRect.height) / 2;
159 for (unsigned int i = 1; i < objRects.size(); i++)
160 {
161 cv::Rect orect = objRects[i];
162 orect.x -= xoff;
163 orect.y -= yoff;
164 cv::rectangle(mdebug, orect, cv::Scalar(255, 0, 0), 1);
165 }
166 cv::imshow("Face Reference", mdebug);
167#endif
168 }
169 catch(std::exception& e)
170 {
171 MMechostr(MSKRUNTIME, "AR marker could not detect enough features points : %s", e.what());
172 return;
173 }
174
175 if (m_ObjPoints.size() > m_minInliners)
176 m_bInitialized = true;
177 }
178 m_bInitialized = true;
179}
180
185{
186 vector<int> empty(2);
187 empty[0] = 0; empty[1] = 0;
188 m_image = cv::Mat(empty);
189 m_prevFrame = cv::Mat(empty);
190 m_pcorners.clear();
191
192#ifdef DEBUG_FACE_TRACKING
193 cv::destroyWindow("Face Tracking");
194 cv::destroyWindow("Face Reference");
195#endif
196}
197
199{
200 std::vector<uchar> status;
201 std::vector<float> err;
202 cv::Size subPixWinSize(20, 20);
203 cv::TermCriteria termcrit(cv::TermCriteria::MAX_ITER | cv::TermCriteria::EPS, 20, 0.03);
204
205 if (!m_bInitialized)
206 return false;
207
208#ifdef DEBUG_FACE_TRACKING
209 cv::Mat mdebug;
210 frame.copyTo(mdebug);
211 cv::cvtColor(mdebug, mdebug, CV_GRAY2BGR);
212#endif
213
214 cv::buildOpticalFlowPyramid(frame, m_nextPyr, subPixWinSize, 3, true);
215
216 if (m_ObjPoints.empty() || m_prevPyr.empty())
217 {
218 m_prevPyr.swap(m_nextPyr);
219 return false;
220 }
221
222 std::vector<cv::Point2f> nFramePoints;
223 std::vector<cv::Point2f> nGoodFramePoints;
224 std::vector<cv::Point2f> nGoodPoints;
225 std::vector<cv::Mat> nGoodPatches;
226 unsigned int goodtracks = 0;
227 try
228 {
229 cv::calcOpticalFlowPyrLK(m_prevPyr, m_nextPyr, m_prevFramePoints, nFramePoints, status, err, subPixWinSize, 3, termcrit, /*cv::OPTFLOW_LK_GET_MIN_EIGENVALS|*/cv::OPTFLOW_FARNEBACK_GAUSSIAN, 0.001f);
230
231 double movept = 0.0;
232 unsigned int nbMatches = 0;
233 for (unsigned int i = 0; i < nFramePoints.size(); ++i)
234 {
235 if (status[i] == 1)
236 {
237 movept += cv::norm(nFramePoints[i] - m_prevFramePoints[i]);
238 nbMatches++;
239 }
240 }
241
242 movept /= nbMatches;
243 double threshold = 0.6;
244 for (unsigned int i = 0; i < m_ObjPoints.size(); i++)
245 {
246 double dist = abs(cv::norm(nFramePoints[i] - m_prevFramePoints[i]));
247
248 if (status[i] == 1 && err[i] <= 16.0 && (dist < 0.5 || dist < abs(movept * 2.0)))
249 {
250 goodtracks++;
251 nGoodPoints.push_back(m_ObjPoints[i]);
252 nGoodFramePoints.push_back(nFramePoints[i]);
253 m_prevFramePoints[i] = nFramePoints[i];
254 m_lastFoundPoints[i] = true;
255
256#ifdef DEBUG_FACE_TRACKING
257 cv::circle(mdebug, nFramePoints[i], 2, cv::Scalar(0, 255, 0), 1);
258#endif
259 }
260 else
261 {
262 m_lastFoundPoints[i] = false;
263 m_prevFramePoints[i] = m_prevFramePoints[i];
264
265#ifdef DEBUG_FACE_TRACKING
266 cv::circle(mdebug, m_prevFramePoints[i], 4, cv::Scalar(0, 0, 255), 2);
267#endif
268 }
269 }
270 }
271 catch(std::exception &e)
272 {
273 std::string mess = e.what();
274 m_bInitialized = false;
275 m_prevPyr.clear();
276 m_prevFramePoints.clear();
277 m_initFrames = 0;
278 return false;
279 }
280
281 m_prevPyr.swap(m_nextPyr);
282 frame.copyTo(m_prevFrame);
283
284 if (m_initFrames < m_nbInitFrames)
285 {
286 m_ObjPoints = nGoodPoints;
287 m_prevFramePoints = nGoodFramePoints;
288 m_initFrames ++;
289 }
290
291 if ((goodtracks < m_minInliners) || !GetCorners(nGoodPoints, nGoodFramePoints))
292 {
293 m_bInitialized = false;
294 m_prevPyr.clear();
295 m_prevFramePoints.clear();
296 m_initFrames = 0;
297 m_PosSmoothers.Reset();
298 return false;
299 }
300 else
301 {
302#ifdef DEBUG_FACE_TRACKING
303 cv::Scalar color(255, 0, 0);
304 int lineWidth = 1;
305
306 cv::line(mdebug, this->at(0), this->at(1), color, lineWidth, CV_AA);
307 cv::line(mdebug, this->at(1), this->at(2), color, lineWidth, CV_AA);
308 cv::line(mdebug, this->at(2), this->at(3), color, lineWidth, CV_AA);
309 cv::line(mdebug, this->at(3), this->at(0), color, lineWidth, CV_AA);
310 cv::rectangle(mdebug, this->at(0)-cv::Point2f(2,2), this->at(0)+cv::Point2f(2,2), cv::Scalar(0,0,255), lineWidth, CV_AA);
311 cv::rectangle(mdebug, this->at(1)-cv::Point2f(2,2), this->at(1)+cv::Point2f(2,2), cv::Scalar(0,255,0), lineWidth, CV_AA);
312 cv::rectangle(mdebug, this->at(2)-cv::Point2f(2,2), this->at(2)+cv::Point2f(2,2), cv::Scalar(255,0,0), lineWidth, CV_AA);
313
314 cv::Point2f mcenter;
315 float mradius;
316 cv::minEnclosingCircle(nGoodFramePoints, mcenter, mradius);
317
318 cv::circle(mdebug, mcenter, (int)mradius, cv::Scalar(0,255,0));
319 cv::imshow("Face Tracking", mdebug);
320#endif
321 return true;
322 }
323}
324
325bool ArFaceMarker::GetCorners(std::vector<cv::Point2f> trainPoints, std::vector<cv::Point2f> framePoints)
326{
327 unsigned int i, j;
328 std::vector<cv::Point2f> corners;
329 cv::Mat transformedPoints;
330 cv::Mat HMatrix;
331 bool isvalid = false;
332
333 if (framePoints.empty())
334 {
335 return false;
336 }
337
338 //set corners size
339 corners.resize(4);
340 unsigned int nbItt = 3;
341 try
342 {
343 for (i = 0; (i < nbItt) && !isvalid; i++)
344 {
345 HMatrix = findHomography(cv::Mat(trainPoints), cv::Mat(framePoints), 0/*cv::RANSAC*/, 20.0);
346
347 // transform points
348 cv::perspectiveTransform(cv::Mat(m_pcorners), transformedPoints, HMatrix);
349
350 for (j = 0; j < corners.size(); j++)
351 corners[j] = transformedPoints.at<cv::Point2f>(j, 0);
352
353 if ((corners.size() == 4) && (cv::isContourConvex(cv::Mat(corners))))
354 {
355 bool validCorners = true;
356 for (j = 0; j < corners.size(); j++)
357 {
358 int pdx = j == 0 ? corners.size() - 1 : j - 1; // predecessor of idx
359 int sdx = j == corners.size() - 1 ? 0 : j + 1; // successor of idx
360
361 cv::Point v1 = corners[sdx] - corners[j];
362 cv::Point v2 = corners[pdx] - corners[j];
363
364 // one of the low interior angle + within upper 90% of region the marker seems bad
365 double angle = acos(static_cast<double>(v1.x*v2.x + v1.y*v2.y) / (cv::norm(v1) * cv::norm(v2)));
366 if (angle < 0.6f)
367 {
368 validCorners = false;
369 break;
370 }
371 }
372
373 isvalid = validCorners;// && NiceHomography(HMatrix);
374 }
375 };
376 }
377 catch(cv::Exception &)
378 {
379 return false;
380 }
381
382 //clear marker corners
383 this->clear();
384
385 if (!isvalid)
386 return false;
387
388 /*
389 if ((corners.size() == 4) && (cv::isContourConvex(cv::Mat(corners))))
390 {
391 bool validCorners = true;
392 for (j = 0; j < corners.size(); j++)
393 {
394 int pdx = j == 0 ? corners.size() - 1 : j - 1; // predecessor of idx
395 int sdx = j == corners.size() - 1 ? 0 : j + 1; // successor of idx
396
397 cv::Point v1 = corners[sdx] - corners[j];
398 cv::Point v2 = corners[pdx] - corners[j];
399
400 // one of the low interior angle + within upper 90% of region the marker seems bad
401 double angle = acos(static_cast<double>(v1.x*v2.x + v1.y*v2.y) / (cv::norm(v1) * cv::norm(v2)));
402 if (angle < 0.9)
403 {
404 validCorners = false;
405 break;
406 }
407 }
408
409 isvalid = validCorners;
410 }
411
412 if (!isvalid)
413 return false;
414
415 //use the last position for very small moves
416 //interpolate corners between the 2 last frames for better stability
417 if(m_lastCorner.empty())
418 m_lastCorner = corners;
419 else
420 {
421 // test if a corner move alone
422 float c1 = abs ((corners[0].x - m_lastCorner[0].x) + (corners[0].y - m_lastCorner[0].y));
423 float c2 = abs ((corners[1].x - m_lastCorner[1].x) + (corners[1].y - m_lastCorner[1].y));
424 float c3 = abs ((corners[2].x - m_lastCorner[2].x) + (corners[2].y - m_lastCorner[2].y));
425 float c4 = abs ((corners[3].x - m_lastCorner[3].x) + (corners[3].y - m_lastCorner[3].y));
426
427 if (c1 >= 1.0f && c2 < 2.0f && c3 < 2.0f && c4 < 2.0f)
428 corners[0].x = m_lastCorner[0].x;
429
430 if (c2 >= 1.0f && c1 < 2.0f && c3 < 2.0f && c4 < 2.0f)
431 corners[1].x = m_lastCorner[1].x;
432
433 if (c3 >= 1.0f && c2 < 2.0f && c1 < 2.0f && c4 < 2.0f)
434 corners[2].x = m_lastCorner[2].x;
435
436 if (c4 >= 1.0f && c2 < 2.0f && c3 < 2.0f && c1 < 2.0f)
437 corners[3].x = m_lastCorner[3].x;
438
439 float cornerDiff = 0.0f;
440 for (j = 0; j < 4; j++)
441 {
442 cornerDiff += sqrt(((corners[j].x - m_lastCorner[j].x) * (corners[j].x - m_lastCorner[j].x)) +
443 ((corners[j].y - m_lastCorner[j].y) * (corners[j].y - m_lastCorner[j].y)));
444 }
445
446 if((cornerDiff / corners.size()) > 2.5f)
447 {
448 m_lastCorner = corners;
449 }
450 else
451 corners = m_lastCorner;
452 }
453 */
454
455 for (i = 0; i < corners.size(); i++)
456 this->push_back(cv::Point2f(corners[i].x, corners[i].y));
457
458 return true;
459}
460
463/*
464void ArFaceMarker::calculateFeaturedExtrinsics(float markerSizeMeters, aruco::CameraParameters& camParams, bool setYPerpendicular)
465{
466 if(!camParams.isValid())
467 throw cv::Exception(9004,"!isValid(): invalid camera parameters. It is not possible to calculate extrinsics","calculateExtrinsics",__FILE__,__LINE__);
468 if (!isValid())
469 throw cv::Exception(9004,"!isValid(): invalid marker. It is not possible to calculate extrinsics","calculateExtrinsics",__FILE__,__LINE__);
470 if (markerSizeMeters<=0)
471 throw cv::Exception(9004,"markerSize<=0: invalid markerSize","calculateExtrinsics",__FILE__,__LINE__);
472 if (camParams.CameraMatrix.rows == 0 || camParams.CameraMatrix.cols == 0)
473 throw cv::Exception(9004,"CameraMatrix is empty","calculateExtrinsics",__FILE__,__LINE__);
474
475 cv::Point2f offset = cv::Point2f((float)m_ObjRect.x + ((float)m_ObjRect.width / 2.0f), (float)m_ObjRect.y + ((float)m_ObjRect.height / 2.0f));
476 float imgHeight = markerSizeMeters / m_image.rows;
477 float imgWidth = imgHeight * (m_image.rows / m_image.cols);
478
479 // map points to a cylinder
480 std::vector<cv::Point3f> objPoints;
481 std::vector<cv::Point2f> prevPoints;
482 for (unsigned int i = 0; i < m_ObjPoints.size(); i++)
483 {
484 if (m_lastFoundPoints[i])
485 {
486 prevPoints.push_back(m_prevFramePoints[i]);
487 cv::Point2f pt = m_ObjPoints[i] - offset;
488 objPoints.push_back(UnProjectPoint(cv::Point2f(pt.x * imgWidth, pt.y * imgHeight), offset, imgWidth, imgHeight, imgWidth * 0.5f));
489 }
490 }
491
492 cv::Mat raux, taux;
493 cv::solvePnPRansac(objPoints, prevPoints, camParams.CameraMatrix, camParams.Distorsion, raux, taux, true, 100, 20.0f, 0.988f, cv::noArray(), CV_P3P);
494 raux.convertTo(Rvec, CV_32F);
495 taux.convertTo(Tvec, CV_32F);
496
497 //rotate the X axis so that Y is perpendicular to the marker plane
498 if (setYPerpendicular)
499 {
500 cv::Mat R(3,3,CV_32F);
501 Rodrigues(Rvec, R);
502 //create a rotation matrix for x axis
503 float angleRad = CV_PI; // - 180°
504
505 cv::Mat RX = cv::Mat::eye(3, 3, CV_32F);
506 RX.at<float>(1,1) = cos(angleRad);
507 RX.at<float>(1,2) = -sin(angleRad);
508 RX.at<float>(2,1) = sin(angleRad);
509 RX.at<float>(2,2) = cos(angleRad);
510
511 //cv::Mat RY = cv::Mat::eye(3, 3, CV_32F);
512 //RY.at<float>(0,0) = cos(angleRad);
513 //RY.at<float>(0,2) = -sin(angleRad);
514 //RY.at<float>(2,0) = sin(angleRad);
515 //RY.at<float>(2,2) = cos(angleRad);
516
517 //cv::Mat RZ = cv::Mat::eye(3, 3, CV_32F);
518 //RZ.at<float>(0,0) = cos(angleRad);
519 //RZ.at<float>(0,1) = -sin(angleRad);
520 //RZ.at<float>(1,0) = sin(angleRad);
521 //RZ.at<float>(1,1) = cos(angleRad);
522
523 //now multiply
524 R = R * RX;
525
526 //finally, the the rodrigues back
527 cv::Rodrigues(R, Rvec);
528 }
529
530 //Filter
531
532 cv::Mat R1(3, 3, CV_32F);
533 cv::Rodrigues(Rvec, R1);
534 cv::Point3f euler = rot2euler(R1);
535 cv::Point3f pos(Tvec.at<float>(0, 0), Tvec.at<float>(1, 0), Tvec.at<float>(2, 0));
536
537 std::vector<cv::Point3f> filtered;
538 filtered.push_back(pos);
539 filtered.push_back(euler);
540
541 m_PosSmoothers.Update(filtered);
542 filtered = m_PosSmoothers.GetFilteredPoints();
543
544 Tvec.at<float>(0, 0) = filtered[0].x;
545 Tvec.at<float>(1, 0) = filtered[0].y;
546 Tvec.at<float>(2, 0) = filtered[0].z;
547
548 R1 = euler2rot(filtered[1]);
549
550 //back to rodrigues
551 cv::Rodrigues(R1, Rvec);
552}
553*/
554
555cv::Point3f ArFaceMarker::UnProjectPoint(cv::Point2f point, cv::Point2f offset, float w, float h, float radius)
556{
557 //center the point at 0,0
558 cv::Point2f pc(point.x-w/2,point.y-h/2);
559
560 //these are your free parameters
561 float f = w;
562 float r = w;
563
564 float omega = radius;
565 float z0 = f - sqrt(r*r-omega*omega);
566
567 float zc = (2*z0+sqrt(4*z0*z0-4*(pc.x*pc.x/(f*f)+1)*(z0*z0-r*r)))/(2* (pc.x*pc.x/(f*f)+1));
568 cv::Point3f final_point(pc.x, pc.y, -zc);
569 final_point.x += w/2;
570 final_point.y += h/2;
571 return final_point;
572}
573
574void ArFaceMarker::Update(cv::Mat frame, cv::Mat color, aruco::CameraParameters& camparam, bool reverse)
575{
576 boost::mutex::scoped_lock l(killMutex);
577
578 if (!m_bInitialized)
579 return;
580
581 bool found = (detectFeatured(frame, color, camparam.CamSize)) ? true : false;
582 m_bInitialized = found;
583 if (found)
584 {
585 /* Get Matrix*/
586 double modelview_matrix[16];
587
588 try
589 {
590 //recalculate Rvec and Tvec with marker size
591 calculateFeaturedExtrinsics(m_size, camparam, false);
592
593 cv::Mat R(3, 3, CV_32F);
594 Rodrigues(Rvec, R);
595 //create a rotation matrix for x axis
596 float angleRad = CV_PI / 2.0f; // - 90°
597
598 //cv::Mat RX = cv::Mat::eye(3, 3, CV_32F);
599 //RX.at<float>(1,1) = cos(angleRad);
600 //RX.at<float>(1,2) = -sin(angleRad);
601 //RX.at<float>(2,1) = sin(angleRad);
602 //RX.at<float>(2,2) = cos(angleRad);
603
604 //cv::Mat RY = cv::Mat::eye(3, 3, CV_32F);
605 //RY.at<float>(0,0) = cos(angleRad);
606 //RY.at<float>(0,2) = -sin(angleRad);
607 //RY.at<float>(2,0) = sin(angleRad);
608 //RY.at<float>(2,2) = cos(angleRad);
609
610 cv::Mat RZ = cv::Mat::eye(3, 3, CV_32F);
611 RZ.at<float>(0, 0) = cos(angleRad);
612 RZ.at<float>(0, 1) = -sin(angleRad);
613 RZ.at<float>(1, 0) = sin(angleRad);
614 RZ.at<float>(1, 1) = cos(angleRad);
615
616 //now multiply
617 R = R * RZ;
618
619 //finally, the the rodrigues back
620 cv::Rodrigues(R, Rvec);
621
622 glGetModelViewMatrix(modelview_matrix);
623 }
624 catch (std::exception&)
625 {
626 return;
627 }
628
629 /*if(debugDraw)
630 {
631 m_objmarker->draw(lastData.image, cv::Scalar(255, 0, 0), 1, true);
632 //aruco::CvDrawingUtils::draw3dAxis(image, m_objmarker, arCamParam.camParam);
633 }*/
634
635 //determine the centroid
636 Vector3 pixelPosition(0.0, 0.0, 0.0);
637 for (int j = 0; j<4; j++)
638 {
639 pixelPosition.x += (*this).at(j).x;
640 pixelPosition.y += (*this).at(j).y;
641 }
642
643 pixelPosition.z = sqrt(pow(((*this).at(1).x - (*this).at(0).x), 2) + pow(((*this).at(1).y - (*this).at(0).y), 2));
644 pixelPosition.z += sqrt(pow(((*this).at(2).x - (*this).at(1).x), 2) + pow(((*this).at(2).y - (*this).at(1).y), 2));
645 pixelPosition.z += sqrt(pow(((*this).at(3).x - (*this).at(2).x), 2) + pow(((*this).at(3).y - (*this).at(2).y), 2));
646 pixelPosition.z += sqrt(pow(((*this).at(0).x - (*this).at(3).x), 2) + pow(((*this).at(0).y - (*this).at(3).y), 2));
647
648 pixelPosition.x /= 4.;
649 pixelPosition.y /= 4.;
650 pixelPosition.z /= 4.;
651
652 //cv::circle(image, Point(pixelPosition.x, pixelPosition.y),pixelPosition.z/2,cv::Scalar(255,0,255));
653
654 SetPixelPosition(pixelPosition);
655
656 SetPosition(Vector3(static_cast<float>(reverse ? -modelview_matrix[12] : modelview_matrix[12]), static_cast<float>(modelview_matrix[13]), static_cast<float>(modelview_matrix[14])));
657 SetOrientation(BtQuaternion::FromRotationMatrix(modelview_matrix, reverse));
658 SetVisible(true);
659 }
660 else
661 SetVisible(false);
662
663 m_needUpdate = true;
664}
ArFaceMarker(unsigned int mid, float size)
void SetImage(const cv::Mat image, std::vector< cv::Rect > objRects)
bool IsInitialized()
void Update(cv::Mat frame, cv::Mat color, aruco::CameraParameters &camparam, bool reverse)
void calculateFeaturedExtrinsics(float markerSize, aruco::CameraParameters &camParams, bool setYPerperdicular=true)
bool detectMotionFlow(cv::Mat &frame)
boost::mutex killMutex
Definition ArMarker.h:49
void SetPixelPosition(Vector3 pixelpos)
Definition ArMarker.cpp:62
void SetPosition(Vector3 pos)
Definition ArMarker.cpp:57
void SetOrientation(BtQuaternion orientation)
Definition ArMarker.cpp:67
bool m_needUpdate
Definition ArMarker.h:55
float m_size
Definition ArMarker.h:56
std::vector< cv::Point2f > GetCorners()
Definition ArMarker.cpp:325
void SetVisible(bool visible)
Definition ArMarker.cpp:100
static BtQuaternion FromRotationMatrix(double rotMatrix[16], bool reverseX=false, bool reverseY=true)
void Init(float fSmoothing=0.25f, float fCorrection=0.25f, float fPrediction=0.25f, float fJitterRadius=0.03f, float fMaxDeviationRadius=0.05f)