31#include <scolPlugin.h>
37class symTestParallel :
public cv::ParallelLoopBody
40 symTestParallel(
const std::vector<std::vector<cv::DMatch> >& _matches1,
const std::vector<std::vector<cv::DMatch> >& _matches2, std::vector<std::vector<cv::DMatch> > &_omatchVec)
41 : matches1(_matches1),
46 void operator()(
const cv::Range& range)
const
48 const int start = range.start;
49 const int end = range.end;
52 for (std::vector<std::vector<cv::DMatch> >::const_iterator matchIterator1 = (matches1.begin() + start); matchIterator1 != (matches1.begin() + end); ++matchIterator1)
55 if (matchIterator1->empty() || matchIterator1->size() < 2)
59 for (std::vector<std::vector<cv::DMatch> >::const_iterator
60 matchIterator2 = matches2.begin(); matchIterator2 != matches2.end(); ++matchIterator2)
63 if (matchIterator2->empty() || matchIterator2->size() < 2)
67 if ((*matchIterator1)[0].queryIdx ==
68 (*matchIterator2)[0].trainIdx &&
69 (*matchIterator2)[0].queryIdx ==
70 (*matchIterator1)[0].trainIdx)
73 omatchVec[cv::getThreadNum()].push_back(cv::DMatch((*matchIterator1)[0].queryIdx, (*matchIterator1)[0].trainIdx, (*matchIterator1)[0].distance));
81 symTestParallel& operator=(
const symTestParallel&);
82 std::vector<std::vector<cv::DMatch> > matches1;
83 std::vector<std::vector<cv::DMatch> > matches2;
84 std::vector<std::vector<cv::DMatch> > &omatchVec;
106 for (std::vector<std::vector<cv::DMatch> >::iterator matchIterator = matches.begin(); matchIterator != matches.end(); ++matchIterator)
109 if (matchIterator->size() > 1)
112 if ((*matchIterator)[0].distance / (*matchIterator)[1].distance > ratio_)
114 matchIterator->clear();
120 matchIterator->clear();
128 const std::vector<std::vector<cv::DMatch> >& matches2,
129 std::vector<cv::DMatch>& symMatches)
131 std::vector<std::vector<cv::DMatch> > symParallel(cv::getNumThreads());
134 cv::parallel_for_(cv::Range(0, matches1.size()), symTestParallel(matches1, matches2, symParallel));
137 for (
unsigned int i = 0; i < symParallel.size(); i++)
139 symMatches.insert(symMatches.end(), symParallel[i].begin(), symParallel[i].end());
144 std::vector<cv::KeyPoint>& keypoints_frame,
const cv::Mat& descriptors_model,
const cv::Mat& mask)
146 cv::Mat descriptors_frame;
149 detector_->detectAndCompute(frame, mask, keypoints_frame, descriptors_frame);
152 std::vector<std::vector<cv::DMatch> > matches12, matches21;
155 matcher_->knnMatch(descriptors_frame, descriptors_model, matches12, 2);
158 matcher_->knnMatch(descriptors_model, descriptors_frame, matches21, 2);
171 std::vector<cv::KeyPoint>& keypoints_frame,
const cv::Mat& mask)
173 cv::Mat descriptors_frame;
174 good_matches.clear();
177 detector_->detectAndCompute(frame, mask, keypoints_frame, descriptors_frame);
180 std::vector<std::vector<cv::DMatch> > matches;
181 matcher_->knnMatch(descriptors_frame, matches, 2);
187 for (std::vector<std::vector<cv::DMatch> >::iterator matchIterator = matches.begin(); matchIterator != matches.end(); ++matchIterator)
189 if (!matchIterator->empty()) good_matches.push_back((*matchIterator)[0]);
198 cv::Mat descriptors_marker;
199 std::vector<cv::KeyPoint> keypoints_marker;
200 std::vector<cv::Mat> descMat;
205 traindetector_->detectAndCompute(frame, cv::Mat(), keypoints_marker, descriptors_marker);
206 if (keypoints_marker.size() < 30)
209 descMat = matcher_->getTrainDescriptors();
212 marker_keypoints.insert(marker_keypoints.end(), keypoints_marker.begin(), keypoints_marker.end());
213 descMat.push_back(descriptors_marker);
215 if (descMat.size() > 1)
217 cv::vconcat(descMat, descriptors_marker);
219 descMat.push_back(descriptors_marker);
222 matcher_->add(descMat);
226 catch (std::exception &)
238 m_maxFeatures = maxFeatures;
246 FILE* pFile = fopen(m_file.c_str(),
"rb");
250 fseek(pFile, 0, SEEK_END);
251 lSize = ftell(pFile);
255 buff = (
char*)malloc(lSize);
259 fread(buff, 1, lSize, pFile);
260 std::vector<char> data(buff, buff + lSize);
265 m_image = cv::imdecode(data, cv::IMREAD_GRAYSCALE);
269 MMechostr(MSKDEBUG,
">>>>> Picture not loaded ko\n");
271 MMechostr(MSKDEBUG,
">>>>> Picture loaded ok\n");
275 MMechostr(MSKDEBUG,
">>>>> Picture failed to open : %s\n", filePath.c_str());
278 m_image = cv::imread(m_file, cv::IMREAD_GRAYSCALE);
291 m_maxFeatures = maxFeatures;
293 cv::cvtColor(tpl,
m_image, cv::COLOR_RGB2GRAY);
316 nsize.width =
m_image.cols * 2;
317 nsize.height =
m_image.rows * 2;
323 initTrackingPicture();
330void ArFeaturedMarker::CommonConstructor()
332 m_detector.release();
335 m_registerNextFrame =
false;
337 m_KalmanInit =
false;
340 m_HomographyThreshold = 8;
342 m_CornerSmoothers.
Init(0.2f, 0.2f, 0.2f, 25.0f, 35.0f);
358 nsize.width = (int)(((
float)
m_image.cols * (
float)msize) / (float)
m_image.rows);
359 nsize.height = msize;
364 nsize.height = (int)(((
float)
m_image.rows * (float)msize) / (float)
m_image.cols);
373 initTrackingPicture();
382 std::vector<int> empty(2);
383 empty[0] = 0; empty[1] = 0;
389 delete(m_robustMatcher);
393 if (!m_detector.empty())
395 m_detector.release();
398 if (!m_matcher.empty())
404bool ArFeaturedMarker::initTrackingPicture()
410 delete(m_robustMatcher);
414 if (!m_detector.empty())
416 m_detector.release();
419 if (!m_matcher.empty())
427 m_pcorners.push_back(cv::Point2f(0.0f, 0.0f));
428 m_pcorners.push_back(cv::Point2f(
static_cast<float>(
m_image.cols), 0.0f));
429 m_pcorners.push_back(cv::Point2f(
static_cast<float>(
m_image.cols),
static_cast<float>(
m_image.rows)));
430 m_pcorners.push_back(cv::Point2f(0.0f,
static_cast<float>(
m_image.rows)));
433 cv::Ptr<cv::flann::IndexParams> indexParams = cv::makePtr<cv::flann::LshIndexParams>(6, 12, 1);
434 cv::Ptr<cv::flann::SearchParams> searchParams = cv::makePtr<cv::flann::SearchParams>(50);
435 m_matcher = cv::makePtr<cv::FlannBasedMatcher>(indexParams, searchParams);
439 return trainMatcher();
446 m_registerNextFrame =
true;
449bool ArFeaturedMarker::trainMatcher()
452 m_trainKeypoints.clear();
453 if (!m_detector.empty())
455 m_detector.release();
461 float threshold = 0.003f / ((float)m_maxFeatures / 100.0f);
462 m_detector = cv::AKAZE::create(cv::AKAZE::DESCRIPTOR_MLDB, 0, 3, threshold, 1, 1);
466 std::vector<cv::Mat> descMat;
469 cv::Ptr<cv::AKAZE> sdetector = cv::AKAZE::create(cv::AKAZE::DESCRIPTOR_MLDB, 0, 3, threshold, 32, 4, cv::KAZE::DIFF_PM_G2);
516 if (m_trainKeypoints.size() < 50)
522bool ArFeaturedMarker::GetPointsFromMatches(
const std::vector<cv::KeyPoint> &querykeypoints,
const std::vector<cv::KeyPoint> &trainKeypoints, std::vector<cv::DMatch> matches, std::vector<cv::Point2f> &trainPoints, std::vector<cv::Point2f> &framePoints)
524 if (matches.size() >= m_minMatches)
526 for (
unsigned int i = 0; i < matches.size(); i++)
528 cv::DMatch& match = matches[i];
529 trainPoints.push_back(trainKeypoints[match.trainIdx].pt);
530 framePoints.push_back(querykeypoints[match.queryIdx].pt);
538bool ArFeaturedMarker::GetHomography(std::vector<cv::Point2f> &trainPoints, std::vector<cv::Point2f> &framePoints, cv::Mat &HMatrix)
543 HMatrix = findHomography(trainPoints, framePoints, cv::FM_RANSAC, m_HomographyThreshold);
563 catch (cv::Exception &e)
581 std::vector<cv::Point2f> corners;
582 cv::Mat transformedPoints;
583 bool isvalid =
false;
585 cv::perspectiveTransform(cv::Mat(m_pcorners), transformedPoints, HMatrix);
587 for (i = 0; i < m_pcorners.size(); i++)
588 corners.push_back(transformedPoints.at<cv::Point2f>(i, 0));
593 if ((corners.size() == 4) && (checkRectShape(corners)))
595 isvalid = NiceHomography(HMatrix);
604 m_CornerSmoothers.
Update(corners);
607 for (i = 0; i < corners.size(); i++)
609 this->push_back(cv::Point2f(corners[i].x, corners[i].y));
617bool ArFeaturedMarker::detectMotionFlow(cv::Mat &frame, cv::Mat &color,
const std::vector<cv::Point2f> &trainPoints,
const std::vector<cv::Point2f> &framePoints, std::vector<cv::Point2f> >rainPoints, std::vector<cv::Point2f> &gframePoints)
619 std::vector<cv::Point2f> newFramePoints;
620 std::vector<uchar> status;
621 std::vector<float> err;
622 cv::TermCriteria termcrit(cv::TermCriteria::MAX_ITER | cv::TermCriteria::EPS, 200, 0.02f);
623 cv::Size winSize(41, 41);
625 cv::buildOpticalFlowPyramid(frame,
m_nextPyr, winSize, 8,
true);
627 if (framePoints.empty() ||
m_prevPyr.empty())
635 cv::calcOpticalFlowPyrLK(
m_prevPyr,
m_nextPyr, framePoints, newFramePoints, status, err, winSize, 8, termcrit, cv::OPTFLOW_FARNEBACK_GAUSSIAN, 0.001f);
638 unsigned int nbMatches = 0;
639 for (
unsigned int i = 0; i < trainPoints.size(); i++)
645 if (status[i] == 1 && err[i] < 150.0f)
648 gtrainPoints.push_back(trainPoints[i]);
649 gframePoints.push_back(newFramePoints[i]);
653 if (nbMatches >= m_minMatches)
656 catch(std::exception &e)
658 std::string wt = e.what();
667 boost::mutex::scoped_lock l(m_mutex);
668 m_warped.copyTo(image);
679 if (m_lastFrameSize.width != frame.cols || m_lastFrameSize.height != frame.rows || m_registerNextFrame)
683 m_prevTrainPoints.clear();
687 m_CornerSmoothers.
Reset();
690 m_lastFrameSize = cv::Size(frame.cols, frame.rows);
693 if (m_registerNextFrame && camsize.width != 0 && camsize.height != 0 && m_cropSize.width > 1 && m_cropSize.height > 1)
696 cv::Point2f scale = cv::Point2f((
float)frame.cols / (
float)camsize.width, (
float)frame.rows / (
float)camsize.height);
698 m_cropPos.x = (int)((
float)m_cropPos.x * scale.x);
699 m_cropPos.y = (int)((
float)m_cropPos.y * scale.y);
700 m_cropSize.width = (int)((
float)m_cropSize.width * scale.x);
701 m_cropSize.height = (int)((
float)m_cropSize.height * scale.x);
706 cv::Mat crop(frame, cv::Rect(m_cropPos.x, m_cropPos.y, m_cropSize.width, m_cropSize.height));
708 if (crop.rows >= crop.cols)
710 nsize.width = (int)(((
float)crop.cols * (
float)msize) / (float)crop.rows);
711 nsize.height = msize;
716 nsize.height = (int)(((
float)crop.rows * (float)msize) / (float)crop.cols);
719 cv::resize(crop, marker, nsize, 0, 0, cv::INTER_CUBIC);
722 m_registerNextFrame = (m_robustMatcher->
addTrainData(marker, m_trainKeypoints)) ?
false :
true;
724 catch (std::exception &)
727 MMechostr(MSKDEBUG,
"detectFeatured : failed to set the current frame as marker.\n");
731 if (frame.empty() ||
m_image.empty())
737 std::vector<cv::Point2f> gFramePoints;
738 std::vector<cv::Point2f> gTrainPoints;
740 bool motion = detectMotionFlow(frame, color, m_prevTrainPoints,
m_prevFramePoints, gTrainPoints, gFramePoints);
743 if (motion && !gFramePoints.empty() && !gTrainPoints.empty())
745 if ((
m_lastFound = GetHomography(gTrainPoints, gFramePoints, HMatrix)))
749 cv::Mat prev = cv::Mat(m_prevTrainPoints,
false);
751 cv::perspectiveTransform(prev, next, HMatrix);
766 m_dmask = cv::Mat(frame.rows, frame.cols, CV_8UC1);
770 std::vector<cv::KeyPoint> keypoints;
771 std::vector<cv::DMatch> matches;
776 std::vector<cv::Point2f> trainPoints;
777 std::vector<cv::Point2f> framePoints;
779 if ((
m_lastFound = GetPointsFromMatches(keypoints, m_trainKeypoints, matches, trainPoints, framePoints)))
782 m_prevTrainPoints = trainPoints;
795 std::vector<cv::Point2f> gFramePoints;
796 std::vector<cv::Point2f> gTrainPoints;
798 detectMotionFlow(frame, color, m_prevTrainPoints,
m_prevFramePoints, gFramePoints, gTrainPoints);
801 boost::mutex::scoped_lock l(m_mutex);
802 cv::warpPerspective(color, m_warped, HMatrix, cv::Size(
m_image.cols,
m_image.rows), cv::WARP_INVERSE_MAP | cv::INTER_CUBIC);
806 catch (std::exception&)
815 m_KalmanInit =
false;
818 m_prevTrainPoints.clear();
821 m_CornerSmoothers.
Reset();
840 if (!camParams.isValid())
841 throw cv::Exception(9004,
"!isValid(): invalid camera parameters. It is not possible to calculate extrinsics",
"calculateExtrinsics", __FILE__, __LINE__);
844 throw cv::Exception(9004,
"!isValid(): invalid marker. It is not possible to calculate extrinsics",
"calculateExtrinsics", __FILE__, __LINE__);
845 if (markerSizeMeters <= 0)
846 throw cv::Exception(9004,
"markerSize<=0: invalid markerSize",
"calculateExtrinsics", __FILE__, __LINE__);
847 if (camParams.CameraMatrix.rows == 0 || camParams.CameraMatrix.cols == 0)
848 throw cv::Exception(9004,
"CameraMatrix is empty",
"calculateExtrinsics", __FILE__, __LINE__);
850 float halfSizeX = 0.0f;
851 float halfSizeY = 0.0f;
854 halfSizeX = markerSizeMeters / 2.0f;
855 halfSizeY = ((
m_image.cols * markerSizeMeters) /
m_image.rows) / 2.0f;
859 halfSizeX = ((
m_image.rows * markerSizeMeters) /
m_image.cols) / 2.0f;
860 halfSizeY = markerSizeMeters / 2.0f;
863 cv::Mat ObjPoints(4, 3, CV_32FC1);
864 ObjPoints.at<
float>(1, 0) = -halfSizeX;
865 ObjPoints.at<
float>(1, 1) = halfSizeY;
866 ObjPoints.at<
float>(1, 2) = 0;
867 ObjPoints.at<
float>(2, 0) = halfSizeX;
868 ObjPoints.at<
float>(2, 1) = halfSizeY;
869 ObjPoints.at<
float>(2, 2) = 0;
870 ObjPoints.at<
float>(3, 0) = halfSizeX;
871 ObjPoints.at<
float>(3, 1) = -halfSizeY;
872 ObjPoints.at<
float>(3, 2) = 0;
873 ObjPoints.at<
float>(0, 0) = -halfSizeX;
874 ObjPoints.at<
float>(0, 1) = -halfSizeY;
875 ObjPoints.at<
float>(0, 2) = 0;
877 cv::Mat ImagePoints(4, 2, CV_32FC1);
879 cv::Point2f scale = cv::Point2f((
float)m_lastFrameSize.width / (
float)camParams.CamSize.width, (
float)m_lastFrameSize.height / (
float)camParams.CamSize.height);
882 for (
int c = 0; c < 4; c++)
884 (*this)[c % 4].x = ((*this)[c % 4].x) * (1.0f / scale.x);
885 (*this)[c % 4].y = ((*this)[c % 4].y) * (1.0f / scale.y);
887 ImagePoints.at<
float>(c, 0) = ((*
this)[c % 4].x);
888 ImagePoints.at<
float>(c, 1) = ((*
this)[c % 4].y);
892 cv::solvePnP(ObjPoints, ImagePoints, camParams.CameraMatrix, camParams.Distorsion, raux, taux);
895 raux.convertTo(Rvec, CV_32F);
896 taux.convertTo(Tvec, CV_32F);
899 if (setYPerperdicular)
914 double modelview_matrix[16];
920 glGetModelViewMatrix(modelview_matrix);
922 catch (std::exception&)
928 Vector3 pixelPosition(0.0, 0.0, 0.0);
929 for (
int j = 0; j<4; j++)
931 pixelPosition.
x += (*this).at(j).x;
932 pixelPosition.
y += (*this).at(j).y;
935 pixelPosition.
z = sqrt(pow(((*this).at(1).x - (*this).at(0).x), 2) + pow(((*this).at(1).y - (*this).at(0).y), 2));
936 pixelPosition.
z += sqrt(pow(((*this).at(2).x - (*this).at(1).x), 2) + pow(((*this).at(2).y - (*this).at(1).y), 2));
937 pixelPosition.
z += sqrt(pow(((*this).at(3).x - (*this).at(2).x), 2) + pow(((*this).at(3).y - (*this).at(2).y), 2));
938 pixelPosition.
z += sqrt(pow(((*this).at(0).x - (*this).at(3).x), 2) + pow(((*this).at(0).y - (*this).at(3).y), 2));
940 pixelPosition.
x /= 4.;
941 pixelPosition.
y /= 4.;
942 pixelPosition.
z /= 4.;
948 SetPosition(
Vector3(
static_cast<float>(reverse ? -modelview_matrix[12] : modelview_matrix[12]),
static_cast<float>(modelview_matrix[13]),
static_cast<float>(modelview_matrix[14])));
void Update(cv::Mat frame, cv::Mat color, aruco::CameraParameters &camparam, bool reverse)
void calculateFeaturedExtrinsics(float markerSize, aruco::CameraParameters &camParams, bool setYPerpendicular=true)
void RegisterNextFrame(cv::Point point, cv::Size size)
virtual ~ArFeaturedMarker()
bool detectFeatured(cv::Mat &frame, cv::Mat &color, cv::Size camsize)
bool GetWarpedMarker(cv::Mat &image)
void SetTrackedImage(cv::Mat tpl)
std::vector< cv::Mat > m_prevPyr
std::vector< cv::Point2f > m_prevFramePoints
std::vector< cv::Mat > m_nextPyr
ArFeaturedMarker(std::string filePath, unsigned int nid, float size, unsigned int maxFeatures=600)
void rotateXAxis(cv::Mat &rotation)
void SetPixelPosition(Vector3 pixelpos)
void SetPosition(Vector3 pos)
void SetOrientation(BtQuaternion orientation)
std::vector< cv::Point2f > GetCorners()
void SetVisible(bool visible)
static BtQuaternion FromRotationMatrix(double rotMatrix[16], bool reverseX=false, bool reverseY=true)
std::vector< cv::Point2f > GetFilteredPoints()
void Update(std::vector< cv::Point2f > points)
void Init(float fSmoothing=0.25f, float fCorrection=0.25f, float fPrediction=0.25f, float fJitterRadius=0.03f, float fMaxDeviationRadius=0.05f)
void setDescriptorMatcher(const cv::Ptr< cv::DescriptorMatcher > &match)
void setFeatureDetector(const cv::Ptr< cv::FeatureDetector > &detect)
bool addTrainData(const cv::Mat &frame, std::vector< cv::KeyPoint > &marker_keypoints)
void fastRobustMatch(const cv::Mat &frame, std::vector< cv::DMatch > &good_matches, std::vector< cv::KeyPoint > &keypoints_frame, const cv::Mat &mask)
int ratioTest(std::vector< std::vector< cv::DMatch > > &matches)
void setTrainFeatureDetector(const cv::Ptr< cv::FeatureDetector > &detect)
void robustMatch(const cv::Mat &frame, std::vector< cv::DMatch > &good_matches, std::vector< cv::KeyPoint > &keypoints_frame, const cv::Mat &descriptors_model, const cv::Mat &mask)
void symmetryTest(const std::vector< std::vector< cv::DMatch > > &matches1, const std::vector< std::vector< cv::DMatch > > &matches2, std::vector< cv::DMatch > &symMatches)