This commit is contained in:
Marion Lenfant 2015-03-11 17:12:13 +01:00
commit ff9f4e1cb4
11 changed files with 160 additions and 84 deletions

View File

@ -17,14 +17,14 @@ void readme();
/////////////////////////////////////////////////////////
/// \ingroup detectionandmatching
/// \brief detects the keypoints on the two
/// inputs images and matches the keypoints
/// inputs images with BRISK and matches the keypoints
/// \param img_1 first image
/// \param img_2 second image
/// \param masque_1 mask of img_1
/// \param masque_2 mask of img_2
/// \return a vector containing the matches (pair of Point)
/////////////////////////////////////////////////////////
std::vector<std::pair<cv::Point, cv::Point>> DetectAndMatch(cv::Mat img_1, cv::Mat img_2, cv::Mat masque_1, cv::Mat masque_2);
std::vector<std::pair<cv::Point,cv::Point>> detectAndMatch(cv::Mat const& img_1, cv::Mat const& img_2, cv::Mat const& masque_1, cv::Mat const& masque_2);
/////////////////////////////////////////////////////////
/// \brief Resize the mask of an image
@ -32,7 +32,7 @@ std::vector<std::pair<cv::Point, cv::Point>> DetectAndMatch(cv::Mat img_1, cv::M
/// \return the mask resized
/// \pre the original image need to be in landscape
////////////////////////////////////////////////////////
cv::Mat resizeMask(cv::Mat mask, cv::Size sizeImage);
cv::Mat resizeMask(cv::Mat const& mask, cv::Size const& sizeImage);
/////////////////////////////////////////////////////////
/// \ingroup detectionandmatching
@ -42,7 +42,7 @@ cv::Mat resizeMask(cv::Mat mask, cv::Size sizeImage);
/// \param matches2 contains indexes of matched points : queryIdx for image2 and trainIdx for image 1
/// \return symetric matches
////////////////////////////////////////////////////////
std::vector< cv::DMatch > symetricFilter( std::vector< cv::DMatch > matches1, std::vector< cv::DMatch > matches2);
std::vector< cv::DMatch > symetricFilter( std::vector< cv::DMatch > const& matches1, std::vector< cv::DMatch > const& matches2);
///////////////////////////////////////////////////////
/// \ingroup detectionandmatching
@ -54,7 +54,7 @@ std::vector< cv::DMatch > symetricFilter( std::vector< cv::DMatch > matches1, st
/// \return matches filtered with the order constraint
/// \see symetricFilter
////////////////////////////////////////////////////////
std::vector< cv::DMatch > orderConstraintFilter (std::vector< cv::DMatch > symetricMatches, std::vector<cv::KeyPoint> keypoints_1, std::vector<cv::KeyPoint> keypoints_2, float proportion);
std::vector< cv::DMatch > orderConstraintFilter (std::vector< cv::DMatch > const& symetricMatches, std::vector<cv::KeyPoint> const& keypoints_1, std::vector<cv::KeyPoint> const& keypoints_2, float proportion);
////////////////////////////////////////////////////////
/// \ingroup detectionandmatching
@ -63,7 +63,7 @@ std::vector< cv::DMatch > orderConstraintFilter (std::vector< cv::DMatch > symet
/// \param matches matches to filter
/// \return matches filtered by distance between descriptors
/////////////////////////////////////////////////////////
std::vector< cv::DMatch > thresholdFilter (float distanceThreshold, std::vector< cv::DMatch > matches);
std::vector< cv::DMatch > thresholdFilter (float distanceThreshold, std::vector< cv::DMatch > const& matches);
////////////////////////////////////////////////////////
/// \ingroup detectionandmatching
@ -73,6 +73,6 @@ std::vector< cv::DMatch > thresholdFilter (float distanceThreshold, std::vector<
/// \param correctedMatches the matches in input
/// \return matches filtered by geometric criteria
///////////////////////////////////////////////////////
std::tuple<std::vector< cv::DMatch >, std::vector<cv::KeyPoint>, std::vector<cv::KeyPoint>> geometricFilter ( std::vector<cv::KeyPoint> keypoints_1, std::vector<cv::KeyPoint> keypoints_2, std::vector< cv::DMatch > correctedMatches);
std::tuple<std::vector<cv::DMatch>, std::vector<cv::KeyPoint>, std::vector<cv::KeyPoint>> geometricFilter ( std::vector<cv::KeyPoint> const& keypoints_1, std::vector<cv::KeyPoint> const& keypoints_2, std::vector< cv::DMatch > const& correctedMatches);
#endif // DETECTIONANDMATCHING_HPP

View File

@ -48,11 +48,9 @@ class Spline
///////////////////////////////////////////////////////////
/// \brief Prints the spline to the stream
///
/// \param t Time at which you want to evaluate the derivative of the
/// spline. Must be between 0 and 1
///
/// \return Tuple that corresponds to the derivative of each
/// coordinates of the spline at time t
/// \param out stream to print the spline
/// \param spline spline to be printed on the stream
/// \return a reference to out
///
/// Prints first the degree of the spline, than the nodes and finally
/// the control points. For example
@ -85,7 +83,7 @@ class Spline
///////////////////////////////////////////////////////////
/// \brief Computes circles around the spline
/// \parap nbCircles number of circles on the spline
/// \param nbCircles number of circles on the spline
/// \param nbPoints number of points on the circles
/// \param globalOffset offset for the indices in the mesh
/// \return a vector of the circles

View File

@ -68,8 +68,8 @@ namespace detail
/// \relates pae::Skeleton3D
/// \brief push the face to the mesh with the correct orientation
/// \param mesh mesh to add the face
/// \face face to be added
/// \sphere_center center of the sphere
/// \param face face to be added
/// \param sphere_center center of the sphere
///
/// Compute the vector between the center of the sphere and the points of the faces
/// and add the face so that the normal is pointing outside the sphere.
@ -86,6 +86,7 @@ namespace detail
/// \param mesh the existing mesh to complete.
/// \param junction_point the 3D point where the junction is done
/// \param junction_radius the radius of the n splines of the junction at the junction
/// \param junction_circles last circles on the spline that we want to join
///
/// \return the plane that cut the circles in two (helpful to test).
////////////////////////////////////////////////////////////////

View File

@ -1,5 +1,5 @@
add_subdirectory(Calibration)
# add_subdirectory(DetectionAndMatching)
add_subdirectory(DetectionAndMatching)
add_subdirectory(Extern)
add_subdirectory(HelloCV)
add_subdirectory(Segmentation)

View File

@ -8,14 +8,10 @@
#include <opencv2/features2d/features2d.hpp>
#include <opencv2/nonfree/features2d.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/nonfree/nonfree.hpp>
#include <opencv2/calib3d/calib3d.hpp>
#include "DetectionAndMatching/DetectionAndMatching.hpp"
/*headers*/
// /** @function main */
int main( int argc, char** argv )
{
cv::Mat img_1;
@ -55,24 +51,26 @@ if( !img_1.data || !img_2.data )
std::vector<std::pair<cv::Point, cv::Point>> matchPoints;
cv::Mat masque_1_resize = resizeMask(masque_1,img_1.size());
cv::Mat masque_2_resize = resizeMask(masque_2,img_2.size());
matchPoints = DetectAndMatch(img_1, img_2, masque_1_resize, masque_2_resize);
matchPoints = detectAndMatch(img_1, img_2, masque_1_resize, masque_2_resize);
return 0;
}
/** @function DetectionAndMatching */
/**/
std::vector<std::pair<cv::Point,cv::Point>> DetectAndMatch(cv::Mat img_1, cv::Mat img_2, cv::Mat masque_1, cv::Mat masque_2)
std::vector<std::pair<cv::Point,cv::Point>> detectAndMatch(cv::Mat const& img_1, cv::Mat const& img_2, cv::Mat const& masque_1, cv::Mat const& masque_2)
{
//-- Step 1: Detect the keypoints using SURF Detector
int minHessian = 10;
//-- Step 1: Detect the keypoints using BRISK Detector
cv::SurfFeatureDetector detector( minHessian );
int Threshl=3;
int Octaves=4; //(pyramid layer) from which the keypoint has been extracted
float PatternScales=1.5f;
std::vector<cv::KeyPoint> keypoints_1, keypoints_2;
detector.detect( img_1, keypoints_1, masque_1);
detector.detect( img_2, keypoints_2, masque_2);
cv::BRISK BRISKD(Threshl,Octaves,PatternScales);//initialize algoritm
BRISKD.create("Feature2D.BRISK");
BRISKD.detect(img_1, keypoints_1,masque_1);
BRISKD.detect( img_2, keypoints_2, masque_2);
//-- Draw keypoints
cv::Mat img_keypoints_1; cv::Mat img_keypoints_2;
@ -86,13 +84,12 @@ std::vector<std::pair<cv::Point,cv::Point>> DetectAndMatch(cv::Mat img_1, cv::Ma
cv::waitKey(0);
//-- Step 2: Calculate descriptors (feature vectors)
cv::SurfDescriptorExtractor extractor;
//every raw from the descriptors contains a KeyPoint descriptor (128 columns per raw)
cv::Mat descriptors_1, descriptors_2;
extractor.compute( img_1, keypoints_1, descriptors_1 );
extractor.compute( img_2, keypoints_2, descriptors_2 );
BRISKD.compute( img_1, keypoints_1, descriptors_1 );
BRISKD.compute( img_2, keypoints_2, descriptors_2 );
//-- Step 3: Matching descriptor vectors with a brute force matcher
cv::BFMatcher matcher(cv::NORM_L2);
@ -106,17 +103,17 @@ std::vector<std::pair<cv::Point,cv::Point>> DetectAndMatch(cv::Mat img_1, cv::Ma
//order constraint
float proportion = 1;
float proportion = 0.5;
std::vector< cv::DMatch > matches = orderConstraintFilter (symetricMatches, keypoints_1, keypoints_2, proportion);
//threshold filter
float distanceThreshold=0.5;
std::vector< cv::DMatch > correctedMatches = thresholdFilter (distanceThreshold, matches);
/* //threshold filter
float distanceThreshold=1;
std::vector< cv::DMatch > correctedMatches = thresholdFilter (distanceThreshold, symetricMatches);
std::cout << correctedMatches.size() << std::endl;
std::cout << correctedMatches.size() << std::endl;*/
// geometric filter
std::tuple<std::vector< cv::DMatch >, std::vector<cv::KeyPoint>, std::vector<cv::KeyPoint>> tuple = geometricFilter ( keypoints_1, keypoints_2, correctedMatches);
std::tuple<std::vector< cv::DMatch >, std::vector<cv::KeyPoint>, std::vector<cv::KeyPoint>> tuple = geometricFilter ( keypoints_1, keypoints_2,matches);
std::vector< cv::DMatch > geometricMatches = std::get<0>(tuple);
keypoints_1 = std::get<1>(tuple);
keypoints_2 = std::get<2>(tuple);
@ -135,25 +132,21 @@ std::vector<std::pair<cv::Point,cv::Point>> DetectAndMatch(cv::Mat img_1, cv::Ma
// vector of pairs of matching points
std::vector<std::pair<cv::Point,cv::Point>> matchPoints;
for(unsigned int i=0;i<correctedMatches.size();i++)
for(unsigned int i=0;i<geometricMatches.size();i++)
{
//myPair pair of matching points
std::pair<cv::Point,cv::Point> myPair;
//queryIdx index of the point from image 1
myPair.first=keypoints_1[correctedMatches[i].queryIdx].pt;
myPair.first=keypoints_1[geometricMatches[i].queryIdx].pt;
//trainIdx index of the point from image 2
myPair.second=keypoints_2[correctedMatches[i].trainIdx].pt;
myPair.second=keypoints_2[geometricMatches[i].trainIdx].pt;
matchPoints.push_back(myPair);
}
return matchPoints;
}
/** @function symetricFilter */
/* input : std::vector< DMatch > matches1 contains indexes of matched points : queryIdx for image1 and trainIdx for image 2
std::vector< DMatch > matches2 contains indexes of matched points : queryIdx for image2 and trainIdx for image 1*/
/* output : symetric matches*/
std::vector< cv::DMatch > symetricFilter( std::vector< cv::DMatch > matches1, std::vector< cv::DMatch > matches2)
std::vector< cv::DMatch > symetricFilter( std::vector< cv::DMatch > const& matches1, std::vector< cv::DMatch > const& matches2)
{
std::vector< cv::DMatch > symetricMatches;
unsigned int h;
@ -172,13 +165,8 @@ std::vector<std::pair<cv::Point,cv::Point>> DetectAndMatch(cv::Mat img_1, cv::Ma
return symetricMatches;
}
/** @function orderConstraintFilter */
/* input : std::vector< DMatch > symetricMatches,
std::vector<Keypoint> keypoints_1,
std::vector<Keypoint> keypoints_2,
float proportion */
/* output : matches filtered with the order constraint*/
std::vector< cv::DMatch > orderConstraintFilter (std::vector< cv::DMatch > symetricMatches, std::vector<cv::KeyPoint> keypoints_1, std::vector<cv::KeyPoint> keypoints_2, float proportion)
std::vector< cv::DMatch > orderConstraintFilter (std::vector< cv::DMatch > const& symetricMatches, std::vector<cv::KeyPoint> const& keypoints_1, std::vector<cv::KeyPoint> const& keypoints_2, float proportion)
{
std::vector< cv::DMatch > matches;
unsigned int counter;
@ -200,25 +188,19 @@ std::vector<std::pair<cv::Point,cv::Point>> DetectAndMatch(cv::Mat img_1, cv::Ma
return matches;
}
/** @function thresholdFilter */
/* input : float distanceThreshold,
std::vector< DMatch > matches */
/* output : matches filtered by distance between descriptors*/
std::vector< cv::DMatch > thresholdFilter (float distanceThreshold, std::vector< cv::DMatch > matches)
std::vector< cv::DMatch > thresholdFilter (float distanceThreshold, std::vector< cv::DMatch > const& matches)
{
std::vector< cv::DMatch > correctedMatches;
for(unsigned int i=0;i<matches.size();i++)
{
std::cout << matches[i].distance << std::endl;
if(matches[i].distance<distanceThreshold) correctedMatches.push_back(matches[i]);
}
return correctedMatches;
}
/** @function geometricFilter */
/* filter using epipolar geometry, and the fundamental matrix to filter strange points*/
/* input : keypoints_1, keypoints_2 and correctedMatches*/
/* output :*/
std::tuple<std::vector<cv::DMatch>, std::vector<cv::KeyPoint>, std::vector<cv::KeyPoint>> geometricFilter ( std::vector<cv::KeyPoint> keypoints_1, std::vector<cv::KeyPoint> keypoints_2, std::vector< cv::DMatch > correctedMatches)
std::tuple<std::vector<cv::DMatch>, std::vector<cv::KeyPoint>, std::vector<cv::KeyPoint>> geometricFilter ( std::vector<cv::KeyPoint> const& keypoints_1, std::vector<cv::KeyPoint> const& keypoints_2, std::vector< cv::DMatch > const& correctedMatches)
{
std::vector< cv::DMatch > matches;
std::vector<cv::KeyPoint> kpoints_1, kpoints_2, newkpoints_1, newkpoints_2;
@ -231,7 +213,7 @@ std::vector<std::pair<cv::Point,cv::Point>> DetectAndMatch(cv::Mat img_1, cv::Ma
points_2.push_back(kpoints_2[i].pt);
}
cv::Mat masque;
cv::Mat F = cv::findFundamentalMat(points_1, points_2, CV_FM_RANSAC, 3, 0.99, masque);
cv::Mat F = cv::findFundamentalMat(points_1, points_2, CV_FM_RANSAC, 5, 0.99, masque);
unsigned int counter = 0;
for(unsigned int j=0;j<kpoints_1.size();j++)
{
@ -247,12 +229,7 @@ std::vector<std::pair<cv::Point,cv::Point>> DetectAndMatch(cv::Mat img_1, cv::Ma
}
/** @function resizeMask */
/* Input : the mask to resize and the size of the image*/
/* Output : the mask resized */
/* Requirements : the original image need to be in landscape */
cv::Mat resizeMask(cv::Mat mask, cv::Size sizeImage)
cv::Mat resizeMask(cv::Mat const& mask, cv::Size const& sizeImage)
{
cv::Size sizeMask = mask.size();
cv::Mat newMask = cv::Mat::zeros(sizeImage, CV_8UC1);
@ -266,7 +243,6 @@ cv::Mat resizeMask(cv::Mat mask, cv::Size sizeImage)
return newMask;
}
/** @function readme */
void readme()
{
std::cout << " Usage: DetectionAndMatching.exe <img1> <img2> [<mask1> <mask2>]" << std::endl;

View File

@ -12,9 +12,7 @@
\begin{document}
\maketitle
\begin{abstract}
\end{abstract}
\tableofcontents
\part{First part : segmentation, camera calibration, skeletonization, detection and matching of keypoints}
\chapter{Segmentation}
@ -30,15 +28,20 @@
\chapter{Detection and matching of keypoints}
\section{Detection of keypoints}
We tried two differents algorithm to find points of interest :
We tried three differents algorithm to find points of interest :
\begin{itemize}
\item surf algorithm
\item sift algorithm
\item brisk algorithm
\end{itemize}
The results were similar with the two algorithms.
Since we had already found a surf matcher, we kept the surf algorithm for detection.
The results were similar with the three algorithms.
We kept the surf algorithm for detection as it is the fatest one but finally we decided to use brisk algorithm because surf and sift
are patented algorithm and cannot be used for commercial use. In the following results you can see the differences between surf
and brisk.
With the surf algorithm there is a parameter named minHessian which can be modified, influencing the number of detected points.
The higher the minHessian, the fewer keypoints you will obtain, on the other hand, the lower the minHessian, the more keypoints you get, but they may be more noisy.
With the brisk algorithm some parameters can be changed like the FAST/AGAST detection threshold score (thresh), the detection octaves (octaves) and
the scale to the pattern used for sampling the neighbourhood of a keypoint (patternScale).
\subsection{Test lapin}
@ -60,6 +63,12 @@ The higher the minHessian, the fewer keypoints you will obtain, on the other han
\caption{\label{LapinSurf1000} Lapin, Surf algorithm, minHessian=1000}
\end{figure}
\begin{figure}[H]
\centering
\includegraphics[scale=0.49]{images/pointsDetection/brisk3415}
\caption{\label{brisk3415} Lapin, Brisk, thresh=3, octaves=4, patternScale=1.5}
\end{figure}
\newpage
\subsection{Test minion}
@ -83,7 +92,7 @@ The higher the minHessian, the fewer keypoints you will obtain, on the other han
\section{Matching of keypoints}
\subsection{Different filters}
\subsection{Different filters on Surf }
Different kinds of filters :
\begin{itemize}
\item symmetric constraint
@ -227,6 +236,90 @@ This probably comes from the calculation of the fundamental matrix with the Rans
\caption{\label{LapinAgainGeometric} Geometric filter}
\end{figure}
\subsection{Test using BRISK algorithm}
We changed the parameters of brisk algorithm and of the filters to find the best
values in order to find the best matching points. The different results are
summarise in the table \ref{briskTest}.
\begin{table}[H]
\begin{center}
\begin{tabular}{|c|c|c|c|c|c|c|}
\hline
Figure & Thresh & Octaves & PatternScales & Proportion & RANSAC & Matches number\\
\hline
\ref{B1} & 2 & 4 & 1.5 & 1 & 3 & 55\\
\hline
\ref{B2} & 10 & 4 & 1.5 & 1 & 3 & 43\\
\hline
\ref{B3} & 3 & 4 & 1.5 & 1 & 3 & 57\\
\hline
\ref{B4} & 3 & 4 & 1.0 & 1 & 3 & 31\\
\hline
\ref{B5} & 3 & 4 & 2.0 & 1 & 3 & 53\\
\hline
\ref{B6} & 3 & 4 & 1.5 & 1 & 5 & 66\\
\hline
\ref{B7} & 3 & 4 & 1.5 & 0.5 & 3 & 69\\
\hline
\end{tabular}
\caption{\label{briskTest} Test on parameters of brisk algorithm and filters}
\end{center}
\end{table}
\paragraph{Analyses} When we raise the thresh parameter over 3 the number of matches decrease, the same thing
happened when we decrease it below 3. That is why we kept the value 3 for thresh parameter. Then, the value of
PatternScales parameter is important to adjust the number of keypoints detected and so the number of matches,
the optimal value is 1.5. Using the order constraint filter allow us to find more and better matches because
the fundamental matrix is computed from all the keypoints so if there are a lot of errors this matrix is not
well approximated and there are less good matches at the end. Finally when we change the value of RANSAC parameter
around 3 there are more errors. The final values kept for the parameters are
those corresponding to the figure \ref{B7}.
\begin{figure}[H]
\centering
\includegraphics[scale=0.55]{images/pointsMatching/1}
\caption{\label{B1} BRISK}
\end{figure}
\begin{figure}[H]
\centering
\includegraphics[scale=0.55]{images/pointsMatching/2}
\caption{\label{B2} BRISK}
\end{figure}
\begin{figure}[H]
\centering
\includegraphics[scale=0.55]{images/pointsMatching/3}
\caption{\label{B3} BRISK}
\end{figure}
\begin{figure}[H]
\centering
\includegraphics[scale=0.55]{images/pointsMatching/4}
\caption{\label{B4} BRISK}
\end{figure}
\begin{figure}[H]
\centering
\includegraphics[scale=0.55]{images/pointsMatching/5}
\caption{\label{B5} BRISK}
\end{figure}
\begin{figure}[H]
\centering
\includegraphics[scale=0.55]{images/pointsMatching/6}
\caption{\label{B6} BRISK : one error when we change RANSAC parameter}
\end{figure}
\begin{figure}[H]
\centering
\includegraphics[scale=0.55]{images/pointsMatching/7}
\caption{\label{B7} BRISK : final Result}
\end{figure}
\chapter{Skeletonization}
\section{Cutting skeleton into pieces}
Skeleton extraction tests:

View File

@ -1,5 +1,8 @@
\chapter*{Management introduction}
The second important aspect of our project was its management.
We spent the first week on this part. We tried to define clearly the client need, in order to achieve good product specification. Unfortunately it had to evolve during the first part of our project. We took decisions about the way we would work, for example we decided to use pair programming, and to spend time on our tests before developing. We also worked especially on our risks table to anticipate problems, on our actions table to prevent risks realization, and on our schedule to make sure we would finish on time. \\
We have been updating this documents during the project in order to always anticipate problems.
We worked on risks anticipation, organization of the planning and distribution of the actions.

View File

@ -1,5 +1,7 @@
\chapter{Product specification}
During the first week of our project we tried to clearly define the need of the client and the specifications. Here is our product specification.
\section{General constraints}
\subsection{Expected deliverables and delivery delay}

View File

@ -1,11 +1,12 @@
\chapter{Schedule}
Thanks to our risks table we had anticipated the possibility of a schedule modification due to our interaction with the other group and with the client binaries. We kept one week as a security, which we finally had to use before the integration period.
Thanks to our risks table we had anticipated the possibility of a schedule modification due to our interaction with the other group and with the client binaries. We had kept one week as security, and finally had to use it before the integration period.
\section{Initial schedule}
At the beginning of the project we did a detailed schedule for the first part of the project, and did not detail the schedule for the first part of the project. We just assigned 2 weeks and half for the meshing, and one week for the animation.
We spent our first week on the project management, like doing this schedule. \\
At the beginning we did a detailed schedule for the first part of the project, but not for the second part of the project. We just assigned 2 weeks and half to the meshing, and one week to the animation.
\begin{figure}[h!]
\begin{center}

View File

@ -26,7 +26,7 @@
\setlength{\topmargin}{-30pt} % Pas de marge en haut
\setlength{\headheight}{13pt} % Haut de page
\setlength{\headsep}{10pt} % Entre le haut de page et le texte
\setlength{\footskip}{0.8cm} % Bas de page + séparation
\setlength{\footskip}{0.4cm} % Bas de page + séparation
\setlength{\textheight}{24.5cm} % Hauteur de la zone de texte (25cm)
@ -67,7 +67,7 @@
\include{chapters/productSpecification}
\include{chapters/productDescription}
%\include{chapters/tests}
\include{chapters/technicalConclusion}
\part{Project Management}

View File

@ -1 +1,3 @@
\subsection{Segmentation}
\subsection{Segmentation}
This part was done by the other group.