Merge branch 'master' of github.com:tforgione/Paella

This commit is contained in:
Thomas FORGIONE 2015-03-11 17:52:38 +01:00
commit f6db5939f6
15 changed files with 195 additions and 94 deletions

View File

@ -17,14 +17,14 @@ void readme();
/////////////////////////////////////////////////////////
/// \ingroup detectionandmatching
/// \brief detects the keypoints on the two
/// inputs images and matches the keypoints
/// inputs images with BRISK and matches the keypoints
/// \param img_1 first image
/// \param img_2 second image
/// \param masque_1 mask of img_1
/// \param masque_2 mask of img_2
/// \return a vector containing the matches (pair of Point)
/////////////////////////////////////////////////////////
std::vector<std::pair<cv::Point, cv::Point>> DetectAndMatch(cv::Mat img_1, cv::Mat img_2, cv::Mat masque_1, cv::Mat masque_2);
std::vector<std::pair<cv::Point,cv::Point>> detectAndMatch(cv::Mat const& img_1, cv::Mat const& img_2, cv::Mat const& masque_1, cv::Mat const& masque_2);
/////////////////////////////////////////////////////////
/// \brief Resize the mask of an image
@ -32,7 +32,7 @@ std::vector<std::pair<cv::Point, cv::Point>> DetectAndMatch(cv::Mat img_1, cv::M
/// \return the mask resized
/// \pre the original image need to be in landscape
////////////////////////////////////////////////////////
cv::Mat resizeMask(cv::Mat mask, cv::Size sizeImage);
cv::Mat resizeMask(cv::Mat const& mask, cv::Size const& sizeImage);
/////////////////////////////////////////////////////////
/// \ingroup detectionandmatching
@ -42,7 +42,7 @@ cv::Mat resizeMask(cv::Mat mask, cv::Size sizeImage);
/// \param matches2 contains indexes of matched points : queryIdx for image2 and trainIdx for image 1
/// \return symetric matches
////////////////////////////////////////////////////////
std::vector< cv::DMatch > symetricFilter( std::vector< cv::DMatch > matches1, std::vector< cv::DMatch > matches2);
std::vector< cv::DMatch > symetricFilter( std::vector< cv::DMatch > const& matches1, std::vector< cv::DMatch > const& matches2);
///////////////////////////////////////////////////////
/// \ingroup detectionandmatching
@ -54,7 +54,7 @@ std::vector< cv::DMatch > symetricFilter( std::vector< cv::DMatch > matches1, st
/// \return matches filtered with the order constraint
/// \see symetricFilter
////////////////////////////////////////////////////////
std::vector< cv::DMatch > orderConstraintFilter (std::vector< cv::DMatch > symetricMatches, std::vector<cv::KeyPoint> keypoints_1, std::vector<cv::KeyPoint> keypoints_2, float proportion);
std::vector< cv::DMatch > orderConstraintFilter (std::vector< cv::DMatch > const& symetricMatches, std::vector<cv::KeyPoint> const& keypoints_1, std::vector<cv::KeyPoint> const& keypoints_2, float proportion);
////////////////////////////////////////////////////////
/// \ingroup detectionandmatching
@ -63,7 +63,7 @@ std::vector< cv::DMatch > orderConstraintFilter (std::vector< cv::DMatch > symet
/// \param matches matches to filter
/// \return matches filtered by distance between descriptors
/////////////////////////////////////////////////////////
std::vector< cv::DMatch > thresholdFilter (float distanceThreshold, std::vector< cv::DMatch > matches);
std::vector< cv::DMatch > thresholdFilter (float distanceThreshold, std::vector< cv::DMatch > const& matches);
////////////////////////////////////////////////////////
/// \ingroup detectionandmatching
@ -73,6 +73,6 @@ std::vector< cv::DMatch > thresholdFilter (float distanceThreshold, std::vector<
/// \param correctedMatches the matches in input
/// \return matches filtered by geometric criteria
///////////////////////////////////////////////////////
std::tuple<std::vector< cv::DMatch >, std::vector<cv::KeyPoint>, std::vector<cv::KeyPoint>> geometricFilter ( std::vector<cv::KeyPoint> keypoints_1, std::vector<cv::KeyPoint> keypoints_2, std::vector< cv::DMatch > correctedMatches);
std::tuple<std::vector<cv::DMatch>, std::vector<cv::KeyPoint>, std::vector<cv::KeyPoint>> geometricFilter ( std::vector<cv::KeyPoint> const& keypoints_1, std::vector<cv::KeyPoint> const& keypoints_2, std::vector< cv::DMatch > const& correctedMatches);
#endif // DETECTIONANDMATCHING_HPP

View File

@ -1,5 +1,5 @@
add_subdirectory(Calibration)
# add_subdirectory(DetectionAndMatching)
add_subdirectory(DetectionAndMatching)
add_subdirectory(Extern)
add_subdirectory(HelloCV)
add_subdirectory(Segmentation)

View File

@ -8,14 +8,10 @@
#include <opencv2/features2d/features2d.hpp>
#include <opencv2/nonfree/features2d.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/nonfree/nonfree.hpp>
#include <opencv2/calib3d/calib3d.hpp>
#include "DetectionAndMatching/DetectionAndMatching.hpp"
/*headers*/
// /** @function main */
int main( int argc, char** argv )
{
cv::Mat img_1;
@ -55,24 +51,26 @@ if( !img_1.data || !img_2.data )
std::vector<std::pair<cv::Point, cv::Point>> matchPoints;
cv::Mat masque_1_resize = resizeMask(masque_1,img_1.size());
cv::Mat masque_2_resize = resizeMask(masque_2,img_2.size());
matchPoints = DetectAndMatch(img_1, img_2, masque_1_resize, masque_2_resize);
matchPoints = detectAndMatch(img_1, img_2, masque_1_resize, masque_2_resize);
return 0;
}
/** @function DetectionAndMatching */
/**/
std::vector<std::pair<cv::Point,cv::Point>> DetectAndMatch(cv::Mat img_1, cv::Mat img_2, cv::Mat masque_1, cv::Mat masque_2)
std::vector<std::pair<cv::Point,cv::Point>> detectAndMatch(cv::Mat const& img_1, cv::Mat const& img_2, cv::Mat const& masque_1, cv::Mat const& masque_2)
{
//-- Step 1: Detect the keypoints using SURF Detector
int minHessian = 10;
//-- Step 1: Detect the keypoints using BRISK Detector
cv::SurfFeatureDetector detector( minHessian );
int Threshl=3;
int Octaves=4; //(pyramid layer) from which the keypoint has been extracted
float PatternScales=1.5f;
std::vector<cv::KeyPoint> keypoints_1, keypoints_2;
detector.detect( img_1, keypoints_1, masque_1);
detector.detect( img_2, keypoints_2, masque_2);
cv::BRISK BRISKD(Threshl,Octaves,PatternScales);//initialize algoritm
BRISKD.create("Feature2D.BRISK");
BRISKD.detect(img_1, keypoints_1,masque_1);
BRISKD.detect( img_2, keypoints_2, masque_2);
//-- Draw keypoints
cv::Mat img_keypoints_1; cv::Mat img_keypoints_2;
@ -86,13 +84,12 @@ std::vector<std::pair<cv::Point,cv::Point>> DetectAndMatch(cv::Mat img_1, cv::Ma
cv::waitKey(0);
//-- Step 2: Calculate descriptors (feature vectors)
cv::SurfDescriptorExtractor extractor;
//every raw from the descriptors contains a KeyPoint descriptor (128 columns per raw)
cv::Mat descriptors_1, descriptors_2;
extractor.compute( img_1, keypoints_1, descriptors_1 );
extractor.compute( img_2, keypoints_2, descriptors_2 );
BRISKD.compute( img_1, keypoints_1, descriptors_1 );
BRISKD.compute( img_2, keypoints_2, descriptors_2 );
//-- Step 3: Matching descriptor vectors with a brute force matcher
cv::BFMatcher matcher(cv::NORM_L2);
@ -106,17 +103,17 @@ std::vector<std::pair<cv::Point,cv::Point>> DetectAndMatch(cv::Mat img_1, cv::Ma
//order constraint
float proportion = 1;
float proportion = 0.5;
std::vector< cv::DMatch > matches = orderConstraintFilter (symetricMatches, keypoints_1, keypoints_2, proportion);
//threshold filter
float distanceThreshold=0.5;
std::vector< cv::DMatch > correctedMatches = thresholdFilter (distanceThreshold, matches);
/* //threshold filter
float distanceThreshold=1;
std::vector< cv::DMatch > correctedMatches = thresholdFilter (distanceThreshold, symetricMatches);
std::cout << correctedMatches.size() << std::endl;
std::cout << correctedMatches.size() << std::endl;*/
// geometric filter
std::tuple<std::vector< cv::DMatch >, std::vector<cv::KeyPoint>, std::vector<cv::KeyPoint>> tuple = geometricFilter ( keypoints_1, keypoints_2, correctedMatches);
std::tuple<std::vector< cv::DMatch >, std::vector<cv::KeyPoint>, std::vector<cv::KeyPoint>> tuple = geometricFilter ( keypoints_1, keypoints_2,matches);
std::vector< cv::DMatch > geometricMatches = std::get<0>(tuple);
keypoints_1 = std::get<1>(tuple);
keypoints_2 = std::get<2>(tuple);
@ -135,25 +132,21 @@ std::vector<std::pair<cv::Point,cv::Point>> DetectAndMatch(cv::Mat img_1, cv::Ma
// vector of pairs of matching points
std::vector<std::pair<cv::Point,cv::Point>> matchPoints;
for(unsigned int i=0;i<correctedMatches.size();i++)
for(unsigned int i=0;i<geometricMatches.size();i++)
{
//myPair pair of matching points
std::pair<cv::Point,cv::Point> myPair;
//queryIdx index of the point from image 1
myPair.first=keypoints_1[correctedMatches[i].queryIdx].pt;
myPair.first=keypoints_1[geometricMatches[i].queryIdx].pt;
//trainIdx index of the point from image 2
myPair.second=keypoints_2[correctedMatches[i].trainIdx].pt;
myPair.second=keypoints_2[geometricMatches[i].trainIdx].pt;
matchPoints.push_back(myPair);
}
return matchPoints;
}
/** @function symetricFilter */
/* input : std::vector< DMatch > matches1 contains indexes of matched points : queryIdx for image1 and trainIdx for image 2
std::vector< DMatch > matches2 contains indexes of matched points : queryIdx for image2 and trainIdx for image 1*/
/* output : symetric matches*/
std::vector< cv::DMatch > symetricFilter( std::vector< cv::DMatch > matches1, std::vector< cv::DMatch > matches2)
std::vector< cv::DMatch > symetricFilter( std::vector< cv::DMatch > const& matches1, std::vector< cv::DMatch > const& matches2)
{
std::vector< cv::DMatch > symetricMatches;
unsigned int h;
@ -172,13 +165,8 @@ std::vector<std::pair<cv::Point,cv::Point>> DetectAndMatch(cv::Mat img_1, cv::Ma
return symetricMatches;
}
/** @function orderConstraintFilter */
/* input : std::vector< DMatch > symetricMatches,
std::vector<Keypoint> keypoints_1,
std::vector<Keypoint> keypoints_2,
float proportion */
/* output : matches filtered with the order constraint*/
std::vector< cv::DMatch > orderConstraintFilter (std::vector< cv::DMatch > symetricMatches, std::vector<cv::KeyPoint> keypoints_1, std::vector<cv::KeyPoint> keypoints_2, float proportion)
std::vector< cv::DMatch > orderConstraintFilter (std::vector< cv::DMatch > const& symetricMatches, std::vector<cv::KeyPoint> const& keypoints_1, std::vector<cv::KeyPoint> const& keypoints_2, float proportion)
{
std::vector< cv::DMatch > matches;
unsigned int counter;
@ -200,25 +188,19 @@ std::vector<std::pair<cv::Point,cv::Point>> DetectAndMatch(cv::Mat img_1, cv::Ma
return matches;
}
/** @function thresholdFilter */
/* input : float distanceThreshold,
std::vector< DMatch > matches */
/* output : matches filtered by distance between descriptors*/
std::vector< cv::DMatch > thresholdFilter (float distanceThreshold, std::vector< cv::DMatch > matches)
std::vector< cv::DMatch > thresholdFilter (float distanceThreshold, std::vector< cv::DMatch > const& matches)
{
std::vector< cv::DMatch > correctedMatches;
for(unsigned int i=0;i<matches.size();i++)
{
std::cout << matches[i].distance << std::endl;
if(matches[i].distance<distanceThreshold) correctedMatches.push_back(matches[i]);
}
return correctedMatches;
}
/** @function geometricFilter */
/* filter using epipolar geometry, and the fundamental matrix to filter strange points*/
/* input : keypoints_1, keypoints_2 and correctedMatches*/
/* output :*/
std::tuple<std::vector<cv::DMatch>, std::vector<cv::KeyPoint>, std::vector<cv::KeyPoint>> geometricFilter ( std::vector<cv::KeyPoint> keypoints_1, std::vector<cv::KeyPoint> keypoints_2, std::vector< cv::DMatch > correctedMatches)
std::tuple<std::vector<cv::DMatch>, std::vector<cv::KeyPoint>, std::vector<cv::KeyPoint>> geometricFilter ( std::vector<cv::KeyPoint> const& keypoints_1, std::vector<cv::KeyPoint> const& keypoints_2, std::vector< cv::DMatch > const& correctedMatches)
{
std::vector< cv::DMatch > matches;
std::vector<cv::KeyPoint> kpoints_1, kpoints_2, newkpoints_1, newkpoints_2;
@ -231,7 +213,7 @@ std::vector<std::pair<cv::Point,cv::Point>> DetectAndMatch(cv::Mat img_1, cv::Ma
points_2.push_back(kpoints_2[i].pt);
}
cv::Mat masque;
cv::Mat F = cv::findFundamentalMat(points_1, points_2, CV_FM_RANSAC, 3, 0.99, masque);
cv::Mat F = cv::findFundamentalMat(points_1, points_2, CV_FM_RANSAC, 5, 0.99, masque);
unsigned int counter = 0;
for(unsigned int j=0;j<kpoints_1.size();j++)
{
@ -247,12 +229,7 @@ std::vector<std::pair<cv::Point,cv::Point>> DetectAndMatch(cv::Mat img_1, cv::Ma
}
/** @function resizeMask */
/* Input : the mask to resize and the size of the image*/
/* Output : the mask resized */
/* Requirements : the original image need to be in landscape */
cv::Mat resizeMask(cv::Mat mask, cv::Size sizeImage)
cv::Mat resizeMask(cv::Mat const& mask, cv::Size const& sizeImage)
{
cv::Size sizeMask = mask.size();
cv::Mat newMask = cv::Mat::zeros(sizeImage, CV_8UC1);
@ -266,7 +243,6 @@ cv::Mat resizeMask(cv::Mat mask, cv::Size sizeImage)
return newMask;
}
/** @function readme */
void readme()
{
std::cout << " Usage: DetectionAndMatching.exe <img1> <img2> [<mask1> <mask2>]" << std::endl;

View File

@ -12,9 +12,7 @@
\begin{document}
\maketitle
\begin{abstract}
\end{abstract}
\tableofcontents
\part{First part : segmentation, camera calibration, skeletonization, detection and matching of keypoints}
\chapter{Segmentation}
@ -30,15 +28,20 @@
\chapter{Detection and matching of keypoints}
\section{Detection of keypoints}
We tried two differents algorithm to find points of interest :
We tried three differents algorithm to find points of interest :
\begin{itemize}
\item surf algorithm
\item sift algorithm
\item brisk algorithm
\end{itemize}
The results were similar with the two algorithms.
Since we had already found a surf matcher, we kept the surf algorithm for detection.
The results were similar with the three algorithms.
We kept the surf algorithm for detection as it is the fatest one but finally we decided to use brisk algorithm because surf and sift
are patented algorithm and cannot be used for commercial use. In the following results you can see the differences between surf
and brisk.
With the surf algorithm there is a parameter named minHessian which can be modified, influencing the number of detected points.
The higher the minHessian, the fewer keypoints you will obtain, on the other hand, the lower the minHessian, the more keypoints you get, but they may be more noisy.
With the brisk algorithm some parameters can be changed like the FAST/AGAST detection threshold score (thresh), the detection octaves (octaves) and
the scale to the pattern used for sampling the neighbourhood of a keypoint (patternScale).
\subsection{Test lapin}
@ -60,6 +63,12 @@ The higher the minHessian, the fewer keypoints you will obtain, on the other han
\caption{\label{LapinSurf1000} Lapin, Surf algorithm, minHessian=1000}
\end{figure}
\begin{figure}[H]
\centering
\includegraphics[scale=0.49]{images/pointsDetection/brisk3415}
\caption{\label{brisk3415} Lapin, Brisk, thresh=3, octaves=4, patternScale=1.5}
\end{figure}
\newpage
\subsection{Test minion}
@ -83,7 +92,7 @@ The higher the minHessian, the fewer keypoints you will obtain, on the other han
\section{Matching of keypoints}
\subsection{Different filters}
\subsection{Different filters on Surf }
Different kinds of filters :
\begin{itemize}
\item symmetric constraint
@ -227,6 +236,90 @@ This probably comes from the calculation of the fundamental matrix with the Rans
\caption{\label{LapinAgainGeometric} Geometric filter}
\end{figure}
\subsection{Test using BRISK algorithm}
We changed the parameters of brisk algorithm and of the filters to find the best
values in order to find the best matching points. The different results are
summarise in the table \ref{briskTest}.
\begin{table}[H]
\begin{center}
\begin{tabular}{|c|c|c|c|c|c|c|}
\hline
Figure & Thresh & Octaves & PatternScales & Proportion & RANSAC & Matches number\\
\hline
\ref{B1} & 2 & 4 & 1.5 & 1 & 3 & 55\\
\hline
\ref{B2} & 10 & 4 & 1.5 & 1 & 3 & 43\\
\hline
\ref{B3} & 3 & 4 & 1.5 & 1 & 3 & 57\\
\hline
\ref{B4} & 3 & 4 & 1.0 & 1 & 3 & 31\\
\hline
\ref{B5} & 3 & 4 & 2.0 & 1 & 3 & 53\\
\hline
\ref{B6} & 3 & 4 & 1.5 & 1 & 5 & 66\\
\hline
\ref{B7} & 3 & 4 & 1.5 & 0.5 & 3 & 69\\
\hline
\end{tabular}
\caption{\label{briskTest} Test on parameters of brisk algorithm and filters}
\end{center}
\end{table}
\paragraph{Analyses} When we raise the thresh parameter over 3 the number of matches decrease, the same thing
happened when we decrease it below 3. That is why we kept the value 3 for thresh parameter. Then, the value of
PatternScales parameter is important to adjust the number of keypoints detected and so the number of matches,
the optimal value is 1.5. Using the order constraint filter allow us to find more and better matches because
the fundamental matrix is computed from all the keypoints so if there are a lot of errors this matrix is not
well approximated and there are less good matches at the end. Finally when we change the value of RANSAC parameter
around 3 there are more errors. The final values kept for the parameters are
those corresponding to the figure \ref{B7}.
\begin{figure}[H]
\centering
\includegraphics[scale=0.55]{images/pointsMatching/1}
\caption{\label{B1} BRISK}
\end{figure}
\begin{figure}[H]
\centering
\includegraphics[scale=0.55]{images/pointsMatching/2}
\caption{\label{B2} BRISK}
\end{figure}
\begin{figure}[H]
\centering
\includegraphics[scale=0.55]{images/pointsMatching/3}
\caption{\label{B3} BRISK}
\end{figure}
\begin{figure}[H]
\centering
\includegraphics[scale=0.55]{images/pointsMatching/4}
\caption{\label{B4} BRISK}
\end{figure}
\begin{figure}[H]
\centering
\includegraphics[scale=0.55]{images/pointsMatching/5}
\caption{\label{B5} BRISK}
\end{figure}
\begin{figure}[H]
\centering
\includegraphics[scale=0.55]{images/pointsMatching/6}
\caption{\label{B6} BRISK : one error when we change RANSAC parameter}
\end{figure}
\begin{figure}[H]
\centering
\includegraphics[scale=0.55]{images/pointsMatching/7}
\caption{\label{B7} BRISK : final Result}
\end{figure}
\chapter{Skeletonization}
\section{Cutting skeleton into pieces}
Skeleton extraction tests:

Binary file not shown.

Before

Width:  |  Height:  |  Size: 148 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 99 KiB

View File

@ -1,5 +1,8 @@
\chapter*{Management introduction}
The second important aspect of our project was its management.
We spent the first week on this part. We tried to define clearly the client need, in order to achieve good product specification. Unfortunately it had to evolve during the first part of our project. We took decisions about the way we would work, for example we decided to use pair programming, and to spend time on our tests before developing. We also worked especially on our risks table to anticipate problems, on our actions table to prevent risks realization, and on our schedule to make sure we would finish on time. \\
We have been updating this documents during the project in order to always anticipate problems.
We worked on risks anticipation, organization of the planning and distribution of the actions.

View File

@ -1,5 +1,7 @@
\chapter{Product specification}
During the first week of our project we tried to clearly define the need of the client and the specifications. Here is our product specification.
\section{General constraints}
\subsection{Expected deliverables and delivery delay}

View File

@ -1,11 +1,12 @@
\chapter{Schedule}
Thanks to our risks table we had anticipated the possibility of a schedule modification due to our interaction with the other group and with the client binaries. We kept one week as a security, which we finally had to use before the integration period.
Thanks to our risks table we had anticipated the possibility of a schedule modification due to our interaction with the other group and with the client binaries. We had kept one week as security, and finally had to use it before the integration period.
\section{Initial schedule}
At the beginning of the project we did a detailed schedule for the first part of the project, and did not detail the schedule for the first part of the project. We just assigned 2 weeks and half for the meshing, and one week for the animation.
We spent our first week on the project management, like doing this schedule. \\
At the beginning we did a detailed schedule for the first part of the project, but not for the second part of the project. We just assigned 2 weeks and half to the meshing, and one week to the animation.
\begin{figure}[h!]
\begin{center}

BIN
report/img/Junctions10.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 23 KiB

BIN
report/img/Junctions11.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 56 KiB

BIN
report/img/Junctions9.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.0 KiB

View File

@ -67,7 +67,7 @@
\include{chapters/productSpecification}
\include{chapters/productDescription}
%\include{chapters/tests}
\include{chapters/technicalConclusion}
\part{Project Management}

View File

@ -1,14 +1,9 @@
\subsection{Extremities}
After computing the mesh around the splines we need to draw the
extremities using subdivisions to obtain smoother extremities.
The first step is calculate the projection of the extremity circle on the correspondant
sphere. Then for a subdivision of depth 0 we link this projection with the mesh points
along the extrem circle (see Figure \ref{projection}). For one subdivision we take the middle of each segments compute
earlier and we project it on the sphere (see Figure \ref{sub}). Finally we made triangular mesh the way you can see on
Figure \ref{mesh}).
After computing the mesh around the splines we need to draw the extremities using subdivisions to obtain smoother extremities.
The first step is to calculate the projection of the extremity circle on the correspondent sphere. Then for a subdivision of depth 0 we link this projection with the mesh points along the extrem circle (see Figure \ref{projection}). For one subdivision we take the middle of each segments compute earlier and we project it on the sphere (see Figure \ref{sub}). Finally we made a triangular mesh the way you can see on Figure \ref{mesh}).
The rendering of our extremities on dino.skl for example can be seen Figures \ref{extremity2} (2 subdivisions) and \ref{extremity10}
The rendering of our extremities on dino.skl for example can be seen on Figures \ref{extremity2} (2 subdivisions) and \ref{extremity10}
(10 subdivisions).
\begin{figure}[h!]

View File

@ -1,32 +1,63 @@
\subsection{Junctions}
The last step to complete is to mesh the junctions.
A junction is a point of a skeleton where more than 2 branches join. The process that consists in meshing this portion of the skeleton is complex. In fact it needs to be applicable on multiple cases, for instance 3 or 4 branches (see Figure \ref{junction}, and to take into account that the perfect case will not always be there.
A junction is a point of a skeleton where more than 2 branches join. The process for meshing this portion of the skeleton is complex. In fact it needs to be applicable on multiple cases, for instance 3 or 4 branches (see Figure \ref{junction}, and to take into account the fact that the perfect case will not always be there.
\begin{figure}[h!]
\begin{figure}[H]
\begin{center}
\includegraphics[scale=0.5]{img/Junctions3}
\caption{\label{junction}Junction of three splines}
\includegraphics[scale=0.5]{img/Junctions9}
\caption{\label{junction}Three splines junction}
\end{center}
\end{figure}
In the figure \ref{junction1} you can see how must be the theoretical case that we should have with a perfect skeleton. You can see the sphere shared with the three splines that join in this junction, and the three characteristic circles associated. Those circles by pairs join together in one point.
In the figure \ref{junction1} you can see what we should theoretically obtain with a perfect skeleton. You can see the sphere shared between three splines joining in this junction, and the three characteristic circles associated. Those circles are tangent by pair in one point.
\begin{figure}[h!]
\begin{figure}[H]
\begin{center}
\includegraphics[scale=0.5]{img/JunctionTheory}
\caption{\label{junction1}Theory case with perfect characteristic circles}
\caption{\label{junction1}Theoretical case with perfect characteristic circles}
\end{center}
\end{figure}
But in most of the practice case it is not like this. This is due to number's approximation on computers. The idea is then to look for the closest points of two consecutives circles and to join them and join every point of the circles in one point upside and one downside.
But in most of practical cases it is not like this. This is due to number's approximation on computers. The idea is then to look for the closest points of two consecutive circles and to join them. Then we join every point of the circles in one point upside and one downside.
To be able to do it the first step is to cut the circles in two parts to find the upside and downside of each of them. This can be done by computing the best fitting plane to the set of the circles's center points (figure \ref{junction2}).
We begin by identifying the last circles of each spline of the junction (see figure \ref{junction3}).
\begin{figure}[h!]
\begin{figure}[H]
\begin{center}
\includegraphics[scale=0.5]{img/Junctions2}
\caption{\label{junction2}Plane that cut the circles in two}
\includegraphics[scale=0.5]{img/Junctions3}
\caption{\label{junction3}3-splines junction}
\end{center}
\end{figure}
To be able to do it the first step is to cut the circles in two parts to find the upside and downside of each of them. This can be done by computing the best fitting plane for the set of circles' center points (figure \ref{junction2}).
\begin{figure}[H]
\begin{center}
\includegraphics[scale=0.35]{img/Junctions1}
\includegraphics[scale=0.35]{img/Junctions2}
\caption{\label{junction2}Best fitting plane for the set of circles' center points}
\end{center}
\end{figure}
Then when points are sorted we connect the up-points of each circle with the up-projection of the sphere's center on itself, and idem for the down-points with the down-projection of the sphere's center.
The result of this process is presented in figure \ref{junction4}.
\begin{figure}[H]
\begin{center}
\includegraphics[scale=0.5]{img/Junctions4}
\includegraphics[scale=0.5]{img/Junctions5}
\caption{\label{junction4}Edges added and mesh result}
\end{center}
\end{figure}
To easily see the result of the mesh on a junction, we have made some tests with only the extreme circles that we use. On real skeletons the junction are not easy to watch.
Here \ref{junction5} are two examples of a junction's mesh for 3 and 4-branches junctions.
\begin{figure}[H]
\begin{center}
\includegraphics[scale=0.5]{img/Junctions10}
\includegraphics[scale=0.4]{img/Junctions11}
\caption{\label{junction5}Mesh result on 3 and 4-branches junctions}
\end{center}
\end{figure}