aboutsummaryrefslogtreecommitdiff
path: root/report/paper.md
diff options
context:
space:
mode:
authornunzip <np.scarh@gmail.com>2018-11-17 12:15:01 +0000
committernunzip <np.scarh@gmail.com>2018-11-17 12:15:01 +0000
commit5e65497d7109db22137b9c27eb6262e612462fbc (patch)
tree376094e0571850f1574c482e86498ef255af9f56 /report/paper.md
parenta740538848b290ad572f2695cfd3907cd9a0b743 (diff)
downloadvz215_np1915-5e65497d7109db22137b9c27eb6262e612462fbc.tar.gz
vz215_np1915-5e65497d7109db22137b9c27eb6262e612462fbc.tar.bz2
vz215_np1915-5e65497d7109db22137b9c27eb6262e612462fbc.zip
Fix labels
Diffstat (limited to 'report/paper.md')
-rwxr-xr-xreport/paper.md27
1 files changed, 17 insertions, 10 deletions
diff --git a/report/paper.md b/report/paper.md
index 1c9e224..253b1ba 100755
--- a/report/paper.md
+++ b/report/paper.md
@@ -56,6 +56,7 @@ to flaten.
\begin{center}
\includegraphics[width=20em]{fig/accuracy.pdf}
\caption{NN Recognition Accuracy varying M}
+\label{accuracy}
\end{center}
\end{figure}
@@ -138,11 +139,14 @@ Observing in fact the variance ratio of the principal components, the contributi
they'll have will be very low for values above 100, hence we will require a much higher
quantity of components to improve reconstruction quality. With M=100 we will be able to
use effectively 97% of the information from our initial training data for reconstruction.
+Refer to figure \ref{eigvariance} for the data variance associated with each of the M
+eigenvalues.
\begin{figure}
\begin{center}
\includegraphics[width=20em]{fig/variance.pdf}
\caption{Data variance carried by each of M eigenvalues}
+\label{eigvariance}
\end{center}
\end{figure}
@@ -175,8 +179,8 @@ classification.
\begin{center}
\includegraphics[width=7em]{fig/face2.pdf}
\includegraphics[width=7em]{fig/face5.pdf}
-\label{nn_fail}
\caption{Failure case for NN. Test face left. NN right}
+\label{nn_fail}
\end{center}
\end{figure}
@@ -184,8 +188,8 @@ classification.
\begin{center}
\includegraphics[width=7em]{fig/success1.pdf}
\includegraphics[width=7em]{fig/success1t.pdf}
-\label{nn_succ}
\caption{Success case for NN. Test face left. NN right}
+\label{nn_succ}
\end{center}
\end{figure}
@@ -197,8 +201,8 @@ K=1, as it can be observed from figure \ref{k-diff}.
\begin{figure}
\begin{center}
\includegraphics[width=20em]{fig/kneighbors_diffk.pdf}
-\label{k-diff}
\caption{NN recognition accuracy varying K. Split: 80-20}
+\label{k-diff}
\end{center}
\end{figure}
@@ -207,7 +211,7 @@ subspace is generated for each class. These subspaces are then used for reconstr
of the test image and the class of the subspace that generated the minimum reconstruction
error is assigned.
-The alternative method shows overall a better performance, with peak accuracy of 69%
+The alternative method shows overall a better performance (see figure \ref{altacc}), with peak accuracy of 69%
for M=5. The maximum M non zero eigenvectors that can be used will in this case be at most
the amount of training samples per class minus one, since the same amount of eigenvectors
will be used for each generated class-subspace.
@@ -216,6 +220,7 @@ will be used for each generated class-subspace.
\begin{center}
\includegraphics[width=20em]{fig/alternative_accuracy.pdf}
\caption{Accuracy of Alternative Method varying M}
+\label{altacc}
\end{center}
\end{figure}
@@ -225,12 +230,12 @@ can be observed in figure \ref{cm-alt}.
\begin{figure}
\begin{center}
\includegraphics[width=20em]{fig/altcm.pdf}
-\label{cm-alt}
\caption{Confusion Matrix for alternative method, M=5}
+\label{cm-alt}
\end{center}
\end{figure}
-Similarly to the NN case, we present two cases, respectively failure and success.
+Similarly to the NN case, we present two cases, respectively failure (figure \ref{altfail}) and success (figure \ref{altsucc}).
\begin{figure}
\begin{center}
@@ -238,6 +243,7 @@ Similarly to the NN case, we present two cases, respectively failure and success
\includegraphics[width=7em]{fig/FR.JPG}
\includegraphics[width=7em]{fig/FL.JPG}
\caption{Alternative method failure. Respectively test image, reconstructed image, class assigned}
+\label{altfail}
\end{center}
\end{figure}
@@ -247,6 +253,7 @@ Similarly to the NN case, we present two cases, respectively failure and success
\includegraphics[width=7em]{fig/SR.JPG}
\includegraphics[width=7em]{fig/SL.JPG}
\caption{Alternative method success. Respectively test image, reconstructed image, class assigned}
+\label{altsucc}
\end{center}
\end{figure}
@@ -338,14 +345,14 @@ observed in the confusion matrix shown in figure \ref{ldapca_cm}.
\end{center}
\end{figure}
-Two recognition examples are reported: success in figure \ref{succ_ldapca}) and failure in figure \ref{fail_ldapca}
+Two recognition examples are reported: success in figure \ref{succ_ldapca} and failure in figure \ref{fail_ldapca}.
\begin{figure}
\begin{center}
\includegraphics[width=7em]{fig/ldapcaf2.pdf}
\includegraphics[width=7em]{fig/ldapcaf1.pdf}
-\label{fail_ldapca}
\caption{Failure case for PCA-LDA. Test face left. NN right}
+\label{fail_ldapca}
\end{center}
\end{figure}
@@ -353,8 +360,8 @@ Two recognition examples are reported: success in figure \ref{succ_ldapca}) and
\begin{center}
\includegraphics[width=7em]{fig/ldapcas1.pdf}
\includegraphics[width=7em]{fig/ldapcas2.pdf}
-\label{succ_ldapca}
\caption{Success case for PCA-LDA. Test face left. NN right}
+\label{succ_ldapca}
\end{center}
\end{figure}
@@ -367,8 +374,8 @@ the 3 features of the subspaces obtained are graphed.
\begin{center}
\includegraphics[width=12em]{fig/SubspaceQ1.pdf}
\includegraphics[width=12em]{fig/SubspaceQL1.pdf}
-\label{subspaces}
\caption{Generated Subspaces (3 features). PCA on the left. PCA-LDA on the right}
+\label{subspaces}
\end{center}
\end{figure}