aboutsummaryrefslogtreecommitdiff
path: root/report/paper.md
diff options
context:
space:
mode:
authornunzip <np.scarh@gmail.com>2019-03-08 19:21:44 +0000
committernunzip <np.scarh@gmail.com>2019-03-08 19:21:44 +0000
commit3adb475617e8dd8e53335e834083e6c5348110a5 (patch)
treeb52de548f27adc4406134c0b67728dc3dd9b16a6 /report/paper.md
parent97cc07ebc57a813f1fd4b32f314f455c033ab55a (diff)
downloade4-gan-3adb475617e8dd8e53335e834083e6c5348110a5.tar.gz
e4-gan-3adb475617e8dd8e53335e834083e6c5348110a5.tar.bz2
e4-gan-3adb475617e8dd8e53335e834083e6c5348110a5.zip
Update table
Diffstat (limited to 'report/paper.md')
-rw-r--r--report/paper.md30
1 files changed, 15 insertions, 15 deletions
diff --git a/report/paper.md b/report/paper.md
index 54f25db..984debf 100644
--- a/report/paper.md
+++ b/report/paper.md
@@ -151,21 +151,21 @@ with L2-Net logits.
$$ \textrm{IS}(x) = \exp(\mathcal{E}_x \left( \textrm{KL} ( p(y\|x) \|\| p(y) ) \right) ) $$
\begin{table}[]
-\begin{tabular}{lll}
- & \begin{tabular}[c]{@{}l@{}}Test Accuracy \\ (L2-Net)\end{tabular} & \begin{tabular}[c]{@{}l@{}}Inception Score \\ (L2-Net)\end{tabular} \\ \hline
- Shallow CGAN & 0.7031 & 5.8 \\
- Medium CGAN & 0.7837 & 6.09 \\
- Deep CGAN & 0.8038 & 6.347 \\
- Convolutional CGAN & 0.7714 & 6.219 \\
- \begin{tabular}[c]{@{}l@{}}Medium CGAN\\ One-sided label smoothing\end{tabular} & 0.8268 & 6.592 \\
- \begin{tabular}[c]{@{}l@{}}Convolutional CGAN\\ One-sided label smoothing\end{tabular} & 0.821 & 7.944 \\
- \begin{tabular}[c]{@{}l@{}}Medium CGAN\\ Dropout 0.1\end{tabular} & 0.7697 & 6.341 \\
- \begin{tabular}[c]{@{}l@{}}Medium CGAN\\ Dropout 0.5\end{tabular} & 0.751 & 6.16 \\
- \begin{tabular}[c]{@{}l@{}}Medium CGAN\\ Virtual Batch Normalization\end{tabular} & 0.787 & 6.28 \\
- \begin{tabular}[c]{@{}l@{}}Medium CGAN\\ Virtual Batch Normalization\\ One-sided label smoothing\end{tabular} & 0.829 & 6.62 \\
- *MNIST original test set & 0.9846 & 9.685
- \end{tabular}
- \end{table}
+\begin{tabular}{llll}
+ & \begin{tabular}[c]{@{}l@{}}Test \\ Accuracy \\ (L2-Net)\end{tabular} & \begin{tabular}[c]{@{}l@{}}Inception \\ Score \\ (L2-Net)\end{tabular} & \begin{tabular}[c]{@{}l@{}}Execution \\ time\\ (Training \\ GAN)\end{tabular} \\ \hline
+ Shallow CGAN & 0.645 & 3.57 & 8:14 \\
+ Medium CGAN & 0.715 & 3.79 & 10:23 \\
+ Deep CGAN & 0.739 & 3.85 & 16:27 \\
+ Convolutional CGAN & 0.737 & 4 & 25:27 \\
+ \begin{tabular}[c]{@{}l@{}}Medium CGAN\\ One-sided label \\ smoothing\end{tabular} & 0.749 & 3.643 & 10:42 \\
+ \begin{tabular}[c]{@{}l@{}}Convolutional CGAN\\ One-sided label \\ smoothing\end{tabular} & 0.601 & 2.494 & 27:36 \\
+ \begin{tabular}[c]{@{}l@{}}Medium CGAN\\ Dropout 0.1\end{tabular} & 0.761 & 3.836 & 10:36 \\
+ \begin{tabular}[c]{@{}l@{}}Medium CGAN\\ Dropout 0.5\end{tabular} & 0.725 & 3.677 & 10:36 \\
+ \begin{tabular}[c]{@{}l@{}}Medium CGAN\\ Virtual Batch \\ Normalization\end{tabular} & ? & ? & ? \\
+ \begin{tabular}[c]{@{}l@{}}Medium CGAN\\ Virtual Batch \\ Normalization\\ One-sided label \\ smoothing\end{tabular} & ? & ? & ? \\
+ *MNIST original & 0.9846 & 9.685 & N/A
+ \end{tabular}
+ \end{table}
# Re-training the handwritten digit classifier