From d1596ae6e0f0f26b6e60f7a0b9d56faf4463fc4e Mon Sep 17 00:00:00 2001 From: Thibaut Horel Date: Fri, 11 Dec 2015 22:09:05 -0500 Subject: Add discussion --- finale/final_report.tex | 1 + finale/sections/discussion.tex | 22 ++++++++++++++++++++++ 2 files changed, 23 insertions(+) create mode 100644 finale/sections/discussion.tex diff --git a/finale/final_report.tex b/finale/final_report.tex index 113a637..f2c7fbb 100644 --- a/finale/final_report.tex +++ b/finale/final_report.tex @@ -89,6 +89,7 @@ Diffusion Processes} \input{sections/experiments.tex} \section{Discussion} +\input{sections/discussion.tex} \bibliography{sparse} \bibliographystyle{icml2015} diff --git a/finale/sections/discussion.tex b/finale/sections/discussion.tex new file mode 100644 index 0000000..b04b6dc --- /dev/null +++ b/finale/sections/discussion.tex @@ -0,0 +1,22 @@ +The experimental results obtained in Section 5 look impressive and confirm the +relevance of using a Bayesian approach for the Network Inference problem. +However, we believe that many other aspects of Bayesian Inference could and +should be exploited in the context of Network Inference. We wish to explore +this in future work and only highlight a few possible directions here: +\begin{itemize} + \item obtain formal guarantees on the convergence of measure of the + Bayesian posterior. Similarly to convergence rate results obtained with MLE + estimation, we believe that convergence results could also be obtained in + the Bayesian setting, at least in restricted settings or by making certain + assumptions about the network being learned. + \item strengthening the experimental results by systematically studying how + different network properties impact the speedup induced by active learning. + \item finish formally deriving the update equations when using Bohning + approximations for Variational Inference. + \item extend the combined Variational Inference and Bohning approximation + to Hawkes processes to obtain a unified Bayesian framework for both + discrete-time and continuous-time models. + \item explore the impact of using more expressive (in particular + non-factorized) on the speed of convergence, both in offline and active + online learning. +\end{itemize} -- cgit v1.2.3-70-g09d2