aboutsummaryrefslogtreecommitdiffstats
path: root/paper
diff options
context:
space:
mode:
authorjeanpouget-abadie <jean.pougetabadie@gmail.com>2015-05-18 10:47:17 +0200
committerjeanpouget-abadie <jean.pougetabadie@gmail.com>2015-05-18 10:47:17 +0200
commit48a2579659a5cdb16fc65b5acda5722257cf4964 (patch)
treed3eec8f74bc4f4620c454e18af4967142efb6cfd /paper
parent2586d50b4ce7c932656b8f144784511f08692e14 (diff)
downloadcascades-48a2579659a5cdb16fc65b5acda5722257cf4964.tar.gz
adding poster+WWW presentation+added 2 citations in introduction
Diffstat (limited to 'paper')
-rw-r--r--paper/sections/intro.tex21
-rw-r--r--paper/sparse.bib21
2 files changed, 32 insertions, 10 deletions
diff --git a/paper/sections/intro.tex b/paper/sections/intro.tex
index 264476b..f369e3c 100644
--- a/paper/sections/intro.tex
+++ b/paper/sections/intro.tex
@@ -117,10 +117,10 @@ achieves a ${\cal O}(s \log m)$ guarantee in the case of tree graphs. The work
of~\cite{Abrahao:13} studies the same continuous-model framework as
\cite{GomezRodriguez:2010} and obtains an ${\cal O}(s^9 \log^2 s \log m)$
support recovery algorithm, without the \emph{correlation decay} assumption.
+\cite{du2013uncover} propose a similar algorithm to ours for recovering the
+weights of the graph under a continuous-time independent cascade model, without
+proving theoretical guarantees.
-{\color{red} Du et.~al make a citation}
-
-{\color{red} say they follow the same model as Gomez and abrahao}
Closest to this work is a recent paper by \citet{Daneshmand:2014}, wherein the
authors consider a $\ell_1$-regularized objective function. They adapt standard
results from sparse recovery to obtain a recovery bound of ${\cal O}(s^3 \log
@@ -132,12 +132,15 @@ Independent Cascade model under weaker assumptions. Furthermore, we analyze both
the recovery of the graph's edges and the estimation of the model's parameters,
and achieve close to optimal bounds.
-\begin{comment}
-Their work has the merit of studying a generalization of the discrete-time
-independent cascade model to continuous functions. Similarly to
-\cite{Abrahao:13}, they place themselves in the restrictive single-source
-context.
-\end{comment}
+The work of~\cite{du2014influence} is slightly orthogonal to ours since they
+suggest learning the \emph{influence} function, rather than the networks
+parameters directly.
+%\begin{comment}
+%Their work has the merit of studying a generalization of the discrete-time
+%independent cascade model to continuous functions. Similarly to
+%\cite{Abrahao:13}, they place themselves in the restrictive single-source
+%context.
+%\end{comment}
\begin{comment}
\paragraph{Our contributions}
diff --git a/paper/sparse.bib b/paper/sparse.bib
index 81b493e..00001f0 100644
--- a/paper/sparse.bib
+++ b/paper/sparse.bib
@@ -29,7 +29,26 @@ year = {2006},
@inproceedings{Pouget:2015,
title={Inferring graphs from cascades: A Sparse Recovery Framework},
author={Pouget-Abadie, Jean and Horel, Thibaut},
- series={ICML'15}
+ series={ICML'15},
+ year={2015}
+}
+
+@inproceedings{du2013uncover,
+ title={Uncover topic-sensitive information diffusion networks},
+ author={Du, Nan and Song, Le and Woo, Hyenkyun and Zha, Hongyuan},
+ booktitle={Proceedings of the Sixteenth International Conference on
+ Artificial Intelligence and Statistics},
+ pages={229--237},
+ year={2013}
+}
+
+@inproceedings{du2014influence,
+ title={Influence function learning in information diffusion networks},
+ author={Du, Nan and Liang, Yingyu and Balcan, Maria and Song, Le},
+ booktitle={Proceedings of the 31st International Conference on Machine
+ Learning (ICML-14)},
+ pages={2016--2024},
+ year={2014}
}