aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--paper/sections/lowerbound.tex47
-rw-r--r--paper/sparse.bib76
2 files changed, 103 insertions, 20 deletions
diff --git a/paper/sections/lowerbound.tex b/paper/sections/lowerbound.tex
index bbd8566..fdee069 100644
--- a/paper/sections/lowerbound.tex
+++ b/paper/sections/lowerbound.tex
@@ -1,10 +1,12 @@
-In this section, we will consider the stable sparse recovery of section XXX.
-Our goal is to obtain an information-theoretic lower bound on the number of
-measurements necessary to approximately recover the parameter $\theta$ of
-a cascade model from observed cascades. Similar lower bounds were obtained for
-sparse linear inverse problems in XXXs.
+In this section, we will consider the stable sparse recovery setting of
+Section~\ref{sec:relaxing}. Our goal is to obtain an information-theoretic
+lower bound on the number of measurements necessary to approximately recover
+the parameter $\theta$ of a cascade model from observed cascades. Similar lower
+bounds were obtained for sparse linear inverse problems in \cite{pw11, pw12,
+bipw11}
\begin{theorem}
+ \label{thm:lb}
Let us consider a cascade model of the form XXX and a recovery algorithm
$\mathcal{A}$ which takes as input $n$ random cascade measurements and
outputs $\hat{\theta}$ such that with probability $\delta<\frac{1}{2}$
@@ -17,23 +19,27 @@ sparse linear inverse problems in XXXs.
= \Omega(s\log\frac{m}{s}/\log C)$.
\end{theorem}
-This theorem should be contrasted with Corollary XXX: up to an additive
+This theorem should be contrasted with Corollary~\ref{cor:relaxing}: up to an additive
$s\log s$ factor, the number of measurements required by our algorithm is
tight.
-The proof of Theorem XXX follows an approach similar to XXX. Let us consider an
-algorithm $\mathcal{A}$ as in the theorem. Intuitively, $\mathcal{A}$ allows
-Alice and Bob to send $\Omega(s\log\frac{m}{s})$ quantity of information over
-a Gaussian channel. By the Shannon-Hartley theorem, this quantity of
-information is $O(n\log C)$. These two bounds together give the theorem.
+The proof of Theorem~\ref{thm:lb} follows an approach similar to \cite{pw12}.
+Let us consider an algorithm $\mathcal{A}$ as in the theorem. Intuitively,
+$\mathcal{A}$ allows Alice and Bob to send $\Omega(s\log\frac{m}{s})$ quantity
+of information over a Gaussian channel. By the Shannon-Hartley theorem, this
+quantity of information is $O(n\log C)$. These two bounds together give the
+theorem.
-Formally, let $\mathcal{F}$ be a family such that:
+Formally, let $\mathcal{F}\subset \{S\subset [1..m]\,|\, |S|=s\}$ be a family
+of $s$-sparse supports such that:
\begin{itemize}
- \item
- \item
- \item
+ \item $|S\Delta S'|\geq s$ for $S\neq S'\in\mathcal{F}$ .
+ \item $\P_{S\in\mathcal{F}}[i\in S]=\frac{s}{m}$ for all $i\in [1..m]$ and
+ when $S$ is chosen uniformly at random in $\mathcal{F}$
+ \item $\log|\mathcal{F}| = \Omega(s\log\frac{m}{s})$
\end{itemize}
-and let $X = \big\{t\in\{-1,0,1\}^m\,|\, \mathrm{supp}(t)\in\mathcal{F}\big\}$.
+such a family can be obtained by considering a linear code (see details in
+\cite{pw11}. Let $X = \big\{t\in\{-1,0,1\}^m\,|\, \mathrm{supp}(t)\in\mathcal{F}\big\}$.
Consider the following communication game between Alice and Bob.
\begin{itemize}
@@ -50,15 +56,18 @@ Consider the following communication game between Alice and Bob.
of $\hat{\theta}$ in $X$.
\end{itemize}
-We have the following from XXX and XXX respectively.
+We have the following from \cite{pw11} and \cite{pw12} respectively.
\begin{lemma}
+ \label{lemma:upperinf}
Let $S'=\mathrm{supp}(\hat{t})$. If $\alpha = \Omega(\frac{1}{C})$, $I(S, S')
= \O(n\log C)$.
\end{lemma}
\begin{lemma}
- If $\alpha = \Omega(\frac{1}{C})$, $I(S, S') = \Omega(s\log\frac{m}{s})$
+ \label{lemma:lowerinf}
+ If $\alpha = \Omega(\frac{1}{C})$, $I(S, S') = \Omega(s\log\frac{m}{s})$.
\end{lemma}
-Lemmas XXX and XXX together give Theorem XXX.
+Lemmas \ref{lemma:lowerinf} and \ref{lemma:upperinf} together give Theorem
+\ref{thm:lb}.
diff --git a/paper/sparse.bib b/paper/sparse.bib
index d1abd66..c05d318 100644
--- a/paper/sparse.bib
+++ b/paper/sparse.bib
@@ -269,4 +269,78 @@ year = "2009"
timestamp = {Tue, 21 May 2013 14:15:50 +0200},
biburl = {http://dblp.uni-trier.de/rec/bib/journals/tit/RudelsonZ13},
bibsource = {dblp computer science bibliography, http://dblp.org}
-} \ No newline at end of file
+}
+
+@article{bipw11,
+ author = {Khanh Do Ba and
+ Piotr Indyk and
+ Eric Price and
+ David P. Woodruff},
+ title = {Lower Bounds for Sparse Recovery},
+ journal = {CoRR},
+ volume = {abs/1106.0365},
+ year = {2011},
+ url = {http://arxiv.org/abs/1106.0365},
+ timestamp = {Mon, 05 Dec 2011 18:04:39 +0100},
+ biburl = {http://dblp.uni-trier.de/rec/bib/journals/corr/abs-1106-0365},
+ bibsource = {dblp computer science bibliography, http://dblp.org}
+}
+
+@inproceedings{pw11,
+ author = {Eric Price and
+ David P. Woodruff},
+ title = {{(1} + eps)-Approximate Sparse Recovery},
+ booktitle = {{IEEE} 52nd Annual Symposium on Foundations of Computer Science, {FOCS}
+ 2011, Palm Springs, CA, USA, October 22-25, 2011},
+ pages = {295--304},
+ year = {2011},
+ crossref = {DBLP:conf/focs/2011},
+ url = {http://dx.doi.org/10.1109/FOCS.2011.92},
+ doi = {10.1109/FOCS.2011.92},
+ timestamp = {Tue, 16 Dec 2014 09:57:24 +0100},
+ biburl = {http://dblp.uni-trier.de/rec/bib/conf/focs/PriceW11},
+ bibsource = {dblp computer science bibliography, http://dblp.org}
+}
+
+@proceedings{DBLP:conf/focs/2011,
+ editor = {Rafail Ostrovsky},
+ title = {{IEEE} 52nd Annual Symposium on Foundations of Computer Science, {FOCS}
+ 2011, Palm Springs, CA, USA, October 22-25, 2011},
+ publisher = {{IEEE} Computer Society},
+ year = {2011},
+ url = {http://ieeexplore.ieee.org/xpl/mostRecentIssue.jsp?punumber=6108120},
+ isbn = {978-1-4577-1843-4},
+ timestamp = {Mon, 15 Dec 2014 18:48:45 +0100},
+ biburl = {http://dblp.uni-trier.de/rec/bib/conf/focs/2011},
+ bibsource = {dblp computer science bibliography, http://dblp.org}
+}
+
+@inproceedings{pw12,
+ author = {Eric Price and
+ David P. Woodruff},
+ title = {Applications of the Shannon-Hartley theorem to data streams and sparse
+ recovery},
+ booktitle = {Proceedings of the 2012 {IEEE} International Symposium on Information
+ Theory, {ISIT} 2012, Cambridge, MA, USA, July 1-6, 2012},
+ pages = {2446--2450},
+ year = {2012},
+ crossref = {DBLP:conf/isit/2012},
+ url = {http://dx.doi.org/10.1109/ISIT.2012.6283954},
+ doi = {10.1109/ISIT.2012.6283954},
+ timestamp = {Mon, 01 Oct 2012 17:34:07 +0200},
+ biburl = {http://dblp.uni-trier.de/rec/bib/conf/isit/PriceW12},
+ bibsource = {dblp computer science bibliography, http://dblp.org}
+}
+
+@proceedings{DBLP:conf/isit/2012,
+ title = {Proceedings of the 2012 {IEEE} International Symposium on Information
+ Theory, {ISIT} 2012, Cambridge, MA, USA, July 1-6, 2012},
+ publisher = {{IEEE}},
+ year = {2012},
+ url = {http://ieeexplore.ieee.org/xpl/mostRecentIssue.jsp?punumber=6268627},
+ isbn = {978-1-4673-2580-6},
+ timestamp = {Mon, 01 Oct 2012 17:33:45 +0200},
+ biburl = {http://dblp.uni-trier.de/rec/bib/conf/isit/2012},
+ bibsource = {dblp computer science bibliography, http://dblp.org}
+}
+