aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--paper/sections/results.tex32
1 files changed, 24 insertions, 8 deletions
diff --git a/paper/sections/results.tex b/paper/sections/results.tex
index bb31f79..128a79f 100644
--- a/paper/sections/results.tex
+++ b/paper/sections/results.tex
@@ -166,22 +166,38 @@ Choosing $\lambda\defeq 2\sqrt{\frac{\log m}{\alpha n^{1-\delta}}}$ concludes th
proof.
\end{proof}
+We now show how to use Theorem~\ref{thm:main} to recover the support of
+$\theta^*$, that is, to solve the Graph Inference problem.
+
\begin{corollary}
\label{cor:variable_selection}
-Assume that ${\bf (RE)}$ holds with $\gamma > 0$ and that $\theta$ is s-sparse. Consider the set $\hat {\cal S}_\eta \defeq \{ i \in [1..p] : \hat \theta_i > \eta\}$ for $\eta > 0$. For $\epsilon>0$ and $\epsilon < \eta$, let ${\cal S}^*_{\eta + \epsilon} \defeq \{ i \in [1..p] :\theta_i > \eta +\epsilon \}$ be the set of all true `strong' parents. Suppose the number of measurements verifies:
-\begin{equation}
-n > \frac{36}{p_{\min}\gamma^2 \epsilon^2} s \log m
-\end{equation}
-Then with probability $1-\frac{1}{m}$, ${\cal S}^*_{\eta + \epsilon} \subset
-\hat {\cal S}_\eta \subset {\cal S}^*$. In other words we recover all `strong'
+Under the same assumptions of Theorem~\ref{thm:main}, let $\hat {\cal S}_\eta
+\defeq \{ j \in \{1,\ldots, m\} : \hat{\theta}_j > \eta\}$ for $\eta > 0$. For
+$\epsilon>0$ and $\epsilon < \eta$, let ${\cal S}^*_{\eta + \epsilon} \defeq \{
+i \in \{1,\ldots,m\} :\theta_i > \eta +\epsilon \}$ be the set of all true `strong'
+parents. Suppose the number of measurements verifies:
+$ n > \frac{9}{\alpha}\frac{\gamma^2}{\epsilon^2} s \log m $.
+Then with probability $1-\frac{1}{m}$, ${\cal S}^*_{\eta + \epsilon} \subseteq
+\hat {\cal S}_\eta \subseteq {\cal S}^*$. In other words we recover all `strong'
parents and no `false' parents.
\end{corollary}
\begin{proof}
-Suppose $\theta$ is exactly s-sparse. By choosing $\delta = 0$, if $n>\frac{36}{p_{\min}\gamma^2 \epsilon^2} s \log m$, then $\|\hat \theta-\theta\|_2 < \epsilon < \eta$ with probability $1-\frac{1}{m}$. If $\theta_i = 0$ and $\hat \theta > \eta$, then $\|\hat \theta - \theta\|_2 \geq |\hat \theta_i-\theta_j| > \eta$, which is a contradiction. Therefore we get no false positives. If $\theta_i = \eta + \epsilon$, then $|\hat \theta_i - (\eta+\epsilon)| < \epsilon/2 \implies \theta_j > \eta + \epsilon/2$. Therefore, we get all strong parents.
+By choosing $\delta = 0$, if $n>\frac{9}{\alpha}\frac{\gamma^2}{\epsilon^2} s \log m$,
+then $\|\hat \theta-\theta\|_2 < \epsilon < \eta$ with probability
+$1-\frac{1}{m}$. If $\theta_i = 0$ and $\hat \theta > \eta$, then $\|\hat
+\theta - \theta\|_2 \geq |\hat \theta_i-\theta_j| > \eta$, which is
+a contradiction. Therefore we get no false positives. If $\theta_i \leq \eta
++ \epsilon$, then $|\hat{\theta}_i- \theta_i| < \epsilon \implies
+\theta_j > \eta$. Therefore, we get all strong parents.
\end{proof}
-Note that if we want \emph{perfect recovery} of all edges, we must estimate $p_{\min}$ within $\epsilon'$ and set $\eta \defeq p_{\min} - \epsilon'$
+Assuming we know a lower bound $\alpha$ on $\Theta_{i,j}$,
+Corollary~\ref{cor:variable_selection} can be applied to the Graph Inference
+problem in the following manner: pick $\epsilon = \frac{\eta}{2}$ and $\eta
+= \frac{\alpha}{3}$, then $S_{\eta+\epsilon}^* = S$ provided that
+$n=\Omega\left(\frac{\gamma^2}{\alpha^3}s\log m\right)$. That is, the support
+of $\theta^*$ can be found by thresholding $\hat{\theta}$ to the level $\eta$.
\subsection{Relaxing the Sparsity Constraint}
\label{sec:relaxing_sparsity}