aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--paper/sections/results.tex3
-rw-r--r--src/convex_optimization.py2
-rw-r--r--src/make_plots.py6
3 files changed, 5 insertions, 6 deletions
diff --git a/paper/sections/results.tex b/paper/sections/results.tex
index 46521ed..95a0826 100644
--- a/paper/sections/results.tex
+++ b/paper/sections/results.tex
@@ -164,8 +164,7 @@ Choosing $\lambda\defeq 2\sqrt{\frac{\log m}{\alpha n^{1-\delta}}}$ concludes th
proof.
\end{proof}
-Note how the proof of Lemma 3 relied crucially on Azuma-Hoeffding's inequality: by supposing ... We now show how to use Theorem~\ref{thm:main} to recover the support of
-$\theta^*$, that is, to solve the Graph Inference problem.
+Note how the proof of Lemma~\ref{lem:ub} relied crucially on Azuma-Hoeffding's inequality, which allows us to handle correlated observations, and obtain bounds on the number of measurements rather than the number of cascades. We now show how to use Theorem~\ref{thm:main} to recover the support of $\theta^*$, that is, to solve the Graph Inference problem.
\begin{corollary}
\label{cor:variable_selection}
diff --git a/src/convex_optimization.py b/src/convex_optimization.py
index 0d506e1..8dc6f82 100644
--- a/src/convex_optimization.py
+++ b/src/convex_optimization.py
@@ -90,7 +90,7 @@ def diff_and_opt(M_val, w_val, f_x, f_xz):
def F(x=None, z=None):
if x is None:
- return 0, cvxopt.matrix(-.1, (n,1))
+ return 0, cvxopt.matrix(-.001, (n,1))
elif z is None:
y, y_diff = f_x(x, M_val, w_val)
return cvxopt.matrix(float(y), (1, 1)),\
diff --git a/src/make_plots.py b/src/make_plots.py
index e9e0de3..81d581c 100644
--- a/src/make_plots.py
+++ b/src/make_plots.py
@@ -168,8 +168,8 @@ if __name__=="__main__":
#convex_optimization.type_lasso)
if 1:
compute_graph("../datasets/kronecker_graph_256_cross.txt",
- n_cascades=2000, lbda=0., min_proba=.2, max_proba=.7,
+ n_cascades=2000, lbda=0.1, min_proba=.2, max_proba=.7,
passed_function=
- convex_optimization.sparse_recovery,
- #convex_optimization.type_lasso,
+ #convex_optimization.sparse_recovery,
+ convex_optimization.type_lasso,
sparse_edges=True) \ No newline at end of file