diff options
| -rw-r--r-- | notes/sparse.bib | 34 | ||||
| -rw-r--r-- | paper/sections/results.tex | 2 | ||||
| -rw-r--r-- | paper/sparse.bib | 34 |
3 files changed, 69 insertions, 1 deletions
diff --git a/notes/sparse.bib b/notes/sparse.bib index 62c6c0f..969e45d 100644 --- a/notes/sparse.bib +++ b/notes/sparse.bib @@ -53,6 +53,40 @@ year = {2006}, pages = {538--557}, } +@article{Zhao:2006, + author = {Zhao, Peng and Yu, Bin}, + title = {On Model Selection Consistency of Lasso}, + journal = {J. Mach. Learn. Res.}, + issue_date = {12/1/2006}, + volume = {7}, + month = dec, + year = {2006}, + issn = {1532-4435}, + pages = {2541--2563}, + numpages = {23}, + url = {http://dl.acm.org/citation.cfm?id=1248547.1248637}, + acmid = {1248637}, + publisher = {JMLR.org}, +} + +@inproceedings{Daneshmand:2014, + author = {Hadi Daneshmand and + Manuel Gomez{-}Rodriguez and + Le Song and + Bernhard Sch{\"{o}}lkopf}, + title = {Estimating Diffusion Network Structures: Recovery Conditions, Sample + Complexity {\&} Soft-thresholding Algorithm}, + booktitle = {Proceedings of the 31th International Conference on Machine Learning, + {ICML} 2014, Beijing, China, 21-26 June 2014}, + pages = {793--801}, + year = {2014}, + crossref = {DBLP:conf/icml/2014}, + url = {http://jmlr.org/proceedings/papers/v32/daneshmand14.html}, + timestamp = {Fri, 07 Nov 2014 20:42:30 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/icml/DaneshmandGSS14}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + \bibitem{abrahao} Abrahao, B., Chierichetti, F., Kleinberg, R., and Panconesi, A. \newblock{\it Trace Complexity of Network Inference} diff --git a/paper/sections/results.tex b/paper/sections/results.tex index 8787241..4e129fc 100644 --- a/paper/sections/results.tex +++ b/paper/sections/results.tex @@ -1,7 +1,7 @@ In this section, we exploit standard techniques in sparse recovery and leverage the simple nature of Generalized Linear models to address the standard problem of edge detection as well as the less frequently studied problem of coefficient estimation. We discuss both standard diffusion processes, and extend our analysis beyond sparse graphs to approximately sparse graphs. \paragraph{Recovering Edges vs. Recovering Coefficients} -Recovering the edges of the graph or estimating the parents of a node comes down to recovering the support (non-zero coefficients) of $\Theta$, a process known as {\it variable selection}. However, there have been a series of papers arguing that the Lasso is an inappropriate variable selection method (see H.Zou and T.Hastie, Sarah van de Geer ...). In fact, the irrepresentability condition (discussed in and was introduced in ) is essentially necessary for variable selection and rarely holds in practical situations where correlation between variable occurs. We defer an extended analysis of this situation to Section~\ref{sec:assumptions}. +Recovering the edges of the graph or estimating the parents of a node comes down to recovering the support (non-zero coefficients) of $\Theta$, a process known as {\it variable selection}. However, there have been a series of papers arguing that the Lasso is an inappropriate variable selection method (see H.Zou and T.Hastie, Sarah van de Geer ...). In fact, the irrepresentability condition (discussed in \cite{Daneshmand:2014} and introduced in \cite{Zhao:2006}) is essentially necessary for variable selection and rarely holds in practical situations where correlation between variable occurs. We defer an extended analysis of this situation to Section~\ref{sec:assumptions}. Our approach is different. Rather than trying to perform variable selection by finding $\{j: \theta_j \neq 0\}$, we seek to upper-bound $\|\hat \theta - \theta^* \|_2$. We first apply standard techniques to obtain ${\cal O}(\sqrt{\frac{s \log m}{n}})$ in the case of sparse vectors, which is tight to a certain extent as we will show in Section ???. diff --git a/paper/sparse.bib b/paper/sparse.bib index 62c6c0f..969e45d 100644 --- a/paper/sparse.bib +++ b/paper/sparse.bib @@ -53,6 +53,40 @@ year = {2006}, pages = {538--557}, } +@article{Zhao:2006, + author = {Zhao, Peng and Yu, Bin}, + title = {On Model Selection Consistency of Lasso}, + journal = {J. Mach. Learn. Res.}, + issue_date = {12/1/2006}, + volume = {7}, + month = dec, + year = {2006}, + issn = {1532-4435}, + pages = {2541--2563}, + numpages = {23}, + url = {http://dl.acm.org/citation.cfm?id=1248547.1248637}, + acmid = {1248637}, + publisher = {JMLR.org}, +} + +@inproceedings{Daneshmand:2014, + author = {Hadi Daneshmand and + Manuel Gomez{-}Rodriguez and + Le Song and + Bernhard Sch{\"{o}}lkopf}, + title = {Estimating Diffusion Network Structures: Recovery Conditions, Sample + Complexity {\&} Soft-thresholding Algorithm}, + booktitle = {Proceedings of the 31th International Conference on Machine Learning, + {ICML} 2014, Beijing, China, 21-26 June 2014}, + pages = {793--801}, + year = {2014}, + crossref = {DBLP:conf/icml/2014}, + url = {http://jmlr.org/proceedings/papers/v32/daneshmand14.html}, + timestamp = {Fri, 07 Nov 2014 20:42:30 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/icml/DaneshmandGSS14}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + \bibitem{abrahao} Abrahao, B., Chierichetti, F., Kleinberg, R., and Panconesi, A. \newblock{\it Trace Complexity of Network Inference} |
