diff options
| author | Thibaut Horel <thibaut.horel@gmail.com> | 2015-03-09 13:50:20 -0400 |
|---|---|---|
| committer | Thibaut Horel <thibaut.horel@gmail.com> | 2015-03-09 13:50:33 -0400 |
| commit | 843f75943d25f4e180493142b6da0968621b9a78 (patch) | |
| tree | a1c7e5fa8898e663f4009715bd8101ac5696d7c8 /presentation | |
| parent | c73f5ffb14020f8997488d1edf6594833fcbbef7 (diff) | |
| download | cascades-843f75943d25f4e180493142b6da0968621b9a78.tar.gz | |
Big reorganisation of the repo
Diffstat (limited to 'presentation')
| -rw-r--r-- | presentation/econcs/beamer.tex | 381 | ||||
| -rw-r--r-- | presentation/econcs/figures/Screen Shot 2015-03-08 at 13.08.01.png | bin | 0 -> 126844 bytes | |||
| -rw-r--r-- | presentation/econcs/figures/weighted_graph.png | bin | 0 -> 24602 bytes | |||
| -rw-r--r-- | presentation/econcs/sparse.bib | 503 | ||||
| -rw-r--r-- | presentation/extended_abstract.txt | 10 | ||||
| -rw-r--r-- | presentation/images/greedy_sparse_comparison.png | bin | 0 -> 35393 bytes | |||
| -rw-r--r-- | presentation/images/icc.png | bin | 0 -> 130144 bytes | |||
| -rw-r--r-- | presentation/images/sparse_recovery_illustration.svg | 655 | ||||
| -rw-r--r-- | presentation/images/voter.png | bin | 0 -> 111444 bytes | |||
| -rw-r--r-- | presentation/stats/beamer_2.tex | 397 | ||||
| -rw-r--r-- | presentation/stats/figures/Screen Shot 2015-03-08 at 13.08.01.png | bin | 0 -> 126844 bytes | |||
| -rw-r--r-- | presentation/stats/figures/weighted_graph.png | bin | 0 -> 24602 bytes | |||
| -rw-r--r-- | presentation/stats/sparse.bib | 503 |
13 files changed, 2449 insertions, 0 deletions
diff --git a/presentation/econcs/beamer.tex b/presentation/econcs/beamer.tex new file mode 100644 index 0000000..7d4b5c1 --- /dev/null +++ b/presentation/econcs/beamer.tex @@ -0,0 +1,381 @@ +\documentclass[10pt]{beamer} + +\usepackage{amssymb, amsmath, graphicx, amsfonts, color} + +\title{Estimating a Graph's Parameters from Cascades} +\author{Jean (John) Pouget-Abadie \\ Joint Work with Thibaut (T-bo) Horel} +\date{} + +\begin{document} + +\begin{frame} +\titlepage +\end{frame} + +\begin{frame} +\frametitle{Example} +\begin{figure} +\includegraphics[scale=.25]{../images/drawing.pdf} +\caption{A network} +\end{figure} +\end{frame} + +\begin{frame} +\frametitle{Example} +\begin{figure} +\includegraphics[scale=.25]{../images/noedges_step1.pdf} +\caption{Cascade 1: $t=0$} +\end{figure} +\end{frame} + +\begin{frame} +\frametitle{Example} +\begin{figure} +\includegraphics[scale=.25]{../images/noedges_step2.pdf} +\caption{Cascade 1: $t=1$} +\end{figure} +\end{frame} + +\begin{frame} +\frametitle{Example} +\begin{figure} +\includegraphics[scale=.25]{../images/noedges_step3.pdf} +\caption{Cascade 1: $t=2$} +\end{figure} +\end{frame} + +\begin{frame} +\frametitle{Example} +\begin{figure} +\includegraphics[scale=.25]{../images/noedges_step1_cascade2} +\caption{Cascade 2: $t=0$} +\end{figure} +\end{frame} + +\begin{frame} +\frametitle{Example} +\begin{figure} +\includegraphics[scale=.25]{../images/noedges_step2_cascade2} +\caption{Cascade 2: $t=1$} +\end{figure} +\end{frame} + +\begin{frame} +\frametitle{Example} +\begin{figure} +\includegraphics[scale=.25]{../images/noedges_step3_cascade2} +\caption{Cascade 2: $t=2$} +\end{figure} +\end{frame} + + +\begin{frame} +\frametitle{Context} + +Notation: +\begin{itemize} +\item $({\cal G}, \theta)$ : graph, parameters +\item Cascade: diffusion process of a `behavior' on $({\cal G}, \theta)$ +\item $(X_t)_c$ : set of `active' nodes at time t for cascade $c$ +\end{itemize} + + +\begin{table} +\begin{tabular}{c c} +Graph $\implies$ Cascades & Cascades $\implies$ Graph \\ \hline +$({\cal G}, \theta)$ is known & $(X_t)_c$ is observed \\ +Predict $(X_t) | X_0$ & Recover $({\cal G}, \theta)$ \\ +\end{tabular} +\end{table} + +Summary: +\begin{itemize} +\item Many algorithms \emph{require} knowledge of $({\cal G}, \theta)$ +\item {\bf Graph Inference} is the problem of \emph{learning} $({\cal G}, \theta)$ +\end{itemize} +\end{frame} + +\begin{frame} +\begin{block}{Decomposability} +Learning the graph $\Leftrightarrow$ Learning the parents of a single node +\end{block} +\end{frame} + + +\begin{frame} +\frametitle{Problem Statement} +\begin{itemize} +\pause +\item Can we learn ${\cal G}$ from $(X_t)_c$? +\pause +\item How many cascades? How many steps in each cascade? +\pause +\item Can we learn $\theta$ from $(X_t)_c$? +\pause +\item How does the error decrease with $n_{\text{cascades}}$? +\pause +\item Are there graphs which are easy to learn? Harder to learn? +\pause +\item What kind of diffusion processes can we consider? +\pause +\item What is the minimal number of cascades required to learn $({\cal G}, \theta)$? +\end{itemize} +\end{frame} + +\begin{frame} +\frametitle{Notation} +\begin{itemize} +\item n : number of measurements +\item N : number of cascades +\item m : number of nodes +\item s : degree of node considered +\end{itemize} +\end{frame} + + +\begin{frame} +\frametitle{Related Work} + +\begin{itemize} +\pause +\item Can we learn ${\cal G}$ from $(X_t)_c$? +\pause +\\{\color{blue} Yes} +\pause +\item How many cascades? How many steps in each cascade? +\pause +\\ {\color{blue} poly(s)$ \log m$ cascades} +\pause +\item Can we learn $\theta$ from $(X_t)_c$? +\pause +\\ {\color{blue} (?)} +\pause +\item How does the error decrease with $n_{\text{cascades}}$? +\pause +\\ {\color{blue} (?)} +\pause +\item Are there graphs which are easy to learn? Harder to learn? +\pause +\\{\color{blue} Sparse Graphs are easy} +\pause +\item What kind of diffusion processes can we consider? +\pause +\\{\color{blue} IC Model (discrete and continuous)} +\pause +\item What is the minimal number of cascades required to learn $({\cal G}, \theta)$? +\pause +\\{\color{blue} (?)\dots$s \log m/s$ in specific setting} +\end{itemize} +\end{frame} + + + +\begin{frame} +\frametitle{Our Work} +\begin{itemize} +\pause +\item Can we learn ${\cal G}$ from $(X_t)_c$? +\pause +\\{\color{blue} Yes} $\rightarrow$ {\color{red} Yes} +\pause +\item How many cascades? How many steps in each cascade? +\pause +\\ {\color{blue} poly(s)$ \log m$ cascades} $\rightarrow$ {\color{red} $s\log m$ measurements} +\pause +\item Can we learn $\theta$ from $(X_t)_c$? +\pause +\\ {\color{blue} (?)} $\rightarrow$ {\color{red} Yes!} +\pause +\item How does the error decrease with $n_{\text{cascades}}$? +\pause +\\ {\color{blue} (?)} $\rightarrow$ {\color{red} ${\cal O}(\sqrt{s\log m/n})$} +\pause +\item Are there graphs which are easy to learn? Harder to learn? +\pause +\\ {\color{blue} Sparse Graphs are easy} $\rightarrow$ {\color{red} Approx. sparsity is also easy} +\pause +\item What kind of diffusion processes can we consider? +\pause +\\ {\color{blue} IC Model (discrete, continuous)} $\rightarrow$ {\color{red} Large class of Cascade Models} +\pause +\item What is the minimal number of cascades required to learn $({\cal G}, \theta)$? +\pause +\\{\color{blue} $s \log m/s$ in specific setting} $\rightarrow$ {\color{red} $s \log m/s$ for approx. sparse graphs} +\end{itemize} + +\end{frame} + +\begin{frame} +\frametitle{Voter Model} +\begin{itemize} +\pause +\item {\color{red} Red} and {\color{blue} Blue} nodes. At every step, each node $i$ chooses one of its neighbors $j$ with probability $p_{j,i}$ and adopts that color at $t+1$ +\pause +\item If {\color{blue} Blue} is `contagious' state: +\pause +\begin{equation} +\nonumber +\mathbb{P}(i \in X^{t+1}|X^t) = \sum_{j \in {\cal N}(i)\cap X^t} p_{ji} = X^t \cdot \theta_i +\end{equation} +\end{itemize} +\end{frame} + +\begin{frame} +\frametitle{Independent Cascade Model} +\begin{itemize} +\pause +\item Each `infected' node $i$ has a probability $p_{i,j}$ of infecting each of his neighbors $j$. +\pause +\item A node stays `infected' for a single turn. Then it becomes `inactive'. +$$\mathbb{P}(j \text{ becomes infected at t+1}|X_{t}) = 1 - \prod_{i \in {\cal N}(j) \cap X_{t}} (1 - p_{i,j})$$ +\end{itemize} +\end{frame} + +\begin{frame} +\frametitle{Independent Cascade Model} +\begin{align} +\mathbb{P}(j\in X_{t+1}|X_{t}) & = 1 - \prod_{i \in {\cal N}(j) \cap X_{t}} (1 - p_{i,j}) \\ +& = 1 - \exp \left[ \sum_{i \in {\cal N}(j) \cap X_{t}} \log(1 - p_{i,j}) \right] \\ +& = 1 - \exp \left[ X_{t} \cdot \theta_{i,j}\right] +\end{align} + +where $\theta_{i,j} := \log (1 - p_{i,j})$ and $\theta_i := (\theta_{i,j})_j$ +\\[5ex] +\begin{itemize} +\item Support of $\vec \theta$ $\Leftrightarrow$ support of $\vec p$ +\end{itemize} +\end{frame} + +\begin{frame} +\frametitle{Model Comparison} +\begin{table} +\centering +\begin{tabular}{c | c} +Voter Model & Indep. Casc. Model \\[1ex] +\hline \\[.1ex] +Markov & Markov \\[3ex] +Indep. prob. of $\in X^{t+1} | X^t$ & Indep. prob. of $\in X^{t+1} | X^t$ \\[3ex] +$\mathbb{P}(j\in X_{t+1}|X_{t}) = X_{t} \cdot \theta_{i}$ & $\mathbb{P}(j\in X_{t+1}|X_{t}) = 1 - \exp(X_{t} \cdot \theta_{i})$ \\[3ex] +Always Susceptible & Susceptible until infected \\ +\includegraphics[scale=.4]{../images/voter_model.pdf} & \includegraphics[scale=.3]{../images/icc_model.pdf} \\ +\end{tabular} +\end{table} +\end{frame} + +\begin{frame} +\frametitle{Generalized Linear Cascade Models} +\begin{definition} +{\bf Generalized Linear Cascade Model} with inverse link function $f : \mathbb{R} \rightarrow [0,1]$: +\begin{itemize} +\item for each \emph{susceptible} node $j$ in state $s$ at $t$, $\mathbb{P}(j \in X^{t+1}|X^t)$ is a Bernoulli of parameter $f(\theta_j \cdot X^t)$ +\end{itemize} +\end{definition} +\end{frame} + +\begin{frame} +\frametitle{Sparse Recovery} +\begin{figure} +\includegraphics[scale=.6]{../images/sparse_recovery_illustration.pdf} +\caption{$f(X\cdot\theta) = b$} +\end{figure} +\end{frame} + + +\begin{frame} +\frametitle{$\ell1$ penalized Maximum Likelihood} +\begin{itemize} +\item Decomposable node by node +\item Sum over susceptible steps +\end{itemize} + +\begin{block}{Likelihood function} +\begin{equation} +\nonumber +{\cal L}(\theta| x^1, \dots x^n) = \frac{1}{{\cal T}_i} \sum_{t \in {\cal T}_i} x^{t+1}_i \log f(\theta_i \cdot x^t) + (1 - x^{t+1}_i) \log(1 - f(\theta_i \cdot x^t)) +\end{equation} +\end{block} + +\begin{block}{Algorithm} +\begin{equation} +\nonumber +\theta \in \arg \max_\theta {\cal L}(\theta| x^1, \dots x^n) - \lambda \|\theta\|_1 +\end{equation} +\end{block} + +\end{frame} + +\begin{frame} +\frametitle{Main Result} +\begin{theorem} +Assume condition on the Hessian and certain regularity properties on $f$, then $\exists C>0$ depending only on the properties of the ${\cal G}$, with high probability: +$$\| \theta^*_i - \hat \theta_i \|_2 \leq C\sqrt{\frac{s\log m}{n}}$$ +\end{theorem} + +\begin{corollary} +By thresholding $\hat \theta_i$, if $n > C' s \log m$, we recover the support of $\theta^*$ and therefore the edges of ${\cal G}$ +\end{corollary} + +\end{frame} + +\begin{frame} +\frametitle{Approximate Sparsity} +\begin{itemize} +\item $\theta^*_{\lceil s \rceil}$ best s-sparse approximation to $\theta^*$ +\item $\|\theta^* - \theta^*_{\lceil s \rceil} \|_1$: `tail' of $\theta^*$ +\end{itemize} +\begin{theorem} +Assume condition on Hessian and certain regularity properties on $f$, then $\exists C_1, C_2>0$ depending only on the properties of ${\cal G}$, with high probability: +\begin{equation} +\|\hat \theta_i - \theta^*_i\|_2 \leq C_1 \sqrt{\frac{s\log m}{n}} + C_2 \sqrt[4]{\frac{s\log m}{n}}\|\theta^* - \theta^*_{\lceil s \rceil} \|_1 +\end{equation} +\end{theorem} +\end{frame} + +\begin{frame} +\frametitle{Lower Bound} +\begin{itemize} +\item Under correlation decay assumption for the IC model, ${\Omega}(s \log N/s)$ cascades necessary for graph reconstruction (Netrapalli et Sanghavi SIGMETRICS'12) +\item Adapting (Price \& Woodruff STOC'12), in the approximately sparse case, any algorithm for any generalized linear cascade model such that: +$$\|\hat \theta - \theta^*\|_2 \leq C \|\theta^* - \theta^*_{\lfloor s \rfloor}\|_2$$ +requires ${\cal O}(s \log (n/s)/\log C)$ measurement. +\end{itemize} +\end{frame} + +\begin{frame} +\frametitle{(RE) assumptions} +\begin{block}{Assumption on Hessian} +\begin{itemize} +\item +Hessian has to verify a `restricted eigenvalue property' i.e smallest eigenvalue on sparse vectors is away from $0$. +\end{itemize} +\end{block} + +\begin{block}{From Hessian to Gram Matrix} +\begin{itemize} +\item $\mathbb{E}[X X^T]$ : `expected' Gram matrix of observations +\item $\mathbb{E}[X X^T]_{i,i}$ : $\mathbb{P}$ that node $i$ is infected +\item $\mathbb{E}[X X^T]_{i,j}$ : $\mathbb{P} $that node $i$ and node $j$ are infected simultaneously +\end{itemize} +\end{block} +\end{frame} + +\begin{frame} +\frametitle{Future Work} + +\begin{block}{Linear Threshold Model} +\begin{itemize} +\item Linear threshold model is a generalized linear cascade, with non-differential inverse link function. $$\mathbb{P}(j \in X^{t+1}|X^t) = sign(\theta_j \cdot X^t - t_j)$$ +\end{itemize} +\end{block} + +\begin{block}{Noisy Influence Maximization} +\end{block} + +\begin{block}{Confidence Intervals} +\end{block} + +\begin{block}{Active Learning} +\end{block} +\end{frame} + +\end{document} diff --git a/presentation/econcs/figures/Screen Shot 2015-03-08 at 13.08.01.png b/presentation/econcs/figures/Screen Shot 2015-03-08 at 13.08.01.png Binary files differnew file mode 100644 index 0000000..b053f0c --- /dev/null +++ b/presentation/econcs/figures/Screen Shot 2015-03-08 at 13.08.01.png diff --git a/presentation/econcs/figures/weighted_graph.png b/presentation/econcs/figures/weighted_graph.png Binary files differnew file mode 100644 index 0000000..7deccc3 --- /dev/null +++ b/presentation/econcs/figures/weighted_graph.png diff --git a/presentation/econcs/sparse.bib b/presentation/econcs/sparse.bib new file mode 100644 index 0000000..5df4b59 --- /dev/null +++ b/presentation/econcs/sparse.bib @@ -0,0 +1,503 @@ +@article {CandesRomberTao:2006, +author = {Candès, Emmanuel J. and Romberg, Justin K. and Tao, Terence}, +title = {Stable signal recovery from incomplete and inaccurate measurements}, +journal = {Communications on Pure and Applied Mathematics}, +volume = {59}, +number = {8}, +publisher = {Wiley Subscription Services, Inc., A Wiley Company}, +issn = {1097-0312}, +pages = {1207--1223}, +year = {2006}, +} + + +@inproceedings{GomezRodriguez:2010, + author = {Gomez Rodriguez, Manuel and Leskovec, Jure and Krause, Andreas}, + title = {Inferring Networks of Diffusion and Influence}, + booktitle = {Proceedings of the 16th ACM SIGKDD International Conference on Knowledge Discovery and Data Mining}, + series = {KDD '10}, + year = {2010}, + isbn = {978-1-4503-0055-1}, + location = {Washington, DC, USA}, + pages = {1019--1028}, + numpages = {10}, + publisher = {ACM}, + address = {New York, NY, USA}, +} + + +@article{Netrapalli:2012, + author = {Netrapalli, Praneeth and Sanghavi, Sujay}, + title = {Learning the Graph of Epidemic Cascades}, + journal = {SIGMETRICS Perform. Eval. Rev.}, + volume = {40}, + number = {1}, + month = {June}, + year = {2012}, + issn = {0163-5999}, + pages = {211--222}, + numpages = {12}, + publisher = {ACM}, + address = {New York, NY, USA}, + keywords = {cascades, epidemics, graph structure learning}, +} + +@article{Negahban:2009, + author = {Negahban, Sahand N. and Ravikumar, Pradeep and Wrainwright, Martin J. and Yu, Bin}, + title = {A Unified Framework for High-Dimensional Analysis of M-Estimators with Decomposable Regularizers}, + Journal = {Statistical Science}, + year = {2012}, + month = {December}, + volume = {27}, + number = {4}, + pages = {538--557}, +} + +@article{Zhao:2006, + author = {Zhao, Peng and Yu, Bin}, + title = {On Model Selection Consistency of Lasso}, + journal = {J. Mach. Learn. Res.}, + issue_date = {12/1/2006}, + volume = {7}, + month = dec, + year = {2006}, + issn = {1532-4435}, + pages = {2541--2563}, + numpages = {23}, + url = {http://dl.acm.org/citation.cfm?id=1248547.1248637}, + acmid = {1248637}, + publisher = {JMLR.org}, +} + +@inproceedings{Daneshmand:2014, + author = {Hadi Daneshmand and + Manuel Gomez{-}Rodriguez and + Le Song and + Bernhard Sch{\"{o}}lkopf}, + title = {Estimating Diffusion Network Structures: Recovery Conditions, Sample + Complexity {\&} Soft-thresholding Algorithm}, + booktitle = {Proceedings of the 31th International Conference on Machine Learning, + {ICML} 2014, Beijing, China, 21-26 June 2014}, + pages = {793--801}, + year = {2014}, + url = {http://jmlr.org/proceedings/papers/v32/daneshmand14.html}, + timestamp = {Fri, 07 Nov 2014 20:42:30 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/icml/DaneshmandGSS14}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@inproceedings{Kempe:03, + author = {David Kempe and + Jon M. Kleinberg and + {\'{E}}va Tardos}, + title = {Maximizing the spread of influence through a social network}, + booktitle = {Proceedings of the Ninth {ACM} {SIGKDD} International Conference on + Knowledge Discovery and Data Mining, Washington, DC, USA, August 24 + - 27, 2003}, + pages = {137--146}, + year = {2003}, + url = {http://doi.acm.org/10.1145/956750.956769}, + doi = {10.1145/956750.956769}, + timestamp = {Mon, 13 Feb 2006 15:34:20 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/kdd/KempeKT03}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@inproceedings{Abrahao:13, + author = {Bruno D. Abrahao and + Flavio Chierichetti and + Robert Kleinberg and + Alessandro Panconesi}, + title = {Trace complexity of network inference}, + booktitle = {The 19th {ACM} {SIGKDD} International Conference on Knowledge Discovery + and Data Mining, {KDD} 2013, Chicago, IL, USA, August 11-14, 2013}, + pages = {491--499}, + year = {2013}, + url = {http://doi.acm.org/10.1145/2487575.2487664}, + doi = {10.1145/2487575.2487664}, + timestamp = {Tue, 10 Sep 2013 10:11:57 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/kdd/AbrahaoCKP13}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + + +@article{vandegeer:2009, +author = "van de Geer, Sara A. and B{\"u}hlmann, Peter", +doi = "10.1214/09-EJS506", +fjournal = "Electronic Journal of Statistics", +journal = "Electron. J. Statist.", +pages = "1360--1392", +publisher = "The Institute of Mathematical Statistics and the Bernoulli Society", +title = "On the conditions used to prove oracle results for the Lasso", +url = "http://dx.doi.org/10.1214/09-EJS506", +volume = "3", +year = "2009" +} + +@article{vandegeer:2011, +author = "van de Geer, Sara and Bühlmann, Peter and Zhou, Shuheng", +doi = "10.1214/11-EJS624", +fjournal = "Electronic Journal of Statistics", +journal = "Electron. J. Statist.", +pages = "688--749", +publisher = "The Institute of Mathematical Statistics and the Bernoulli Society", +title = "The adaptive and the thresholded Lasso for potentially misspecified models (and a lower bound for the Lasso)", +url = "http://dx.doi.org/10.1214/11-EJS624", +volume = "5", +year = "2011" +} + +@article{Zou:2006, +author = {Zou, Hui}, +title = {The Adaptive Lasso and Its Oracle Properties}, +journal = {Journal of the American Statistical Association}, +volume = {101}, +number = {476}, +pages = {1418-1429}, +year = {2006}, +doi = {10.1198/016214506000000735}, +URL = {http://dx.doi.org/10.1198/016214506000000735}, +} + +@article{Jacques:2013, + author = {Laurent Jacques and + Jason N. Laska and + Petros T. Boufounos and + Richard G. Baraniuk}, + title = {Robust 1-Bit Compressive Sensing via Binary Stable Embeddings of Sparse + Vectors}, + journal = {{IEEE} Transactions on Information Theory}, + volume = {59}, + number = {4}, + pages = {2082--2102}, + year = {2013}, + url = {http://dx.doi.org/10.1109/TIT.2012.2234823}, + doi = {10.1109/TIT.2012.2234823}, + timestamp = {Tue, 09 Apr 2013 19:57:48 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/journals/tit/JacquesLBB13}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@inproceedings{Boufounos:2008, + author = {Petros Boufounos and + Richard G. Baraniuk}, + title = {1-Bit compressive sensing}, + booktitle = {42nd Annual Conference on Information Sciences and Systems, {CISS} + 2008, Princeton, NJ, USA, 19-21 March 2008}, + pages = {16--21}, + year = {2008}, + url = {http://dx.doi.org/10.1109/CISS.2008.4558487}, + doi = {10.1109/CISS.2008.4558487}, + timestamp = {Wed, 15 Oct 2014 17:04:27 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/ciss/BoufounosB08}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@inproceedings{Gupta:2010, + author = {Ankit Gupta and + Robert Nowak and + Benjamin Recht}, + title = {Sample complexity for 1-bit compressed sensing and sparse classification}, + booktitle = {{IEEE} International Symposium on Information Theory, {ISIT} 2010, + June 13-18, 2010, Austin, Texas, USA, Proceedings}, + pages = {1553--1557}, + year = {2010}, + url = {http://dx.doi.org/10.1109/ISIT.2010.5513510}, + doi = {10.1109/ISIT.2010.5513510}, + timestamp = {Thu, 15 Jan 2015 17:11:50 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/isit/GuptaNR10}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@article{Plan:2014, + author = {Yaniv Plan and + Roman Vershynin}, + title = {Dimension Reduction by Random Hyperplane Tessellations}, + journal = {Discrete {\&} Computational Geometry}, + volume = {51}, + number = {2}, + pages = {438--461}, + year = {2014}, + url = {http://dx.doi.org/10.1007/s00454-013-9561-6}, + doi = {10.1007/s00454-013-9561-6}, + timestamp = {Tue, 11 Feb 2014 13:48:56 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/journals/dcg/PlanV14}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@article{bickel:2009, +author = "Bickel, Peter J. and Ritov, Ya’acov and Tsybakov, Alexandre B.", +doi = "10.1214/08-AOS620", +fjournal = "The Annals of Statistics", +journal = "Ann. Statist.", +month = "08", +number = "4", +pages = "1705--1732", +publisher = "The Institute of Mathematical Statistics", +title = "Simultaneous analysis of Lasso and Dantzig selector", +url = "http://dx.doi.org/10.1214/08-AOS620", +volume = "37", +year = "2009" +} + +@article{raskutti:10, + author = {Garvesh Raskutti and + Martin J. Wainwright and + Bin Yu}, + title = {Restricted Eigenvalue Properties for Correlated Gaussian Designs}, + journal = {Journal of Machine Learning Research}, + volume = {11}, + pages = {2241--2259}, + year = {2010}, + url = {http://portal.acm.org/citation.cfm?id=1859929}, + timestamp = {Wed, 15 Oct 2014 17:04:32 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/journals/jmlr/RaskuttiWY10}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@article{rudelson:13, + author = {Mark Rudelson and + Shuheng Zhou}, + title = {Reconstruction From Anisotropic Random Measurements}, + journal = {{IEEE} Transactions on Information Theory}, + volume = {59}, + number = {6}, + pages = {3434--3447}, + year = {2013}, + url = {http://dx.doi.org/10.1109/TIT.2013.2243201}, + doi = {10.1109/TIT.2013.2243201}, + timestamp = {Tue, 21 May 2013 14:15:50 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/journals/tit/RudelsonZ13}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@article{bipw11, + author = {Khanh Do Ba and + Piotr Indyk and + Eric Price and + David P. Woodruff}, + title = {Lower Bounds for Sparse Recovery}, + journal = {CoRR}, + volume = {abs/1106.0365}, + year = {2011}, + url = {http://arxiv.org/abs/1106.0365}, + timestamp = {Mon, 05 Dec 2011 18:04:39 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/journals/corr/abs-1106-0365}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@inproceedings{pw11, + author = {Eric Price and + David P. Woodruff}, + title = {{(1} + eps)-Approximate Sparse Recovery}, + booktitle = {{IEEE} 52nd Annual Symposium on Foundations of Computer Science, {FOCS} + 2011, Palm Springs, CA, USA, October 22-25, 2011}, + pages = {295--304}, + year = {2011}, + crossref = {DBLP:conf/focs/2011}, + url = {http://dx.doi.org/10.1109/FOCS.2011.92}, + doi = {10.1109/FOCS.2011.92}, + timestamp = {Tue, 16 Dec 2014 09:57:24 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/focs/PriceW11}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@proceedings{DBLP:conf/focs/2011, + editor = {Rafail Ostrovsky}, + title = {{IEEE} 52nd Annual Symposium on Foundations of Computer Science, {FOCS} + 2011, Palm Springs, CA, USA, October 22-25, 2011}, + publisher = {{IEEE} Computer Society}, + year = {2011}, + url = {http://ieeexplore.ieee.org/xpl/mostRecentIssue.jsp?punumber=6108120}, + isbn = {978-1-4577-1843-4}, + timestamp = {Mon, 15 Dec 2014 18:48:45 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/focs/2011}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@inproceedings{pw12, + author = {Eric Price and + David P. Woodruff}, + title = {Applications of the Shannon-Hartley theorem to data streams and sparse + recovery}, + booktitle = {Proceedings of the 2012 {IEEE} International Symposium on Information + Theory, {ISIT} 2012, Cambridge, MA, USA, July 1-6, 2012}, + pages = {2446--2450}, + year = {2012}, + crossref = {DBLP:conf/isit/2012}, + url = {http://dx.doi.org/10.1109/ISIT.2012.6283954}, + doi = {10.1109/ISIT.2012.6283954}, + timestamp = {Mon, 01 Oct 2012 17:34:07 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/isit/PriceW12}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@proceedings{DBLP:conf/isit/2012, + title = {Proceedings of the 2012 {IEEE} International Symposium on Information + Theory, {ISIT} 2012, Cambridge, MA, USA, July 1-6, 2012}, + publisher = {{IEEE}}, + year = {2012}, + url = {http://ieeexplore.ieee.org/xpl/mostRecentIssue.jsp?punumber=6268627}, + isbn = {978-1-4673-2580-6}, + timestamp = {Mon, 01 Oct 2012 17:33:45 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/isit/2012}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@article{Leskovec:2010, + author = {Jure Leskovec and + Deepayan Chakrabarti and + Jon M. Kleinberg and + Christos Faloutsos and + Zoubin Ghahramani}, + title = {Kronecker Graphs: An Approach to Modeling Networks}, + journal = {Journal of Machine Learning Research}, + volume = {11}, + pages = {985--1042}, + year = {2010}, + url = {http://doi.acm.org/10.1145/1756006.1756039}, + doi = {10.1145/1756006.1756039}, + timestamp = {Thu, 22 Apr 2010 13:26:26 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/journals/jmlr/LeskovecCKFG10}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@article{Holme:2002, + author= {Petter Holme and Beom Jun Kim}, + title = {Growing scale-free networks with tunable clustering}, + journal = {Physical review E}, + volume = {65}, + issue = {2}, + pages = {026--107}, + year = {2002} +} + + +@article{watts:1998, + Annote = {10.1038/30918}, + Author = {Watts, Duncan J. and Strogatz, Steven H.}, + Date = {1998/06/04/print}, + Isbn = {0028-0836}, + Journal = {Nature}, + Number = {6684}, + Pages = {440--442}, + Read = {0}, + Title = {Collective dynamics of `small-world' networks}, + Url = {http://dx.doi.org/10.1038/30918}, + Volume = {393}, + Year = {1998}, +} + +@article{barabasi:2001, + author = {R{\'{e}}ka Albert and + Albert{-}L{\'{a}}szl{\'{o}} Barab{\'{a}}si}, + title = {Statistical mechanics of complex networks}, + journal = {CoRR}, + volume = {cond-mat/0106096}, + year = {2001}, + url = {http://arxiv.org/abs/cond-mat/0106096}, + timestamp = {Mon, 05 Dec 2011 18:05:15 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/journals/corr/cond-mat-0106096}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + + +@article{gomezbalduzzi:2011, + author = {Manuel Gomez{-}Rodriguez and + David Balduzzi and + Bernhard Sch{\"{o}}lkopf}, + title = {Uncovering the Temporal Dynamics of Diffusion Networks}, + journal = {CoRR}, + volume = {abs/1105.0697}, + year = {2011}, + url = {http://arxiv.org/abs/1105.0697}, + timestamp = {Mon, 05 Dec 2011 18:05:23 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/journals/corr/abs-1105-0697}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@article{Nowell08, + author = {Liben-Nowell, David and Kleinberg, Jon}, + biburl = {http://www.bibsonomy.org/bibtex/250b9b1ca1849fa9cb8bb92d6d9031436/mkroell}, + doi = {10.1073/pnas.0708471105}, + eprint = {http://www.pnas.org/content/105/12/4633.full.pdf+html}, + journal = {Proceedings of the National Academy of Sciences}, + keywords = {SNA graph networks}, + number = 12, + pages = {4633-4638}, + timestamp = {2008-10-09T10:32:56.000+0200}, + title = {{Tracing information flow on a global scale using Internet chain-letter data}}, + url = {http://www.pnas.org/content/105/12/4633.abstract}, + volume = 105, + year = 2008 +} + +@inproceedings{Leskovec07, + author = {Jure Leskovec and + Mary McGlohon and + Christos Faloutsos and + Natalie S. Glance and + Matthew Hurst}, + title = {Patterns of Cascading Behavior in Large Blog Graphs}, + booktitle = {Proceedings of the Seventh {SIAM} International Conference on Data + Mining, April 26-28, 2007, Minneapolis, Minnesota, {USA}}, + pages = {551--556}, + year = {2007}, + url = {http://dx.doi.org/10.1137/1.9781611972771.60}, + doi = {10.1137/1.9781611972771.60}, + timestamp = {Wed, 12 Feb 2014 17:08:15 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/sdm/LeskovecMFGH07}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + + +@inproceedings{AdarA05, + author = {Eytan Adar and + Lada A. Adamic}, + title = {Tracking Information Epidemics in Blogspace}, + booktitle = {2005 {IEEE} / {WIC} / {ACM} International Conference on Web Intelligence + {(WI} 2005), 19-22 September 2005, Compiegne, France}, + pages = {207--214}, + year = {2005}, + url = {http://dx.doi.org/10.1109/WI.2005.151}, + doi = {10.1109/WI.2005.151}, + timestamp = {Tue, 12 Aug 2014 16:59:16 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/webi/AdarA05}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@inproceedings{Kleinberg:00, + author = {Jon M. Kleinberg}, + title = {The small-world phenomenon: an algorithm perspective}, + booktitle = {Proceedings of the Thirty-Second Annual {ACM} Symposium on Theory + of Computing, May 21-23, 2000, Portland, OR, {USA}}, + pages = {163--170}, + year = {2000}, + url = {http://doi.acm.org/10.1145/335305.335325}, + doi = {10.1145/335305.335325}, + timestamp = {Thu, 16 Feb 2012 12:06:08 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/stoc/Kleinberg00}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@article{zhang2014, + title={Confidence intervals for low dimensional parameters in high dimensional linear models}, + author={Zhang, Cun-Hui and Zhang, Stephanie S}, + journal={Journal of the Royal Statistical Society: Series B (Statistical Methodology)}, + volume={76}, + number={1}, + pages={217--242}, + year={2014}, + publisher={Wiley Online Library} +} + +@article{javanmard2014, + title={Confidence intervals and hypothesis testing for high-dimensional regression}, + author={Javanmard, Adel and Montanari, Andrea}, + journal={The Journal of Machine Learning Research}, + volume={15}, + number={1}, + pages={2869--2909}, + year={2014}, + publisher={JMLR. org} +} diff --git a/presentation/extended_abstract.txt b/presentation/extended_abstract.txt new file mode 100644 index 0000000..47a12b9 --- /dev/null +++ b/presentation/extended_abstract.txt @@ -0,0 +1,10 @@ +Title: How can we estimate the parameters of a graph by observing its cascades? + + +A standard problem in Social Network Theory is to understand how the parameters of a graph affect the properties of its cascades, which are diffusion processes that spread from node to node along the graph's weighted edges. In other words, can we predict cascades from the graph's parameters? + +Recent work has considered the dual problem: what knowledge about the existence of an edge in the graph do we gain by observing its cascades and how can we leverage that knowledge efficiently? A natural extension to this problem is: can we learn the weights of the graph's edges from cascades? These questions are fundamental to many aspects of social network theory: knowing the parameters of the graph precedes influence maximization or conversely influence minimization. + +In this talk, we present a "sparse recovery" framework for tackling the "graph inference" problem from cascades. This framework achieves a better convergence rate under weaker assumptions than prior work. We show that we (almost) match the lower bound and that our assumptions are robust to approximately sparse graphs. Finally, the approach is validated on synthetic networks. + +Joint work with Thibaut Horel
\ No newline at end of file diff --git a/presentation/images/greedy_sparse_comparison.png b/presentation/images/greedy_sparse_comparison.png Binary files differnew file mode 100644 index 0000000..3fab5b0 --- /dev/null +++ b/presentation/images/greedy_sparse_comparison.png diff --git a/presentation/images/icc.png b/presentation/images/icc.png Binary files differnew file mode 100644 index 0000000..2148eed --- /dev/null +++ b/presentation/images/icc.png diff --git a/presentation/images/sparse_recovery_illustration.svg b/presentation/images/sparse_recovery_illustration.svg new file mode 100644 index 0000000..bef82c2 --- /dev/null +++ b/presentation/images/sparse_recovery_illustration.svg @@ -0,0 +1,655 @@ +<?xml version="1.0" encoding="UTF-8" standalone="no"?> +<!-- Created with Inkscape (http://www.inkscape.org/) --> + +<svg + xmlns:dc="http://purl.org/dc/elements/1.1/" + xmlns:cc="http://creativecommons.org/ns#" + xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" + xmlns:svg="http://www.w3.org/2000/svg" + xmlns="http://www.w3.org/2000/svg" + xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" + xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" + width="210mm" + height="297mm" + viewBox="0 0 744.09448819 1052.3622047" + id="svg2" + version="1.1" + inkscape:version="0.91 r13725" + sodipodi:docname="drawing.svg"> + <sodipodi:namedview + id="base" + pagecolor="#ffffff" + bordercolor="#666666" + borderopacity="1.0" + inkscape:pageopacity="0.0" + inkscape:pageshadow="2" + inkscape:zoom="1.4" + inkscape:cx="312.55659" + inkscape:cy="551.29016" + inkscape:document-units="px" + inkscape:current-layer="g3479" + showgrid="false" + inkscape:window-width="1918" + inkscape:window-height="1179" + inkscape:window-x="0" + inkscape:window-y="19" + inkscape:window-maximized="1" + showborder="false"> + <inkscape:grid + type="xygrid" + id="grid3864" /> + </sodipodi:namedview> + <defs + id="defs4"> + <marker + inkscape:isstock="true" + style="overflow:visible;" + id="marker6728" + refX="0.0" + refY="0.0" + orient="auto" + inkscape:stockid="Arrow1Lend"> + <path + transform="scale(0.8) rotate(180) translate(12.5,0)" + style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt;stroke-opacity:1;fill:#000000;fill-opacity:1" + d="M 0.0,0.0 L 5.0,-5.0 L -12.5,0.0 L 5.0,5.0 L 0.0,0.0 z " + id="path6730" /> + </marker> + <marker + inkscape:isstock="true" + style="overflow:visible" + id="marker6688" + refX="0.0" + refY="0.0" + orient="auto" + inkscape:stockid="Arrow1Lstart"> + <path + transform="scale(0.8) translate(12.5,0)" + style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt;stroke-opacity:1;fill:#000000;fill-opacity:1" + d="M 0.0,0.0 L 5.0,-5.0 L -12.5,0.0 L 5.0,5.0 L 0.0,0.0 z " + id="path6690" /> + </marker> + <marker + inkscape:stockid="Arrow1Lend" + orient="auto" + refY="0.0" + refX="0.0" + id="marker6095" + style="overflow:visible;" + inkscape:isstock="true" + inkscape:collect="always"> + <path + id="path6097" + d="M 0.0,0.0 L 5.0,-5.0 L -12.5,0.0 L 5.0,5.0 L 0.0,0.0 z " + style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt;stroke-opacity:1;fill:#000000;fill-opacity:1" + transform="scale(0.8) rotate(180) translate(12.5,0)" /> + </marker> + <marker + inkscape:isstock="true" + style="overflow:visible;" + id="Arrow1Mend" + refX="0.0" + refY="0.0" + orient="auto" + inkscape:stockid="Arrow1Mend"> + <path + transform="scale(0.4) rotate(180) translate(10,0)" + style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt;stroke-opacity:1;fill:#000000;fill-opacity:1" + d="M 0.0,0.0 L 5.0,-5.0 L -12.5,0.0 L 5.0,5.0 L 0.0,0.0 z " + id="path5488" /> + </marker> + <marker + inkscape:collect="always" + inkscape:isstock="true" + style="overflow:visible;" + id="Arrow1Lend" + refX="0.0" + refY="0.0" + orient="auto" + inkscape:stockid="Arrow1Lend"> + <path + transform="scale(0.8) rotate(180) translate(12.5,0)" + style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt;stroke-opacity:1;fill:#000000;fill-opacity:1" + d="M 0.0,0.0 L 5.0,-5.0 L -12.5,0.0 L 5.0,5.0 L 0.0,0.0 z " + id="path5482" /> + </marker> + <marker + inkscape:stockid="Arrow1Lstart" + orient="auto" + refY="0.0" + refX="0.0" + id="marker5763" + style="overflow:visible" + inkscape:isstock="true" + inkscape:collect="always"> + <path + id="path5765" + d="M 0.0,0.0 L 5.0,-5.0 L -12.5,0.0 L 5.0,5.0 L 0.0,0.0 z " + style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt;stroke-opacity:1;fill:#000000;fill-opacity:1" + transform="scale(0.8) translate(12.5,0)" /> + </marker> + <marker + inkscape:collect="always" + inkscape:isstock="true" + style="overflow:visible" + id="Arrow1Lstart" + refX="0.0" + refY="0.0" + orient="auto" + inkscape:stockid="Arrow1Lstart"> + <path + transform="scale(0.8) translate(12.5,0)" + style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt;stroke-opacity:1;fill:#000000;fill-opacity:1" + d="M 0.0,0.0 L 5.0,-5.0 L -12.5,0.0 L 5.0,5.0 L 0.0,0.0 z " + id="path5479" /> + </marker> + <filter + id="filter5471" + style="color-interpolation-filters:sRGB" + inkscape:collect="always"> + <feGaussianBlur + id="feGaussianBlur5473" + stdDeviation="0.55116915" + inkscape:collect="always" /> + </filter> + </defs> + <metadata + id="metadata7"> + <rdf:RDF> + <cc:Work + rdf:about=""> + <dc:format>image/svg+xml</dc:format> + <dc:type + rdf:resource="http://purl.org/dc/dcmitype/StillImage" /> + <dc:title></dc:title> + </cc:Work> + </rdf:RDF> + </metadata> + <g + inkscape:label="Layer 1" + inkscape:groupmode="layer" + id="layer1"> + <g + id="g3479" + transform="matrix(1.0629921,0,0,-1.0629921,-193.8523,879.6803)" + xml:space="preserve" + stroke-miterlimit="10.433" + font-style="normal" + font-variant="normal" + font-weight="normal" + font-stretch="normal" + font-size-adjust="none" + letter-spacing="normal" + word-spacing="normal" + style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;letter-spacing:normal;word-spacing:normal;text-anchor:start;fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:10.43299961;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"><g + id="g3846" + transform="matrix(0.99999949,0,0,2.5255598,1.5806055e-4,-563.65269)"><g + word-spacing="normal" + letter-spacing="normal" + font-size-adjust="none" + font-stretch="normal" + font-weight="normal" + font-variant="normal" + font-style="normal" + stroke-miterlimit="10.433" + xml:space="preserve" + transform="matrix(1.0629921,0,0,-1.0629921,-186.02362,789.27165)" + id="content" + style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;letter-spacing:normal;word-spacing:normal;text-anchor:start;fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:10.43299961;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"><path + id="path3849" + d="m 472,394.25 0,0.02 0,0.03 0,0.01 0,0.02 0,0.01 -0.01,0.02 0,0.01 -0.01,0.02 -0.01,0.02 -0.01,0.02 -0.01,0.02 -0.02,0.03 -0.01,0.02 -0.02,0.03 -0.02,0.03 -0.03,0.03 -0.02,0.03 -0.03,0.04 -0.02,0.02 -0.01,0.02 -0.02,0.02 -0.02,0.02 -0.02,0.02 -0.02,0.03 -0.02,0.02 -0.02,0.02 -0.02,0.03 -0.03,0.02 -0.02,0.03 -0.03,0.03 -0.03,0.03 -0.02,0.03 -0.03,0.03 -0.03,0.03 -0.03,0.03 -0.03,0.03 -0.04,0.04 -0.03,0.03 -0.04,0.04 -0.03,0.03 -0.04,0.04 -0.04,0.04 c -6.22,6.28 -7.81,15.69 -7.81,23.31 0,8.67 1.89,17.35 8.01,23.56 0.64,0.61 0.64,0.71 0.64,0.86 0,0.35 -0.18,0.49 -0.48,0.49 -0.5,0 -4.98,-3.38 -7.92,-9.7 -2.55,-5.49 -3.14,-11.02 -3.14,-15.21 0,-3.89 0.54,-9.9 3.28,-15.54 2.98,-6.13 7.28,-9.36 7.78,-9.36 0.3,0 0.48,0.14 0.48,0.5 z" + inkscape:connector-curvature="0" + style="fill:#000000;stroke-width:0" /></g></g><g + transform="matrix(-0.99999949,0,0,2.5255598,853.87226,-563.65269)" + id="g3858"><g + style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;letter-spacing:normal;word-spacing:normal;text-anchor:start;fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:10.43299961;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + id="g3860" + transform="matrix(1.0629921,0,0,-1.0629921,-186.02362,789.27165)" + xml:space="preserve" + stroke-miterlimit="10.433" + font-style="normal" + font-variant="normal" + font-weight="normal" + font-stretch="normal" + font-size-adjust="none" + letter-spacing="normal" + word-spacing="normal"><path + style="fill:#000000;stroke-width:0" + inkscape:connector-curvature="0" + d="m 472,394.25 0,0.02 0,0.03 0,0.01 0,0.02 0,0.01 -0.01,0.02 0,0.01 -0.01,0.02 -0.01,0.02 -0.01,0.02 -0.01,0.02 -0.02,0.03 -0.01,0.02 -0.02,0.03 -0.02,0.03 -0.03,0.03 -0.02,0.03 -0.03,0.04 -0.02,0.02 -0.01,0.02 -0.02,0.02 -0.02,0.02 -0.02,0.02 -0.02,0.03 -0.02,0.02 -0.02,0.02 -0.02,0.03 -0.03,0.02 -0.02,0.03 -0.03,0.03 -0.03,0.03 -0.02,0.03 -0.03,0.03 -0.03,0.03 -0.03,0.03 -0.03,0.03 -0.04,0.04 -0.03,0.03 -0.04,0.04 -0.03,0.03 -0.04,0.04 -0.04,0.04 c -6.22,6.28 -7.81,15.69 -7.81,23.31 0,8.67 1.89,17.35 8.01,23.56 0.64,0.61 0.64,0.71 0.64,0.86 0,0.35 -0.18,0.49 -0.48,0.49 -0.5,0 -4.98,-3.38 -7.92,-9.7 -2.55,-5.49 -3.14,-11.02 -3.14,-15.21 0,-3.89 0.54,-9.9 3.28,-15.54 2.98,-6.13 7.28,-9.36 7.78,-9.36 0.3,0 0.48,0.14 0.48,0.5 z" + id="path3862" /></g></g><g + transform="matrix(0.9999992,0,0,3.7101121,261.33046,-788.97252)" + id="g3866"><g + style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;letter-spacing:normal;word-spacing:normal;text-anchor:start;fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:10.43299961;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + id="g3868" + transform="matrix(1.0629921,0,0,-1.0629921,-186.02362,789.27165)" + xml:space="preserve" + stroke-miterlimit="10.433" + font-style="normal" + font-variant="normal" + font-weight="normal" + font-stretch="normal" + font-size-adjust="none" + letter-spacing="normal" + word-spacing="normal"><path + style="fill:#000000;stroke-width:0" + inkscape:connector-curvature="0" + d="m 472,394.25 0,0.02 0,0.03 0,0.01 0,0.02 0,0.01 -0.01,0.02 0,0.01 -0.01,0.02 -0.01,0.02 -0.01,0.02 -0.01,0.02 -0.02,0.03 -0.01,0.02 -0.02,0.03 -0.02,0.03 -0.03,0.03 -0.02,0.03 -0.03,0.04 -0.02,0.02 -0.01,0.02 -0.02,0.02 -0.02,0.02 -0.02,0.02 -0.02,0.03 -0.02,0.02 -0.02,0.02 -0.02,0.03 -0.03,0.02 -0.02,0.03 -0.03,0.03 -0.03,0.03 -0.02,0.03 -0.03,0.03 -0.03,0.03 -0.03,0.03 -0.03,0.03 -0.04,0.04 -0.03,0.03 -0.04,0.04 -0.03,0.03 -0.04,0.04 -0.04,0.04 c -6.22,6.28 -7.81,15.69 -7.81,23.31 0,8.67 1.89,17.35 8.01,23.56 0.64,0.61 0.64,0.71 0.64,0.86 0,0.35 -0.18,0.49 -0.48,0.49 -0.5,0 -4.98,-3.38 -7.92,-9.7 -2.55,-5.49 -3.14,-11.02 -3.14,-15.21 0,-3.89 0.54,-9.9 3.28,-15.54 2.98,-6.13 7.28,-9.36 7.78,-9.36 0.3,0 0.48,0.14 0.48,0.5 z" + id="path3870" /></g></g><g + id="g3872" + transform="matrix(-0.9999992,0,0,3.7101121,916.59141,-788.97252)"><g + word-spacing="normal" + letter-spacing="normal" + font-size-adjust="none" + font-stretch="normal" + font-weight="normal" + font-variant="normal" + font-style="normal" + stroke-miterlimit="10.433" + xml:space="preserve" + transform="matrix(1.0629921,0,0,-1.0629921,-186.02362,789.27165)" + id="g3874" + style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;letter-spacing:normal;word-spacing:normal;text-anchor:start;fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:10.43299961;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"><path + id="path3876" + d="m 472,394.25 0,0.02 0,0.03 0,0.01 0,0.02 0,0.01 -0.01,0.02 0,0.01 -0.01,0.02 -0.01,0.02 -0.01,0.02 -0.01,0.02 -0.02,0.03 -0.01,0.02 -0.02,0.03 -0.02,0.03 -0.03,0.03 -0.02,0.03 -0.03,0.04 -0.02,0.02 -0.01,0.02 -0.02,0.02 -0.02,0.02 -0.02,0.02 -0.02,0.03 -0.02,0.02 -0.02,0.02 -0.02,0.03 -0.03,0.02 -0.02,0.03 -0.03,0.03 -0.03,0.03 -0.02,0.03 -0.03,0.03 -0.03,0.03 -0.03,0.03 -0.03,0.03 -0.04,0.04 -0.03,0.03 -0.04,0.04 -0.03,0.03 -0.04,0.04 -0.04,0.04 c -6.22,6.28 -7.81,15.69 -7.81,23.31 0,8.67 1.89,17.35 8.01,23.56 0.64,0.61 0.64,0.71 0.64,0.86 0,0.35 -0.18,0.49 -0.48,0.49 -0.5,0 -4.98,-3.38 -7.92,-9.7 -2.55,-5.49 -3.14,-11.02 -3.14,-15.21 0,-3.89 0.54,-9.9 3.28,-15.54 2.98,-6.13 7.28,-9.36 7.78,-9.36 0.3,0 0.48,0.14 0.48,0.5 z" + inkscape:connector-curvature="0" + style="fill:#000000;stroke-width:0" /></g></g><g + transform="matrix(0.99999949,0,0,2.5255598,261.33037,-563.65269)" + id="g3878"><g + style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;letter-spacing:normal;word-spacing:normal;text-anchor:start;fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:10.43299961;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + id="g3880" + transform="matrix(1.0629921,0,0,-1.0629921,-186.02362,789.27165)" + xml:space="preserve" + stroke-miterlimit="10.433" + font-style="normal" + font-variant="normal" + font-weight="normal" + font-stretch="normal" + font-size-adjust="none" + letter-spacing="normal" + word-spacing="normal"><path + style="fill:#000000;stroke-width:0" + inkscape:connector-curvature="0" + d="m 472,394.25 0,0.02 0,0.03 0,0.01 0,0.02 0,0.01 -0.01,0.02 0,0.01 -0.01,0.02 -0.01,0.02 -0.01,0.02 -0.01,0.02 -0.02,0.03 -0.01,0.02 -0.02,0.03 -0.02,0.03 -0.03,0.03 -0.02,0.03 -0.03,0.04 -0.02,0.02 -0.01,0.02 -0.02,0.02 -0.02,0.02 -0.02,0.02 -0.02,0.03 -0.02,0.02 -0.02,0.02 -0.02,0.03 -0.03,0.02 -0.02,0.03 -0.03,0.03 -0.03,0.03 -0.02,0.03 -0.03,0.03 -0.03,0.03 -0.03,0.03 -0.03,0.03 -0.04,0.04 -0.03,0.03 -0.04,0.04 -0.03,0.03 -0.04,0.04 -0.04,0.04 c -6.22,6.28 -7.81,15.69 -7.81,23.31 0,8.67 1.89,17.35 8.01,23.56 0.64,0.61 0.64,0.71 0.64,0.86 0,0.35 -0.18,0.49 -0.48,0.49 -0.5,0 -4.98,-3.38 -7.92,-9.7 -2.55,-5.49 -3.14,-11.02 -3.14,-15.21 0,-3.89 0.54,-9.9 3.28,-15.54 2.98,-6.13 7.28,-9.36 7.78,-9.36 0.3,0 0.48,0.14 0.48,0.5 z" + id="path3882" /></g></g><g + id="g3884" + transform="matrix(-0.99999949,0,0,2.5255598,916.5915,-563.65269)"><g + word-spacing="normal" + letter-spacing="normal" + font-size-adjust="none" + font-stretch="normal" + font-weight="normal" + font-variant="normal" + font-style="normal" + stroke-miterlimit="10.433" + xml:space="preserve" + transform="matrix(1.0629921,0,0,-1.0629921,-186.02362,789.27165)" + id="g3886" + style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;letter-spacing:normal;word-spacing:normal;text-anchor:start;fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:10.43299961;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"><path + id="path3888" + d="m 472,394.25 0,0.02 0,0.03 0,0.01 0,0.02 0,0.01 -0.01,0.02 0,0.01 -0.01,0.02 -0.01,0.02 -0.01,0.02 -0.01,0.02 -0.02,0.03 -0.01,0.02 -0.02,0.03 -0.02,0.03 -0.03,0.03 -0.02,0.03 -0.03,0.04 -0.02,0.02 -0.01,0.02 -0.02,0.02 -0.02,0.02 -0.02,0.02 -0.02,0.03 -0.02,0.02 -0.02,0.02 -0.02,0.03 -0.03,0.02 -0.02,0.03 -0.03,0.03 -0.03,0.03 -0.02,0.03 -0.03,0.03 -0.03,0.03 -0.03,0.03 -0.03,0.03 -0.04,0.04 -0.03,0.03 -0.04,0.04 -0.03,0.03 -0.04,0.04 -0.04,0.04 c -6.22,6.28 -7.81,15.69 -7.81,23.31 0,8.67 1.89,17.35 8.01,23.56 0.64,0.61 0.64,0.71 0.64,0.86 0,0.35 -0.18,0.49 -0.48,0.49 -0.5,0 -4.98,-3.38 -7.92,-9.7 -2.55,-5.49 -3.14,-11.02 -3.14,-15.21 0,-3.89 0.54,-9.9 3.28,-15.54 2.98,-6.13 7.28,-9.36 7.78,-9.36 0.3,0 0.48,0.14 0.48,0.5 z" + inkscape:connector-curvature="0" + style="fill:#000000;stroke-width:0" /></g></g><g + transform="matrix(0.73936511,0,0,-0.73936511,358.75934,738.94031)" + id="g4095"><g + style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;letter-spacing:normal;word-spacing:normal;text-anchor:start;fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:10.43299961;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + word-spacing="normal" + letter-spacing="normal" + font-size-adjust="none" + font-stretch="normal" + font-weight="normal" + font-variant="normal" + font-style="normal" + stroke-miterlimit="10.433" + xml:space="preserve" + transform="matrix(1.0629921,0,0,-1.0629921,-186.02362,789.27165)" + id="g4097"><path + style="fill:#000000;stroke-width:0" + inkscape:connector-curvature="0" + id="path4099" + d="m 478.19,431.11 0,0.31 -0.01,0.34 -0.02,0.36 -0.02,0.37 -0.03,0.39 -0.04,0.39 -0.04,0.41 -0.06,0.41 -0.07,0.42 -0.07,0.43 -0.09,0.42 -0.1,0.43 -0.12,0.42 -0.12,0.42 -0.14,0.41 -0.16,0.41 -0.17,0.4 -0.18,0.38 -0.2,0.37 -0.22,0.36 -0.23,0.34 -0.25,0.32 -0.28,0.3 -0.29,0.27 -0.31,0.25 -0.16,0.12 -0.17,0.1 -0.17,0.1 -0.18,0.1 -0.18,0.08 -0.19,0.08 -0.19,0.07 -0.2,0.06 -0.21,0.05 -0.21,0.05 -0.22,0.03 -0.22,0.02 -0.23,0.02 -0.23,0 -0.05,-1.09 c 2.44,0 2.73,-3.19 2.73,-6.03 0,-2.44 -0.39,-4.99 -1.59,-9.92 l -10.41,0 c 0.85,3.2 1.85,7.18 3.83,10.72 1.36,2.43 3.19,5.23 5.44,5.23 l 0.05,1.09 c -6.92,0 -14.6,-14.04 -14.6,-25.45 0,-4.69 1.44,-10.22 5.99,-10.22 l 0,1.1 c -1.66,0 -2.71,1.45 -2.71,6.07 0,2.1 0.3,4.99 1.6,9.86 l 10.37,0 c -0.54,-2.28 -1.59,-6.51 -3.5,-10.11 -1.73,-3.39 -3.62,-5.82 -5.76,-5.82 l 0,-1.1 c 7.01,0 14.59,14.3 14.59,25.46 z" /></g></g><g + transform="matrix(0.50450469,0,0,-0.40018987,176.30365,501.56169)" + id="g4416"><g + style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;letter-spacing:normal;word-spacing:normal;text-anchor:start;fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:10.43299961;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + word-spacing="normal" + letter-spacing="normal" + font-size-adjust="none" + font-stretch="normal" + font-weight="normal" + font-variant="normal" + font-style="normal" + stroke-miterlimit="10.433" + xml:space="preserve" + transform="matrix(1.0629921,0,0,-1.0629921,-186.02362,789.27165)" + id="g4418"><path + style="fill:#000000;stroke-width:0" + inkscape:connector-curvature="0" + id="path4420" + d="m 478.44,422.14 0,0.74 -0.01,0.75 -0.02,0.75 -0.04,0.74 -0.04,0.75 -0.06,0.74 -0.08,0.74 -0.09,0.73 -0.12,0.73 -0.14,0.73 -0.16,0.72 -0.19,0.72 -0.21,0.72 -0.25,0.7 -0.28,0.7 -0.15,0.35 -0.16,0.34 c -2.28,4.8 -6.37,5.6 -8.47,5.6 -2.98,0 -6.62,-1.3 -8.67,-5.94 -1.59,-3.44 -1.84,-7.33 -1.84,-11.31 0,-3.74 0.2,-8.22 2.25,-12 2.14,-4.03 5.78,-5.03 8.22,-5.03 l 0,1.09 c -1.94,0 -4.89,1.25 -5.79,6.03 -0.54,2.98 -0.54,7.56 -0.54,10.52 0,3.18 0,6.46 0.4,9.15 0.94,5.94 4.68,6.38 5.93,6.38 1.64,0 4.93,-0.89 5.87,-5.83 0.5,-2.78 0.5,-6.58 0.5,-9.7 0,-3.75 0,-7.13 -0.55,-10.32 -0.75,-4.73 -3.59,-6.23 -5.82,-6.23 l 0,0 0,-1.09 c 2.68,0 6.46,1.04 8.67,5.78 1.59,3.43 1.84,7.31 1.84,11.25 z" /><path + style="fill:#000000;stroke-width:0" + inkscape:connector-curvature="0" + id="path4422" + d="m 503.4,422.14 -0.01,0.74 -0.01,0.75 -0.02,0.75 -0.03,0.74 -0.04,0.75 -0.06,0.74 -0.08,0.74 -0.1,0.73 -0.11,0.73 -0.14,0.73 -0.16,0.72 -0.19,0.72 -0.22,0.72 -0.24,0.7 -0.28,0.7 -0.15,0.35 -0.16,0.34 c -2.28,4.8 -6.38,5.6 -8.47,5.6 -2.98,0 -6.63,-1.3 -8.67,-5.94 -1.6,-3.44 -1.85,-7.33 -1.85,-11.31 0,-3.74 0.21,-8.22 2.25,-12 2.14,-4.03 5.79,-5.03 8.22,-5.03 l 0,1.09 c -1.93,0 -4.89,1.25 -5.78,6.03 -0.55,2.98 -0.55,7.56 -0.55,10.52 0,3.18 0,6.46 0.41,9.15 0.94,5.94 4.67,6.38 5.92,6.38 1.64,0 4.94,-0.89 5.88,-5.83 0.5,-2.78 0.5,-6.58 0.5,-9.7 0,-3.75 0,-7.13 -0.55,-10.32 -0.75,-4.73 -3.59,-6.23 -5.83,-6.23 l 0,0 0,-1.09 c 2.69,0 6.47,1.04 8.67,5.78 1.6,3.43 1.85,7.31 1.85,11.25 z" /><path + style="fill:#000000;stroke-width:0" + inkscape:connector-curvature="0" + id="path4424" + d="m 520.07,438.09 0,0.05 0,0.06 0,0.05 0,0.05 0,0.05 0,0.05 0,0.04 0,0.05 0,0.04 0,0.04 0,0.04 0,0.04 -0.01,0.04 0,0.03 0,0.04 0,0.03 -0.01,0.07 -0.01,0.06 -0.01,0.05 -0.02,0.05 -0.01,0.05 -0.02,0.04 -0.02,0.04 -0.03,0.03 -0.03,0.03 -0.03,0.03 -0.03,0.03 -0.04,0.02 -0.05,0.02 -0.02,0.01 -0.03,0.01 -0.02,0 -0.03,0.01 -0.03,0.01 -0.03,0 -0.03,0.01 -0.03,0 -0.03,0.01 -0.04,0 -0.03,0 -0.04,0.01 -0.04,0 -0.04,0 -0.04,0 -0.04,0 -0.05,0 -0.05,0.01 -0.04,0 -0.05,0 -0.05,0 -0.06,0 c -3.09,-3.19 -7.47,-3.19 -9.06,-3.19 l 0,-1.55 c 1,0 3.94,0 6.52,1.3 l 0,-25.81 c 0,-1.8 -0.14,-2.39 -4.63,-2.39 l -1.59,0 0,-1.55 c 1.75,0.16 6.08,0.16 8.06,0.16 2,0 6.33,0 8.08,-0.16 l 0,1.55 -1.59,0 c -4.49,0 -4.65,0.54 -4.65,2.39 z" /><path + style="fill:#000000;stroke-width:0" + inkscape:connector-curvature="0" + id="path4426" + d="m 548.23,408.84 0,0.13 -0.01,0.14 -0.02,0.13 -0.02,0.13 -0.03,0.12 -0.04,0.13 -0.04,0.12 -0.04,0.12 -0.06,0.12 -0.06,0.11 -0.06,0.11 -0.07,0.11 -0.07,0.1 -0.08,0.1 -0.08,0.1 -0.09,0.09 -0.09,0.09 -0.1,0.08 -0.1,0.08 -0.1,0.07 -0.11,0.07 -0.11,0.07 -0.11,0.06 -0.12,0.05 -0.12,0.05 -0.12,0.04 -0.13,0.04 -0.13,0.03 -0.13,0.02 -0.13,0.02 -0.13,0.01 -0.14,0 c -1.43,0 -2.64,-1.2 -2.64,-2.64 0,-1.44 1.21,-2.64 2.64,-2.64 1.46,0 2.64,1.2 2.64,2.64 z" /><path + style="fill:#000000;stroke-width:0" + inkscape:connector-curvature="0" + id="path4428" + d="m 570.31,408.84 0,0.13 -0.01,0.14 -0.02,0.13 -0.02,0.13 -0.03,0.12 -0.04,0.13 -0.04,0.12 -0.05,0.12 -0.05,0.12 -0.06,0.11 -0.06,0.11 -0.07,0.11 -0.08,0.1 -0.08,0.1 -0.08,0.1 -0.09,0.09 -0.09,0.09 -0.09,0.08 -0.1,0.08 -0.11,0.07 -0.11,0.07 -0.11,0.07 -0.11,0.06 -0.12,0.05 -0.12,0.05 -0.12,0.04 -0.12,0.04 -0.13,0.03 -0.13,0.02 -0.13,0.02 -0.14,0.01 -0.13,0 c -1.44,0 -2.64,-1.2 -2.64,-2.64 0,-1.44 1.2,-2.64 2.64,-2.64 1.45,0 2.64,1.2 2.64,2.64 z" /><path + style="fill:#000000;stroke-width:0" + inkscape:connector-curvature="0" + id="path4430" + d="m 592.39,408.84 -0.01,0.13 -0.01,0.14 -0.01,0.13 -0.03,0.13 -0.03,0.12 -0.03,0.13 -0.04,0.12 -0.05,0.12 -0.05,0.12 -0.06,0.11 -0.07,0.11 -0.07,0.11 -0.07,0.1 -0.08,0.1 -0.08,0.1 -0.09,0.09 -0.09,0.09 -0.1,0.08 -0.1,0.08 -0.1,0.07 -0.11,0.07 -0.11,0.07 -0.11,0.06 -0.12,0.05 -0.12,0.05 -0.12,0.04 -0.13,0.04 -0.12,0.03 -0.13,0.02 -0.13,0.02 -0.14,0.01 -0.13,0 c -1.44,0 -2.65,-1.2 -2.65,-2.64 0,-1.44 1.21,-2.64 2.65,-2.64 1.45,0 2.64,1.2 2.64,2.64 z" /><path + style="fill:#000000;stroke-width:0" + inkscape:connector-curvature="0" + id="path4432" + d="m 619.66,438.09 0,0.05 0,0.06 0,0.05 0,0.05 0,0.05 0,0.05 0,0.04 0,0.05 0,0.04 0,0.04 0,0.04 0,0.04 -0.01,0.04 0,0.03 0,0.04 0,0.03 -0.01,0.07 -0.01,0.06 -0.01,0.05 -0.02,0.05 -0.01,0.05 -0.02,0.04 -0.02,0.04 -0.03,0.03 -0.03,0.03 -0.03,0.03 -0.03,0.03 -0.04,0.02 -0.05,0.02 -0.02,0.01 -0.03,0.01 -0.02,0 -0.03,0.01 -0.03,0.01 -0.03,0 -0.03,0.01 -0.03,0 -0.03,0.01 -0.04,0 -0.03,0 -0.04,0.01 -0.04,0 -0.04,0 -0.04,0 -0.05,0 -0.04,0 -0.05,0.01 -0.04,0 -0.05,0 -0.05,0 -0.06,0 c -3.09,-3.19 -7.47,-3.19 -9.06,-3.19 l 0,-1.55 c 1,0 3.94,0 6.52,1.3 l 0,-25.81 c 0,-1.8 -0.14,-2.39 -4.63,-2.39 l -1.59,0 0,-1.55 c 1.75,0.16 6.08,0.16 8.06,0.16 2,0 6.33,0 8.08,-0.16 l 0,1.55 -1.6,0 c -4.48,0 -4.64,0.54 -4.64,2.39 z" /><path + style="fill:#000000;stroke-width:0" + inkscape:connector-curvature="0" + id="path4434" + d="m 652.9,422.14 0,0.74 -0.01,0.75 -0.02,0.75 -0.03,0.74 -0.05,0.75 -0.06,0.74 -0.08,0.74 -0.09,0.73 -0.12,0.73 -0.14,0.73 -0.16,0.72 -0.19,0.72 -0.21,0.72 -0.25,0.7 -0.28,0.7 -0.15,0.35 -0.16,0.34 c -2.28,4.8 -6.37,5.6 -8.47,5.6 -2.98,0 -6.62,-1.3 -8.67,-5.94 -1.59,-3.44 -1.84,-7.33 -1.84,-11.31 0,-3.74 0.2,-8.22 2.25,-12 2.14,-4.03 5.78,-5.03 8.22,-5.03 l 0,1.09 c -1.94,0 -4.89,1.25 -5.79,6.03 -0.54,2.98 -0.54,7.56 -0.54,10.52 0,3.18 0,6.46 0.41,9.15 0.93,5.94 4.67,6.38 5.92,6.38 1.64,0 4.93,-0.89 5.87,-5.83 0.5,-2.78 0.5,-6.58 0.5,-9.7 0,-3.75 0,-7.13 -0.54,-10.32 -0.75,-4.73 -3.6,-6.23 -5.83,-6.23 l 0,0 0,-1.09 c 2.68,0 6.46,1.04 8.67,5.78 1.59,3.43 1.84,7.31 1.84,11.25 z" /><path + style="fill:#000000;stroke-width:0" + inkscape:connector-curvature="0" + id="path4436" + d="m 677.86,422.14 0,0.74 -0.02,0.75 -0.02,0.75 -0.03,0.74 -0.04,0.75 -0.06,0.74 -0.08,0.74 -0.09,0.73 -0.12,0.73 -0.14,0.73 -0.16,0.72 -0.19,0.72 -0.22,0.72 -0.24,0.7 -0.28,0.7 -0.15,0.35 -0.16,0.34 c -2.28,4.8 -6.38,5.6 -8.47,5.6 -2.98,0 -6.62,-1.3 -8.67,-5.94 -1.6,-3.44 -1.84,-7.33 -1.84,-11.31 0,-3.74 0.2,-8.22 2.24,-12 2.15,-4.03 5.79,-5.03 8.22,-5.03 l 0,1.09 c -1.93,0 -4.89,1.25 -5.78,6.03 -0.54,2.98 -0.54,7.56 -0.54,10.52 0,3.18 0,6.46 0.4,9.15 0.94,5.94 4.67,6.38 5.92,6.38 1.64,0 4.94,-0.89 5.88,-5.83 0.5,-2.78 0.5,-6.58 0.5,-9.7 0,-3.75 0,-7.13 -0.55,-10.32 -0.75,-4.73 -3.59,-6.23 -5.83,-6.23 l 0,0 0,-1.09 c 2.69,0 6.47,1.04 8.68,5.78 1.59,3.43 1.84,7.31 1.84,11.25 z" /><path + style="fill:#000000;stroke-width:0" + inkscape:connector-curvature="0" + id="path4438" + d="m 697.73,408.84 0,0.13 -0.01,0.14 -0.02,0.13 -0.02,0.13 -0.03,0.12 -0.04,0.13 -0.04,0.12 -0.04,0.12 -0.06,0.12 -0.06,0.11 -0.06,0.11 -0.07,0.11 -0.07,0.1 -0.08,0.1 -0.08,0.1 -0.09,0.09 -0.09,0.09 -0.1,0.08 -0.1,0.08 -0.1,0.07 -0.11,0.07 -0.11,0.07 -0.11,0.06 -0.12,0.05 -0.12,0.05 -0.12,0.04 -0.13,0.04 -0.13,0.03 -0.13,0.02 -0.13,0.02 -0.13,0.01 -0.14,0 c -1.43,0 -2.64,-1.2 -2.64,-2.64 0,-1.44 1.21,-2.64 2.64,-2.64 1.46,0 2.64,1.2 2.64,2.64 z" /><path + style="fill:#000000;stroke-width:0" + inkscape:connector-curvature="0" + id="path4440" + d="m 719.81,408.84 0,0.13 -0.01,0.14 -0.02,0.13 -0.02,0.13 -0.03,0.12 -0.04,0.13 -0.04,0.12 -0.05,0.12 -0.05,0.12 -0.06,0.11 -0.06,0.11 -0.07,0.11 -0.07,0.1 -0.08,0.1 -0.09,0.1 -0.08,0.09 -0.09,0.09 -0.1,0.08 -0.1,0.08 -0.1,0.07 -0.11,0.07 -0.11,0.07 -0.12,0.06 -0.11,0.05 -0.12,0.05 -0.12,0.04 -0.13,0.04 -0.13,0.03 -0.13,0.02 -0.13,0.02 -0.13,0.01 -0.14,0 c -1.44,0 -2.64,-1.2 -2.64,-2.64 0,-1.44 1.2,-2.64 2.64,-2.64 1.45,0 2.64,1.2 2.64,2.64 z" /><path + style="fill:#000000;stroke-width:0" + inkscape:connector-curvature="0" + id="path4442" + d="m 741.89,408.84 0,0.13 -0.01,0.14 -0.02,0.13 -0.02,0.13 -0.03,0.12 -0.04,0.13 -0.04,0.12 -0.05,0.12 -0.05,0.12 -0.06,0.11 -0.06,0.11 -0.07,0.11 -0.08,0.1 -0.07,0.1 -0.09,0.1 -0.08,0.09 -0.1,0.09 -0.09,0.08 -0.1,0.08 -0.11,0.07 -0.1,0.07 -0.11,0.07 -0.12,0.06 -0.11,0.05 -0.12,0.05 -0.13,0.04 -0.12,0.04 -0.13,0.03 -0.13,0.02 -0.13,0.02 -0.13,0.01 -0.14,0 c -1.44,0 -2.64,-1.2 -2.64,-2.64 0,-1.44 1.2,-2.64 2.64,-2.64 1.45,0 2.64,1.2 2.64,2.64 z" /><path + style="fill:#000000;stroke-width:0" + inkscape:connector-curvature="0" + id="path4444" + d="m 777.32,422.14 0,0.74 -0.01,0.75 -0.02,0.75 -0.03,0.74 -0.05,0.75 -0.06,0.74 -0.07,0.74 -0.1,0.73 -0.12,0.73 -0.13,0.73 -0.17,0.72 -0.19,0.72 -0.21,0.72 -0.25,0.7 -0.28,0.7 -0.15,0.35 -0.16,0.34 c -2.28,4.8 -6.37,5.6 -8.47,5.6 -2.98,0 -6.62,-1.3 -8.67,-5.94 -1.59,-3.44 -1.84,-7.33 -1.84,-11.31 0,-3.74 0.2,-8.22 2.25,-12 2.14,-4.03 5.78,-5.03 8.22,-5.03 l 0,1.09 c -1.94,0 -4.89,1.25 -5.78,6.03 -0.55,2.98 -0.55,7.56 -0.55,10.52 0,3.18 0,6.46 0.41,9.15 0.93,5.94 4.67,6.38 5.92,6.38 1.64,0 4.94,-0.89 5.87,-5.83 0.5,-2.78 0.5,-6.58 0.5,-9.7 0,-3.75 0,-7.13 -0.54,-10.32 -0.75,-4.73 -3.6,-6.23 -5.83,-6.23 l 0,0 0,-1.09 c 2.69,0 6.47,1.04 8.67,5.78 1.59,3.43 1.84,7.31 1.84,11.25 z" /><path + style="fill:#000000;stroke-width:0" + inkscape:connector-curvature="0" + id="path4446" + d="m 794,438.09 0,0.05 0,0.06 0,0.05 0,0.05 0,0.05 0,0.05 0,0.04 0,0.05 0,0.04 0,0.04 -0.01,0.04 0,0.04 0,0.04 0,0.03 0,0.04 -0.01,0.03 -0.01,0.07 0,0.06 -0.02,0.05 -0.01,0.05 -0.02,0.05 -0.02,0.04 -0.02,0.04 -0.02,0.03 -0.03,0.03 -0.03,0.03 -0.04,0.03 -0.04,0.02 -0.04,0.02 -0.03,0.01 -0.02,0.01 -0.03,0 -0.02,0.01 -0.03,0.01 -0.03,0 -0.03,0.01 -0.03,0 -0.04,0.01 -0.03,0 -0.04,0 -0.04,0.01 -0.03,0 -0.04,0 -0.05,0 -0.04,0 -0.04,0 -0.05,0.01 -0.05,0 -0.05,0 -0.05,0 -0.05,0 c -3.09,-3.19 -7.47,-3.19 -9.06,-3.19 l 0,-1.55 c 1,0 3.93,0 6.51,1.3 l 0,-25.81 c 0,-1.8 -0.14,-2.39 -4.62,-2.39 l -1.6,0 0,-1.55 c 1.75,0.16 6.08,0.16 8.07,0.16 2,0 6.32,0 8.07,-0.16 l 0,1.55 -1.59,0 c -4.48,0 -4.64,0.54 -4.64,2.39 z" /><path + style="fill:#000000;stroke-width:0" + inkscape:connector-curvature="0" + id="path4448" + d="m 827.24,422.14 -0.01,0.74 -0.01,0.75 -0.02,0.75 -0.03,0.74 -0.04,0.75 -0.06,0.74 -0.08,0.74 -0.1,0.73 -0.11,0.73 -0.14,0.73 -0.16,0.72 -0.19,0.72 -0.22,0.72 -0.24,0.7 -0.28,0.7 -0.15,0.35 -0.16,0.34 c -2.28,4.8 -6.38,5.6 -8.47,5.6 -2.99,0 -6.62,-1.3 -8.67,-5.94 -1.6,-3.44 -1.85,-7.33 -1.85,-11.31 0,-3.74 0.21,-8.22 2.25,-12 2.15,-4.03 5.78,-5.03 8.22,-5.03 l 0,1.09 c -1.94,0 -4.89,1.25 -5.78,6.03 -0.54,2.98 -0.54,7.56 -0.54,10.52 0,3.18 0,6.46 0.4,9.15 0.94,5.94 4.67,6.38 5.92,6.38 1.64,0 4.94,-0.89 5.88,-5.83 0.5,-2.78 0.5,-6.58 0.5,-9.7 0,-3.75 0,-7.13 -0.55,-10.32 -0.75,-4.73 -3.59,-6.23 -5.83,-6.23 l 0,0 0,-1.09 c 2.69,0 6.47,1.04 8.68,5.78 1.59,3.43 1.84,7.31 1.84,11.25 z" /></g></g><path + inkscape:connector-curvature="0" + id="path4669" + d="m 316.35589,411.52729 220.46766,0" + style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:0.94074076px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;opacity:0.99999999;filter:url(#filter5471);marker-start:url(#Arrow1Lstart)" /><path + inkscape:connector-curvature="0" + id="path5755" + d="m 315.75208,388.72029 222.36825,0" + style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:0.94074076px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-start:url(#marker5763);marker-end:url(#Arrow1Lend)" /><path + inkscape:connector-curvature="0" + id="path6087" + d="m 635.65388,305.75787 121.63734,0" + style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:0.94074076px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-end:url(#marker6095)" /><g + id="g6141" + transform="matrix(0.99999949,0,0,2.5255598,463.7425,-563.65269)"><g + word-spacing="normal" + letter-spacing="normal" + font-size-adjust="none" + font-stretch="normal" + font-weight="normal" + font-variant="normal" + font-style="normal" + stroke-miterlimit="10.433" + xml:space="preserve" + transform="matrix(1.0629921,0,0,-1.0629921,-186.02362,789.27165)" + id="g6143" + style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;letter-spacing:normal;word-spacing:normal;text-anchor:start;fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:10.43299961;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"><path + id="path6145" + d="m 472,394.25 0,0.02 0,0.03 0,0.01 0,0.02 0,0.01 -0.01,0.02 0,0.01 -0.01,0.02 -0.01,0.02 -0.01,0.02 -0.01,0.02 -0.02,0.03 -0.01,0.02 -0.02,0.03 -0.02,0.03 -0.03,0.03 -0.02,0.03 -0.03,0.04 -0.02,0.02 -0.01,0.02 -0.02,0.02 -0.02,0.02 -0.02,0.02 -0.02,0.03 -0.02,0.02 -0.02,0.02 -0.02,0.03 -0.03,0.02 -0.02,0.03 -0.03,0.03 -0.03,0.03 -0.02,0.03 -0.03,0.03 -0.03,0.03 -0.03,0.03 -0.03,0.03 -0.04,0.04 -0.03,0.03 -0.04,0.04 -0.03,0.03 -0.04,0.04 -0.04,0.04 c -6.22,6.28 -7.81,15.69 -7.81,23.31 0,8.67 1.89,17.35 8.01,23.56 0.64,0.61 0.64,0.71 0.64,0.86 0,0.35 -0.18,0.49 -0.48,0.49 -0.5,0 -4.98,-3.38 -7.92,-9.7 -2.55,-5.49 -3.14,-11.02 -3.14,-15.21 0,-3.89 0.54,-9.9 3.28,-15.54 2.98,-6.13 7.28,-9.36 7.78,-9.36 0.3,0 0.48,0.14 0.48,0.5 z" + inkscape:connector-curvature="0" + style="fill:#000000;stroke-width:0" /></g></g><g + transform="matrix(-0.99999949,0,0,2.5255598,1116.1527,-563.65269)" + id="g6147"><g + style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;letter-spacing:normal;word-spacing:normal;text-anchor:start;fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:10.43299961;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + id="g6149" + transform="matrix(1.0629921,0,0,-1.0629921,-186.02362,789.27165)" + xml:space="preserve" + stroke-miterlimit="10.433" + font-style="normal" + font-variant="normal" + font-weight="normal" + font-stretch="normal" + font-size-adjust="none" + letter-spacing="normal" + word-spacing="normal"><path + style="fill:#000000;stroke-width:0" + inkscape:connector-curvature="0" + d="m 472,394.25 0,0.02 0,0.03 0,0.01 0,0.02 0,0.01 -0.01,0.02 0,0.01 -0.01,0.02 -0.01,0.02 -0.01,0.02 -0.01,0.02 -0.02,0.03 -0.01,0.02 -0.02,0.03 -0.02,0.03 -0.03,0.03 -0.02,0.03 -0.03,0.04 -0.02,0.02 -0.01,0.02 -0.02,0.02 -0.02,0.02 -0.02,0.02 -0.02,0.03 -0.02,0.02 -0.02,0.02 -0.02,0.03 -0.03,0.02 -0.02,0.03 -0.03,0.03 -0.03,0.03 -0.02,0.03 -0.03,0.03 -0.03,0.03 -0.03,0.03 -0.03,0.03 -0.04,0.04 -0.03,0.03 -0.04,0.04 -0.03,0.03 -0.04,0.04 -0.04,0.04 c -6.22,6.28 -7.81,15.69 -7.81,23.31 0,8.67 1.89,17.35 8.01,23.56 0.64,0.61 0.64,0.71 0.64,0.86 0,0.35 -0.18,0.49 -0.48,0.49 -0.5,0 -4.98,-3.38 -7.92,-9.7 -2.55,-5.49 -3.14,-11.02 -3.14,-15.21 0,-3.89 0.54,-9.9 3.28,-15.54 2.98,-6.13 7.28,-9.36 7.78,-9.36 0.3,0 0.48,0.14 0.48,0.5 z" + id="path6151" /></g></g><g + transform="matrix(0.85368615,0,0,-0.85368615,323.95947,595.49677)" + id="g6430"><g + style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;letter-spacing:normal;word-spacing:normal;text-anchor:start;fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:10.43299961;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + id="g6432" + transform="matrix(1.0629921,0,0,-1.0629921,-186.02362,789.27165)" + xml:space="preserve" + stroke-miterlimit="10.433" + font-style="normal" + font-variant="normal" + font-weight="normal" + font-stretch="normal" + font-size-adjust="none" + letter-spacing="normal" + word-spacing="normal"><path + style="fill:#000000;stroke-width:0" + inkscape:connector-curvature="0" + d="m 467.43,440.23 0,0 0,0.01 0,0 0,0.01 -0.01,0.02 0,0.01 0,0.02 0,0.02 0,0.02 -0.01,0.02 0,0.02 -0.01,0.02 -0.01,0.02 -0.01,0.03 -0.01,0.02 -0.01,0.03 -0.01,0.02 -0.02,0.03 -0.02,0.02 -0.02,0.02 -0.02,0.03 -0.01,0.01 -0.02,0.01 -0.01,0.01 -0.01,0.01 -0.02,0.01 -0.01,0.01 -0.02,0.01 -0.01,0.01 -0.02,0.01 -0.02,0.01 -0.02,0 -0.02,0.01 -0.02,0.01 -0.02,0 -0.02,0.01 -0.02,0.01 -0.03,0 -0.02,0 -0.03,0.01 -0.02,0 -0.03,0 -0.03,0.01 -0.03,0 -0.02,0 c -1.16,0 -4.79,-0.41 -6.08,-0.5 -0.41,-0.05 -0.96,-0.1 -0.96,-1 0,-0.6 0.46,-0.6 1.21,-0.6 2.39,0 2.48,-0.34 2.48,-0.84 0,-0.36 -0.45,-2.05 -0.7,-3.09 l -4.08,-16.24 c -0.59,-2.5 -0.8,-3.3 -0.8,-5.03 0,-4.73 2.64,-7.83 6.33,-7.83 l 0,1.1 c -1.7,0 -3.34,1.34 -3.34,5.03 0,0.95 0,1.89 0.79,5.03 l 1.1,4.59 c 0.3,1.1 0.3,1.19 0.75,1.74 2.44,3.23 4.69,3.98 5.98,3.98 1.8,0 3.14,-1.48 3.14,-4.69 0,-2.93 -1.65,-8.65 -2.54,-10.56 -1.64,-3.33 -3.94,-5.12 -5.88,-5.12 l 0,-1.1 c 5.88,0 12,7.42 12,14.6 0,4.53 -2.64,7.96 -6.62,7.96 -2.28,0 -4.33,-1.43 -5.83,-2.98 z" + id="path6434" /></g></g><g + id="g6584" + transform="matrix(0.4503923,0,0,-0.4503923,282.17489,557.47955)"><g + word-spacing="normal" + letter-spacing="normal" + font-size-adjust="none" + font-stretch="normal" + font-weight="normal" + font-variant="normal" + font-style="normal" + stroke-miterlimit="10.433" + xml:space="preserve" + transform="matrix(1.0629921,0,0,-1.0629921,-186.02362,789.27165)" + id="g6586" + style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;letter-spacing:normal;word-spacing:normal;text-anchor:start;fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:10.43299961;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"><path + id="path6588" + d="m 459.91,409.14 -0.02,-0.07 -0.01,-0.08 -0.02,-0.07 -0.01,-0.08 -0.04,-0.16 -0.04,-0.16 -0.03,-0.16 -0.04,-0.16 -0.04,-0.16 -0.04,-0.16 -0.01,-0.08 -0.02,-0.07 -0.02,-0.07 -0.01,-0.07 -0.02,-0.07 -0.01,-0.07 -0.01,-0.06 -0.01,-0.06 -0.01,-0.06 -0.01,-0.05 -0.01,-0.05 -0.01,-0.04 -0.01,-0.04 0,-0.04 0,-0.03 0,-0.02 c 0,-0.89 0.7,-1.35 1.43,-1.35 0.61,0 1.5,0.39 1.85,1.39 0.06,0.11 0.65,2.44 0.95,3.69 l 1.1,4.48 c 0.29,1.1 0.59,2.19 0.84,3.35 0.2,0.84 0.61,2.28 0.66,2.48 0.75,1.55 3.39,6.08 8.1,6.08 2.25,0 2.71,-1.84 2.71,-3.48 0,-1.25 -0.36,-2.64 -0.75,-4.14 l -1.39,-5.79 -1,-3.78 c -0.21,-1 -0.66,-2.68 -0.66,-2.93 0,-0.89 0.7,-1.35 1.45,-1.35 1.55,0 1.85,1.25 2.24,2.85 0.7,2.78 2.55,10.06 3,12 0.14,0.65 2.78,6.62 8.22,6.62 2.14,0 2.68,-1.69 2.68,-3.48 0,-2.85 -2.09,-8.52 -3.09,-11.16 -0.45,-1.2 -0.64,-1.75 -0.64,-2.75 0,-2.33 1.73,-4.08 4.08,-4.08 4.69,0 6.53,7.27 6.53,7.67 0,0.5 -0.45,0.5 -0.59,0.5 -0.5,0 -0.5,-0.15 -0.75,-0.9 -0.75,-2.64 -2.35,-6.17 -5.08,-6.17 -0.86,0 -1.21,0.5 -1.21,1.64 0,1.25 0.46,2.45 0.9,3.54 0.95,2.58 3.04,8.11 3.04,10.96 0,3.23 -2,5.32 -5.73,5.32 -3.74,0 -6.27,-2.18 -8.11,-4.82 -0.06,0.65 -0.2,2.34 -1.61,3.53 -1.24,1.04 -2.83,1.29 -4.08,1.29 -4.48,0 -6.92,-3.18 -7.76,-4.32 -0.25,2.84 -2.35,4.32 -4.6,4.32 -2.28,0 -3.23,-1.93 -3.69,-2.82 -0.89,-1.75 -1.54,-4.69 -1.54,-4.85 0,-0.5 0.5,-0.5 0.61,-0.5 0.5,0 0.54,0.07 0.84,1.16 0.84,3.53 1.84,5.92 3.64,5.92 0.8,0 1.55,-0.39 1.55,-2.28 0,-1.05 -0.16,-1.61 -0.8,-4.19 z" + inkscape:connector-curvature="0" + style="fill:#000000;stroke-width:0" /></g></g><path + style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:1.01223707;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:10.43299961;stroke-dasharray:none;stroke-opacity:1;marker-start:url(#marker6688);marker-end:url(#marker6728)" + d="m 293.12729,372.35784 0,-133.19994" + id="path6680" + inkscape:connector-curvature="0" /><path + style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:0.94074076;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:10.43299961;stroke-dasharray:7.52592611, 3.76296305;stroke-dashoffset:0;stroke-opacity:1" + d="m 327.78257,333.84095 203.36242,0" + id="path7438" + inkscape:connector-curvature="0" /><path + inkscape:connector-curvature="0" + id="path7440" + d="m 327.78257,269.93384 203.36242,0" + style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:0.94074076;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:10.43299961;stroke-dasharray:7.52592611, 3.76296305;stroke-dashoffset:0;stroke-opacity:1" /><g + transform="matrix(0.55298894,0,0,-0.55298894,184.25053,439.44739)" + id="g7521"><g + style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;letter-spacing:normal;word-spacing:normal;text-anchor:start;fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:10.43299961;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + id="g7523" + transform="matrix(1.0629921,0,0,-1.0629921,-186.02362,789.27165)" + xml:space="preserve" + stroke-miterlimit="10.433" + font-style="normal" + font-variant="normal" + font-weight="normal" + font-stretch="normal" + font-size-adjust="none" + letter-spacing="normal" + word-spacing="normal"><path + style="fill:#000000;stroke-width:0" + inkscape:connector-curvature="0" + d="m 493.19,435 0.1,0.37 0.05,0.18 0.06,0.17 0.06,0.18 0.07,0.17 0.07,0.16 0.08,0.16 0.08,0.16 0.09,0.16 0.1,0.15 0.1,0.14 0.12,0.14 0.12,0.14 0.13,0.13 0.15,0.12 0.15,0.12 0.16,0.11 0.18,0.11 0.18,0.11 0.2,0.09 0.21,0.09 0.23,0.09 0.23,0.07 0.26,0.07 0.26,0.07 0.28,0.05 0.3,0.05 0.31,0.04 0.33,0.04 0.35,0.03 0.18,0.01 0.19,0 c 0.25,0 0.84,0.05 0.84,1 0,0.05 0,0.55 -0.64,0.55 -1.66,0 -3.39,-0.16 -5.03,-0.16 -1.7,0 -3.44,0.16 -5.1,0.16 -0.29,0 -0.89,0 -0.89,-1 0,-0.55 0.5,-0.55 0.89,-0.55 2.85,-0.04 3.4,-1.09 3.4,-2.18 0,-0.16 -0.1,-0.91 -0.15,-1.05 l -5.57,-22.17 -11.02,26 c -0.41,0.9 -0.45,0.95 -1.59,0.95 l -6.67,0 c -1,0 -1.46,0 -1.46,-1 0,-0.55 0.46,-0.55 1.39,-0.55 0.25,0 3.39,0 3.39,-0.45 l -6.67,-26.75 c -0.5,-2 -1.34,-3.59 -5.37,-3.73 -0.32,0 -0.86,-0.05 -0.86,-1 0,-0.35 0.25,-0.55 0.65,-0.55 1.6,0 3.33,0.16 4.99,0.16 1.69,0 3.48,-0.16 5.12,-0.16 0.25,0 0.89,0 0.89,1 0,0.5 -0.43,0.55 -0.98,0.55 -2.89,0.09 -3.3,1.18 -3.3,2.18 0,0.35 0.05,0.6 0.21,1.14 l 6.57,26.32 c 0.21,-0.3 0.21,-0.39 0.46,-0.89 l 12.4,-29.35 c 0.35,-0.86 0.49,-0.95 0.94,-0.95 0.55,0 0.55,0.16 0.8,1.05 z" + id="path7525" /><path + style="fill:#000000;stroke-width:0" + inkscape:connector-curvature="0" + d="m 509.62,426.12 7.13,0 0,1.55 -7.13,0 0,9.17 -1.24,0 c -0.05,-4.08 -1.55,-9.42 -6.43,-9.61 l 0,-1.11 4.24,0 0,-13.75 c 0,-6.12 4.62,-6.72 6.42,-6.72 3.53,0 4.94,3.53 4.94,6.72 l 0,2.84 -1.25,0 0,-2.73 c 0,-3.69 -1.5,-5.58 -3.35,-5.58 -3.33,0 -3.33,4.53 -3.33,5.38 z" + id="path7527" /><path + style="fill:#000000;stroke-width:0" + inkscape:connector-curvature="0" + d="m 525.8,409.98 0,-0.1 0,-0.1 -0.01,-0.19 -0.01,-0.18 -0.01,-0.17 -0.02,-0.15 -0.02,-0.15 -0.03,-0.13 -0.04,-0.12 -0.04,-0.11 -0.05,-0.11 -0.05,-0.09 -0.07,-0.09 -0.07,-0.08 -0.08,-0.07 -0.09,-0.06 -0.1,-0.05 -0.11,-0.05 -0.12,-0.05 -0.13,-0.03 -0.14,-0.04 -0.15,-0.02 -0.17,-0.03 -0.17,-0.01 -0.1,-0.01 -0.09,-0.01 -0.1,-0.01 -0.1,0 -0.11,-0.01 -0.11,0 -0.11,0 -0.12,-0.01 -0.12,0 -0.12,0 -0.13,0 -0.13,0 -0.14,0 -0.14,0 -0.14,0 -0.14,0 -0.16,0 -0.15,0 0,-1.55 c 1.75,0.05 4.28,0.16 5.62,0.16 1.3,0 3.9,-0.11 5.58,-0.16 l 0,1.55 c -3.33,0 -3.87,0 -3.87,2.23 l 0,9.17 c 0,5.19 3.53,7.97 6.72,7.97 3.14,0 3.69,-2.69 3.69,-5.53 l 0,-11.61 c 0,-2.23 -0.55,-2.23 -3.9,-2.23 l 0,-1.55 c 1.75,0.05 4.3,0.16 5.65,0.16 1.29,0 3.87,-0.11 5.57,-0.16 l 0,1.55 c -2.59,0 -3.84,0 -3.89,1.5 l 0,9.51 c 0,4.28 0,5.83 -1.55,7.61 -0.68,0.86 -2.34,1.84 -5.21,1.84 -4.19,0 -6.39,-2.98 -7.24,-4.87 l 0,17.44 -7.17,-0.55 0,-1.55 c 3.49,0 3.89,-0.34 3.89,-2.79 z" + id="path7529" /><path + style="fill:#000000;stroke-width:0" + inkscape:connector-curvature="0" + d="m 570.54,417.06 0.01,0.38 0,0.36 0.02,0.36 0.02,0.35 0.03,0.34 0.03,0.34 0.03,0.32 0.04,0.32 0.05,0.31 0.05,0.3 0.06,0.29 0.06,0.29 0.13,0.55 0.15,0.51 0.17,0.49 0.18,0.46 0.2,0.43 0.21,0.4 0.22,0.38 0.23,0.35 0.24,0.32 0.25,0.3 0.26,0.28 0.26,0.26 0.27,0.23 0.28,0.21 0.27,0.19 0.29,0.16 0.28,0.15 0.28,0.13 0.28,0.12 0.29,0.09 0.27,0.08 0.28,0.06 0.27,0.05 0.27,0.03 0.26,0.02 0.25,0.01 c 0.44,0 3.57,-0.05 5.32,-1.85 -2.04,-0.15 -2.34,-1.64 -2.34,-2.29 0,-1.3 0.89,-2.3 2.3,-2.3 1.28,0 2.28,0.86 2.28,2.34 0,3.39 -3.78,5.33 -7.62,5.33 -6.22,0 -10.82,-5.37 -10.82,-11.55 0,-6.37 4.94,-11.31 10.72,-11.31 6.67,0 8.27,5.99 8.27,6.47 0,0.5 -0.5,0.5 -0.64,0.5 -0.46,0 -0.55,-0.19 -0.66,-0.5 -1.44,-4.62 -4.67,-5.22 -6.52,-5.22 -2.64,0 -7.03,2.14 -7.03,10.16 z" + id="path7531" /><path + style="fill:#000000;stroke-width:0" + inkscape:connector-curvature="0" + d="m 603.39,409.98 0.02,-0.19 0.03,-0.18 0.03,-0.19 0.05,-0.18 0.05,-0.19 0.05,-0.18 0.06,-0.18 0.07,-0.18 0.08,-0.18 0.08,-0.17 0.09,-0.18 0.09,-0.16 0.1,-0.16 0.11,-0.16 0.11,-0.16 0.12,-0.14 0.13,-0.14 0.13,-0.14 0.14,-0.13 0.14,-0.12 0.15,-0.12 0.16,-0.1 0.16,-0.1 0.17,-0.09 0.17,-0.08 0.19,-0.07 0.18,-0.06 0.2,-0.05 0.19,-0.04 0.21,-0.03 0.21,-0.02 0.22,-0.01 c 1.03,0 4.07,0.71 4.07,4.74 l 0,2.78 -1.25,0 0,-2.78 c 0,-2.89 -1.23,-3.19 -1.79,-3.19 -1.64,0 -1.84,2.23 -1.84,2.48 l 0,9.97 c 0,2.1 0,4.03 -1.79,5.88 -1.95,1.93 -4.43,2.73 -6.83,2.73 -4.09,0 -7.53,-2.33 -7.53,-5.62 0,-1.49 1,-2.35 2.3,-2.35 1.39,0 2.3,1 2.3,2.3 0,0.59 -0.25,2.23 -2.55,2.3 1.35,1.73 3.78,2.28 5.38,2.28 2.45,0 5.28,-1.94 5.28,-6.38 l 0,-1.84 0,-1.05 0,-4.98 c 0,-4.74 -3.58,-6.42 -5.83,-6.42 -2.44,0 -4.48,1.75 -4.48,4.23 0,2.73 2.09,6.88 10.31,7.17 l 0,0 0,1.05 c -2.53,-0.14 -6.02,-0.3 -9.16,-1.8 -3.73,-1.69 -4.98,-4.28 -4.98,-6.47 0,-4.03 4.83,-5.28 7.96,-5.28 3.3,0 5.58,1.99 6.54,4.33 z" + id="path7533" /><path + style="fill:#000000;stroke-width:0" + inkscape:connector-curvature="0" + d="m 622.11,415.87 0.11,-0.02 0.13,-0.02 0.14,-0.03 0.16,-0.04 0.16,-0.04 0.18,-0.04 0.19,-0.05 0.2,-0.06 0.21,-0.06 0.21,-0.07 0.21,-0.08 0.22,-0.08 0.22,-0.1 0.22,-0.1 0.22,-0.11 0.22,-0.12 0.22,-0.14 0.21,-0.14 0.21,-0.15 0.19,-0.16 0.19,-0.18 0.18,-0.19 0.17,-0.2 0.16,-0.21 0.14,-0.22 0.07,-0.12 0.06,-0.12 0.06,-0.13 0.05,-0.12 0.05,-0.14 0.05,-0.13 0.04,-0.14 0.03,-0.14 0.04,-0.15 0.02,-0.15 0.02,-0.16 0.02,-0.15 0,-0.17 0.01,-0.16 c 0,-2.53 -1.75,-4.53 -5.63,-4.53 -4.19,0 -5.98,2.84 -6.94,7.07 -0.14,0.64 -0.18,0.85 -0.68,0.85 -0.66,0 -0.66,-0.35 -0.66,-1.25 l 0,-6.58 c 0,-0.84 0,-1.19 0.55,-1.19 0.25,0 0.29,0.05 1.25,1 0.09,0.1 0.09,0.19 1,1.14 2.19,-2.09 4.43,-2.14 5.48,-2.14 5.72,0 8.02,3.35 8.02,6.92 0,2.64 -1.5,4.14 -2.1,4.74 -1.64,1.59 -3.59,2 -5.68,2.39 -2.79,0.55 -6.13,1.2 -6.13,4.09 0,1.74 1.3,3.78 5.58,3.78 5.48,0 5.73,-4.48 5.83,-6.03 0.06,-0.45 0.5,-0.45 0.61,-0.45 0.64,0 0.64,0.25 0.64,1.2 l 0,5.03 c 0,0.85 0,1.19 -0.55,1.19 -0.25,0 -0.34,0 -1,-0.59 -0.14,-0.21 -0.64,-0.64 -0.84,-0.8 -1.89,1.39 -3.94,1.39 -4.69,1.39 -6.08,0 -7.97,-3.33 -7.97,-6.12 0,-1.74 0.8,-3.14 2.14,-4.24 1.59,-1.29 3,-1.59 6.58,-2.28 z" + id="path7535" /><path + style="fill:#000000;stroke-width:0" + inkscape:connector-curvature="0" + d="m 637.26,417.06 0,0.38 0.01,0.36 0.01,0.36 0.02,0.35 0.03,0.34 0.03,0.34 0.04,0.32 0.04,0.32 0.05,0.31 0.05,0.3 0.05,0.29 0.06,0.29 0.14,0.55 0.15,0.51 0.17,0.49 0.18,0.46 0.19,0.43 0.21,0.4 0.22,0.38 0.23,0.35 0.25,0.32 0.24,0.3 0.26,0.28 0.27,0.26 0.27,0.23 0.27,0.21 0.28,0.19 0.28,0.16 0.28,0.15 0.29,0.13 0.28,0.12 0.28,0.09 0.28,0.08 0.27,0.06 0.28,0.05 0.26,0.03 0.26,0.02 0.25,0.01 c 0.44,0 3.58,-0.05 5.33,-1.85 -2.05,-0.15 -2.34,-1.64 -2.34,-2.29 0,-1.3 0.89,-2.3 2.29,-2.3 1.28,0 2.28,0.86 2.28,2.34 0,3.39 -3.78,5.33 -7.62,5.33 -6.22,0 -10.81,-5.37 -10.81,-11.55 0,-6.37 4.93,-11.31 10.72,-11.31 6.67,0 8.26,5.99 8.26,6.47 0,0.5 -0.5,0.5 -0.64,0.5 -0.45,0 -0.55,-0.19 -0.66,-0.5 -1.43,-4.62 -4.67,-5.22 -6.51,-5.22 -2.64,0 -7.03,2.14 -7.03,10.16 z" + id="path7537" /><path + style="fill:#000000;stroke-width:0" + inkscape:connector-curvature="0" + d="m 670.1,409.98 0.02,-0.19 0.03,-0.18 0.04,-0.19 0.04,-0.18 0.05,-0.19 0.06,-0.18 0.06,-0.18 0.07,-0.18 0.07,-0.18 0.08,-0.17 0.09,-0.18 0.09,-0.16 0.1,-0.16 0.11,-0.16 0.11,-0.16 0.12,-0.14 0.13,-0.14 0.13,-0.14 0.14,-0.13 0.14,-0.12 0.16,-0.12 0.15,-0.1 0.16,-0.1 0.17,-0.09 0.18,-0.08 0.18,-0.07 0.19,-0.06 0.19,-0.05 0.2,-0.04 0.2,-0.03 0.22,-0.02 0.21,-0.01 c 1.03,0 4.08,0.71 4.08,4.74 l 0,2.78 -1.25,0 0,-2.78 c 0,-2.89 -1.23,-3.19 -1.8,-3.19 -1.64,0 -1.84,2.23 -1.84,2.48 l 0,9.97 c 0,2.1 0,4.03 -1.78,5.88 -1.95,1.93 -4.44,2.73 -6.83,2.73 -4.09,0 -7.53,-2.33 -7.53,-5.62 0,-1.49 1,-2.35 2.3,-2.35 1.39,0 2.29,1 2.29,2.3 0,0.59 -0.25,2.23 -2.54,2.3 1.34,1.73 3.78,2.28 5.37,2.28 2.45,0 5.28,-1.94 5.28,-6.38 l 0,-1.84 0,-1.05 0,-4.98 c 0,-4.74 -3.58,-6.42 -5.83,-6.42 -2.43,0 -4.48,1.75 -4.48,4.23 0,2.73 2.09,6.88 10.31,7.17 l 0,0 0,1.05 c -2.53,-0.14 -6.01,-0.3 -9.15,-1.8 -3.74,-1.69 -4.99,-4.28 -4.99,-6.47 0,-4.03 4.83,-5.28 7.97,-5.28 3.3,0 5.58,1.99 6.53,4.33 z" + id="path7539" /><path + style="fill:#000000;stroke-width:0" + inkscape:connector-curvature="0" + d="m 697.4,408.93 0,-3.28 7.32,0.55 0,1.55 c -3.49,0 -3.88,0.34 -3.88,2.78 l 0,30.25 -7.17,-0.55 0,-1.55 c 3.48,0 3.87,-0.34 3.87,-2.79 l 0,-10.75 c -1.44,1.78 -3.57,3.07 -6.28,3.07 l 0.21,-1.09 c 1.75,0 3.84,-0.75 5.38,-2.98 0.55,-0.85 0.55,-0.96 0.55,-1.85 l 0,0 0,-10.22 c 0,-0.89 0,-1 -0.55,-1.84 -1.5,-2.39 -3.75,-3.48 -5.88,-3.48 -2.24,0 -4.04,1.29 -5.22,3.18 -1.3,2.05 -1.46,4.89 -1.46,6.93 0,1.84 0.1,4.84 1.55,7.07 1.05,1.55 2.94,3.19 5.63,3.19 l -0.21,1.09 c -5.87,0 -11.11,-4.87 -11.11,-11.29 0,-6.33 4.89,-11.27 10.57,-11.27 3.18,0 5.43,1.69 6.68,3.28 z" + id="path7541" /><path + style="fill:#000000;stroke-width:0" + inkscape:connector-curvature="0" + d="m 711.72,417.71 0.04,1.05 c 0.3,7.42 4.49,8.66 6.19,8.66 5.12,0 5.62,-6.72 5.62,-8.66 l -11.81,0 -0.04,-1.05 13.9,0 c 1.1,0 1.23,0 1.23,1.05 0,4.92 -2.68,9.75 -8.9,9.75 -5.78,0 -10.38,-5.12 -10.38,-11.36 0,-6.67 5.24,-11.5 10.97,-11.5 6.08,0 8.31,5.53 8.31,6.47 0,0.5 -0.38,0.61 -0.63,0.61 -0.46,0 -0.55,-0.3 -0.66,-0.7 -1.74,-5.13 -6.22,-5.13 -6.72,-5.13 -2.5,0 -4.49,1.49 -5.62,3.33 -1.5,2.39 -1.5,5.69 -1.5,7.48 z" + id="path7543" /></g></g><g + id="g7761" + transform="matrix(0.41157394,0,0,-0.41157394,150.15744,448.1947)"><g + word-spacing="normal" + letter-spacing="normal" + font-size-adjust="none" + font-stretch="normal" + font-weight="normal" + font-variant="normal" + font-style="normal" + stroke-miterlimit="10.433" + xml:space="preserve" + transform="matrix(1.0629921,0,0,-1.0629921,-186.02362,789.27165)" + id="g7763" + style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;letter-spacing:normal;word-spacing:normal;text-anchor:start;fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:10.43299961;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"><path + id="path7765" + d="m 459.91,409.14 -0.02,-0.07 -0.01,-0.08 -0.02,-0.07 -0.01,-0.08 -0.04,-0.16 -0.04,-0.16 -0.03,-0.16 -0.04,-0.16 -0.04,-0.16 -0.04,-0.16 -0.01,-0.08 -0.02,-0.07 -0.02,-0.07 -0.01,-0.07 -0.02,-0.07 -0.01,-0.07 -0.01,-0.06 -0.01,-0.06 -0.01,-0.06 -0.01,-0.05 -0.01,-0.05 -0.01,-0.04 -0.01,-0.04 0,-0.04 0,-0.03 0,-0.02 c 0,-0.89 0.7,-1.35 1.43,-1.35 0.61,0 1.5,0.39 1.85,1.39 0.06,0.11 0.65,2.44 0.95,3.69 l 1.1,4.48 c 0.29,1.1 0.59,2.19 0.84,3.35 0.2,0.84 0.61,2.28 0.66,2.48 0.75,1.55 3.39,6.08 8.1,6.08 2.25,0 2.71,-1.84 2.71,-3.48 0,-3.1 -2.46,-9.47 -3.25,-11.61 -0.44,-1.14 -0.49,-1.75 -0.49,-2.3 0,-2.33 1.74,-4.08 4.08,-4.08 4.69,0 6.53,7.27 6.53,7.67 0,0.5 -0.45,0.5 -0.61,0.5 -0.48,0 -0.48,-0.15 -0.73,-0.9 -1,-3.38 -2.64,-6.17 -5.09,-6.17 -0.85,0 -1.19,0.5 -1.19,1.64 0,1.25 0.45,2.45 0.89,3.54 0.95,2.64 3.05,8.11 3.05,10.96 0,3.34 -2.15,5.32 -5.74,5.32 -4.48,0 -6.92,-3.18 -7.76,-4.32 -0.25,2.79 -2.3,4.32 -4.6,4.32 -2.28,0 -3.23,-1.93 -3.73,-2.82 -0.8,-1.71 -1.5,-4.64 -1.5,-4.85 0,-0.5 0.5,-0.5 0.61,-0.5 0.5,0 0.54,0.07 0.84,1.16 0.84,3.53 1.84,5.92 3.64,5.92 0.99,0 1.55,-0.64 1.55,-2.28 0,-1.05 -0.16,-1.61 -0.8,-4.19 z" + inkscape:connector-curvature="0" + style="fill:#000000;stroke-width:0" /></g></g><g + id="g8270" + transform="matrix(0.50275347,0,0,-0.50275347,499.24477,498.85208)"><g + word-spacing="normal" + letter-spacing="normal" + font-size-adjust="none" + font-stretch="normal" + font-weight="normal" + font-variant="normal" + font-style="normal" + stroke-miterlimit="10.433" + xml:space="preserve" + transform="matrix(1.0629921,0,0,-1.0629921,-186.02362,789.27165)" + id="g8272" + style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;letter-spacing:normal;word-spacing:normal;text-anchor:start;fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:10.43299961;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"><path + id="path8274" + d="m 469.47,440.03 0.01,0.02 0,0.03 0.01,0.02 0,0.02 0,0.03 0.01,0.02 0,0.02 0,0.02 0.01,0.04 0.01,0.04 0,0.04 0.01,0.03 0,0.03 0.01,0.03 0,0.03 0,0.03 0.01,0.02 0,0.03 0,0.02 0,0.02 0.01,0.02 0,0.02 0,0.02 0,0.02 0,0.01 0,0.02 0,0.02 0,0.01 0.01,0.03 0,0.03 0,0.03 0,0.02 c 0,0.35 -0.1,0.5 -0.55,0.5 -0.55,0 -2.05,-0.75 -3.03,-1.25 -2.3,-1.14 -4.19,-2.09 -4.19,-2.98 0,-0.3 0.34,-0.3 0.55,-0.3 0.59,0 1.89,0.69 2.84,1.14 -0.61,-4.87 -1.8,-11.25 -2.7,-15.54 -1.85,-8.47 -2.78,-11.5 -5.22,-16.35 -0.25,-0.43 -0.25,-0.54 -0.25,-0.59 0,-0.34 0.39,-0.34 0.5,-0.34 0.8,0 2.87,0.89 3.67,2.29 0.66,1.14 2.55,4.78 4.05,10.56 1.09,4.29 2.98,10.8 6.51,15.94 2.3,3.24 4.39,4.74 7.54,4.74 2.68,0 5.12,-1.75 5.12,-4.39 0,-4.38 -4.73,-5.97 -9.91,-7.72 -0.59,-0.21 -4.03,-1.39 -4.03,-2.64 0,-0.3 0.35,-0.35 0.49,-0.35 0.2,0 1.36,0.19 2.4,0.19 5.03,0 9.11,-2.98 9.11,-7.72 0,-6.17 -5.28,-8.17 -9.81,-8.17 -3.84,0 -5.69,2.09 -6.33,2.8 -0.2,0.25 -0.25,0.34 -0.65,0.34 -1.1,0 -3.63,-1.55 -3.63,-2.44 0,-0.15 1.94,-3.43 7.31,-3.43 7.03,0 17.35,4.98 17.35,13 0,4.18 -2.8,7.32 -7.88,8.26 3.99,1.7 9.81,5.28 9.81,9.97 0,2.89 -2.39,5.03 -6.07,5.03 -1.69,0 -8.07,-0.45 -14.1,-7.92 z" + inkscape:connector-curvature="0" + style="fill:#000000;stroke-width:0" /><path + id="path8276" + d="m 510.42,389.27 0,0.02 0,0.02 0,0.01 -0.01,0.02 0,0.02 0,0.01 0,0.02 -0.01,0.01 0,0.02 -0.01,0.01 0,0.02 -0.01,0.01 0,0.01 -0.01,0.02 -0.01,0.01 0,0.01 -0.02,0.03 -0.01,0.02 -0.02,0.03 -0.02,0.02 -0.02,0.03 -0.01,0.03 -0.02,0.02 -0.01,0.02 -0.02,0.01 c -2.29,2.44 -5.67,6.49 -7.76,14.6 -1.16,4.53 -1.59,9.67 -1.59,14.29 0,13.11 3.14,22.28 9.1,28.75 0.46,0.46 0.46,0.55 0.46,0.66 0,0.5 -0.41,0.5 -0.6,0.5 -0.75,0 -3.43,-3 -4.09,-3.75 -5.08,-6.02 -8.31,-14.98 -8.31,-26.09 0,-7.08 1.23,-17.1 7.76,-25.52 0.5,-0.59 3.74,-4.39 4.64,-4.39 0.19,0 0.6,0 0.6,0.5 z" + inkscape:connector-curvature="0" + style="fill:#000000;stroke-width:0" /><path + id="path8278" + d="m 530.92,426.12 4.28,0 c 1,0 1.5,0 1.5,1 0,0.55 -0.5,0.55 -1.34,0.55 l -4.14,0 1.05,5.69 c 0.2,1.04 0.9,4.57 1.2,5.17 0.45,0.95 1.3,1.7 2.34,1.7 0.19,0 1.49,0 2.44,-0.91 -2.19,-0.18 -2.69,-1.93 -2.69,-2.68 0,-1.14 0.89,-1.74 1.85,-1.74 1.29,0 2.73,1.1 2.73,2.99 0,2.29 -2.3,3.43 -4.33,3.43 -1.7,0 -4.84,-0.89 -6.33,-5.82 -0.29,-1.05 -0.45,-1.55 -1.64,-7.83 l -3.43,0 c -0.96,0 -1.5,0 -1.5,-0.94 0,-0.61 0.45,-0.61 1.39,-0.61 l 3.29,0 -3.75,-19.67 c -0.89,-4.83 -1.73,-9.38 -4.32,-9.38 -0.21,0 -1.44,0 -2.4,0.91 2.3,0.14 2.74,1.94 2.74,2.69 0,1.14 -0.89,1.75 -1.84,1.75 -1.3,0 -2.74,-1.1 -2.74,-3 0,-2.24 2.19,-3.44 4.24,-3.44 2.73,0 4.73,2.94 5.62,4.84 1.59,3.14 2.75,9.16 2.8,9.52 z" + inkscape:connector-curvature="0" + style="fill:#000000;stroke-width:0" /><path + id="path8280" + d="m 558.88,394.25 0,0.02 0,0.03 0,0.01 0,0.02 -0.01,0.01 0,0.02 -0.01,0.01 0,0.02 -0.01,0.02 -0.01,0.02 -0.01,0.02 -0.02,0.03 -0.01,0.02 -0.02,0.03 -0.02,0.03 -0.03,0.03 -0.02,0.03 -0.03,0.04 -0.02,0.02 -0.02,0.02 -0.01,0.02 -0.02,0.02 -0.02,0.02 -0.02,0.03 -0.02,0.02 -0.02,0.02 -0.03,0.03 -0.02,0.02 -0.02,0.03 -0.03,0.03 -0.03,0.03 -0.02,0.03 -0.03,0.03 -0.03,0.03 -0.03,0.03 -0.04,0.03 -0.03,0.04 -0.03,0.03 -0.04,0.04 -0.03,0.03 -0.04,0.04 -0.04,0.04 c -6.22,6.28 -7.81,15.69 -7.81,23.31 0,8.67 1.89,17.35 8.01,23.56 0.64,0.61 0.64,0.71 0.64,0.86 0,0.35 -0.18,0.49 -0.48,0.49 -0.5,0 -4.99,-3.38 -7.92,-9.7 -2.55,-5.49 -3.14,-11.02 -3.14,-15.21 0,-3.89 0.54,-9.9 3.28,-15.54 2.98,-6.13 7.28,-9.36 7.78,-9.36 0.3,0 0.48,0.14 0.48,0.5 z" + inkscape:connector-curvature="0" + style="fill:#000000;stroke-width:0" /><path + id="path8282" + d="m 573.62,440.23 0,0 0,0.01 0,0 0,0.01 0,0.02 0,0.01 0,0.02 -0.01,0.02 0,0.02 0,0.02 -0.01,0.02 -0.01,0.02 0,0.02 -0.01,0.03 -0.01,0.02 -0.02,0.03 -0.01,0.02 -0.02,0.03 -0.01,0.02 -0.02,0.02 -0.03,0.03 -0.01,0.01 -0.01,0.01 -0.01,0.01 -0.02,0.01 -0.01,0.01 -0.02,0.01 -0.01,0.01 -0.02,0.01 -0.02,0.01 -0.01,0.01 -0.02,0 -0.02,0.01 -0.02,0.01 -0.02,0 -0.03,0.01 -0.02,0.01 -0.02,0 -0.03,0 -0.02,0.01 -0.03,0 -0.02,0 -0.03,0.01 -0.03,0 -0.03,0 c -1.16,0 -4.78,-0.41 -6.08,-0.5 -0.4,-0.05 -0.95,-0.1 -0.95,-1 0,-0.6 0.45,-0.6 1.2,-0.6 2.39,0 2.49,-0.34 2.49,-0.84 0,-0.36 -0.46,-2.05 -0.71,-3.09 l -4.08,-16.24 c -0.59,-2.5 -0.79,-3.3 -0.79,-5.03 0,-4.73 2.64,-7.83 6.33,-7.83 l 0,1.1 c -1.71,0 -3.35,1.34 -3.35,5.03 0,0.95 0,1.89 0.8,5.03 l 1.09,4.59 c 0.3,1.1 0.3,1.19 0.75,1.74 2.44,3.23 4.69,3.98 5.99,3.98 1.8,0 3.14,-1.48 3.14,-4.69 0,-2.93 -1.66,-8.65 -2.55,-10.56 -1.64,-3.33 -3.94,-5.12 -5.87,-5.12 l 0,-1.1 c 5.87,0 12,7.42 12,14.6 0,4.53 -2.64,7.96 -6.63,7.96 -2.28,0 -4.33,-1.43 -5.83,-2.98 z" + inkscape:connector-curvature="0" + style="fill:#000000;stroke-width:0" /><path + id="path8284" + d="m 594.38,420.54 -0.01,0.05 0,0.06 -0.01,0.05 -0.01,0.06 -0.01,0.05 -0.01,0.06 -0.02,0.05 -0.02,0.06 -0.02,0.06 -0.02,0.05 -0.03,0.06 -0.03,0.05 -0.03,0.06 -0.04,0.05 -0.04,0.05 -0.04,0.05 -0.04,0.05 -0.05,0.05 -0.05,0.04 -0.05,0.05 -0.06,0.04 -0.06,0.03 -0.06,0.04 -0.07,0.03 -0.06,0.03 -0.08,0.03 -0.07,0.02 -0.08,0.02 -0.08,0.01 -0.08,0.01 -0.09,0.01 -0.09,0 c -0.94,0 -1.95,-0.91 -1.95,-1.92 0,-0.59 0.45,-1.33 1.4,-1.33 1,0 1.96,0.99 1.96,1.92 z" + inkscape:connector-curvature="0" + style="fill:#000000;stroke-width:0" /><path + id="path8286" + d="m 587.27,402.84 -0.06,-0.17 -0.05,-0.17 -0.03,-0.08 -0.02,-0.09 -0.03,-0.08 -0.02,-0.09 -0.03,-0.09 -0.02,-0.09 -0.01,-0.1 -0.02,-0.09 -0.01,-0.1 -0.01,-0.05 0,-0.05 0,-0.06 -0.01,-0.05 0,-0.05 0,-0.06 c 0,-1.64 1.39,-2.95 3.3,-2.95 3.48,0 5.03,4.79 5.03,5.33 0,0.45 -0.45,0.45 -0.56,0.45 -0.49,0 -0.53,-0.2 -0.67,-0.59 -0.8,-2.79 -2.33,-4.22 -3.69,-4.22 -0.7,0 -0.88,0.45 -0.88,1.22 0,0.79 0.25,1.46 0.57,2.23 0.34,0.94 0.73,1.87 1.11,2.78 0.31,0.84 1.57,4.02 1.72,4.44 0.09,0.34 0.2,0.76 0.2,1.11 0,1.64 -1.39,2.95 -3.31,2.95 -3.46,0 -5.05,-4.73 -5.05,-5.33 0,-0.45 0.48,-0.45 0.59,-0.45 0.49,0 0.52,0.17 0.63,0.56 0.9,3 2.44,4.25 3.72,4.25 0.56,0 0.87,-0.28 0.87,-1.22 0,-0.79 -0.2,-1.32 -1.08,-3.48 z" + inkscape:connector-curvature="0" + style="fill:#000000;stroke-width:0" /><path + id="path8288" + d="m 614.15,418.65 0,0.37 0,0.39 -0.01,0.39 -0.02,0.41 -0.04,0.84 -0.06,0.88 -0.08,0.92 -0.11,0.95 -0.13,0.97 -0.16,1.01 -0.2,1.02 -0.23,1.03 -0.27,1.05 -0.3,1.06 -0.35,1.07 -0.39,1.07 -0.22,0.53 -0.22,0.53 -0.24,0.53 -0.25,0.53 c -2.98,6.12 -7.27,9.36 -7.77,9.36 -0.29,0 -0.5,-0.19 -0.5,-0.49 0,-0.15 0,-0.25 0.94,-1.15 4.89,-4.92 7.74,-12.85 7.74,-23.27 0,-8.51 -1.85,-17.28 -8.03,-23.56 -0.65,-0.59 -0.65,-0.7 -0.65,-0.84 0,-0.3 0.21,-0.5 0.5,-0.5 0.5,0 4.97,3.39 7.93,9.71 2.53,5.47 3.12,11 3.12,15.19 z" + inkscape:connector-curvature="0" + style="fill:#000000;stroke-width:0" /><path + id="path8290" + d="m 634.28,418.61 -0.01,1.36 -0.06,1.43 -0.09,1.47 -0.14,1.53 -0.19,1.57 -0.25,1.61 -0.31,1.63 -0.39,1.66 -0.22,0.83 -0.24,0.84 -0.26,0.84 -0.29,0.84 -0.3,0.84 -0.33,0.84 -0.35,0.85 -0.38,0.83 -0.4,0.84 -0.43,0.83 -0.45,0.83 -0.48,0.82 -0.51,0.82 -0.54,0.81 -0.56,0.8 -0.6,0.8 c -0.5,0.59 -3.73,4.39 -4.62,4.39 -0.26,0 -0.61,-0.11 -0.61,-0.5 0,-0.2 0.11,-0.36 0.29,-0.5 2.39,-2.59 5.64,-6.63 7.67,-14.55 1.16,-4.53 1.61,-9.67 1.61,-14.29 0,-5.04 -0.45,-10.13 -1.75,-15 -1.89,-6.99 -4.82,-10.97 -7.37,-13.75 -0.45,-0.46 -0.45,-0.55 -0.45,-0.66 0,-0.39 0.35,-0.5 0.61,-0.5 0.73,0 3.48,3.05 4.07,3.75 5.08,6.02 8.33,14.98 8.33,26.09 z" + inkscape:connector-curvature="0" + style="fill:#000000;stroke-width:0" /></g></g><g + id="g8876" + transform="translate(2.7774935,0)"><path + id="path8521" + d="m 790.86729,366.54831 0,0.2608 -0.004,0.26432 -0.007,0.26433 -0.0141,0.26079 -0.0141,0.26433 -0.0211,0.2608 -0.0282,0.2608 -0.0318,0.25728 -0.0423,0.25727 -0.0494,0.25728 -0.0564,0.25375 -0.0669,0.25375 -0.074,0.25375 -0.0881,0.2467 -0.0987,0.24671 -0.0528,0.12335 -0.0564,0.11983 c -0.80354,1.69167 -2.24499,1.97362 -2.9851,1.97362 -1.05025,0 -2.3331,-0.45817 -3.05559,-2.09345 -0.56037,-1.21237 -0.64847,-2.58333 -0.64847,-3.98601 0,-1.3181 0.0705,-2.897 0.79297,-4.2292 0.7542,-1.4203 2.03706,-1.77273 2.897,-1.77273 l 0,0.38415 c -0.68372,0 -1.7234,0.44055 -2.04059,2.12517 -0.19031,1.05025 -0.19031,2.66439 -0.19031,3.70759 0,1.12074 0,2.27671 0.14097,3.22476 0.33129,2.09345 1.64938,2.24852 2.08993,2.24852 0.57798,0 1.73749,-0.31367 2.06877,-2.05468 0.17622,-0.97977 0.17622,-2.31901 0.17622,-3.4186 0,-1.32162 0,-2.51284 -0.19384,-3.6371 -0.26432,-1.66701 -1.26523,-2.19566 -2.05115,-2.19566 l 0,0 0,-0.38415 c 0.94451,0 2.27671,0.36653 3.05558,2.03706 0.56037,1.20885 0.64848,2.57628 0.64848,3.96487 z" + inkscape:connector-curvature="0" + style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;letter-spacing:normal;word-spacing:normal;text-anchor:start;fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:0;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:10.43299961;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" /><path + id="path8523" + d="m 787.87196,340.09332 0,0.0211 0,0.0176 0,0.0177 0,0.0177 0,0.0176 0,0.0177 0,0.0141 0,0.0176 0,0.0141 0,0.0141 -0.004,0.0141 0,0.0141 0,0.0141 0,0.0141 0,0.0106 -0.004,0.0141 -0.004,0.0211 0,0.0212 -0.007,0.0177 -0.004,0.0177 -0.007,0.0176 -0.007,0.0141 -0.007,0.0141 -0.007,0.0141 -0.0106,0.0106 -0.0106,0.0105 -0.0141,0.007 -0.0141,0.007 -0.0141,0.007 -0.0106,0.004 -0.007,0.004 -0.0106,0 -0.007,0.004 -0.0106,0.004 -0.0106,0 -0.0106,0.004 -0.0106,0 -0.0141,0.004 -0.0106,0 -0.0141,0 -0.0141,0.004 -0.0106,0 -0.0141,0 -0.0177,0 -0.0141,0.004 -0.0141,0 -0.0177,0 -0.0177,0 -0.0177,0 -0.0177,0 -0.0177,0 c -1.08901,-1.12425 -2.63267,-1.12425 -3.19303,-1.12425 l 0,-0.54627 c 0.35243,0 1.38505,0 2.29433,0.45816 l 0,-9.09629 c 0,-0.63438 -0.0493,-0.84231 -1.62824,-0.84231 l -0.5639,0 0,-0.54627 c 0.61676,0.0564 2.14279,0.0564 2.84413,0.0564 0.70486,0 2.22738,0 2.84414,-0.0564 l 0,0.54627 -0.56037,0 c -1.57891,0 -1.63529,0.19384 -1.63529,0.84231 z" + inkscape:connector-curvature="0" + style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;letter-spacing:normal;word-spacing:normal;text-anchor:start;fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:0;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:10.43299961;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" /><g + transform="matrix(0.40138562,0,0,0.40138562,664.52811,164.85066)" + id="g8747"><g + style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;letter-spacing:normal;word-spacing:normal;text-anchor:start;fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:10.43299961;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + id="g8749" + transform="matrix(1.0629921,0,0,-1.0629921,-186.02362,789.27165)" + xml:space="preserve" + stroke-miterlimit="10.433" + font-style="normal" + font-variant="normal" + font-weight="normal" + font-stretch="normal" + font-size-adjust="none" + letter-spacing="normal" + word-spacing="normal"><path + style="fill:#000000;stroke-width:0" + inkscape:connector-curvature="0" + d="m 465.08,423.48 0,0.13 -0.01,0.14 -0.02,0.13 -0.02,0.13 -0.03,0.12 -0.04,0.13 -0.04,0.12 -0.05,0.12 -0.05,0.12 -0.06,0.11 -0.06,0.11 -0.07,0.11 -0.07,0.1 -0.08,0.1 -0.09,0.1 -0.08,0.09 -0.1,0.09 -0.09,0.08 -0.1,0.08 -0.1,0.07 -0.11,0.07 -0.11,0.07 -0.12,0.06 -0.11,0.05 -0.12,0.05 -0.13,0.04 -0.12,0.04 -0.13,0.03 -0.13,0.02 -0.13,0.02 -0.13,0.01 -0.14,0 c -1.44,0 -2.64,-1.2 -2.64,-2.64 0,-1.44 1.2,-2.64 2.64,-2.64 1.45,0 2.64,1.2 2.64,2.64 z" + id="path8751" /><path + style="fill:#000000;stroke-width:0" + inkscape:connector-curvature="0" + d="m 465.08,403.56 0,0.14 -0.01,0.13 -0.02,0.13 -0.02,0.13 -0.03,0.13 -0.04,0.12 -0.04,0.12 -0.05,0.12 -0.05,0.12 -0.06,0.11 -0.06,0.11 -0.07,0.11 -0.07,0.1 -0.08,0.1 -0.09,0.1 -0.08,0.09 -0.1,0.09 -0.09,0.08 -0.1,0.08 -0.1,0.08 -0.11,0.07 -0.11,0.06 -0.12,0.06 -0.11,0.05 -0.12,0.05 -0.13,0.04 -0.12,0.04 -0.13,0.03 -0.13,0.02 -0.13,0.02 -0.13,0.01 -0.14,0 c -1.44,0 -2.64,-1.2 -2.64,-2.64 0,-1.44 1.2,-2.64 2.64,-2.64 1.45,0 2.64,1.2 2.64,2.64 z" + id="path8753" /><path + style="fill:#000000;stroke-width:0" + inkscape:connector-curvature="0" + d="m 465.08,383.64 0,0.13 -0.01,0.14 -0.02,0.13 -0.02,0.13 -0.03,0.12 -0.04,0.13 -0.04,0.12 -0.05,0.12 -0.05,0.12 -0.06,0.11 -0.06,0.11 -0.07,0.11 -0.07,0.1 -0.08,0.1 -0.09,0.1 -0.08,0.09 -0.1,0.09 -0.09,0.08 -0.1,0.08 -0.1,0.07 -0.11,0.07 -0.11,0.07 -0.12,0.06 -0.11,0.05 -0.12,0.05 -0.13,0.04 -0.12,0.04 -0.13,0.03 -0.13,0.02 -0.13,0.02 -0.13,0.01 -0.14,0 c -1.44,0 -2.64,-1.2 -2.64,-2.64 0,-1.44 1.2,-2.64 2.64,-2.64 1.45,0 2.64,1.2 2.64,2.64 z" + id="path8755" /></g></g><path + style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;letter-spacing:normal;word-spacing:normal;text-anchor:start;fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:0;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:10.43299961;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + inkscape:connector-curvature="0" + d="m 790.86729,351.09328 0,0.2608 -0.004,0.26432 -0.007,0.26433 -0.0141,0.26079 -0.0141,0.26433 -0.0211,0.2608 -0.0282,0.2608 -0.0318,0.25728 -0.0423,0.25727 -0.0494,0.25728 -0.0564,0.25375 -0.0669,0.25375 -0.074,0.25375 -0.0881,0.2467 -0.0987,0.24671 -0.0528,0.12335 -0.0564,0.11983 c -0.80354,1.69167 -2.24499,1.97362 -2.9851,1.97362 -1.05025,0 -2.3331,-0.45817 -3.05559,-2.09345 -0.56037,-1.21237 -0.64847,-2.58333 -0.64847,-3.98601 0,-1.3181 0.0705,-2.897 0.79297,-4.2292 0.7542,-1.4203 2.03706,-1.77273 2.897,-1.77273 l 0,0.38415 c -0.68372,0 -1.7234,0.44055 -2.04059,2.12517 -0.19031,1.05025 -0.19031,2.66439 -0.19031,3.70759 0,1.12074 0,2.27671 0.14097,3.22476 0.33129,2.09345 1.64938,2.24852 2.08993,2.24852 0.57798,0 1.73749,-0.31367 2.06877,-2.05468 0.17622,-0.97977 0.17622,-2.31901 0.17622,-3.4186 0,-1.32162 0,-2.51284 -0.19384,-3.6371 -0.26432,-1.66701 -1.26523,-2.19566 -2.05115,-2.19566 l 0,0 0,-0.38415 c 0.94451,0 2.27671,0.36653 3.05558,2.03706 0.56037,1.20885 0.64848,2.57628 0.64848,3.96487 z" + id="path8868" /><path + id="path8870" + d="m 790.86729,285.91338 0,0.2608 -0.004,0.26432 -0.007,0.26433 -0.0141,0.26079 -0.0141,0.26433 -0.0211,0.2608 -0.0282,0.2608 -0.0318,0.25728 -0.0423,0.25727 -0.0494,0.25728 -0.0564,0.25375 -0.0669,0.25375 -0.074,0.25375 -0.0881,0.2467 -0.0987,0.24671 -0.0528,0.12335 -0.0564,0.11983 c -0.80354,1.69167 -2.24499,1.97362 -2.9851,1.97362 -1.05025,0 -2.3331,-0.45817 -3.05559,-2.09345 -0.56037,-1.21237 -0.64847,-2.58333 -0.64847,-3.98601 0,-1.3181 0.0705,-2.897 0.79297,-4.2292 0.7542,-1.4203 2.03706,-1.77273 2.897,-1.77273 l 0,0.38415 c -0.68372,0 -1.7234,0.44055 -2.04059,2.12517 -0.19031,1.05025 -0.19031,2.66439 -0.19031,3.70759 0,1.12074 0,2.27671 0.14097,3.22476 0.33129,2.09345 1.64938,2.24852 2.08993,2.24852 0.57798,0 1.73749,-0.31367 2.06877,-2.05468 0.17622,-0.97977 0.17622,-2.31901 0.17622,-3.4186 0,-1.32162 0,-2.51284 -0.19384,-3.6371 -0.26432,-1.66701 -1.26523,-2.19566 -2.05115,-2.19566 l 0,0 0,-0.38415 c 0.94451,0 2.27671,0.36653 3.05558,2.03706 0.56037,1.20885 0.64848,2.57628 0.64848,3.96487 z" + inkscape:connector-curvature="0" + style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;letter-spacing:normal;word-spacing:normal;text-anchor:start;fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:0;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:10.43299961;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" /><path + style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;letter-spacing:normal;word-spacing:normal;text-anchor:start;fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:0;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:10.43299961;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + inkscape:connector-curvature="0" + d="m 787.87196,274.91342 0,0.0211 0,0.0176 0,0.0177 0,0.0177 0,0.0176 0,0.0177 0,0.0141 0,0.0176 0,0.0141 0,0.0141 -0.004,0.0141 0,0.0141 0,0.0141 0,0.0141 0,0.0106 -0.004,0.0141 -0.004,0.0211 0,0.0212 -0.007,0.0177 -0.004,0.0177 -0.007,0.0176 -0.007,0.0141 -0.007,0.0141 -0.007,0.0141 -0.0106,0.0106 -0.0106,0.0105 -0.0141,0.007 -0.0141,0.007 -0.0141,0.007 -0.0106,0.004 -0.007,0.004 -0.0106,0 -0.007,0.004 -0.0106,0.004 -0.0106,0 -0.0106,0.004 -0.0106,0 -0.0141,0.004 -0.0106,0 -0.0141,0 -0.0141,0.004 -0.0106,0 -0.0141,0 -0.0177,0 -0.0141,0.004 -0.0141,0 -0.0177,0 -0.0177,0 -0.0177,0 -0.0177,0 -0.0177,0 c -1.08902,-1.12425 -2.63267,-1.12425 -3.19304,-1.12425 l 0,-0.54627 c 0.35244,0 1.38506,0 2.29433,0.45816 l 0,-9.09629 c 0,-0.63438 -0.0493,-0.84231 -1.62824,-0.84231 l -0.5639,0 0,-0.54627 c 0.61676,0.0564 2.14279,0.0564 2.84413,0.0564 0.70486,0 2.22738,0 2.84414,-0.0564 l 0,0.54627 -0.56037,0 c -1.57891,0 -1.63529,0.19384 -1.63529,0.84231 z" + id="path8872" /><path + id="path8874" + d="m 790.86729,250.97158 0,0.2608 -0.004,0.26432 -0.007,0.26433 -0.0141,0.26079 -0.0141,0.26433 -0.0211,0.2608 -0.0282,0.2608 -0.0318,0.25728 -0.0423,0.25727 -0.0494,0.25728 -0.0564,0.25375 -0.0669,0.25375 -0.074,0.25375 -0.0881,0.2467 -0.0987,0.24671 -0.0528,0.12335 -0.0564,0.11983 c -0.80354,1.69167 -2.24499,1.97362 -2.9851,1.97362 -1.05025,0 -2.3331,-0.45817 -3.05559,-2.09345 -0.56037,-1.21237 -0.64847,-2.58333 -0.64847,-3.98601 0,-1.3181 0.0705,-2.897 0.79297,-4.2292 0.7542,-1.4203 2.03706,-1.77273 2.897,-1.77273 l 0,0.38415 c -0.68372,0 -1.7234,0.44055 -2.04059,2.12517 -0.19031,1.05025 -0.19031,2.66439 -0.19031,3.70759 0,1.12074 0,2.27671 0.14097,3.22476 0.33129,2.09345 1.64938,2.24852 2.08993,2.24852 0.57798,0 1.73749,-0.31367 2.06877,-2.05468 0.17622,-0.97977 0.17622,-2.31901 0.17622,-3.4186 0,-1.32162 0,-2.51284 -0.19384,-3.6371 -0.26432,-1.66701 -1.26523,-2.19566 -2.05115,-2.19566 l 0,0 0,-0.38415 c 0.94451,0 2.27671,0.36653 3.05558,2.03706 0.56037,1.20885 0.64848,2.57628 0.64848,3.96487 z" + inkscape:connector-curvature="0" + style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;letter-spacing:normal;word-spacing:normal;text-anchor:start;fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:0;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:10.43299961;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" /></g></g> </g> +</svg> diff --git a/presentation/images/voter.png b/presentation/images/voter.png Binary files differnew file mode 100644 index 0000000..c1325cb --- /dev/null +++ b/presentation/images/voter.png diff --git a/presentation/stats/beamer_2.tex b/presentation/stats/beamer_2.tex new file mode 100644 index 0000000..ecc66ed --- /dev/null +++ b/presentation/stats/beamer_2.tex @@ -0,0 +1,397 @@ +\documentclass[10pt]{beamer} + +\usepackage{amssymb, amsmath, graphicx, amsfonts, color, amsthm, wasysym} + +\newtheorem{proposition}{Proposition} + +\title{Learning from Diffusion processes} +\subtitle{What cascades really teach us about networks} +\author{Jean (John) Pouget-Abadie \\ Joint Work with Thibaut (T-bo) Horel} + +\begin{document} + +\begin{frame} +\titlepage +\end{frame} + +\begin{frame} +\frametitle{Introduction} + +%notes: Learn what? the network, the parameters of the diffusion process. + +\begin{table} +\centering +\begin{tabular}{c | c} +Network & Diffusion process \\[1ex] +\hline +\\ +Airports & Infectious diseases (SARS) \\ + & Delays (Eyjafjallajökull) \\[3ex] +Social Network & Infectious diseases (flu) \\ + & Behaviors (Ice Bucket Challenge) \\[3ex] +Internet/WWW & Information diffusion (Memes, Pirated content \dots) +\end{tabular} +\end{table} + +\end{frame} + +%%%%%%%%%%%%%%%%%%%%%%% + +\begin{frame} +\frametitle{Introduction} + +What do we know? What do we want to know? + +\begin{itemize} +\item We know the {\bf airport network} structure. We observe delays. Can we learn how delays propagate? +\item We (sometimes) know the {\bf social network}. We observe behaviors. Can we learn who influences whom? +\item Rarely know {\bf blog network}. We observe discussions. Can we learn who learns from whom? +\end{itemize} + +\end{frame} + +%%%%%%%%%%%%%%%%%%%%%%% + +\begin{frame} +\frametitle{Independent Cascade Model} + +\begin{figure} +\includegraphics[scale=.3]{figures/weighted_graph.png} +\caption{Weighted, directed graph} +\end{figure} + +\begin{itemize} +\item At $t=0$, nodes are in three possible states: susceptible, {\color{blue} infected}, {\color{red} dead} +\pause +\item At time step $t$, each {\color{blue} infected} node $i$ has a ``one-shot'' probability $p_{i,j}$ of infecting each of his susceptible neighbors $j$ at $t+1$. +\pause +\item A node stays {\color{blue} infected} for one round, then it {\color{red} dies} +\pause +\item At $t=0$, each node is {\color{blue} infected} with probability $p_{\text{init}}$ +\pause +\item Process continues until random time $T$ when no more nodes can become infected. +\pause +\item $X_t$: set of {\color{blue} infected} nodes at time $t$ +\pause +\item A {\bf cascade} is an instance of the ICC model: $(X_t)_{t=0, t=T}$ +\end{itemize} + +%Notes: Revisit the celebrated independent cascade model -> Influence maximisation is tractable, requires knowledge of weights + +\end{frame} + +%%%%%%%%%%%%%%%%%%%%%%%%% + +\begin{frame} +\frametitle{Independent Cascade Model} + +\begin{figure} +\includegraphics[scale=.5]{figures/weighted_graph.png} +\caption{Weighted, directed graph} +\end{figure} + +\begin{block}{Example} +\begin{itemize} +\item At $t=0$, the {\color{orange} orange} node is infected, and the two other nodes are susceptible. $X_0 = $({\color{orange} orange}) +\item At $t=1$, the {\color{orange}} node infects the {\color{blue} blue} node and fails to infect the {\color{green} green} node. The {\color{orange} orange} node dies. $X_1 = $({\color{blue} blue}) +\item At $t=2$, {\color{blue} blue} dies. $X_2 = \emptyset$ +\end{itemize} +\end{block} + +\end{frame} + +%%%%%%%%%%%%%%%%%%%%%%%%% + +\begin{frame} +\frametitle{Independent Cascade Model} + +\begin{figure} +\includegraphics[scale=.5]{figures/weighted_graph.png} +\caption{Weighted, directed graph} +\end{figure} + +\begin{itemize} +\item If the {\color{orange} orange} node and the {\color{green} green} node are infected at $t=0$, what is the probability that the {\color{blue} blue} node is infected at $t=1$? +$$1 - \mathbb{P}(\text{not infected}) = 1 - (1 - .45)(1-.04)$$ +\end{itemize} + + +\end{frame} + +%%%%%%%%%%%%%%%%%%%%%%%%% + +\begin{frame} +\frametitle{Independent Cascade Model} +\begin{figure} +\includegraphics[scale=.5]{figures/weighted_graph.png} +\caption{Weighted, directed graph} +\end{figure} + +\begin{itemize} +\item In general, for each susceptible node $j$: +$$\mathbb{P}(j \text{ becomes infected at t+1}|X_{t}) = 1 - \prod_{i \in {\cal N}(j) \cap X_{t}} (1 - p_{i,j})$$ +\end{itemize} + +\end{frame} + + +%%%%%%%%%%%%%%%%%%%%%%%%% + +\begin{frame} +\frametitle{Independent Cascade Model} +For each susceptible node $j$, the event that it becomes {\color{blue} infected} conditioned on previous time step is a Bernoulli: +$$(j \in X_{t+1} | X_t) \sim {\cal B} \big(f(X_t \cdot \theta_j) \big)$$ +\begin{itemize} +\item $\theta_{i,j} := \log(1 - p_{i,j})$ +\item $\theta_j := (0, 0, 0, \theta_{4,j}, 0 \dots, \theta_{k,j}, \dots)$ +\item $f : x \mapsto 1 - e^x$ +\begin{align*} +\mathbb{P}(j\in X_{t+1}|X_{t}) & = 1 - \prod_{i \in {\cal N}(j) \cap X_{t}} (1 - p_{i,j}) \\ +& = 1 - \exp \left[ \sum_{i \in {\cal N}(j) \cap X_{t}} \log(1 - p_{i,j}) \right] \\ +& = 1 - \exp \left[ X_{t} \cdot \theta_{j}\right] +\end{align*} +\end{itemize} +\end{frame} + + + +%%%%%%%%%%%%%%%%%%%%%%%%% + +\begin{frame} +\frametitle{Independent Cascade Model} +For each susceptible node $j$, the event that it becomes {\color{blue} infected} conditioned on previous time step is a Bernoulli: +$$(j \in X_{t+1} | X_t) \sim {\cal B} \big(f(X_t \cdot \theta_j) \big)$$ + +\begin{block}{Decomposability} +\begin{itemize} +\item Conditioned on $X_t$, the state of the nodes at the next time step are mutually independent +\item We can learn the parents of each node independently +\end{itemize} +\end{block} + +\begin{block}{Sparsity} +\begin{itemize} +\item $\theta_{i,j} = 0 \Leftrightarrow \log(1 - p_{i,j}) = 0 \Leftrightarrow p_{i,j} = 0$ +\item If graph is ``sparse'', then $p_{j}$ is sparse, then $\theta_j$ is sparse. +\end{itemize} +\end{block} +\end{frame} + + + +%%%%%%%%%%%%%%%%%%%%%%% + +\begin{frame} +\frametitle{Learning from Diffusion Processes} +\begin{block}{Problem Statement} +\begin{itemize} +\item We are given a graph ${\cal G}$, and a diffusion process $f$ parameterized by $\left((\theta_j)_j, p_{\text{init}}\right)$. +\item Suppose we {\bf only} observe $(X_t)$ from the diffusion process. +\item Under what conditions can we learn $\theta_{i,j}$ for all $(i,j)$? +\end{itemize} +\end{block} +\end{frame} + +%%%%%%%%%%%%%%%%%%%%%%%%% + +\begin{frame} +\frametitle{Sparse Recovery} +\begin{figure} +\includegraphics[scale=.6]{../images/sparse_recovery_illustration_copy.pdf} +\caption{$\mathbb{P}(j \in X_{t+1}| X_t) = f(X_t\cdot \theta)$} +\end{figure} +\end{frame} + + + +%%%%%%%%%%%%%%%%%%%%% + +\begin{frame} +\frametitle{Learning from Diffusion Processes} + +% \begin{figure} +% \includegraphics[scale=.4]{../images/sparse_recovery_illustration.pdf} +% \caption{Generalized Cascade Model for node $i$} +% \end{figure} + +\begin{block}{Likelihood Function} +\begin{align*} +{\cal L}(\theta_1, \dots, \theta_m| X_1, \dots X_n) = \sum_{i=1}^m \sum_{t} & X_{t+1}^i \log f(\theta_i \cdot X_t) + \\ +& (1 - X_{t+1}^i) \log(1 - f(\theta_i \cdot X_t)) +\end{align*} +\end{block} + +\begin{block}{MLE} +For each node $i$, $$\theta_i \in \arg \max \frac{1}{n_i}{\cal {L}}_i(\theta_i | X_1, X_2, \dots, X_{n_i}) - \lambda \|\theta_i\|_1$$ +\end{block} + +\end{frame} + +%%%%%%%%%%%%%%%%%%%%% + +\begin{frame} +\frametitle{Conditions} +\begin{block}{On $f$} +\begin{itemize} +\item $\log f$ and $\log (1-f)$ have to be concave +\item $\log f$ and $\log (1-f)$ have to have bounded gradient +\end{itemize} +\end{block} + +\begin{block}{On $(X_t)$} +\begin{itemize} +\item Want ${\cal H}$, the hessian of ${\cal L}$ with respect to $\theta$, to be well-conditioned. +\item $ n < dim(\theta) \implies {\cal H}$ is degenerate. +\item {\bf Restricted Eigenvalue condition} = ``almost invertible'' on sparse vectors. +\end{itemize} +\end{block} + +\end{frame} + +%%%%%%%%%%%%%%%%%%%%%%%% + +\begin{frame} +\frametitle{Restricted Eigenvalue Condition} + +\begin{definition} +For a set $S$, +$${\cal C} := \{ \Delta : \|\Delta\|_2 = 1, \|\Delta_{\bar S}\|_1 \leq 3 \| \Delta_S\|_1 \}$$ +${\cal H}$ verifies the $(S, \gamma)$-RE condition if: +$$\forall \Delta \in {\cal C}, \Delta {\cal H} \Delta \geq \gamma$$ +\end{definition} +\end{frame} +%%%%%%%%%%%%%%%%%%%%%%%% + +\begin{frame} +\frametitle{Main Result} +Adapting a result from \cite{Negahban:2009}, we have the following theorem: + +\begin{theorem} +For node $i$, assume +\begin{itemize} +\item the Hessian verifies the $(S,\gamma)$-RE condition where $S$ is the set of parents of node $i$ (support of $\theta_i$) +\item $f$ and $1-f$ are log-concave +\item $|(\log f)'| < \frac{1}{\alpha}$ and $|(\log 1- f)'| < \frac{1}{\alpha}$ +\end{itemize} then with high probability: +$$\| \theta^*_i - \hat \theta_i \|_2 \leq \frac{6}{\gamma}\sqrt{\frac{s\log m}{\alpha n}}$$ +\end{theorem} + +\begin{corollary} +By thresholding $\hat \theta_i$, if $n > C' s \log m$, we recover the support of $\theta^*$ and therefore the edges of ${\cal G}$ +\end{corollary} + +\end{frame} + +%%%%%%%%%%%%%%%%%%%%%%%% + +\begin{frame} +\frametitle{Main result} + +\begin{block}{Correlation} +\begin{itemize} +\item Positive result despite correlated measurements \smiley +\item Independent measurements $\implies$ taking one measurement per cascade. +\end{itemize} +\end{block} + +\begin{block}{Statement w.r.t observations and not the model} +\begin{itemize} +\item The Hessian must verify the $(S,\gamma)$-RE condition \frownie +\item Can we make a conditional statement on $\theta$ and not $X_t$? +\end{itemize} +\end{block} + +\end{frame} + +%%%%%%%%%%%%%%%%%%%%%%% + +\begin{frame} +\frametitle{Restricted Eigenvalue Condition} + +\begin{block}{From Hessian to Gram Matrix} +\begin{itemize} +\item If $\log f$ and $\log 1 -f$ are strictly log-concave with constant $c$, then if $(S, \gamma)$-RE holds for the gram matrix $\frac{1}{n}X X^T$ , then $(S, c \gamma)$-RE holds for ${\cal H}$ +\end{itemize} +\end{block} + +\begin{block}{From Gram Matrix to Expected Gram Matrix} +\begin{itemize} +\item If $n > C' s^2 \log m$ and $(S, \gamma)$-RE holds for $\mathbb{E}(\frac{1}{n}XX^T)$, then $(S, \gamma/2)$-RE holds for $\frac{1}{n}XX^T$ w.h.p +\item $\mathbb{E}(\frac{1}{n}XX^T)$ only depends on $p_{\text{init}}$ and $(\theta_j)_j$ +\end{itemize} +\end{block} + +\begin{block}{Expected Gram Matrix} +\begin{itemize} +\item Diagonal : average number of times node is infected +\item Outer-diagonal : average number of times pair of nodes is infected {\emph together} +\end{itemize} +\end{block} + +\end{frame} + +%%%%%%%%%%%%%%%%%%% +\begin{frame} +\frametitle{Approximate Sparsity} +\begin{itemize} +\item $\theta^*_{\lceil s \rceil}$ best s-sparse approximation to $\theta^*$ +\item $\|\theta^* - \theta^*_{\lceil s \rceil} \|_1$: `tail' of $\theta^*$ +\end{itemize} +\begin{theorem} +Under similar conditions on $f$ and a relaxed RE condition, there $\exists C_1, C_2>0$ such that with high probability: +\begin{equation} +\|\hat \theta_i - \theta^*_i\|_2 \leq C_1 \sqrt{\frac{s\log m}{n}} + C_2 \sqrt[4]{\frac{s\log m}{n}}\|\theta^* - \theta^*_{\lceil s \rceil} \|_1 +\end{equation} +\end{theorem} +\end{frame} + +%%%%%%%%%%%%%%%%%%%%%%% + +\begin{frame} +\frametitle{Lower Bound} +\begin{itemize} +\item Under correlation decay assumption for the IC model, ${\Omega}(s \log N/s)$ cascades necessary for graph reconstruction (Netrapalli et Sanghavi SIGMETRICS'12) +\item Adapting (Price \& Woodruff STOC'12), in the approximately sparse case, any algorithm for any generalized linear cascade model such that: +$$\|\hat \theta - \theta^*\|_2 \leq C \|\theta^* - \theta^*_{\lfloor s \rfloor}\|_2$$ +requires ${\cal O}(s \log (n/s)/\log C)$ measurement. +\end{itemize} +\end{frame} + +%%%%%%%%%%%%%%%%%%%%%% + +\begin{frame} +\frametitle{Voter Model} +\begin{itemize} +\pause +\item {\color{red} Red} and {\color{blue} Blue} nodes. At every step, each node $i$ chooses one of its neighbors $j$ with probability $p_{j,i}$ and adopts that color at $t+1$ +\pause +\item If {\color{blue} Blue} is `contagious' state: +\pause +\begin{equation} +\nonumber +\mathbb{P}(i \in X^{t+1}|X^t) = \sum_{j \in {\cal N}(i)\cap X^t} p_{ji} = X^t \cdot \theta_i +\end{equation} +\end{itemize} +\end{frame} + +%%%%%%%%%%%%%%%%%%%%%%% + +\begin{frame} +\frametitle{Future Work} +\begin{itemize} +\item Lower bound restricted eigenvalues of expected gram matrix +\item Confidence Intervals +\item Show that $n > C' s \log m$ measurements are necessary w.r.t. expected hessian. +\item Linear Threshold model $\rightarrow$ 1-bit compressed sensing formulation +\item Better lower bounds +\item Active Learning +\end{itemize} +\end{frame} + + +%%%%%%%%%%%%%%%%% + +\bibliography{../../paper/sparse} +\bibliographystyle{apalike} + +\end{document}
\ No newline at end of file diff --git a/presentation/stats/figures/Screen Shot 2015-03-08 at 13.08.01.png b/presentation/stats/figures/Screen Shot 2015-03-08 at 13.08.01.png Binary files differnew file mode 100644 index 0000000..b053f0c --- /dev/null +++ b/presentation/stats/figures/Screen Shot 2015-03-08 at 13.08.01.png diff --git a/presentation/stats/figures/weighted_graph.png b/presentation/stats/figures/weighted_graph.png Binary files differnew file mode 100644 index 0000000..7deccc3 --- /dev/null +++ b/presentation/stats/figures/weighted_graph.png diff --git a/presentation/stats/sparse.bib b/presentation/stats/sparse.bib new file mode 100644 index 0000000..5df4b59 --- /dev/null +++ b/presentation/stats/sparse.bib @@ -0,0 +1,503 @@ +@article {CandesRomberTao:2006, +author = {Candès, Emmanuel J. and Romberg, Justin K. and Tao, Terence}, +title = {Stable signal recovery from incomplete and inaccurate measurements}, +journal = {Communications on Pure and Applied Mathematics}, +volume = {59}, +number = {8}, +publisher = {Wiley Subscription Services, Inc., A Wiley Company}, +issn = {1097-0312}, +pages = {1207--1223}, +year = {2006}, +} + + +@inproceedings{GomezRodriguez:2010, + author = {Gomez Rodriguez, Manuel and Leskovec, Jure and Krause, Andreas}, + title = {Inferring Networks of Diffusion and Influence}, + booktitle = {Proceedings of the 16th ACM SIGKDD International Conference on Knowledge Discovery and Data Mining}, + series = {KDD '10}, + year = {2010}, + isbn = {978-1-4503-0055-1}, + location = {Washington, DC, USA}, + pages = {1019--1028}, + numpages = {10}, + publisher = {ACM}, + address = {New York, NY, USA}, +} + + +@article{Netrapalli:2012, + author = {Netrapalli, Praneeth and Sanghavi, Sujay}, + title = {Learning the Graph of Epidemic Cascades}, + journal = {SIGMETRICS Perform. Eval. Rev.}, + volume = {40}, + number = {1}, + month = {June}, + year = {2012}, + issn = {0163-5999}, + pages = {211--222}, + numpages = {12}, + publisher = {ACM}, + address = {New York, NY, USA}, + keywords = {cascades, epidemics, graph structure learning}, +} + +@article{Negahban:2009, + author = {Negahban, Sahand N. and Ravikumar, Pradeep and Wrainwright, Martin J. and Yu, Bin}, + title = {A Unified Framework for High-Dimensional Analysis of M-Estimators with Decomposable Regularizers}, + Journal = {Statistical Science}, + year = {2012}, + month = {December}, + volume = {27}, + number = {4}, + pages = {538--557}, +} + +@article{Zhao:2006, + author = {Zhao, Peng and Yu, Bin}, + title = {On Model Selection Consistency of Lasso}, + journal = {J. Mach. Learn. Res.}, + issue_date = {12/1/2006}, + volume = {7}, + month = dec, + year = {2006}, + issn = {1532-4435}, + pages = {2541--2563}, + numpages = {23}, + url = {http://dl.acm.org/citation.cfm?id=1248547.1248637}, + acmid = {1248637}, + publisher = {JMLR.org}, +} + +@inproceedings{Daneshmand:2014, + author = {Hadi Daneshmand and + Manuel Gomez{-}Rodriguez and + Le Song and + Bernhard Sch{\"{o}}lkopf}, + title = {Estimating Diffusion Network Structures: Recovery Conditions, Sample + Complexity {\&} Soft-thresholding Algorithm}, + booktitle = {Proceedings of the 31th International Conference on Machine Learning, + {ICML} 2014, Beijing, China, 21-26 June 2014}, + pages = {793--801}, + year = {2014}, + url = {http://jmlr.org/proceedings/papers/v32/daneshmand14.html}, + timestamp = {Fri, 07 Nov 2014 20:42:30 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/icml/DaneshmandGSS14}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@inproceedings{Kempe:03, + author = {David Kempe and + Jon M. Kleinberg and + {\'{E}}va Tardos}, + title = {Maximizing the spread of influence through a social network}, + booktitle = {Proceedings of the Ninth {ACM} {SIGKDD} International Conference on + Knowledge Discovery and Data Mining, Washington, DC, USA, August 24 + - 27, 2003}, + pages = {137--146}, + year = {2003}, + url = {http://doi.acm.org/10.1145/956750.956769}, + doi = {10.1145/956750.956769}, + timestamp = {Mon, 13 Feb 2006 15:34:20 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/kdd/KempeKT03}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@inproceedings{Abrahao:13, + author = {Bruno D. Abrahao and + Flavio Chierichetti and + Robert Kleinberg and + Alessandro Panconesi}, + title = {Trace complexity of network inference}, + booktitle = {The 19th {ACM} {SIGKDD} International Conference on Knowledge Discovery + and Data Mining, {KDD} 2013, Chicago, IL, USA, August 11-14, 2013}, + pages = {491--499}, + year = {2013}, + url = {http://doi.acm.org/10.1145/2487575.2487664}, + doi = {10.1145/2487575.2487664}, + timestamp = {Tue, 10 Sep 2013 10:11:57 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/kdd/AbrahaoCKP13}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + + +@article{vandegeer:2009, +author = "van de Geer, Sara A. and B{\"u}hlmann, Peter", +doi = "10.1214/09-EJS506", +fjournal = "Electronic Journal of Statistics", +journal = "Electron. J. Statist.", +pages = "1360--1392", +publisher = "The Institute of Mathematical Statistics and the Bernoulli Society", +title = "On the conditions used to prove oracle results for the Lasso", +url = "http://dx.doi.org/10.1214/09-EJS506", +volume = "3", +year = "2009" +} + +@article{vandegeer:2011, +author = "van de Geer, Sara and Bühlmann, Peter and Zhou, Shuheng", +doi = "10.1214/11-EJS624", +fjournal = "Electronic Journal of Statistics", +journal = "Electron. J. Statist.", +pages = "688--749", +publisher = "The Institute of Mathematical Statistics and the Bernoulli Society", +title = "The adaptive and the thresholded Lasso for potentially misspecified models (and a lower bound for the Lasso)", +url = "http://dx.doi.org/10.1214/11-EJS624", +volume = "5", +year = "2011" +} + +@article{Zou:2006, +author = {Zou, Hui}, +title = {The Adaptive Lasso and Its Oracle Properties}, +journal = {Journal of the American Statistical Association}, +volume = {101}, +number = {476}, +pages = {1418-1429}, +year = {2006}, +doi = {10.1198/016214506000000735}, +URL = {http://dx.doi.org/10.1198/016214506000000735}, +} + +@article{Jacques:2013, + author = {Laurent Jacques and + Jason N. Laska and + Petros T. Boufounos and + Richard G. Baraniuk}, + title = {Robust 1-Bit Compressive Sensing via Binary Stable Embeddings of Sparse + Vectors}, + journal = {{IEEE} Transactions on Information Theory}, + volume = {59}, + number = {4}, + pages = {2082--2102}, + year = {2013}, + url = {http://dx.doi.org/10.1109/TIT.2012.2234823}, + doi = {10.1109/TIT.2012.2234823}, + timestamp = {Tue, 09 Apr 2013 19:57:48 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/journals/tit/JacquesLBB13}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@inproceedings{Boufounos:2008, + author = {Petros Boufounos and + Richard G. Baraniuk}, + title = {1-Bit compressive sensing}, + booktitle = {42nd Annual Conference on Information Sciences and Systems, {CISS} + 2008, Princeton, NJ, USA, 19-21 March 2008}, + pages = {16--21}, + year = {2008}, + url = {http://dx.doi.org/10.1109/CISS.2008.4558487}, + doi = {10.1109/CISS.2008.4558487}, + timestamp = {Wed, 15 Oct 2014 17:04:27 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/ciss/BoufounosB08}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@inproceedings{Gupta:2010, + author = {Ankit Gupta and + Robert Nowak and + Benjamin Recht}, + title = {Sample complexity for 1-bit compressed sensing and sparse classification}, + booktitle = {{IEEE} International Symposium on Information Theory, {ISIT} 2010, + June 13-18, 2010, Austin, Texas, USA, Proceedings}, + pages = {1553--1557}, + year = {2010}, + url = {http://dx.doi.org/10.1109/ISIT.2010.5513510}, + doi = {10.1109/ISIT.2010.5513510}, + timestamp = {Thu, 15 Jan 2015 17:11:50 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/isit/GuptaNR10}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@article{Plan:2014, + author = {Yaniv Plan and + Roman Vershynin}, + title = {Dimension Reduction by Random Hyperplane Tessellations}, + journal = {Discrete {\&} Computational Geometry}, + volume = {51}, + number = {2}, + pages = {438--461}, + year = {2014}, + url = {http://dx.doi.org/10.1007/s00454-013-9561-6}, + doi = {10.1007/s00454-013-9561-6}, + timestamp = {Tue, 11 Feb 2014 13:48:56 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/journals/dcg/PlanV14}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@article{bickel:2009, +author = "Bickel, Peter J. and Ritov, Ya’acov and Tsybakov, Alexandre B.", +doi = "10.1214/08-AOS620", +fjournal = "The Annals of Statistics", +journal = "Ann. Statist.", +month = "08", +number = "4", +pages = "1705--1732", +publisher = "The Institute of Mathematical Statistics", +title = "Simultaneous analysis of Lasso and Dantzig selector", +url = "http://dx.doi.org/10.1214/08-AOS620", +volume = "37", +year = "2009" +} + +@article{raskutti:10, + author = {Garvesh Raskutti and + Martin J. Wainwright and + Bin Yu}, + title = {Restricted Eigenvalue Properties for Correlated Gaussian Designs}, + journal = {Journal of Machine Learning Research}, + volume = {11}, + pages = {2241--2259}, + year = {2010}, + url = {http://portal.acm.org/citation.cfm?id=1859929}, + timestamp = {Wed, 15 Oct 2014 17:04:32 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/journals/jmlr/RaskuttiWY10}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@article{rudelson:13, + author = {Mark Rudelson and + Shuheng Zhou}, + title = {Reconstruction From Anisotropic Random Measurements}, + journal = {{IEEE} Transactions on Information Theory}, + volume = {59}, + number = {6}, + pages = {3434--3447}, + year = {2013}, + url = {http://dx.doi.org/10.1109/TIT.2013.2243201}, + doi = {10.1109/TIT.2013.2243201}, + timestamp = {Tue, 21 May 2013 14:15:50 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/journals/tit/RudelsonZ13}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@article{bipw11, + author = {Khanh Do Ba and + Piotr Indyk and + Eric Price and + David P. Woodruff}, + title = {Lower Bounds for Sparse Recovery}, + journal = {CoRR}, + volume = {abs/1106.0365}, + year = {2011}, + url = {http://arxiv.org/abs/1106.0365}, + timestamp = {Mon, 05 Dec 2011 18:04:39 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/journals/corr/abs-1106-0365}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@inproceedings{pw11, + author = {Eric Price and + David P. Woodruff}, + title = {{(1} + eps)-Approximate Sparse Recovery}, + booktitle = {{IEEE} 52nd Annual Symposium on Foundations of Computer Science, {FOCS} + 2011, Palm Springs, CA, USA, October 22-25, 2011}, + pages = {295--304}, + year = {2011}, + crossref = {DBLP:conf/focs/2011}, + url = {http://dx.doi.org/10.1109/FOCS.2011.92}, + doi = {10.1109/FOCS.2011.92}, + timestamp = {Tue, 16 Dec 2014 09:57:24 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/focs/PriceW11}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@proceedings{DBLP:conf/focs/2011, + editor = {Rafail Ostrovsky}, + title = {{IEEE} 52nd Annual Symposium on Foundations of Computer Science, {FOCS} + 2011, Palm Springs, CA, USA, October 22-25, 2011}, + publisher = {{IEEE} Computer Society}, + year = {2011}, + url = {http://ieeexplore.ieee.org/xpl/mostRecentIssue.jsp?punumber=6108120}, + isbn = {978-1-4577-1843-4}, + timestamp = {Mon, 15 Dec 2014 18:48:45 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/focs/2011}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@inproceedings{pw12, + author = {Eric Price and + David P. Woodruff}, + title = {Applications of the Shannon-Hartley theorem to data streams and sparse + recovery}, + booktitle = {Proceedings of the 2012 {IEEE} International Symposium on Information + Theory, {ISIT} 2012, Cambridge, MA, USA, July 1-6, 2012}, + pages = {2446--2450}, + year = {2012}, + crossref = {DBLP:conf/isit/2012}, + url = {http://dx.doi.org/10.1109/ISIT.2012.6283954}, + doi = {10.1109/ISIT.2012.6283954}, + timestamp = {Mon, 01 Oct 2012 17:34:07 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/isit/PriceW12}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@proceedings{DBLP:conf/isit/2012, + title = {Proceedings of the 2012 {IEEE} International Symposium on Information + Theory, {ISIT} 2012, Cambridge, MA, USA, July 1-6, 2012}, + publisher = {{IEEE}}, + year = {2012}, + url = {http://ieeexplore.ieee.org/xpl/mostRecentIssue.jsp?punumber=6268627}, + isbn = {978-1-4673-2580-6}, + timestamp = {Mon, 01 Oct 2012 17:33:45 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/isit/2012}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@article{Leskovec:2010, + author = {Jure Leskovec and + Deepayan Chakrabarti and + Jon M. Kleinberg and + Christos Faloutsos and + Zoubin Ghahramani}, + title = {Kronecker Graphs: An Approach to Modeling Networks}, + journal = {Journal of Machine Learning Research}, + volume = {11}, + pages = {985--1042}, + year = {2010}, + url = {http://doi.acm.org/10.1145/1756006.1756039}, + doi = {10.1145/1756006.1756039}, + timestamp = {Thu, 22 Apr 2010 13:26:26 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/journals/jmlr/LeskovecCKFG10}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@article{Holme:2002, + author= {Petter Holme and Beom Jun Kim}, + title = {Growing scale-free networks with tunable clustering}, + journal = {Physical review E}, + volume = {65}, + issue = {2}, + pages = {026--107}, + year = {2002} +} + + +@article{watts:1998, + Annote = {10.1038/30918}, + Author = {Watts, Duncan J. and Strogatz, Steven H.}, + Date = {1998/06/04/print}, + Isbn = {0028-0836}, + Journal = {Nature}, + Number = {6684}, + Pages = {440--442}, + Read = {0}, + Title = {Collective dynamics of `small-world' networks}, + Url = {http://dx.doi.org/10.1038/30918}, + Volume = {393}, + Year = {1998}, +} + +@article{barabasi:2001, + author = {R{\'{e}}ka Albert and + Albert{-}L{\'{a}}szl{\'{o}} Barab{\'{a}}si}, + title = {Statistical mechanics of complex networks}, + journal = {CoRR}, + volume = {cond-mat/0106096}, + year = {2001}, + url = {http://arxiv.org/abs/cond-mat/0106096}, + timestamp = {Mon, 05 Dec 2011 18:05:15 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/journals/corr/cond-mat-0106096}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + + +@article{gomezbalduzzi:2011, + author = {Manuel Gomez{-}Rodriguez and + David Balduzzi and + Bernhard Sch{\"{o}}lkopf}, + title = {Uncovering the Temporal Dynamics of Diffusion Networks}, + journal = {CoRR}, + volume = {abs/1105.0697}, + year = {2011}, + url = {http://arxiv.org/abs/1105.0697}, + timestamp = {Mon, 05 Dec 2011 18:05:23 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/journals/corr/abs-1105-0697}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@article{Nowell08, + author = {Liben-Nowell, David and Kleinberg, Jon}, + biburl = {http://www.bibsonomy.org/bibtex/250b9b1ca1849fa9cb8bb92d6d9031436/mkroell}, + doi = {10.1073/pnas.0708471105}, + eprint = {http://www.pnas.org/content/105/12/4633.full.pdf+html}, + journal = {Proceedings of the National Academy of Sciences}, + keywords = {SNA graph networks}, + number = 12, + pages = {4633-4638}, + timestamp = {2008-10-09T10:32:56.000+0200}, + title = {{Tracing information flow on a global scale using Internet chain-letter data}}, + url = {http://www.pnas.org/content/105/12/4633.abstract}, + volume = 105, + year = 2008 +} + +@inproceedings{Leskovec07, + author = {Jure Leskovec and + Mary McGlohon and + Christos Faloutsos and + Natalie S. Glance and + Matthew Hurst}, + title = {Patterns of Cascading Behavior in Large Blog Graphs}, + booktitle = {Proceedings of the Seventh {SIAM} International Conference on Data + Mining, April 26-28, 2007, Minneapolis, Minnesota, {USA}}, + pages = {551--556}, + year = {2007}, + url = {http://dx.doi.org/10.1137/1.9781611972771.60}, + doi = {10.1137/1.9781611972771.60}, + timestamp = {Wed, 12 Feb 2014 17:08:15 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/sdm/LeskovecMFGH07}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + + +@inproceedings{AdarA05, + author = {Eytan Adar and + Lada A. Adamic}, + title = {Tracking Information Epidemics in Blogspace}, + booktitle = {2005 {IEEE} / {WIC} / {ACM} International Conference on Web Intelligence + {(WI} 2005), 19-22 September 2005, Compiegne, France}, + pages = {207--214}, + year = {2005}, + url = {http://dx.doi.org/10.1109/WI.2005.151}, + doi = {10.1109/WI.2005.151}, + timestamp = {Tue, 12 Aug 2014 16:59:16 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/webi/AdarA05}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@inproceedings{Kleinberg:00, + author = {Jon M. Kleinberg}, + title = {The small-world phenomenon: an algorithm perspective}, + booktitle = {Proceedings of the Thirty-Second Annual {ACM} Symposium on Theory + of Computing, May 21-23, 2000, Portland, OR, {USA}}, + pages = {163--170}, + year = {2000}, + url = {http://doi.acm.org/10.1145/335305.335325}, + doi = {10.1145/335305.335325}, + timestamp = {Thu, 16 Feb 2012 12:06:08 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/stoc/Kleinberg00}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@article{zhang2014, + title={Confidence intervals for low dimensional parameters in high dimensional linear models}, + author={Zhang, Cun-Hui and Zhang, Stephanie S}, + journal={Journal of the Royal Statistical Society: Series B (Statistical Methodology)}, + volume={76}, + number={1}, + pages={217--242}, + year={2014}, + publisher={Wiley Online Library} +} + +@article{javanmard2014, + title={Confidence intervals and hypothesis testing for high-dimensional regression}, + author={Javanmard, Adel and Montanari, Andrea}, + journal={The Journal of Machine Learning Research}, + volume={15}, + number={1}, + pages={2869--2909}, + year={2014}, + publisher={JMLR. org} +} |
