1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
|
\documentclass[10pt]{article}
\usepackage{fullpage}
\usepackage[utf8x]{inputenc}
\usepackage{amsmath,amsfonts,amsthm}
\usepackage[english]{babel}
\usepackage[capitalize, noabbrev]{cleveref}
\usepackage{algorithm}
\usepackage{algpseudocode}
\newenvironment{enumerate*}%
{\vspace{-2ex} \begin{enumerate} %
\setlength{\itemsep}{-1ex} \setlength{\parsep}{0pt}}%
{\end{enumerate}}
\newenvironment{itemize*}%
{\vspace{-2ex} \begin{itemize} %
\setlength{\itemsep}{-1ex} \setlength{\parsep}{0pt}}%
{\end{itemize}}
\newenvironment{description*}%
{\vspace{-2ex} \begin{description} %
\setlength{\itemsep}{-1ex} \setlength{\parsep}{0pt}}%
{\end{description}}
\DeclareMathOperator{\E}{\mathbb{E}}
\let\P\relax
\DeclareMathOperator{\P}{\mathbb{P}}
\newcommand{\ex}[1]{\E\left[#1\right]}
\newcommand{\prob}[1]{\P\left[#1\right]}
\newcommand{\inprod}[1]{\left\langle #1 \right\rangle}
\newcommand{\R}{\mathbf{R}}
\newcommand{\N}{\mathbf{N}}
\newcommand{\C}{\mathcal{C}}
\newcommand{\eps}{\varepsilon}
\newcommand{\cl}[1]{\text{\textbf{#1}}}
\newcommand{\eqdef}{\mathbin{\stackrel{\rm def}{=}}}
\newcommand{\llbracket}{[\![}
\newtheorem{theorem}{Theorem}
\newtheorem{lemma}{Lemma}
\algrenewcommand\algorithmicensure{\textbf{Output:}}
\algrenewcommand\algorithmicrequire{\textbf{Input:}}
\author{Thibaut Horel}
\title{CS 225 Problem Set 2 -- Solutions}
\begin{document}
\maketitle
Collaborator: Debmalya Mandal
\section*{Problem 2.9}
\paragraph{1.}
Let $\lambda$ be an eigenvalue of $M$ and $x$ be an associated eigenvector. Let
$i^*\in\{1,\ldots, n\}$ such that $|x_{i^*}| = \|x\|_{\infty}$. Since $x$ is an
eigenvector we have:
\begin{displaymath}
\sum_{j=1}^n m_{i^*,j} x_j = \lambda x_{i^*}
\end{displaymath}
$x$ is non-zero, hence $x_{i^*}$ is non-zero. Dividing both sides by $x_{i^*}$,
we obtain:
\begin{displaymath}
|\lambda| =
\left|\sum_{j=1}^n m_{i^*,j} \frac{x_j}{x_{i^*}}\right|
\leq \sum_{j=1}^n m_{i^*,j} \left|\frac{x_j}{x_{i^*}}\right|
\leq \sum_{j=1}^n m_{i^*, j} = 1
\end{displaymath}
where the first inequality used the triangle inequality and the second
inequality used the definition of $x_{i^*}$.
\paragraph{2.} Assume $G$ is disconnected. Hence it has at least two connected
components. We can write $V = C_1\cup C_2$ with $C_1\cap C_2 = \emptyset$ and
$ C_1\times C_2\cap E = \emptyset$. Then it is clear that $x^1
= \mathbf{1}_{C_1}$ (the indicator vector of $C_1$) and $x^2
= \mathbf{1}_{C_2}$ are eigenvectors of $M$ for the eigenvalue 1. For example
for $x^1$:
\begin{displaymath}
\sum_{j=1}^n m_{i,j} x^1_j =
\begin{cases}
1&\text{if $i\in C_1$}\\
0&\text{otherwise}
\end{cases}
\end{displaymath}
where the first case follows from the fact that if $i\in C_1$, all the non-zero
terms $m_{i,j}$ will be preserved by the multiplication by $x^1_j$ since
$m_{i,j}\neq 0\Leftrightarrow j\in C_1$ and the fact that $\sum_{j=1}^n m_{i,j}
= 1$. Similarly if $i\in C_2$ $m_{i,j} = 0$ whenever $j\in C_1$.
Furthermore $x^1$ and $x^2$ are clearly orthogonal, so they span a vector space
of dimension 2. Hence the multiplicity of $1$ as an eigenvalue is at least 2.
\paragraph{} Let us now assume that $G$ is connected and let us consider an
eigenvector $x$ for the eigenvalue 1. We have $M^k x = x$ for all $k$.
Furthermore since $G$ is connected, we have that $M^n > 0$ (all the entries are
positive). Indeed, there always exists at least one path of length $n$ between
any two vertices. Writing $a_{i,j}$ the entries of $M^n$ and choosing $i^*\in
\{1,\ldots n\}$ such that $|x_{i^*}| = \|x\|_{\infty}$ we have:
\begin{displaymath}
\sum_{j=1}^n a_{i^*,j}x_j = x_{i^*}
\end{displaymath}
Dividing by $x_{i^*}$ both sides (it is non-zero for the same reason as in
\textbf{1.}):
\begin{displaymath}
1 = \left|\sum_{j=1}^n a_{i^*,j}\frac{x_j}{x_{i^*}}\right|
\leq \sum_{j=1}^n a_{i^*,j}\left|\frac{x_j}{x_{i^*}}\right|
\leq \sum_{j=1}^n a_{i^*,j} = 1
\end{displaymath}
where we again used the triangle inequality, the definition of $x_{i^*}$ and
the fact that the sum of the entries of the rows of $M^n$ is one (indeed we
have $M\textbf{1} = \textbf{1}$ where $\textbf{1}$ is the all-one vector, so
$M^n\textbf{1} = \textbf{1}$). Hence, the chain of inequalities is a chain of
equalities. Since $a_{i^*,j}\neq 0$ for all $j$, this implies that for all $j$:
$|\frac{x_j}{x_{i*}}|=1$ and all the $\frac{x_j}{x_{i^*}}$ have the same sign.
That is, for all $j$ $x_j = x_{i^*}$. Hence $x = \alpha \textbf{1}$ for some
$\alpha$.
We have just proved that $G$ connected implies that the multiplicity of $1$ is $1$
(the eigenspace is spanned by $\textbf{1}$). The contrapositive gives the
second part of the equivalence that we were asked to prove.
\paragraph{3.} Assuming that $G$ is connected, it is easy to see that $G$
bipartite is equivalent to $G^2$ being disconnected. Hence assuming $G$
connected and using \textbf{2.}:
$G$ bipartite $\Leftrightarrow$ $G^2$ disconnected $\Leftrightarrow$ $1$ is an eigenvalue of $M^2$
of multiplicity at least 2 $\Leftrightarrow$ $\exists x\perp \mathbf{1},\; M^2x
= x$ $\Leftrightarrow$ $\exists x\perp\mathbf{1},\; Mx
= \pm x$ $\Leftrightarrow$ $\exists x\perp\mathbf{1},\; Mx = -x$ $\Leftrightarrow$ $-1$ is an eigenvalue of
$M$.
The antepenultimate equivalence uses that the eigenvalues of $M^2$ are the
squares of the eigenvalues of $M$, and the penultimate equivalence uses that
$x$ cannot be an eigenvector for the eigenvalue $1$ because of \textbf{2}. The
second equivalence also uses \textbf{2.}.
\paragraph{4.} First observe that $\max_{\|x\|_2 = 1} \inprod{Mx, x}$
= $\lambda_{max}(M)$ for any symmetric matrix $M$. Indeed, using the spectral
theorem and writing $M = P^T DP$ with $D$ = $\text{diag}(\lambda_1,\dots,\lambda_n)$
and observing that $\|x\|_2 =1\Leftrightarrow \|Px\|_2 = 1$ we have:
\begin{displaymath}
\max_{\|x\|_2 = 1} \inprod{Mx, x} = \max_{\|y\|_2=1} y^TDy = \max_{\|y\|_2
= 1} \sum_{i=1}^n\lambda_i y_i^2
\end{displaymath}
the right-hand side maximum is clearly obtained when $y_i = 1$ for some
coordinated $i$ such that $\lambda_i = \lambda_{max}(M)$ and $y_j = 0$ for
$j\neq i$. For this $y$ the value is $\lambda_{max}(M)$.
Now observe that $\max_x\inprod{Mx, x}$ when $x$ is taken in the vector space
orthogonal to $\mathbf{1}$ is exactly the largest eigenvalue of $M$ when
restricted to the vector space orthogonal to $\mathbf{1}$ (restricting $M$ to
this subspace is allowed since this subspace is invariant by $M$). But since
the eigenspaces of $M$ are orthogonal, the largest eigenvalue of $M$ restricted
to the orthogonal of $\mathbf{1}$ is the second largest eigenvalue of $M$.
Hence:
\begin{displaymath}
\lambda_2(M) = \max_{x} \inprod{Mx, x}
\end{displaymath}
where the maximum is taken over $x$ of length 1 in the orthogonal space of $\mathbf{1}$.
\paragrpha{} Let us now show the second identity given in the problem
statement.
\end{document}
|