aboutsummaryrefslogtreecommitdiffstats
path: root/results.tex
diff options
context:
space:
mode:
Diffstat (limited to 'results.tex')
-rw-r--r--results.tex17
1 files changed, 15 insertions, 2 deletions
diff --git a/results.tex b/results.tex
index 9dd5b46..2c0873d 100644
--- a/results.tex
+++ b/results.tex
@@ -57,7 +57,9 @@ Let $\Omega$ be a universe of elements of size $n$. For $k \in [n]$, let $D_k$ b
Let $f = (w_i)_{i \in \Omega}$ be an unknown additive function: $f(S) \equiv \sum_{u \in S} w_u$. How many (set, value) pairs $(S, f(S))$ where $S \sim D_k$ must be observed until we can optimize $f$ exactly or to a constant approximation factor under cardinality constraint $k$?
\end{problem}
+
\subsection{Inverting the system}
+\label{subsec:invert_the_system}
Note that for observations $(S_j, f(S_j))_{j \in N}$ we can write the system $A w = b$, where each row of $A_j$ corresponds to the indicator vector of the set $S_j$ and $b_j \equiv f(S_j)$. From Corollary~\ref{cor:number_of_rows_needed}, it is easy to see that $(C_K+1)^2 \frac{n^3}{k(n - k)}$ rows are sufficient for $A$ to have full row rank with probability $1-e^{-c_k n}$. If $A$ has full row rank, then it is easy to invert the system in polynomial time, and both learn and optimize $f$ for any cardinality constraint.
@@ -103,10 +105,21 @@ Assume that $N$ pairs of set-value $(S_j, f(S_j))_{j \in N}$ are observed, where
A result by \cite{balcan2011learning} states that:
\begin{proposition}
-Let ${\cal F}$ be the class of non-negative, monotone, submodular functions over $Omega$. There is an algorithm that PMAC-learns ${\cal F}$ with approximation factor $\sqrt{n+1}$, by finding a function $\tilde f: S\mapsto \sqrt{\frac{1}{(n+1)z}w^T \chi(S)}$ such that, with high probability, XXX
+\label{prop:balcan_learned}
+Let ${\cal F}$ be the class of non-negative, monotone, submodular functions over $\Omega$. There is an algorithm that PMAC-learns ${\cal F}$ with approximation factor $\sqrt{n+1}$, by outputting a function $\tilde f: S\mapsto \sqrt{\frac{1}{(n+1)z}w^T \chi(S)}$.
+\end{proposition}
+
+By adapting Section~\ref{subsec:invert_the_system}, we get the following result:
+
+\begin{proposition}
+Any non-negative monotone submodular function can be passively optimized under cardinality constraint $k \in \mathbb{N}$ from $D_p$ for \emph{any} $p \in ]0, 1[$ with polynomially many samples up to a factor $1/\sqrt{n+1}$.
\end{proposition}
-It follows that if we optimize $\tilde f$, which can easily be done by solving for the system $Ax = b^2$ where $(b^2)_i = (b_i)^2$, then we obtain a $(\sqrt{n+1})$-approximation algorithm to optimizing $f$ under cardinality constraint. Once again, we optimize under \emph{any} cardinality constraint.
+\begin{proof}
+We optimize $\tilde f$ from Proposition~\ref{prop:balcan_learned}, which can easily be done by solving for the system $Ax = b^2$ where $(b^2)_i = (b_i)^2$. TODO: complete proof
+\end{proof}
+
+
\subsection{Average weight method}