diff options
| -rw-r--r-- | notes.bib | 87 | ||||
| -rw-r--r-- | related.tex | 11 |
2 files changed, 97 insertions, 1 deletions
@@ -5,6 +5,25 @@ publisher={Cambridge University Press} } +@article{approximatemechanismdesign, + author = {Kobbi Nissim and + Rann Smorodinsky and + Moshe Tennenholtz}, + title = {Approximately Optimal Mechanism Design via Differential + Privacy}, + year = {2010}, + ee = {http://arxiv.org/abs/1004.2888}, +} + +@inproceedings{mcsherrytalwar, + author = {Frank McSherry and + Kunal Talwar}, + title = {Mechanism Design via Differential Privacy}, + booktitle = {Proc. FOCS}, + year = {2007} +} + + @article{vandenberghe1998determinant, title={Determinant maximization with linear matrix inequality constraints}, author={Vandenberghe, L. and Boyd, S. and Wu, S.P.}, @@ -530,3 +549,71 @@ publisher={Springer Series in Statistics} } + +@article{chen:privacy-truthfulness, + author = {Yiling Chen and + Stephen Chong and + Ian A. Kash and + Tal Moran and + Salil P. Vadhan}, + title = {Truthful Mechanisms for Agents that Value Privacy}, + journal = {CoRR}, + volume = {abs/1111.5472}, + year = {2011}, + ee = {http://arxiv.org/abs/1111.5472}, + bibsource = {DBLP, http://dblp.uni-trier.de} +} + + +@inproceedings{roth-schoenebeck, + author = {Roth, Aaron and Schoenebeck, Grant}, + title = {Conducting truthful surveys, cheaply}, + booktitle = {Proceedings of the 13th ACM Conference on Electronic Commerce}, + series = {EC '12}, + year = {2012}, + isbn = {978-1-4503-1415-2}, + location = {Valencia, Spain}, + pages = {826--843}, + numpages = {18}, + url = {http://doi.acm.org/10.1145/2229012.2229076}, + doi = {10.1145/2229012.2229076}, + acmid = {2229076}, + publisher = {ACM}, + address = {New York, NY, USA}, + keywords = {mechanism design, privacy}, +} + +@inproceedings{roth-liggett, + author = {Katrina Ligett and + Aaron Roth}, + title = {{Take it or Leave it: Running a Survey when Privacy Comes + at a Cost}}, + booktitle = {Proceedings of the 8th Workshop on Internet and Network Economics}, + series = {WINE '12}, + pages = {To appear}, + year = {2012}, +} + + + + +@article{approximatemechanismdesign, + author = {Kobbi Nissim and + Rann Smorodinsky and + Moshe Tennenholtz}, + title = {Approximately Optimal Mechanism Design via Differential + Privacy}, + year = {2010}, + ee = {http://arxiv.org/abs/1004.2888}, +} + + +@techreport{xiao:privacy-truthfulness, +author = "David Xiao", +title = "Is privacy compatible with truthfulness?", +institution = {{Cryptology ePrint Archive}}, +number = "2011/005", +year = "2011" +} + + diff --git a/related.tex b/related.tex index 2788478..f61abc4 100644 --- a/related.tex +++ b/related.tex @@ -13,7 +13,16 @@ a truthful, $O(\log^3 n)$-approximate mechanism \cite{dobz2011-mechanisms} as well as a a universally truthful, $O(\frac{\log n}{\log \log n})$-approximate mechanism for subadditive maximization. \cite{bei2012budget}. Moreover, in a Bayesian setup, assuming a prior distribution among the agent's costs, there exists a truthful mechanism with a 768/512-approximation ratio \cite{bei2012budget}. %(in terms of expectations) -\stratis{TODO: privacy discussion. Logdet objective. Should be one paragraph each.} + A series of recent papers \cite{mcsherrytalwar,approximatemechanismdesign,xiao:privacy-truthfulness,chen:privacy-truthfulness} consider the related problem of conducting retreive data from an \textit{unverified} database: the buyer of the data cannot verify the data reported by individuals and therefore must incentivize them to report truthfully. +McSherry and Talwar \cite{mcsherrytalwar} argue that \emph{differentially private} mechanisms also offer a form of \emph{approximate truthfulness}, as the mechanism's output is only slightly purturbed when an individual unilaterally changes her data value; as a result, reporting untruthfully can only increase an individual's utility by a small amount. Xiao \cite{xiao:privacy-truthfulness}, improving upon earlier work by Nissim et al.~\cite{approximatemechanismdesign} construct mechanisms that +simultaneously achieve exact truthfulness as well as differential privacy. Eliciting private data through a \emph{survey} \cite{roth-liggett}, whereby individuals first decide whether to participate in the survey and then report their data, + also fall under the unverified database setting \cite{xiao:privacy-truthfulness}. Our work differs from the above works in that it does not capture user utility through differential privacy; crucially, we also assume that experiments conducted are \emph{tamper-proof}, and individuals can missreport their costs but not their values. + +Our work is closest to Roth and Schoenebeck \cite{roth-shoenebeck}, who also consider how to sample individuals from a population to obtain an unbiased estimate of a reported value. The authors assume a prior on the joint distribution +\stratis{to be continued} + + +%\stratis{TODO: privacy discussion. Logdet objective. Should be one paragraph each.} \begin{comment} Two types of mechanisms: \emph{deterministic} and \emph{randomized}. For |
