var bibbase_data = {"data":"\"Loading..\"\n\n
\n\n \n\n \n\n \n \n\n \n\n \n \n\n \n\n \n
\n generated by\n \n \"bibbase.org\"\n\n \n
\n \n\n
\n\n \n\n\n
\n\n Excellent! Next you can\n create a new website with this list, or\n embed it in an existing web page by copying & pasting\n any of the following snippets.\n\n
\n JavaScript\n (easiest)\n
\n \n <script src=\"https://bibbase.org/show?bib=http%3A%2F%2Fwww.cs.columbia.edu%2F~ccanonne%2Ftutorial-focs2020%2Fpublications-annotated.bib&fullnames=1&sort=title&theme=dividers&jsonp=1&folding=0&jsonp=1\"></script>\n \n
\n\n PHP\n
\n \n <?php\n $contents = file_get_contents(\"https://bibbase.org/show?bib=http%3A%2F%2Fwww.cs.columbia.edu%2F~ccanonne%2Ftutorial-focs2020%2Fpublications-annotated.bib&fullnames=1&sort=title&theme=dividers&jsonp=1&folding=0\");\n print_r($contents);\n ?>\n \n
\n\n iFrame\n (not recommended)\n
\n \n <iframe src=\"https://bibbase.org/show?bib=http%3A%2F%2Fwww.cs.columbia.edu%2F~ccanonne%2Ftutorial-focs2020%2Fpublications-annotated.bib&fullnames=1&sort=title&theme=dividers&jsonp=1&folding=0\"></iframe>\n \n
\n\n

\n For more details see the documention.\n

\n
\n
\n\n
\n\n This is a preview! To use this list on your own web site\n or create a new web site from it,\n create a free account. The file will be added\n and you will be able to edit it in the File Manager.\n We will show you instructions once you've created your account.\n
\n\n
\n\n

To the site owner:

\n\n

Action required! Mendeley is changing its\n API. In order to keep using Mendeley with BibBase past April\n 14th, you need to:\n

    \n
  1. renew the authorization for BibBase on Mendeley, and
  2. \n
  3. update the BibBase URL\n in your page the same way you did when you initially set up\n this page.\n
  4. \n
\n

\n\n

\n \n \n Fix it now\n

\n
\n\n
\n\n\n
\n \n \n
\n
\n  \n 2020\n \n \n (11)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n Distributed Signal Detection under Communication Constraints.\n \n \n \n \n\n\n \n Jayadev Acharya; Clément L. Canonne; and Himanshu Tyagi.\n\n\n \n\n\n\n In volume 125, of Proceedings of Machine Learning Research, pages 41–63, 2020. PMLR\n \n\n
[ACT20c] Applies the $χ^2$-contraction lower bound framework of [ACT20a] to obtain lower bounds for the problem of Gaussian mean testing (the hypothesis testing version of mean estimation in the Gaussian Location Model) under communication constraints (noninteractive). Also provides upper bounds for both private- and public-coin protocols.
\n\n
\n\n\n\n \n \n \"DistributedPaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n  \n \n 21 downloads\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@InProceedings{ACT20,\n  title = \t {Distributed Signal Detection under Communication Constraints},\n  author =       {Acharya, Jayadev and Canonne, Cl{\\'e}ment L. and Tyagi, Himanshu},\n  pages = \t {41--63},\n  year = \t {2020},\n  volume = \t {125},\n  series = \t {Proceedings of Machine Learning Research},\n  publisher =    {PMLR},\n  pdf = \t {http://proceedings.mlr.press/v125/acharya20b/acharya20b.pdf},\n  url = \t {http://proceedings.mlr.press/v125/acharya20b.html},\n  bibbase_note = {<div class="well well-small bibbase"><span class="bluecite">[ACT20c]</span> Applies the $χ^2$-contraction lower bound framework of [ACT20a] to obtain lower bounds for the problem of Gaussian mean testing (the hypothesis testing version of mean estimation in the Gaussian Location Model) under communication constraints (noninteractive). Also provides upper bounds for both private- and public-coin protocols.</div>}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Domain Compression and its Application to Randomness-Optimal Distributed Goodness-of-Fit.\n \n \n \n \n\n\n \n Jayadev Acharya; Clément L Canonne; Yanjun Han; Ziteng Sun; and Himanshu Tyagi.\n\n\n \n\n\n\n In 33rd Conference on Learning Theory, COLT 2020, volume 125, of Proceedings of Machine Learning Research, pages 3–40, 2020. PMLR\n \n\n
[ACHST20] Similar noninteractive setting as [ACT20a,ACT20b,ACFST20], for general local information constraints. Focus on the tight tradeoff between amount of public randomness available and sample complexity, for identity testing (goodness-of-fit).
\n\n
\n\n\n\n \n \n \"DomainPaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n  \n \n 62 downloads\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{ACHST20,\n  title = \t {Domain Compression and its Application to Randomness-Optimal Distributed Goodness-of-Fit},\n  author = \t {Jayadev Acharya and Clément L Canonne and Yanjun Han and Ziteng Sun and Himanshu Tyagi},\n  booktitle = \t {33rd Conference on Learning Theory, {COLT} 2020},\n  pages = \t {3--40},\n  year = \t {2020},\n  volume = \t {125},\n  series = \t {Proceedings of Machine Learning Research},\n  publisher = \t {PMLR},\n  pdf = \t {http://proceedings.mlr.press/v125/acharya20a/acharya20a.pdf},\n  url = \t {http://proceedings.mlr.press/v125/acharya20a.html},\n  bibbase_note = {<div class="well well-small bibbase"><span class="bluecite">[ACHST20]</span> Similar noninteractive setting as [ACT20a,ACT20b,ACFST20], for general local information constraints. Focus on the tight tradeoff between amount of public randomness available and sample complexity, for identity testing (goodness-of-fit).</div>}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Entanglement is Necessary for Optimal Quantum Property Testing.\n \n \n \n \n\n\n \n Sébastien Bubeck; Sitan Chen; and Jerry Li.\n\n\n \n\n\n\n CoRR, abs/2004.07869. 2020.\n To appear in FOCS'20.\n\n
[BCL20] Consider the task of testing whether a quantum state is maximally mixed vs. far from it in trace distance, given \"local measurements\" (no entanglement). Establishes a separation between non-adaptive (noninteractive) and adaptive (sequentially interactive) algorithms for this task. Even though this question does not, per se, fall under the same setting as distributed uniformity testing and the works are independent, the ideas bear resemblance to the $χ^2$-contraction lower bound framework of [ACT20a].
\n\n
\n\n\n\n \n \n \"EntanglementPaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n  \n \n 2 downloads\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{BCL20,\n  author    = {S{\\'{e}}bastien Bubeck and\n               Sitan Chen and\n               Jerry Li},\n  title     = {Entanglement is Necessary for Optimal Quantum Property Testing},\n  journal   = {CoRR},\n  volume    = {abs/2004.07869},\n  year      = {2020},\n  note   = {To appear in FOCS'20.},\n  url = {https://arxiv.org/abs/2004.07869},\n  bibbase_note = {<div class="well well-small bibbase"><span class="bluecite">[BCL20]</span> Consider the task of testing whether a quantum state is maximally mixed vs. far from it in trace distance, given "local measurements" (no entanglement). Establishes a separation between non-adaptive (noninteractive) and adaptive (sequentially interactive) algorithms for this task. Even though this question does not, per se, fall under the same setting as distributed uniformity testing and the works are independent, the ideas bear resemblance to the $χ^2$-contraction lower bound framework of [ACT20a].</div>}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Fisher information under local differential privacy.\n \n \n \n \n\n\n \n Leighton P. Barnes; Wei-Ning Chen; and Ayfer Özgür.\n\n\n \n\n\n\n CoRR, abs/2005.10783. 2020.\n Accepted to the IEEE Journal on Selected Areas in Information Theory (JSAIT).
\n\n
[BCÖ20] Provides lower bounds for parameter estimation under $\\ell_2$ loss for interactive protocols (blackboard model) under local privacy, and instantiate it to obtain tight bounds for Gaussian mean estimation, (sparse) Bernoulli mean estimation, and discrete distribution estimation. As in [BHÖ19], the lower bound framework is based on a Cramér–Rao/van Trees-type approach.\n\n
\n\n\n\n \n \n \"FisherPaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n  \n \n 2 downloads\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{BCO20,\n  author    = {Leighton P. Barnes and\n               Wei{-}Ning Chen and\n               Ayfer {\\"{O}}zg{\\"{u}}r},\n  title     = {Fisher information under local differential privacy},\n  journal   = {CoRR},\n  volume    = {abs/2005.10783},\n  url = {https://arxiv.org/abs/2005.10783},\n  year      = {2020},\n  bibbase_note = {<div class="well well-small bibbase"><span class="bluecite">[BCÖ20]</span> Provides lower bounds for parameter estimation under $\\ell_2$ loss for interactive protocols (blackboard model) under local privacy, and instantiate it to obtain tight bounds for Gaussian mean estimation, (sparse) Bernoulli mean estimation, and discrete distribution estimation. As in [BHÖ19], the lower bound framework is based on a Cramér–Rao/van Trees-type approach.},\n  note = {Accepted to the IEEE Journal on Selected Areas in Information Theory (JSAIT).</div>}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n General lower bounds for interactive high-dimensional estimation under information constraints.\n \n \n \n \n\n\n \n Jayadev Acharya; Clément L. Canonne; and Himanshu Tyagi.\n\n\n \n\n\n\n CoRR, abs/2010.06562. 2020.\n \n\n
[ACT20d] Abstracts and generalizes the lower bound framework of [ACLST20] to provide \"plug-and-play\" lower bounds for parameter estimation with sequentially interactive protocolunder general information constraints. Instantiates them to get lower bounds for $\\ell_p$ losses in the cases of discrete distribution estimation, Gaussian mean estimation, and product Bernoulli mean estimation; and complements them with upper bounds.
\n\n
\n\n\n\n \n \n \"GeneralPaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n  \n \n 65 downloads\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{ACT20:HD,\n  author    = {Jayadev Acharya and\n               Cl{\\'{e}}ment L. Canonne and\n               Himanshu Tyagi},\n  title     = {General lower bounds for interactive high-dimensional estimation under\n               information constraints},\n  journal   = {CoRR},\n  volume    = {abs/2010.06562},\n  year      = {2020},\n  url = {https://arxiv.org/abs/2010.06562},\n  bibbase_note = {<div class="well well-small bibbase"><span class="bluecite">[ACT20d]</span> Abstracts and generalizes the lower bound framework of [ACLST20] to provide "plug-and-play" lower bounds for parameter estimation with sequentially interactive protocolunder general information constraints. Instantiates them to get lower bounds for $\\ell_p$ losses in the cases of discrete distribution estimation, Gaussian mean estimation, and product Bernoulli mean estimation; and complements them with upper bounds.</div>}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Inference under Information Constraints I: Lower Bounds from Chi-Square Contraction.\n \n \n \n \n\n\n \n Jayadev Acharya; Clément L. Canonne; and Himanshu Tyagi.\n\n\n \n\n\n\n IEEE Transactions on Information Theory. 2020.\n Conference version in COLT'19\n\n
[ACT20a] Provides a unified lower bound framework to derive sample complexity lower bounds for noninteractive protocols under general local information constraints, captured by a characterization of $\\mathcal{W}$ in terms of various norms of a p.s.d. matrix $H(W)$.
\n\n
\n\n\n\n \n \n \"InferencePaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n  \n \n 70 downloads\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{ACT19:LB,\n  title = \t {Inference under Information Constraints {I}: Lower Bounds from Chi-Square Contraction},\n  author = \t {Acharya, Jayadev and Canonne, Cl{\\'{e}}ment L. and Tyagi, Himanshu},\n  journal = \t {IEEE Transactions on Information Theory},\n  year = \t {2020},\n  note = {Conference version in {COLT'19}},\n  url = {https://arxiv.org/abs/1812.11476},\n  bibbase_note = {<div class="well well-small bibbase"><span class="bluecite">[ACT20a]</span> Provides a unified lower bound framework to derive sample complexity lower bounds for noninteractive protocols under general local information constraints, captured by a characterization of $\\mathcal{W}$ in terms of various norms of a p.s.d. matrix $H(W)$.</div>}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Inference under Information Constraints II: Communication Constraints and Shared Randomness.\n \n \n \n \n\n\n \n Jayadev Acharya; Clément L. Canonne; and Himanshu Tyagi.\n\n\n \n\n\n\n IEEE Transactions on Information Theory. 2020.\n Conference version in ICML'19\n\n
[ACT20b] Focuses on the communication constraints ($\\ell$ bits per player), and provides public- and private-coin protocols for identity testing, and private-coin protocols for learning which match the lower bounds from [ACT20a].
\n\n
\n\n\n\n \n \n \"InferencePaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n  \n \n 44 downloads\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{ACT19:UB,\n  title = \t {Inference under Information Constraints {II}: Communication Constraints and Shared Randomness},\n  author = \t {Acharya, Jayadev and Canonne, Cl{\\'{e}}ment L. and Tyagi, Himanshu},\n  journal = {IEEE Transactions on Information Theory},\n  year = \t {2020},\n  note = {Conference version in {ICML'19}},\n  url = {https://arxiv.org/abs/1905.08302},\n  bibbase_note = {<div class="well well-small bibbase"><span class="bluecite">[ACT20b]</span> Focuses on the communication constraints ($\\ell$ bits per player), and provides public- and private-coin protocols for identity testing, and private-coin protocols for learning which match the lower bounds from [ACT20a].</div>}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Inference under Information Constraints III: Local Privacy Constraints.\n \n \n \n\n\n \n Jayadev Acharya; Clément L. Canonne; Cody Freitag; Ziteng Sun; and Himanshu Tyagi.\n\n\n \n\n\n\n 2020.\n Accepted to the IEEE Journal on Selected Areas in Information Theory (JSAIT). Preprint available at arXiv:abs/1808.02174; Conference version in AISTATS'19\n\n
[ACFST20] Focuses on the local privacy constraints (ρ-LDP), and provides public- and private-coin protocols for identity testing, and private-coin protocols for learning which match the lower bounds from [ACT20a]. Also contains lower bounds for independence testing under LDP.
\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@misc{ACFST19,\n  author    = {Jayadev Acharya and\n               Cl{\\'{e}}ment L. Canonne and\n              Cody Freitag and Ziteng Sun\n              and Himanshu Tyagi},\n  title     = {Inference under Information Constraints {III}: Local Privacy Constraints},\n  note = {Accepted to the IEEE Journal on Selected Areas in Information Theory (JSAIT). Preprint available at arXiv:abs/1808.02174; Conference version in {AISTATS'19}},\n  year      = {2020},\n  bibbase_note = {<div class="well well-small bibbase"><span class="bluecite">[ACFST20]</span> Focuses on the local privacy constraints (ρ-LDP), and provides public- and private-coin protocols for identity testing, and private-coin protocols for learning which match the lower bounds from [ACT20a]. Also contains lower bounds for independence testing under LDP.</div>}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Interactive Inference under Information Constraints.\n \n \n \n \n\n\n \n Jayadev Acharya; Clément L. Canonne; Yuhan Liu; Ziteng Sun; and Himanshu Tyagi.\n\n\n \n\n\n\n CoRR, abs/2007.10976. 2020.\n \n\n
[ACLST20] Focuses on density estimation (learning) and goodness-of-fit (identity testing) of discrete distributions with regard to total variation ($\\ell_1$ metric), for sequentially interactive protocols under general local information constraints. Implies tight bounds for those problems under LDP and communication constraints.
\n\n
\n\n\n\n \n \n \"InteractivePaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n  \n \n 23 downloads\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{ACLST20,\n  author    = {Jayadev Acharya and\n               Cl{\\'{e}}ment L. Canonne and\n               Yuhan Liu and\n               Ziteng Sun and\n               Himanshu Tyagi},\n  title     = {Interactive Inference under Information Constraints},\n  journal   = {CoRR},\n  volume    = {abs/2007.10976},\n  year      = {2020},\n  url       = {https://arxiv.org/abs/2007.10976},\n  archivePrefix = {arXiv},\n  eprint    = {2007.10976},\n  bibbase_note = {<div class="well well-small bibbase"><span class="bluecite">[ACLST20]</span> Focuses on density estimation (learning) and goodness-of-fit (identity testing) of discrete distributions with regard to total variation ($\\ell_1$ metric), for sequentially interactive protocols under general local information constraints. Implies tight bounds for those problems under LDP and communication constraints.</div>}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Locally private non-asymptotic testing of discrete distributions is faster using interactive mechanisms.\n \n \n \n\n\n \n Thomas B. Berrett; and Cristina Butucea.\n\n\n \n\n\n\n CoRR, abs/2005.12601. 2020.\n To appear in NeurIPS'20.\n\n
[BB20] Obtains, among others, a tight lower bound for identity testing (goodness-of-fit) under local privacy. This lower bound applies to sequentially interactive protocols, and is slightly more general than those of [AJM20,ACLST20] for the same problem, in that it provides an explicit dependence on the reference distribution $\\mathbf{q}$ (while the other two works focus on the worst-case $\\mathbf{q}$, i.e., the uniform distribution).
\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{BB20,\n    title={Locally private non-asymptotic testing of discrete distributions is faster using interactive mechanisms},\n    author={Thomas B. Berrett and Cristina Butucea},\n    journal = {CoRR},\n    year={2020},\n    volume    = {abs/2005.12601},\n    note = {To appear in {NeurIPS'20}.},\n  bibbase_note = {<div class="well well-small bibbase"><span class="bluecite">[BB20]</span> Obtains, among others, a tight lower bound for identity testing (goodness-of-fit) under local privacy. This lower bound applies to sequentially interactive protocols, and is slightly more general than those of [AJM20,ACLST20] for the same problem, in that it provides an explicit dependence on the reference distribution $\\mathbf{q}$ (while the other two works focus on the worst-case $\\mathbf{q}$, i.e., the uniform distribution).</div>}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Pan-Private Uniformity Testing.\n \n \n \n \n\n\n \n Kareem Amin; Matthew Joseph; and Jieming Mao.\n\n\n \n\n\n\n In 33rd Conference on Learning Theory, COLT 2020, volume 125, of Proceedings of Machine Learning Research, pages 183–218, 2020. PMLR\n \n\n
[AJM20] Obtains, among others, tight lower bounds for uniformity testing (goodness-of-fit) under local privacy and pan-privacy (another version of differential privacy, \"in-between\" local and central DP) constraints. Those lower bounds apply to sequentially interactive protocols, and match the upper bounds from [ACFST20] (achieved by noninteractive, pubic-coin protocols).
\n\n
\n\n\n\n \n \n \"Pan-PrivatePaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n  \n \n 1 download\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{AJM20,\n  title = \t {Pan-Private Uniformity Testing},\n  author = \t {Kareem Amin and Matthew Joseph and Jieming Mao},\n  booktitle = \t {33rd Conference on Learning Theory, {COLT} 2020},\n  pages = \t {183--218},\n  year = \t {2020},\n  volume = \t {125},\n  series = \t {Proceedings of Machine Learning Research},\n  publisher = \t {PMLR},\n  pdf = \t {http://proceedings.mlr.press/v125/amin20a/amin20a.pdf},\n  url = \t {http://proceedings.mlr.press/v125/amin20a.html},\n  bibbase_note = {<div class="well well-small bibbase"><span class="bluecite">[AJM20]</span> Obtains, among others, tight lower bounds for uniformity testing (goodness-of-fit) under local privacy and pan-privacy (another version of differential privacy, "in-between" local and central DP) constraints. Those lower bounds apply to sequentially interactive protocols, and match the upper bounds from [ACFST20] (achieved by noninteractive, pubic-coin protocols).</div>}\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2019\n \n \n (4)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Can Distributed Uniformity Testing Be Local?.\n \n \n \n\n\n \n Uri Meir; Dor Minzer; and Rotem Oshman.\n\n\n \n\n\n\n In 2019 ACM Symposium on Principles of Distributed Computing, PODC 2019, pages 228–237, 2019. ACM\n \n\n
[MMO19] Considers a similar setting as [FMO18], but allowing $\\ell$ bits per user, and also arbitrary aggregation rules (i.e., in this case, the same public-coin SMP setting as in [ACT20b], but allowing for several samples per users and trying to minimize this number). The proofs rely on Boolean Fourier analysis, seeing the decision rule as an $n\\ell$-bit Boolean function.
\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{MMO19,\n  author    = {Uri Meir and\n               Dor Minzer and\n               Rotem Oshman},\n  title     = {Can Distributed Uniformity Testing Be Local?},\n  booktitle = {2019 {ACM} Symposium on Principles of Distributed Computing, {PODC} 2019},\n  pages     = {228--237},\n  publisher = {{ACM}},\n  year      = {2019},\n  bibbase_note = {<div class="well well-small bibbase"><span class="bluecite">[MMO19]</span> Considers a similar setting as [FMO18], but allowing $\\ell$ bits per user, and also arbitrary aggregation rules (i.e., in this case, the same public-coin SMP setting as in [ACT20b], but allowing for several samples per users and trying to minimize this number). The proofs rely on Boolean Fourier analysis, seeing the decision rule as an $n\\ell$-bit Boolean function.</div>}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Communication and Memory Efficient Testing of Discrete Distributions.\n \n \n \n\n\n \n Ilias Diakonikolas; Themis Gouleakis; Daniel M. Kane; and Sankeerth Rao.\n\n\n \n\n\n\n In 32nd Conference on Learning Theory, COLT 2019, volume 99, of Proceedings of Machine Learning Research, pages 1070–1106, 2019. PMLR\n \n\n
[DGKR19] Considers global memory constraints, in the blackboard model: the communication bound is for the total communication over all users, and as such the results are incomparable to works on local constraints, though some of the techniques can carry over. Establish tight or near-tight bounds for identity testing of discrete distributions; the techniques also apply to testing under memory constraints.
\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{DGKR19,\n  title = \t {Communication and Memory Efficient Testing of Discrete Distributions},\n  author = \t {Diakonikolas, Ilias and Gouleakis, Themis and Kane, Daniel M. and Rao, Sankeerth},\n  booktitle = \t {32nd Conference on Learning Theory, {COLT} 2019},\n  pages = \t {1070--1106},\n  year = \t {2019},\n  volume = \t {99},\n  series = \t {Proceedings of Machine Learning Research},\n  publisher = \t {PMLR},\n  pdf = \t {http://proceedings.mlr.press/v99/diakonikolas19a/diakonikolas19a.pdf},\n  bibbase_note = {<div class="well well-small bibbase"><span class="bluecite">[DGKR19]</span> Considers global memory constraints, in the blackboard model: the communication bound is for the total communication over all users, and as such the results are incomparable to works on local constraints, though some of the techniques can carry over. Establish tight or near-tight bounds for identity testing of discrete distributions; the techniques also apply to testing under memory constraints.</div>}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Fisher Information for Distributed Estimation under a Blackboard Communication Protocol.\n \n \n \n\n\n \n Leighton P. Barnes; Yanjun Han; and Ayfer Özgür.\n\n\n \n\n\n\n In ISIT, pages 2704–2708, 2019. IEEE\n \n\n
[BHÖ19] Provides general lower bounds for parameter estimation (and, in some cases, nonparametric density estimation) under $\\ell_2$ loss for interactive protocols (blackboard model) under communication constraints, and instantiate it to obtain tight bounds for various statistical models such as Gaussian, product Bernoulli, and discrete distribution estimation. At its core, the lower bounds rely on a Cramér–Rao/van Trees-type approach, which leaves the (trace of) the Fisher information matrix as quantity to analyze to get lower bounds.
\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{BHO19,\n  author    = {Leighton P. Barnes and\n               Yanjun Han and\n               Ayfer {\\"{O}}zg{\\"{u}}r},\n  title     = {Fisher Information for Distributed Estimation under a Blackboard Communication Protocol},\n  booktitle = {{ISIT}},\n  pages     = {2704--2708},\n  publisher = {{IEEE}},\n  year      = {2019},\n  bibbase_note = {<div class="well well-small bibbase"><span class="bluecite">[BHÖ19]</span> Provides general lower bounds for parameter estimation (and, in some cases, nonparametric density estimation) under $\\ell_2$ loss for interactive protocols (blackboard model) under communication constraints, and instantiate it to obtain tight bounds for various statistical models such as Gaussian, product Bernoulli, and discrete distribution estimation. At its core, the lower bounds rely on a Cramér–Rao/van Trees-type approach, which leaves the (trace of) the Fisher information matrix as quantity to analyze to get lower bounds.</div>}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Lower Bounds for Locally Private Estimation via Communication Complexity.\n \n \n \n\n\n \n John Duchi; and Ryan Rogers.\n\n\n \n\n\n\n In 32nd Conference on Learning Theory, COLT 2019, volume 99, of Proceedings of Machine Learning Research, pages 1161–1191, 2019. PMLR\n \n\n
[DR19] Develops a communication-complexity-based methodology for lower bounds under LDP constraints in the blackboard communication model. Obtains lower bounds for mean estimation of Bernoulli products under general $\\ell_p$ losses, as well as tight bounds for mean estimation of Gaussian and sparse Gaussian distributions under the $\\ell_2$ loss.\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{DR19,\n  title = \t {Lower Bounds for Locally Private Estimation via Communication Complexity},\n  author = \t {Duchi, John and Rogers, Ryan},\n  booktitle = \t {32nd Conference on Learning Theory, {COLT} 2019},\n  pages = \t {1161--1191},\n  year = \t {2019},\n  volume = \t {99},\n  series = \t {Proceedings of Machine Learning Research},\n  publisher = \t {PMLR},\n  bibbase_note = {<div class="well well-small bibbase"><span class="bluecite">[DR19]</span> Develops a communication-complexity-based methodology for lower bounds under LDP constraints in the blackboard communication model. Obtains lower bounds for mean estimation of Bernoulli products under general $\\ell_p$ losses, as well as tight bounds for mean estimation of Gaussian and sparse Gaussian distributions under the $\\ell_2$ loss.}\n }\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2018\n \n \n (4)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Distributed Statistical Estimation of High-Dimensional and Non-parametric Distributions.\n \n \n \n\n\n \n Yanjun Han; Pritam Mukherjee; Ayfer Özgür; and Tsachy Weissman.\n\n\n \n\n\n\n In 2018 IEEE International Symposium on Information Theory (ISIT'18), pages 506–510, 2018. \n \n\n
[HMÖW18] Lower bounds on distributed estimation (in total variation distance) of several families of distributions under communication constraints. Results apply to noniteractive protocols (a gap in the argument prevents the extension to interactive).
\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{HMOW-ISIT18,\n  author    = {Yanjun Han and Pritam Mukherjee and Ayfer {{\\"O}zg{\\"u}r} and Tsachy Weissman},\n  title = {Distributed Statistical Estimation of High-Dimensional and Non-parametric Distributions},\n  booktitle = {2018 IEEE International Symposium on Information Theory (ISIT'18)},\nyear = {2018},\npages={506--510},\n  bibbase_note = {<div class="well well-small bibbase"><span class="bluecite">[HMÖW18]</span> Lower bounds on distributed estimation (in total variation distance) of several families of distributions under communication constraints. Results apply to noniteractive protocols (a gap in the argument prevents the extension to interactive).</div>}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Distributed Uniformity Testing.\n \n \n \n\n\n \n Orr Fischer; Uri Meir; and Rotem Oshman.\n\n\n \n\n\n\n In 2018 ACM Symposium on Principles of Distributed Computing, PODC 2018, pages 455–464, 2018. ACM\n \n\n
[FMO18] Studies distributed uniformity testing in a setting where each user observe multiple samples, and communicates a single bit to the referee who is required to use a simple aggregation rule, e.g., AND or a threshold. The goal is then to minimize the number of samples required per user. Also contains results on uniformity testing in other distributed models, such as the CONGEST or LOCAL models.
\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{FMO18,\n  author    = {Orr Fischer and\n               Uri Meir and\n               Rotem Oshman},\n  title     = {Distributed Uniformity Testing},\n  booktitle = {2018 {ACM} Symposium on Principles of Distributed Computing, {PODC} 2018},\n  pages     = {455--464},\n  publisher = {{ACM}},\n  year      = {2018},\n  bibbase_note = {<div class="well well-small bibbase"><span class="bluecite">[FMO18]</span> Studies distributed uniformity testing in a setting where each user observe  multiple samples, and  communicates a single bit to the referee who  is  required  to  use a simple  aggregation rule, e.g., AND or a threshold. The goal is then to minimize the number of samples required per user. Also contains results on uniformity testing in other distributed models, such as the CONGEST or LOCAL models.</div>}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Geometric Lower Bounds for Distributed Parameter Estimation under Communication Constraints.\n \n \n \n\n\n \n Yanjun Han; Ayfer Özgür; and Tsachy Weissman.\n\n\n \n\n\n\n In 31st Conference on Learning Theory, COLT 2018, volume 75, of Proceedings of Machine Learning Research, pages 3163–3188, 2018. PMLR\n See updated version, ˘rlhttps://arxiv.org/abs/1802.08417v3 (2020).\n\n
[HÖW18] Lower bounds on distributed parameter estimation under the $\\ell_2$ loss of several families of distributions, including high-dimensional, under communication constraints. Results for the latest version, using techniques similar to those of [ACLST20,ACT20d], extend to the blackboard model and include $\\ell_p$ losses.
\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{HOW18,\n  author    = {Yanjun Han and\n               Ayfer {\\"{O}}zg{\\"{u}}r and\n               Tsachy Weissman},\n  title     = {Geometric Lower Bounds for Distributed Parameter Estimation under\n               Communication Constraints},\n  booktitle = {31st Conference on Learning Theory, {COLT} 2018},\n  series    = {Proceedings of Machine Learning Research},\n  volume    = {75},\n  pages     = {3163--3188},\n  publisher = {{PMLR}},\n  year      = {2018},\n  note = {See updated version, \\url{https://arxiv.org/abs/1802.08417v3} (2020).},\n  bibbase_note = {<div class="well well-small bibbase"><span class="bluecite">[HÖW18]</span> Lower bounds on distributed parameter estimation under the $\\ell_2$ loss of several families of distributions, including high-dimensional, under communication constraints. Results for the latest version, using techniques similar to those of [ACLST20,ACT20d], extend to the blackboard model and include $\\ell_p$ losses.</div>}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Minimax optimal procedures for locally private estimation.\n \n \n \n\n\n \n John C. Duchi; Michael I. Jordan; and Martin J. Wainwright.\n\n\n \n\n\n\n J. Amer. Statist. Assoc., 113(521): 182–201. 2018.\n \n\n
[DJW18] Results and techniques similar to those of [DJW13].
\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article {DJW17,\n    AUTHOR = {Duchi, John C. and Jordan, Michael I. and Wainwright, Martin\n              J.},\n     TITLE = {Minimax optimal procedures for locally private estimation},\n   JOURNAL = {J. Amer. Statist. Assoc.},\n  FJOURNAL = {Journal of the American Statistical Association},\n    VOLUME = {113},\n      YEAR = {2018},\n    NUMBER = {521},\n     PAGES = {182--201},\n      ISSN = {0162-1459},\n   MRCLASS = {62C20 (62B10 62D05 62G07 62J12)},\n  MRNUMBER = {3803452},\n  bibbase_note = {<div class="well well-small bibbase"><span class="bluecite">[DJW18]</span> Results and techniques similar to those of [DJW13].</div>}\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2017\n \n \n (1)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n A General Characterization of the Statistical Query Complexity.\n \n \n \n \n\n\n \n Vitaly Feldman.\n\n\n \n\n\n\n In Satyen Kale; and Ohad Shamir., editor(s), volume 65, of Proceedings of Machine Learning Research, pages 785–830, Amsterdam, Netherlands, 07–10 Jul 2017. PMLR\n \n\n
[Feldman17] Provides a characterization of SQ learning in terms of new \"statistical dimension;\" to be seen in view of the poly-factor equivalence between SQ learning and LDP-constrained/memory-constrained learnin (e.g., [KLNRS11,BDD98]). Includes applications to memory-limited streaming and communication-limited learning.
\n\n
\n\n\n\n \n \n \"APaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{Feldman17,\n  title = \t {A General Characterization of the Statistical Query Complexity},\n  author = \t {Vitaly Feldman},\n  pages = \t {785--830},\n  year = \t {2017},\n  editor = \t {Satyen Kale and Ohad Shamir},\n  volume = \t {65},\n  series = \t {Proceedings of Machine Learning Research},\n  address = \t {Amsterdam, Netherlands},\n  month = \t {07--10 Jul},\n  publisher =    {PMLR},\n  pdf = \t {http://proceedings.mlr.press/v65/feldman17c/feldman17c.pdf},\n  url = \t {http://proceedings.mlr.press/v65/feldman17c.html},\n  bibbase_note = {<div class="well well-small bibbase"><span class="bluecite">[Feldman17]</span> Provides a characterization of SQ learning in terms of new "statistical dimension;" to be seen in view of the poly-factor equivalence between SQ learning and LDP-constrained/memory-constrained learnin (e.g., [KLNRS11,BDD98]). Includes applications to memory-limited streaming and communication-limited learning.</div>}\n}\n  \n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2016\n \n \n (2)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Communication lower bounds for statistical estimation problems via a distributed data processing inequality.\n \n \n \n\n\n \n Mark Braverman; Ankit Garg; Tengyu Ma; Huy L. Nguyen; and David P. Woodruff.\n\n\n \n\n\n\n In Symposium on Theory of Computing Conference, STOC'16, pages 1011–1020, 2016. ACM\n \n\n
[BGMNW16] Significantly generalizes [GMN14], extending the lower bounds to the blackboard model, and the upper bounds to the SMP model. Includes tight results for sparse mean estimation. The lower bounds are obtained via a combination of strong data processing inequalities (SDPI) and the \"cut-and-paste\" property of Hellinger distance.
\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{BGMNW16,\n  author    = {Mark Braverman and\n               Ankit Garg and\n               Tengyu Ma and\n               Huy L. Nguyen and\n               David P. Woodruff},\n  title     = {Communication lower bounds for statistical estimation problems via\n               a distributed data processing inequality},\n  booktitle = {Symposium on Theory of Computing Conference, {STOC'16}},\n  pages     = {1011--1020},\n  publisher = {{ACM}},\n  year      = {2016},\n  bibbase_note = {<div class="well well-small bibbase"><span class="bluecite">[BGMNW16]</span> Significantly generalizes [GMN14], extending the lower bounds to the blackboard model, and the upper bounds to the SMP model. Includes tight results for sparse mean estimation. The lower bounds are obtained via a combination of strong data processing inequalities (SDPI) and the "cut-and-paste" property of Hellinger distance.</div>}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Memory, Communication, and Statistical Queries.\n \n \n \n \n\n\n \n Jacob Steinhardt; Gregory Valiant; and Stefan Wager.\n\n\n \n\n\n\n In Vitaly Feldman; Alexander Rakhlin; and Ohad Shamir., editor(s), volume 49, of Proceedings of Machine Learning Research, pages 1490–1516, Columbia University, New York, New York, USA, 23–26 Jun 2016. PMLR\n \n\n
[SVW16] Studies memory- and communication-constrained learning, and provides a general framework (based on statistical query (SQ) learning) to obtain lower bounds against those. Note that this allows to prove exponential separations for some problems (such as learning parities), but the reductions inherently lose polynomial factors.
\n\n
\n\n\n\n \n \n \"Memory,Paper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@InProceedings{SVW16,\n  title = \t {Memory, Communication, and Statistical Queries},\n  author = \t {Jacob Steinhardt and Gregory Valiant and Stefan Wager},\n  pages = \t {1490--1516},\n  year = \t {2016},\n  editor = \t {Vitaly Feldman and Alexander Rakhlin and Ohad Shamir},\n  volume = \t {49},\n  series = \t {Proceedings of Machine Learning Research},\n  address = \t {Columbia University, New York, New York, USA},\n  month = \t {23--26 Jun},\n  publisher =    {PMLR},\n  url = \t {http://proceedings.mlr.press/v49/steinhardt16.html},\n  bibbase_note = \t {<div class="well well-small bibbase"><span class="bluecite">[SVW16]</span> Studies memory- and communication-constrained learning, and provides a general framework (based on statistical query (SQ) learning) to obtain lower bounds against those. Note that this allows to prove exponential separations for some problems (such as learning parities), but the reductions inherently lose polynomial factors.</div>}\n}\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2014\n \n \n (2)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Fundamental limits of online and distributed algorithms for statistical learning and estimation.\n \n \n \n\n\n \n Ohad Shamir.\n\n\n \n\n\n\n In Advances in Neural Information Processing Systems 27, NeurIPS'14, pages 163–171, 2014. \n \n\n
[Sha14] Considers estimation tasks under various information constraints, such as memory or communication. Establishes results for sequentially interactive protocols for those problems by proving a lower bound on the \"hide-and-seek\" problem, that is, the mean estimation problem for high-dimensional product Bernoulli distributions with 1-sparse mean vectors.
\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{Shamir14,\n  title={Fundamental limits of online and distributed algorithms for statistical learning and estimation},\n  author={Shamir, Ohad},\n  booktitle= {Advances in Neural Information Processing Systems 27, {NeurIPS'14}},\n  pages={163--171},\n  year={2014},\n  bibbase_note = {<div class="well well-small bibbase"><span class="bluecite">[Sha14]</span> Considers estimation tasks under various information constraints, such as memory or communication. Establishes results for sequentially interactive protocols for those problems by proving a lower bound on the "hide-and-seek" problem, that is, the mean estimation problem for high-dimensional product Bernoulli distributions with 1-sparse mean vectors.</div>}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n On Communication Cost of Distributed Statistical Estimation and Dimensionality.\n \n \n \n\n\n \n Ankit Garg; Tengyu Ma; and Huy L. Nguyen.\n\n\n \n\n\n\n In Advances in Neural Information Processing Systems 27, NeurIPS'14, pages 2726–2734, 2014. \n \n\n
[GMN14] Uses strong data processing inequalities (SDPI) to obtain lower bounds for simultaneous message passing (SMP) protocols for various settings, specifically Gaussian mean estimation under $\\ell_2$ loss. Also incudes matching upper bounds in the blackboard model, which can be generalized to $\\ell_p$ in the sequentially interactive model.
\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{GMN14,\n  author    = {Ankit Garg and\n               Tengyu Ma and\n               Huy L. Nguyen},\n  title     = {On Communication Cost of Distributed Statistical Estimation and Dimensionality},\n  booktitle = {Advances in Neural Information Processing Systems 27, {NeurIPS'14}},\n  pages     = {2726--2734},\n  year      = {2014},\n  bibbase_note = {<div class="well well-small bibbase"><span class="bluecite">[GMN14]</span> Uses strong data processing inequalities (SDPI) to obtain lower bounds for simultaneous message passing (SMP) protocols for various settings, specifically Gaussian mean estimation under $\\ell_2$ loss. Also incudes matching upper bounds in the blackboard model, which can be generalized to $\\ell_p$ in the sequentially interactive model.</div>}\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2013\n \n \n (2)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n Information-theoretic lower bounds for distributed statistical estimation with communication constraints.\n \n \n \n \n\n\n \n Yuchen Zhang; John Duchi; Michael I. Jordan; and Martin J. Wainwright.\n\n\n \n\n\n\n In Advances in Neural Information Processing Systems 26, NeurIPS'13, pages 2328–2336, 2013. \n \n\n
[ZDJW13] and [DJWZ14] Uses strong data processing inequalities (SDPI) to obtain lower bounds for simultaneous message passing (SMP) protocols under communication constraints under $\\ell_2$ loss for various estimation problems, including Gaussian and Bernoulli mean estimation. Also includes some results for interactive protocols.
\n\n
\n\n\n\n \n \n \"Information-theoreticPaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{ZDJW13,\n  title={Information-theoretic lower bounds for distributed statistical estimation with communication constraints},\n  author={Zhang, Yuchen and Duchi, John and Jordan, Michael I. and Wainwright, Martin J.},\n  booktitle= {Advances in Neural Information Processing Systems 26, {NeurIPS'13}},\n  pages={2328--2336},\n  year={2013},\n  url = {https://arxiv.org/abs/1405.0782},\n  bibbase_note = {<div class="well well-small bibbase"><span class="bluecite">[ZDJW13]</span> and <span class="bluecite">[DJWZ14]</span> Uses strong data processing inequalities (SDPI) to obtain lower bounds for simultaneous message passing (SMP) protocols  under communication constraints under $\\ell_2$ loss for various estimation problems, including Gaussian and Bernoulli mean estimation. Also includes some results for interactive protocols.</div>}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Local Privacy and Statistical Minimax Rates.\n \n \n \n\n\n \n John C. Duchi; Michael I. Jordan; and Martin J. Wainwright.\n\n\n \n\n\n\n In focs13, pages 429–438, 2013. IEEE Computer Society\n Full version at ˘rlhttps://arxiv.org/abs/1302.3203.\n\n
[DJW13] Lower bounds on parameter estimation for several families of distributions under LDP constraints, via strong data processing inequalities (SDPI) and a locally private version of Le Cam, Fano, and Assouad's lemmata. Results apply to the one-dimensional setting, and in high dimensions for noninteractive protocols.
\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{DJW13:FOCS,\n  author    = {John C. Duchi and\n               Michael I. Jordan and\n               Martin J. Wainwright},\n  title     = {Local Privacy and Statistical Minimax Rates},\n  booktitle = focs13,\n  pages     = {429--438},\n  publisher = {{IEEE} Computer Society},\n  year      = {2013},\n  note = {Full version at \\url{https://arxiv.org/abs/1302.3203}.},\n  bibbase_note = {<div class="well well-small bibbase"><span class="bluecite">[DJW13]</span> Lower bounds on parameter estimation for several families of distributions under LDP constraints, via strong data processing inequalities (SDPI) and a locally private version of Le Cam, Fano, and Assouad's lemmata. Results apply to the one-dimensional setting, and in high dimensions for noninteractive protocols.</div>}\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2011\n \n \n (1)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n What can we learn privately?.\n \n \n \n \n\n\n \n Shiva Prasad Kasiviswanathan; Homin K. Lee; Kobbi Nissim; Sofya Raskhodnikova; and Adam Smith.\n\n\n \n\n\n\n SIAM J. Comput., 40(3): 793–826. 2011.\n \n\n
[KLNRS11] Provides the first connection (and poly-factor equivalence) between learning under local privacy (LDP) constraints and statistical query (SQ) learning.
\n\n
\n\n\n\n \n \n \"WhatPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{KLNRS11,\n    AUTHOR = {Kasiviswanathan, Shiva Prasad and Lee, Homin K. and Nissim,\n              Kobbi and Raskhodnikova, Sofya and Smith, Adam},\n     TITLE = {What can we learn privately?},\n   JOURNAL = {SIAM J. Comput.},\n  FJOURNAL = {SIAM Journal on Computing},\n    VOLUME = {40},\n      YEAR = {2011},\n    NUMBER = {3},\n     PAGES = {793--826},\n      ISSN = {0097-5397},\n   MRCLASS = {68Q32},\n  MRNUMBER = {2823508},\nMRREVIEWER = {Martin Anthony},\n       DOI = {10.1137/090756090},\n       URL = {https://doi.org/10.1137/090756090},\n  bibbase_note = {<div class="well well-small bibbase"><span class="bluecite">[KLNRS11]</span> Provides the first connection (and poly-factor equivalence) between learning under local privacy (LDP) constraints and statistical query (SQ) learning.</div>}\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 1998\n \n \n (1)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n Learning with restricted focus of attention.\n \n \n \n \n\n\n \n Shai Ben-David; and Eli Dichterman.\n\n\n \n\n\n\n J. Comput. System Sci., 56(3): 277–298. 1998.\n \n\n
[BDD98] Provides the first connection (and poly-factor equivalence) between learning with communication constraints (wRFA: \"restricted focus of attention'') and statistical query (SQ) learning.
\n\n
\n\n\n\n \n \n \"LearningPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{BenDavidD98,\n    AUTHOR = {Ben-David, Shai and Dichterman, Eli},\n     TITLE = {Learning with restricted focus of attention},\n   JOURNAL = {J. Comput. System Sci.},\n  FJOURNAL = {Journal of Computer and System Sciences},\n    VOLUME = {56},\n      YEAR = {1998},\n    NUMBER = {3},\n     PAGES = {277--298},\n      ISSN = {0022-0000},\n   MRCLASS = {68Q32 (68T05)},\n  MRNUMBER = {1633977},\n       DOI = {10.1006/jcss.1998.1569},\n       URL = {https://doi.org/10.1006/jcss.1998.1569},\n  bibbase_note = {<div class="well well-small bibbase"><span class="bluecite">[BDD98]</span> Provides the first connection (and poly-factor equivalence) between learning with communication constraints (wRFA: "restricted focus of attention'') and statistical query (SQ) learning.</div>}\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 1995\n \n \n (1)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n Applications of the Van Trees inequality: a Bayesian Cramér-Rao bound.\n \n \n \n \n\n\n \n Richard D. Gill; and Boris Y. Levit.\n\n\n \n\n\n\n Bernoulli, 1(1-2): 59–79. 1995.\n \n\n
[GL95] Provides a good and self-contained primer and derivation of the van Trees inequality and its uses.
\n\n
\n\n\n\n \n \n \"ApplicationsPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{GillR95,\n    AUTHOR = {Gill, Richard D. and Levit, Boris Y.},\n     TITLE = {Applications of the {V}an {T}rees inequality: a {B}ayesian\n              {C}ram\\'{e}r-{R}ao bound},\n   JOURNAL = {Bernoulli},\n  FJOURNAL = {Bernoulli. Official Journal of the Bernoulli Society for\n              Mathematical Statistics and Probability},\n    VOLUME = {1},\n      YEAR = {1995},\n    NUMBER = {1-2},\n     PAGES = {59--79},\n      ISSN = {1350-7265},\n   MRCLASS = {62F10},\n  MRNUMBER = {1354456},\nMRREVIEWER = {V. K. Rohatgi},\n       DOI = {10.2307/3318681},\n       URL = {https://doi.org/10.2307/3318681},\n  bibbase_note = {<div class="well well-small bibbase"><span class="bluecite">[GL95]</span> Provides a good and self-contained primer and derivation of the van Trees inequality and its uses.</div>} \n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 1993\n \n \n (1)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Decentralized detection.\n \n \n \n\n\n \n John N. Tsitsiklis.\n\n\n \n\n\n\n In H. V. Poor; and J. B. Thomas., editor(s), Advances in Statistical Signal Processing, volume 2, pages 297–344, 1993. JAI Press\n \n\n
[Tsi93] Considers simple hypothesis testing in a distributed setting, under communication constraints, and shows that for this problem public-coin (noninteractive) protocols are no more powerful than private-coin protocols.
\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{Tsitsiklis93,\n  title={Decentralized detection},\n  author={Tsitsiklis, John N.},\n  booktitle={Advances in Statistical Signal Processing},\n  year={1993},\n  volume = {2},\n  editor = {H. V. Poor and J. B. Thomas},\n  publisher = {JAI Press},\n  pages = {297--344},\n  bibbase_note = {<div class="well well-small bibbase"><span class="bluecite">[Tsi93]</span> Considers <em>simple</em> hypothesis testing in a distributed setting, under communication constraints, and shows that for this problem public-coin (noninteractive) protocols are no more powerful than private-coin protocols.</div>}\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n\n\n\n
\n\n\n \n\n \n \n \n \n\n
\n"}; document.write(bibbase_data.data);