\n
\n\n \n \n \n \n \n \n Distributed Signal Detection under Communication Constraints.\n \n \n \n \n\n\n \n Jayadev Acharya; Clément L. Canonne; and Himanshu Tyagi.\n\n\n \n\n\n\n In volume 125, of
Proceedings of Machine Learning Research, pages 41–63, 2020. PMLR\n
\n\n
[ACT20c] Applies the $χ^2$-contraction lower bound framework of [ACT20a] to obtain lower bounds for the problem of Gaussian mean testing (the hypothesis testing version of mean estimation in the Gaussian Location Model) under communication constraints (noninteractive). Also provides upper bounds for both private- and public-coin protocols.
\n\n
\n\n
\n\n \n \n Paper\n \n \n\n \n\n \n link\n \n \n\n bibtex\n \n\n \n\n \n \n \n 21 downloads\n \n \n\n \n \n \n \n \n \n \n\n \n \n \n\n\n\n
\n
@InProceedings{ACT20,\n title = \t {Distributed Signal Detection under Communication Constraints},\n author = {Acharya, Jayadev and Canonne, Cl{\\'e}ment L. and Tyagi, Himanshu},\n pages = \t {41--63},\n year = \t {2020},\n volume = \t {125},\n series = \t {Proceedings of Machine Learning Research},\n publisher = {PMLR},\n pdf = \t {http://proceedings.mlr.press/v125/acharya20b/acharya20b.pdf},\n url = \t {http://proceedings.mlr.press/v125/acharya20b.html},\n bibbase_note = {<div class="well well-small bibbase"><span class="bluecite">[ACT20c]</span> Applies the $χ^2$-contraction lower bound framework of [ACT20a] to obtain lower bounds for the problem of Gaussian mean testing (the hypothesis testing version of mean estimation in the Gaussian Location Model) under communication constraints (noninteractive). Also provides upper bounds for both private- and public-coin protocols.</div>}\n}\n\n
\n
\n\n\n\n
\n
\n\n \n \n \n \n \n \n Domain Compression and its Application to Randomness-Optimal Distributed Goodness-of-Fit.\n \n \n \n \n\n\n \n Jayadev Acharya; Clément L Canonne; Yanjun Han; Ziteng Sun; and Himanshu Tyagi.\n\n\n \n\n\n\n In
33rd Conference on Learning Theory, COLT 2020, volume 125, of
Proceedings of Machine Learning Research, pages 3–40, 2020. PMLR\n
\n\n
[ACHST20] Similar noninteractive setting as [ACT20a,ACT20b,ACFST20], for general local information constraints. Focus on the tight tradeoff between amount of public randomness available and sample complexity, for identity testing (goodness-of-fit).
\n\n
\n\n
\n\n \n \n Paper\n \n \n\n \n\n \n link\n \n \n\n bibtex\n \n\n \n\n \n \n \n 62 downloads\n \n \n\n \n \n \n \n \n \n \n\n \n \n \n\n\n\n
\n
@inproceedings{ACHST20,\n title = \t {Domain Compression and its Application to Randomness-Optimal Distributed Goodness-of-Fit},\n author = \t {Jayadev Acharya and Clément L Canonne and Yanjun Han and Ziteng Sun and Himanshu Tyagi},\n booktitle = \t {33rd Conference on Learning Theory, {COLT} 2020},\n pages = \t {3--40},\n year = \t {2020},\n volume = \t {125},\n series = \t {Proceedings of Machine Learning Research},\n publisher = \t {PMLR},\n pdf = \t {http://proceedings.mlr.press/v125/acharya20a/acharya20a.pdf},\n url = \t {http://proceedings.mlr.press/v125/acharya20a.html},\n bibbase_note = {<div class="well well-small bibbase"><span class="bluecite">[ACHST20]</span> Similar noninteractive setting as [ACT20a,ACT20b,ACFST20], for general local information constraints. Focus on the tight tradeoff between amount of public randomness available and sample complexity, for identity testing (goodness-of-fit).</div>}\n}\n\n
\n
\n\n\n\n
\n
\n\n \n \n \n \n \n \n Entanglement is Necessary for Optimal Quantum Property Testing.\n \n \n \n \n\n\n \n Sébastien Bubeck; Sitan Chen; and Jerry Li.\n\n\n \n\n\n\n
CoRR, abs/2004.07869. 2020.\n
To appear in FOCS'20.\n\n
[BCL20] Consider the task of testing whether a quantum state is maximally mixed vs. far from it in trace distance, given \"local measurements\" (no entanglement). Establishes a separation between non-adaptive (noninteractive) and adaptive (sequentially interactive) algorithms for this task. Even though this question does not, per se, fall under the same setting as distributed uniformity testing and the works are independent, the ideas bear resemblance to the $χ^2$-contraction lower bound framework of [ACT20a].
\n\n
\n\n
\n\n \n \n Paper\n \n \n\n \n\n \n link\n \n \n\n bibtex\n \n\n \n\n \n \n \n 2 downloads\n \n \n\n \n \n \n \n \n \n \n\n \n \n \n\n\n\n
\n
@article{BCL20,\n author = {S{\\'{e}}bastien Bubeck and\n Sitan Chen and\n Jerry Li},\n title = {Entanglement is Necessary for Optimal Quantum Property Testing},\n journal = {CoRR},\n volume = {abs/2004.07869},\n year = {2020},\n note = {To appear in FOCS'20.},\n url = {https://arxiv.org/abs/2004.07869},\n bibbase_note = {<div class="well well-small bibbase"><span class="bluecite">[BCL20]</span> Consider the task of testing whether a quantum state is maximally mixed vs. far from it in trace distance, given "local measurements" (no entanglement). Establishes a separation between non-adaptive (noninteractive) and adaptive (sequentially interactive) algorithms for this task. Even though this question does not, per se, fall under the same setting as distributed uniformity testing and the works are independent, the ideas bear resemblance to the $χ^2$-contraction lower bound framework of [ACT20a].</div>}\n}\n\n
\n
\n\n\n\n
[BCÖ20] Provides lower bounds for parameter estimation under $\\ell_2$ loss for interactive protocols (blackboard model) under local privacy, and instantiate it to obtain tight bounds for Gaussian mean estimation, (sparse) Bernoulli mean estimation, and discrete distribution estimation. As in [BHÖ19], the lower bound framework is based on a Cramér–Rao/van Trees-type approach.\n\n
\n\n
\n\n \n \n Paper\n \n \n\n \n\n \n link\n \n \n\n bibtex\n \n\n \n\n \n \n \n 2 downloads\n \n \n\n \n \n \n \n \n \n \n\n \n \n \n\n\n\n
\n
@article{BCO20,\n author = {Leighton P. Barnes and\n Wei{-}Ning Chen and\n Ayfer {\\"{O}}zg{\\"{u}}r},\n title = {Fisher information under local differential privacy},\n journal = {CoRR},\n volume = {abs/2005.10783},\n url = {https://arxiv.org/abs/2005.10783},\n year = {2020},\n bibbase_note = {<div class="well well-small bibbase"><span class="bluecite">[BCÖ20]</span> Provides lower bounds for parameter estimation under $\\ell_2$ loss for interactive protocols (blackboard model) under local privacy, and instantiate it to obtain tight bounds for Gaussian mean estimation, (sparse) Bernoulli mean estimation, and discrete distribution estimation. As in [BHÖ19], the lower bound framework is based on a Cramér–Rao/van Trees-type approach.},\n note = {Accepted to the IEEE Journal on Selected Areas in Information Theory (JSAIT).</div>}\n}\n\n
\n
\n\n\n\n
\n\n\n \n\n\n \n\n\n \n\n\n \n\n\n \n\n\n \n
\n\n \n \n \n \n \n Locally private non-asymptotic testing of discrete distributions is faster using interactive mechanisms.\n \n \n \n\n\n \n Thomas B. Berrett; and Cristina Butucea.\n\n\n \n\n\n\n
CoRR, abs/2005.12601. 2020.\n
To appear in NeurIPS'20.\n\n
[BB20] Obtains, among others, a tight lower bound for identity testing (goodness-of-fit) under local privacy. This lower bound applies to sequentially interactive protocols, and is slightly more general than those of [AJM20,ACLST20] for the same problem, in that it provides an explicit dependence on the reference distribution $\\mathbf{q}$ (while the other two works focus on the worst-case $\\mathbf{q}$, i.e., the uniform distribution).
\n\n
\n\n
\n\n \n\n \n\n \n link\n \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n \n \n \n\n\n\n
\n
@article{BB20,\n title={Locally private non-asymptotic testing of discrete distributions is faster using interactive mechanisms},\n author={Thomas B. Berrett and Cristina Butucea},\n journal = {CoRR},\n year={2020},\n volume = {abs/2005.12601},\n note = {To appear in {NeurIPS'20}.},\n bibbase_note = {<div class="well well-small bibbase"><span class="bluecite">[BB20]</span> Obtains, among others, a tight lower bound for identity testing (goodness-of-fit) under local privacy. This lower bound applies to sequentially interactive protocols, and is slightly more general than those of [AJM20,ACLST20] for the same problem, in that it provides an explicit dependence on the reference distribution $\\mathbf{q}$ (while the other two works focus on the worst-case $\\mathbf{q}$, i.e., the uniform distribution).</div>}\n}\n\n
\n
\n\n\n\n
\n\n\n \n\n\n\n\n\n