Towards Understanding Differential Privacy: When Do People Trust Randomized Response Technique?. Bullek, B., Garboski, S., Mir, D. J., & Peck, E. M. In Proceedings of the 2017 CHI Conference on Human Factors in Computing Systems, of CHI '17, pages 3833–3837, New York, NY, USA, May, 2017. Association for Computing Machinery.
Paper doi abstract bibtex As a consequence of living in a data ecosystem, we often relinquish personal information to be used in contexts in which we have no control. In this paper, we begin to examine the usability of differential privacy, a mechanism that proposes to promise privacy with a mathematical "proof" to the data donor. Do people trust this promise and adjust their privacy decisions if the interfaces through which they interact make differential privacy less opaque? In a study with 228 participants, we measured comfort, understanding, and trust using a variant of differential privacy known as Randomized Response Technique (RRT). We found that allowing individuals to see the amount of obfuscation applied to their responses increased their trust in the privacy-protecting mechanism. However, participants who associated obfuscating privacy mechanisms with deception did not make the "safest" privacy decisions, even as they demonstrated an understanding of RRT. We demonstrate that prudent privacy-related decisions can be cultivated with simple explanations of usable privacy.
@inproceedings{bullekUnderstandingDifferentialPrivacy2017,
address = {New York, NY, USA},
series = {{CHI} '17},
title = {Towards {Understanding} {Differential} {Privacy}: {When} {Do} {People} {Trust} {Randomized} {Response} {Technique}?},
isbn = {978-1-4503-4655-9},
shorttitle = {Towards {Understanding} {Differential} {Privacy}},
url = {https://dl.acm.org/doi/10.1145/3025453.3025698},
doi = {10.1145/3025453.3025698},
abstract = {As a consequence of living in a data ecosystem, we often relinquish personal information to be used in contexts in which we have no control. In this paper, we begin to examine the usability of differential privacy, a mechanism that proposes to promise privacy with a mathematical "proof" to the data donor. Do people trust this promise and adjust their privacy decisions if the interfaces through which they interact make differential privacy less opaque? In a study with 228 participants, we measured comfort, understanding, and trust using a variant of differential privacy known as Randomized Response Technique (RRT). We found that allowing individuals to see the amount of obfuscation applied to their responses increased their trust in the privacy-protecting mechanism. However, participants who associated obfuscating privacy mechanisms with deception did not make the "safest" privacy decisions, even as they demonstrated an understanding of RRT. We demonstrate that prudent privacy-related decisions can be cultivated with simple explanations of usable privacy.},
urldate = {2024-09-04},
booktitle = {Proceedings of the 2017 {CHI} {Conference} on {Human} {Factors} in {Computing} {Systems}},
publisher = {Association for Computing Machinery},
author = {Bullek, Brooke and Garboski, Stephanie and Mir, Darakhshan J. and Peck, Evan M.},
month = may,
year = {2017},
pages = {3833--3837},
}
Downloads: 0
{"_id":"73cGSaBeEYmrDkL79","bibbaseid":"bullek-garboski-mir-peck-towardsunderstandingdifferentialprivacywhendopeopletrustrandomizedresponsetechnique-2017","author_short":["Bullek, B.","Garboski, S.","Mir, D. J.","Peck, E. M."],"bibdata":{"bibtype":"inproceedings","type":"inproceedings","address":"New York, NY, USA","series":"CHI '17","title":"Towards Understanding Differential Privacy: When Do People Trust Randomized Response Technique?","isbn":"978-1-4503-4655-9","shorttitle":"Towards Understanding Differential Privacy","url":"https://dl.acm.org/doi/10.1145/3025453.3025698","doi":"10.1145/3025453.3025698","abstract":"As a consequence of living in a data ecosystem, we often relinquish personal information to be used in contexts in which we have no control. In this paper, we begin to examine the usability of differential privacy, a mechanism that proposes to promise privacy with a mathematical \"proof\" to the data donor. Do people trust this promise and adjust their privacy decisions if the interfaces through which they interact make differential privacy less opaque? In a study with 228 participants, we measured comfort, understanding, and trust using a variant of differential privacy known as Randomized Response Technique (RRT). We found that allowing individuals to see the amount of obfuscation applied to their responses increased their trust in the privacy-protecting mechanism. However, participants who associated obfuscating privacy mechanisms with deception did not make the \"safest\" privacy decisions, even as they demonstrated an understanding of RRT. We demonstrate that prudent privacy-related decisions can be cultivated with simple explanations of usable privacy.","urldate":"2024-09-04","booktitle":"Proceedings of the 2017 CHI Conference on Human Factors in Computing Systems","publisher":"Association for Computing Machinery","author":[{"propositions":[],"lastnames":["Bullek"],"firstnames":["Brooke"],"suffixes":[]},{"propositions":[],"lastnames":["Garboski"],"firstnames":["Stephanie"],"suffixes":[]},{"propositions":[],"lastnames":["Mir"],"firstnames":["Darakhshan","J."],"suffixes":[]},{"propositions":[],"lastnames":["Peck"],"firstnames":["Evan","M."],"suffixes":[]}],"month":"May","year":"2017","pages":"3833–3837","bibtex":"@inproceedings{bullekUnderstandingDifferentialPrivacy2017,\n\taddress = {New York, NY, USA},\n\tseries = {{CHI} '17},\n\ttitle = {Towards {Understanding} {Differential} {Privacy}: {When} {Do} {People} {Trust} {Randomized} {Response} {Technique}?},\n\tisbn = {978-1-4503-4655-9},\n\tshorttitle = {Towards {Understanding} {Differential} {Privacy}},\n\turl = {https://dl.acm.org/doi/10.1145/3025453.3025698},\n\tdoi = {10.1145/3025453.3025698},\n\tabstract = {As a consequence of living in a data ecosystem, we often relinquish personal information to be used in contexts in which we have no control. In this paper, we begin to examine the usability of differential privacy, a mechanism that proposes to promise privacy with a mathematical \"proof\" to the data donor. Do people trust this promise and adjust their privacy decisions if the interfaces through which they interact make differential privacy less opaque? In a study with 228 participants, we measured comfort, understanding, and trust using a variant of differential privacy known as Randomized Response Technique (RRT). We found that allowing individuals to see the amount of obfuscation applied to their responses increased their trust in the privacy-protecting mechanism. However, participants who associated obfuscating privacy mechanisms with deception did not make the \"safest\" privacy decisions, even as they demonstrated an understanding of RRT. We demonstrate that prudent privacy-related decisions can be cultivated with simple explanations of usable privacy.},\n\turldate = {2024-09-04},\n\tbooktitle = {Proceedings of the 2017 {CHI} {Conference} on {Human} {Factors} in {Computing} {Systems}},\n\tpublisher = {Association for Computing Machinery},\n\tauthor = {Bullek, Brooke and Garboski, Stephanie and Mir, Darakhshan J. and Peck, Evan M.},\n\tmonth = may,\n\tyear = {2017},\n\tpages = {3833--3837},\n}\n\n\n\n","author_short":["Bullek, B.","Garboski, S.","Mir, D. J.","Peck, E. M."],"key":"bullekUnderstandingDifferentialPrivacy2017","id":"bullekUnderstandingDifferentialPrivacy2017","bibbaseid":"bullek-garboski-mir-peck-towardsunderstandingdifferentialprivacywhendopeopletrustrandomizedresponsetechnique-2017","role":"author","urls":{"Paper":"https://dl.acm.org/doi/10.1145/3025453.3025698"},"metadata":{"authorlinks":{}},"downloads":0,"html":""},"bibtype":"inproceedings","biburl":"https://bibbase.org/zotero-group/Lplabst/5610423","dataSources":["i7ZFyWodPNm2kpRNj"],"keywords":[],"search_terms":["towards","understanding","differential","privacy","people","trust","randomized","response","technique","bullek","garboski","mir","peck"],"title":"Towards Understanding Differential Privacy: When Do People Trust Randomized Response Technique?","year":2017}