Interrater Disagreement Resolution: A Systematic Procedure to Reach Consensus in Annotation Tasks. Oortwijn, Y., Ossenkoppele, T., & Betti, A. In Proceedings of the Workshop on Human Evaluation of NLP Systems (HumEval), pages 131–141, Online, April, 2021. Association for Computational Linguistics. Paper abstract bibtex We present a systematic procedure for interrater disagreement resolution. The procedure is general, but of particular use in multiple-annotator tasks geared towards ground truth construction. We motivate our proposal by arguing that, barring cases in which the researchers' goal is to elicit different viewpoints, interrater disagreement is a sign of poor quality in the design or the description of a task. Consensus among annotators, we maintain, should be striven for, through a systematic procedure for disagreement resolution such as the one we describe.
@inproceedings{oortwijn_interrater_2021,
address = {Online},
title = {Interrater {Disagreement} {Resolution}: {A} {Systematic} {Procedure} to {Reach} {Consensus} in {Annotation} {Tasks}},
shorttitle = {Interrater {Disagreement} {Resolution}},
url = {https://aclanthology.org/2021.humeval-1.15},
abstract = {We present a systematic procedure for interrater disagreement resolution. The procedure is general, but of particular use in multiple-annotator tasks geared towards ground truth construction. We motivate our proposal by arguing that, barring cases in which the researchers' goal is to elicit different viewpoints, interrater disagreement is a sign of poor quality in the design or the description of a task. Consensus among annotators, we maintain, should be striven for, through a systematic procedure for disagreement resolution such as the one we describe.},
urldate = {2023-05-26},
booktitle = {Proceedings of the {Workshop} on {Human} {Evaluation} of {NLP} {Systems} ({HumEval})},
publisher = {Association for Computational Linguistics},
author = {Oortwijn, Yvette and Ossenkoppele, Thijs and Betti, Arianna},
month = apr,
year = {2021},
pages = {131--141},
}
Downloads: 0
{"_id":"SLME7j88Fdem8KXoe","bibbaseid":"oortwijn-ossenkoppele-betti-interraterdisagreementresolutionasystematicproceduretoreachconsensusinannotationtasks-2021","author_short":["Oortwijn, Y.","Ossenkoppele, T.","Betti, A."],"bibdata":{"bibtype":"inproceedings","type":"inproceedings","address":"Online","title":"Interrater Disagreement Resolution: A Systematic Procedure to Reach Consensus in Annotation Tasks","shorttitle":"Interrater Disagreement Resolution","url":"https://aclanthology.org/2021.humeval-1.15","abstract":"We present a systematic procedure for interrater disagreement resolution. The procedure is general, but of particular use in multiple-annotator tasks geared towards ground truth construction. We motivate our proposal by arguing that, barring cases in which the researchers' goal is to elicit different viewpoints, interrater disagreement is a sign of poor quality in the design or the description of a task. Consensus among annotators, we maintain, should be striven for, through a systematic procedure for disagreement resolution such as the one we describe.","urldate":"2023-05-26","booktitle":"Proceedings of the Workshop on Human Evaluation of NLP Systems (HumEval)","publisher":"Association for Computational Linguistics","author":[{"propositions":[],"lastnames":["Oortwijn"],"firstnames":["Yvette"],"suffixes":[]},{"propositions":[],"lastnames":["Ossenkoppele"],"firstnames":["Thijs"],"suffixes":[]},{"propositions":[],"lastnames":["Betti"],"firstnames":["Arianna"],"suffixes":[]}],"month":"April","year":"2021","pages":"131–141","bibtex":"@inproceedings{oortwijn_interrater_2021,\n\taddress = {Online},\n\ttitle = {Interrater {Disagreement} {Resolution}: {A} {Systematic} {Procedure} to {Reach} {Consensus} in {Annotation} {Tasks}},\n\tshorttitle = {Interrater {Disagreement} {Resolution}},\n\turl = {https://aclanthology.org/2021.humeval-1.15},\n\tabstract = {We present a systematic procedure for interrater disagreement resolution. The procedure is general, but of particular use in multiple-annotator tasks geared towards ground truth construction. We motivate our proposal by arguing that, barring cases in which the researchers' goal is to elicit different viewpoints, interrater disagreement is a sign of poor quality in the design or the description of a task. Consensus among annotators, we maintain, should be striven for, through a systematic procedure for disagreement resolution such as the one we describe.},\n\turldate = {2023-05-26},\n\tbooktitle = {Proceedings of the {Workshop} on {Human} {Evaluation} of {NLP} {Systems} ({HumEval})},\n\tpublisher = {Association for Computational Linguistics},\n\tauthor = {Oortwijn, Yvette and Ossenkoppele, Thijs and Betti, Arianna},\n\tmonth = apr,\n\tyear = {2021},\n\tpages = {131--141},\n}\n\n","author_short":["Oortwijn, Y.","Ossenkoppele, T.","Betti, A."],"key":"oortwijn_interrater_2021","id":"oortwijn_interrater_2021","bibbaseid":"oortwijn-ossenkoppele-betti-interraterdisagreementresolutionasystematicproceduretoreachconsensusinannotationtasks-2021","role":"author","urls":{"Paper":"https://aclanthology.org/2021.humeval-1.15"},"metadata":{"authorlinks":{}}},"bibtype":"inproceedings","biburl":"https://api.zotero.org/groups/2493581/items?key=esJ5YGDX6948PQKQSPOfhZpO&format=bibtex&limit=100","dataSources":["ZHdsvgePffKRdgqdo","ATwFjrHEsqA4GDy8B","W9xJhCsMa9uhxgDWu","9kNXu23ABH7j4wxXZ","zDZS7QvC6khJT2mcu","GYdQLRi8nMpHxCYKf"],"keywords":[],"search_terms":["interrater","disagreement","resolution","systematic","procedure","reach","consensus","annotation","tasks","oortwijn","ossenkoppele","betti"],"title":"Interrater Disagreement Resolution: A Systematic Procedure to Reach Consensus in Annotation Tasks","year":2021}