Improving electronic health record note comprehension with NoteAid: randomized trial of electronic health record note comprehension interventions with crowdsourced workers. Lalor, J. P., Woolf, B., & Yu, H. Journal of Medical Internet Research, 21(1):e10793, 2019. Paper doi abstract bibtex Background: Patient portals are becoming more common, and with them, the ability of patients to access their personal electronic health records (EHRs). EHRs, in particular the free-text EHR notes, often contain medical jargon and terms that are difficult for laypersons to understand. There are many Web-based resources for learning more about particular diseases or conditions, including systems that directly link to lay definitions or educational materials for medical concepts. Objective: Our goal is to determine whether use of one such tool, NoteAid, leads to higher EHR note comprehension ability. We use a new EHR note comprehension assessment tool instead of patient self-reported scores. Methods: In this work, we compare a passive, self-service educational resource (MedlinePlus) with an active resource (NoteAid) where definitions are provided to the user for medical concepts that the system identifies. We use Amazon Mechanical Turk (AMT) to recruit individuals to complete ComprehENotes, a new test of EHR note comprehension. Results: Mean scores for individuals with access to NoteAid are significantly higher than the mean baseline scores, both for raw scores (P=.008) and estimated ability (P=.02). Conclusions: In our experiments, we show that the active intervention leads to significantly higher scores on the comprehension test as compared with a baseline group with no resources provided. In contrast, there is no significant difference between the group that was provided with the passive intervention and the baseline group. Finally, we analyze the demographics of the individuals who participated in our AMT task and show differences between groups that align with the current understanding of health literacy between populations. This is the first work to show improvements in comprehension using tools such as NoteAid as measured by an EHR note comprehension assessment tool as opposed to patient self-reported scores. [J Med Internet Res 2019;21(1):e10793]
@article{lalor_improving_2019,
title = {Improving electronic health record note comprehension with {NoteAid}: randomized trial of electronic health record note comprehension interventions with crowdsourced workers},
volume = {21},
copyright = {Unless stated otherwise, all articles are open-access distributed under the terms of the Creative Commons Attribution License (http://creativecommons.org/licenses/by/2.0/), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work (},
shorttitle = {Improving electronic health record note comprehension with noteaid},
url = {https://www.jmir.org/2019/1/e10793/},
doi = {10.2196/jmir.10793},
abstract = {Background: Patient portals are becoming more common, and with them, the ability of patients to access their personal electronic health records (EHRs). EHRs, in particular the free-text EHR notes, often contain medical jargon and terms that are difficult for laypersons to understand. There are many Web-based resources for learning more about particular diseases or conditions, including systems that directly link to lay definitions or educational materials for medical concepts. Objective: Our goal is to determine whether use of one such tool, NoteAid, leads to higher EHR note comprehension ability. We use a new EHR note comprehension assessment tool instead of patient self-reported scores. Methods: In this work, we compare a passive, self-service educational resource (MedlinePlus) with an active resource (NoteAid) where definitions are provided to the user for medical concepts that the system identifies. We use Amazon Mechanical Turk (AMT) to recruit individuals to complete ComprehENotes, a new test of EHR note comprehension. Results: Mean scores for individuals with access to NoteAid are significantly higher than the mean baseline scores, both for raw scores (P=.008) and estimated ability (P=.02). Conclusions: In our experiments, we show that the active intervention leads to significantly higher scores on the comprehension test as compared with a baseline group with no resources provided. In contrast, there is no significant difference between the group that was provided with the passive intervention and the baseline group. Finally, we analyze the demographics of the individuals who participated in our AMT task and show differences between groups that align with the current understanding of health literacy between populations. This is the first work to show improvements in comprehension using tools such as NoteAid as measured by an EHR note comprehension assessment tool as opposed to patient self-reported scores. [J Med Internet Res 2019;21(1):e10793]},
language = {en},
number = {1},
urldate = {2019-01-31},
journal = {Journal of Medical Internet Research},
author = {Lalor, John P. and Woolf, Beverly and Yu, Hong},
year = {2019},
pmid = {30664453 PMCID: 6351990},
pages = {e10793},
}
Downloads: 0
{"_id":"fLjp5wwcvpiMnsK3h","bibbaseid":"lalor-woolf-yu-improvingelectronichealthrecordnotecomprehensionwithnoteaidrandomizedtrialofelectronichealthrecordnotecomprehensioninterventionswithcrowdsourcedworkers-2019","author_short":["Lalor, J. P.","Woolf, B.","Yu, H."],"bibdata":{"bibtype":"article","type":"article","title":"Improving electronic health record note comprehension with NoteAid: randomized trial of electronic health record note comprehension interventions with crowdsourced workers","volume":"21","copyright":"Unless stated otherwise, all articles are open-access distributed under the terms of the Creative Commons Attribution License (http://creativecommons.org/licenses/by/2.0/), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work (","shorttitle":"Improving electronic health record note comprehension with noteaid","url":"https://www.jmir.org/2019/1/e10793/","doi":"10.2196/jmir.10793","abstract":"Background: Patient portals are becoming more common, and with them, the ability of patients to access their personal electronic health records (EHRs). EHRs, in particular the free-text EHR notes, often contain medical jargon and terms that are difficult for laypersons to understand. There are many Web-based resources for learning more about particular diseases or conditions, including systems that directly link to lay definitions or educational materials for medical concepts. Objective: Our goal is to determine whether use of one such tool, NoteAid, leads to higher EHR note comprehension ability. We use a new EHR note comprehension assessment tool instead of patient self-reported scores. Methods: In this work, we compare a passive, self-service educational resource (MedlinePlus) with an active resource (NoteAid) where definitions are provided to the user for medical concepts that the system identifies. We use Amazon Mechanical Turk (AMT) to recruit individuals to complete ComprehENotes, a new test of EHR note comprehension. Results: Mean scores for individuals with access to NoteAid are significantly higher than the mean baseline scores, both for raw scores (P=.008) and estimated ability (P=.02). Conclusions: In our experiments, we show that the active intervention leads to significantly higher scores on the comprehension test as compared with a baseline group with no resources provided. In contrast, there is no significant difference between the group that was provided with the passive intervention and the baseline group. Finally, we analyze the demographics of the individuals who participated in our AMT task and show differences between groups that align with the current understanding of health literacy between populations. This is the first work to show improvements in comprehension using tools such as NoteAid as measured by an EHR note comprehension assessment tool as opposed to patient self-reported scores. [J Med Internet Res 2019;21(1):e10793]","language":"en","number":"1","urldate":"2019-01-31","journal":"Journal of Medical Internet Research","author":[{"propositions":[],"lastnames":["Lalor"],"firstnames":["John","P."],"suffixes":[]},{"propositions":[],"lastnames":["Woolf"],"firstnames":["Beverly"],"suffixes":[]},{"propositions":[],"lastnames":["Yu"],"firstnames":["Hong"],"suffixes":[]}],"year":"2019","pmid":"30664453 PMCID: 6351990","pages":"e10793","bibtex":"@article{lalor_improving_2019,\n\ttitle = {Improving electronic health record note comprehension with {NoteAid}: randomized trial of electronic health record note comprehension interventions with crowdsourced workers},\n\tvolume = {21},\n\tcopyright = {Unless stated otherwise, all articles are open-access distributed under the terms of the Creative Commons Attribution License (http://creativecommons.org/licenses/by/2.0/), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work (},\n\tshorttitle = {Improving electronic health record note comprehension with noteaid},\n\turl = {https://www.jmir.org/2019/1/e10793/},\n\tdoi = {10.2196/jmir.10793},\n\tabstract = {Background: Patient portals are becoming more common, and with them, the ability of patients to access their personal electronic health records (EHRs). EHRs, in particular the free-text EHR notes, often contain medical jargon and terms that are difficult for laypersons to understand. There are many Web-based resources for learning more about particular diseases or conditions, including systems that directly link to lay definitions or educational materials for medical concepts. Objective: Our goal is to determine whether use of one such tool, NoteAid, leads to higher EHR note comprehension ability. We use a new EHR note comprehension assessment tool instead of patient self-reported scores. Methods: In this work, we compare a passive, self-service educational resource (MedlinePlus) with an active resource (NoteAid) where definitions are provided to the user for medical concepts that the system identifies. We use Amazon Mechanical Turk (AMT) to recruit individuals to complete ComprehENotes, a new test of EHR note comprehension. Results: Mean scores for individuals with access to NoteAid are significantly higher than the mean baseline scores, both for raw scores (P=.008) and estimated ability (P=.02). Conclusions: In our experiments, we show that the active intervention leads to significantly higher scores on the comprehension test as compared with a baseline group with no resources provided. In contrast, there is no significant difference between the group that was provided with the passive intervention and the baseline group. Finally, we analyze the demographics of the individuals who participated in our AMT task and show differences between groups that align with the current understanding of health literacy between populations. This is the first work to show improvements in comprehension using tools such as NoteAid as measured by an EHR note comprehension assessment tool as opposed to patient self-reported scores. [J Med Internet Res 2019;21(1):e10793]},\n\tlanguage = {en},\n\tnumber = {1},\n\turldate = {2019-01-31},\n\tjournal = {Journal of Medical Internet Research},\n\tauthor = {Lalor, John P. and Woolf, Beverly and Yu, Hong},\n\tyear = {2019},\n\tpmid = {30664453 PMCID: 6351990},\n\tpages = {e10793},\n}\n\n","author_short":["Lalor, J. P.","Woolf, B.","Yu, H."],"key":"lalor_improving_2019","id":"lalor_improving_2019","bibbaseid":"lalor-woolf-yu-improvingelectronichealthrecordnotecomprehensionwithnoteaidrandomizedtrialofelectronichealthrecordnotecomprehensioninterventionswithcrowdsourcedworkers-2019","role":"author","urls":{"Paper":"https://www.jmir.org/2019/1/e10793/"},"metadata":{"authorlinks":{}},"html":""},"bibtype":"article","biburl":"http://fenway.cs.uml.edu/papers/pubs-all.bib","dataSources":["TqaA9miSB65nRfS5H"],"keywords":[],"search_terms":["improving","electronic","health","record","note","comprehension","noteaid","randomized","trial","electronic","health","record","note","comprehension","interventions","crowdsourced","workers","lalor","woolf","yu"],"title":"Improving electronic health record note comprehension with NoteAid: randomized trial of electronic health record note comprehension interventions with crowdsourced workers","year":2019}