EHRTutor: Enhancing Patient Understanding of Discharge Instructions. Zhang, Z., Yao, Z., Zhou, H., ouyang , F., & Yu, H. October, 2023. To appear in NeurIPS'23 Workshop on Generative AI for Education (GAIED), December, New OrleansPaper doi abstract bibtex Large language models have shown success as a tutor in education in various fields. Educating patients about their clinical visits plays a pivotal role in patients' adherence to their treatment plans post-discharge. This paper presents EHRTutor, an innovative multi-component framework leveraging the Large Language Model (LLM) for patient education through conversational question-answering. EHRTutor first formulates questions pertaining to the electronic health record discharge instructions. It then educates the patient through conversation by administering each question as a test. Finally, it generates a summary at the end of the conversation. Evaluation results using LLMs and domain experts have shown a clear preference for EHRTutor over the baseline. Moreover, EHRTutor also offers a framework for generating synthetic patient education dialogues that can be used for future in-house system training.
@misc{zhang_ehrtutor_2023,
title = {{EHRTutor}: {Enhancing} {Patient} {Understanding} of {Discharge} {Instructions}},
shorttitle = {{EHRTutor}},
url = {http://arxiv.org/abs/2310.19212},
doi = {10.48550/arXiv.2310.19212},
abstract = {Large language models have shown success as a tutor in education in various fields. Educating patients about their clinical visits plays a pivotal role in patients' adherence to their treatment plans post-discharge. This paper presents EHRTutor, an innovative multi-component framework leveraging the Large Language Model (LLM) for patient education through conversational question-answering. EHRTutor first formulates questions pertaining to the electronic health record discharge instructions. It then educates the patient through conversation by administering each question as a test. Finally, it generates a summary at the end of the conversation. Evaluation results using LLMs and domain experts have shown a clear preference for EHRTutor over the baseline. Moreover, EHRTutor also offers a framework for generating synthetic patient education dialogues that can be used for future in-house system training.},
urldate = {2023-11-01},
publisher = {arXiv},
author = {Zhang, Zihao and Yao, Zonghai and Zhou, Huixue and ouyang, Feiyun and Yu, Hong},
month = oct,
year = {2023},
note = {To appear in NeurIPS'23 Workshop on Generative AI for Education (GAIED), December, New Orleans},
keywords = {Computer Science - Artificial Intelligence, Computer Science - Computation and Language},
}
Downloads: 0
{"_id":"LHNzLXBshhumizCPd","bibbaseid":"zhang-yao-zhou-ouyang-yu-ehrtutorenhancingpatientunderstandingofdischargeinstructions-2023","author_short":["Zhang, Z.","Yao, Z.","Zhou, H.","ouyang , F.","Yu, H."],"bibdata":{"bibtype":"misc","type":"misc","title":"EHRTutor: Enhancing Patient Understanding of Discharge Instructions","shorttitle":"EHRTutor","url":"http://arxiv.org/abs/2310.19212","doi":"10.48550/arXiv.2310.19212","abstract":"Large language models have shown success as a tutor in education in various fields. Educating patients about their clinical visits plays a pivotal role in patients' adherence to their treatment plans post-discharge. This paper presents EHRTutor, an innovative multi-component framework leveraging the Large Language Model (LLM) for patient education through conversational question-answering. EHRTutor first formulates questions pertaining to the electronic health record discharge instructions. It then educates the patient through conversation by administering each question as a test. Finally, it generates a summary at the end of the conversation. Evaluation results using LLMs and domain experts have shown a clear preference for EHRTutor over the baseline. Moreover, EHRTutor also offers a framework for generating synthetic patient education dialogues that can be used for future in-house system training.","urldate":"2023-11-01","publisher":"arXiv","author":[{"propositions":[],"lastnames":["Zhang"],"firstnames":["Zihao"],"suffixes":[]},{"propositions":[],"lastnames":["Yao"],"firstnames":["Zonghai"],"suffixes":[]},{"propositions":[],"lastnames":["Zhou"],"firstnames":["Huixue"],"suffixes":[]},{"propositions":["ouyang"],"lastnames":[],"firstnames":["Feiyun"],"suffixes":[]},{"propositions":[],"lastnames":["Yu"],"firstnames":["Hong"],"suffixes":[]}],"month":"October","year":"2023","note":"To appear in NeurIPS'23 Workshop on Generative AI for Education (GAIED), December, New Orleans","keywords":"Computer Science - Artificial Intelligence, Computer Science - Computation and Language","bibtex":"@misc{zhang_ehrtutor_2023,\n\ttitle = {{EHRTutor}: {Enhancing} {Patient} {Understanding} of {Discharge} {Instructions}},\n\tshorttitle = {{EHRTutor}},\n\turl = {http://arxiv.org/abs/2310.19212},\n\tdoi = {10.48550/arXiv.2310.19212},\n\tabstract = {Large language models have shown success as a tutor in education in various fields. Educating patients about their clinical visits plays a pivotal role in patients' adherence to their treatment plans post-discharge. This paper presents EHRTutor, an innovative multi-component framework leveraging the Large Language Model (LLM) for patient education through conversational question-answering. EHRTutor first formulates questions pertaining to the electronic health record discharge instructions. It then educates the patient through conversation by administering each question as a test. Finally, it generates a summary at the end of the conversation. Evaluation results using LLMs and domain experts have shown a clear preference for EHRTutor over the baseline. Moreover, EHRTutor also offers a framework for generating synthetic patient education dialogues that can be used for future in-house system training.},\n\turldate = {2023-11-01},\n\tpublisher = {arXiv},\n\tauthor = {Zhang, Zihao and Yao, Zonghai and Zhou, Huixue and ouyang, Feiyun and Yu, Hong},\n\tmonth = oct,\n\tyear = {2023},\n\tnote = {To appear in NeurIPS'23 Workshop on Generative AI for Education (GAIED), December, New Orleans},\n\tkeywords = {Computer Science - Artificial Intelligence, Computer Science - Computation and Language},\n}\n\n","author_short":["Zhang, Z.","Yao, Z.","Zhou, H.","ouyang , F.","Yu, H."],"key":"zhang_ehrtutor_2023","id":"zhang_ehrtutor_2023","bibbaseid":"zhang-yao-zhou-ouyang-yu-ehrtutorenhancingpatientunderstandingofdischargeinstructions-2023","role":"author","urls":{"Paper":"http://arxiv.org/abs/2310.19212"},"keyword":["Computer Science - Artificial Intelligence","Computer Science - Computation and Language"],"metadata":{"authorlinks":{}},"html":""},"bibtype":"misc","biburl":"http://fenway.cs.uml.edu/papers/pubs-all.bib","dataSources":["TqaA9miSB65nRfS5H"],"keywords":["computer science - artificial intelligence","computer science - computation and language"],"search_terms":["ehrtutor","enhancing","patient","understanding","discharge","instructions","zhang","yao","zhou","ouyang ","yu"],"title":"EHRTutor: Enhancing Patient Understanding of Discharge Instructions","year":2023}