Not to Cry Wolf: Distantly Supervised Multitask Learning in Critical Care. Schwab, P., Keller, E., Muroi, C., Mack, D., J., Strässle, C., & Karlen, W. In Proceedings of the 35th International Conference on Machine Learning, ICML 2018, volume 80, of Proceedings of Machine Learning Research, pages 4525--34, 2, 2018.
Not to Cry Wolf: Distantly Supervised Multitask Learning in Critical Care [link]Website  doi  abstract   bibtex   
Patients in the intensive care unit (ICU) require constant and close supervision. To assist clinical staff in this task, hospitals use monitoring systems that trigger audiovisual alarms if their algorithms indicate that a patient's condition may be worsening. However, current monitoring systems are extremely sensitive to movement artefacts and technical errors. As a result, they typically trigger hundreds to thousands of false alarms per patient per day - drowning the important alarms in noise and adding to the exhaustion of clinical staff. In this setting, data is abundantly available, but obtaining trustworthy annotations by experts is laborious and expensive. We frame the problem of false alarm reduction from multivariate time series as a machine-learning task and address it with a novel multitask network architecture that utilises distant supervision through multiple related auxiliary tasks in order to reduce the number of expensive labels required for training. We show that our approach leads to significant improvements over several state-of-the-art baselines on real-world ICU data and provide new insights on the importance of task selection and architectural choices in distantly supervised multitask learning.
@inproceedings{
 title = {Not to Cry Wolf: Distantly Supervised Multitask Learning in Critical Care},
 type = {inproceedings},
 year = {2018},
 pages = {4525--34},
 volume = {80},
 websites = {http://arxiv.org/abs/1802.05027},
 month = {2},
 day = {14},
 city = {Stockholm, Sweden},
 series = {Proceedings of Machine Learning Research},
 id = {62e76d15-a0e7-383f-be70-8d709ee66617},
 created = {2018-05-31T07:22:57.387Z},
 file_attached = {true},
 profile_id = {6d353feb-efe4-367e-84a2-0815eb9ca878},
 last_modified = {2022-09-04T18:12:01.457Z},
 read = {true},
 starred = {false},
 authored = {true},
 confirmed = {true},
 hidden = {false},
 citation_key = {Schwab2018b},
 notes = {Acceptance rate: 0.25},
 folder_uuids = {f1f67efc-95a7-4f1a-b181-c3670c667a34,60555479-b7f0-45f5-aa97-a3920f93c426,4afa922c-d8d6-102e-ac9a-0024e85ead87,0801d9e0-d1ec-46e2-803d-c74946b43a02,d9198259-8733-497d-ab87-d2a9518e0d30},
 private_publication = {false},
 abstract = {Patients in the intensive care unit (ICU) require constant and close supervision. To assist clinical staff in this task, hospitals use monitoring systems that trigger audiovisual alarms if their algorithms indicate that a patient's condition may be worsening. However, current monitoring systems are extremely sensitive to movement artefacts and technical errors. As a result, they typically trigger hundreds to thousands of false alarms per patient per day - drowning the important alarms in noise and adding to the exhaustion of clinical staff. In this setting, data is abundantly available, but obtaining trustworthy annotations by experts is laborious and expensive. We frame the problem of false alarm reduction from multivariate time series as a machine-learning task and address it with a novel multitask network architecture that utilises distant supervision through multiple related auxiliary tasks in order to reduce the number of expensive labels required for training. We show that our approach leads to significant improvements over several state-of-the-art baselines on real-world ICU data and provide new insights on the importance of task selection and architectural choices in distantly supervised multitask learning.},
 bibtype = {inproceedings},
 author = {Schwab, Patrick and Keller, Emanuela and Muroi, Carl and Mack, David J and Strässle, Christian and Karlen, Walter},
 doi = {10.3929/ethz-b-000241127},
 booktitle = {Proceedings of the 35th International Conference on Machine Learning, ICML 2018}
}

Downloads: 0