Granger-Causal Attentive Mixtures of Experts: Learning Important Features with Neural Networks. Schwab, P., Miladinovic, D., & Karlen, W. Proceedings of the AAAI Conference on Artificial Intelligence, 33:4846-53, AAAI Press, 7, 2019.
Granger-Causal Attentive Mixtures of Experts: Learning Important Features with Neural Networks [link]Website  doi  abstract   bibtex   
Knowledge of the importance of input features towards decisions made by machine-learning models is essential to increase our understanding of both the models and the underlying data. Here, we present a new approach to estimating feature importance with neural networks based on the idea of distributing the features of interest among experts in an attentive mixture of experts (AME). AMEs use attentive gating networks trained with a Granger-causal objective to learn to jointly produce accurate predictions as well as estimates of feature importance in a single model. Our experiments show (i) that the feature importance estimates provided by AMEs compare favourably to those provided by state-of-theart methods, (ii) that AMEs are significantly faster at estimating feature importance than existing methods, and (iii) that the associations discovered by AMEs are consistent with those reported by domain experts.
@article{
 title = {Granger-Causal Attentive Mixtures of Experts: Learning Important Features with Neural Networks},
 type = {article},
 year = {2019},
 pages = {4846-53},
 volume = {33},
 websites = {http://arxiv.org/abs/1802.02195,https://aaai.org/ojs/index.php/AAAI/article/view/4412},
 month = {7},
 publisher = {AAAI Press},
 day = {17},
 city = {Honolulu, HI, USA},
 id = {95bb8d99-7548-3461-9465-2c956c806117},
 created = {2020-12-11T07:58:41.553Z},
 file_attached = {true},
 profile_id = {6d353feb-efe4-367e-84a2-0815eb9ca878},
 last_modified = {2022-09-04T18:12:25.644Z},
 read = {false},
 starred = {false},
 authored = {true},
 confirmed = {true},
 hidden = {false},
 citation_key = {Schwab2018},
 notes = {Acceptance rate: 0.16},
 folder_uuids = {1fb64632-2f0e-4803-a360-a363ae831519,f1f67efc-95a7-4f1a-b181-c3670c667a34,4afa922c-d8d6-102e-ac9a-0024e85ead87,0801d9e0-d1ec-46e2-803d-c74946b43a02,d9198259-8733-497d-ab87-d2a9518e0d30},
 private_publication = {false},
 abstract = {Knowledge of the importance of input features towards decisions made by machine-learning models is essential to increase our understanding of both the models and the underlying data. Here, we present a new approach to estimating feature importance with neural networks based on the idea of distributing the features of interest among experts in an attentive mixture of experts (AME). AMEs use attentive gating networks trained with a Granger-causal objective to learn to jointly produce accurate predictions as well as estimates of feature importance in a single model. Our experiments show (i) that the feature importance estimates provided by AMEs compare favourably to those provided by state-of-theart methods, (ii) that AMEs are significantly faster at estimating feature importance than existing methods, and (iii) that the associations discovered by AMEs are consistent with those reported by domain experts.},
 bibtype = {article},
 author = {Schwab, Patrick and Miladinovic, Djordje and Karlen, Walter},
 doi = {10.1609/aaai.v33i01.33014846},
 journal = {Proceedings of the AAAI Conference on Artificial Intelligence}
}

Downloads: 0