Embracing Change: Continual Learning in Deep Neural Networks. Hadsell, R., Rao, D., Rusu, A. A., & Pascanu, R. Trends in Cognitive Sciences, 24(12):1028–1040, December, 2020. Paper doi abstract bibtex Artificial intelligence research has seen enormous progress over the past few decades, but it predominantly relies on fixed datasets and stationary environments. Continual learning is an increasingly relevant area of study that asks how artificial systems might learn sequentially, as biological systems do, from a continuous stream of correlated data. In the present review, we relate continual learning to the learning dynamics of neural networks, highlighting the potential it has to considerably improve data efficiency. We further consider the many new biologically inspired approaches that have emerged in recent years, focusing on those that utilize regularization, modularity, memory, and meta-learning, and highlight some of the most promising and impactful directions.
@article{hadsell_embracing_2020,
title = {Embracing {Change}: {Continual} {Learning} in {Deep} {Neural} {Networks}},
volume = {24},
issn = {1364-6613},
shorttitle = {Embracing {Change}},
url = {https://www.sciencedirect.com/science/article/pii/S1364661320302199},
doi = {10.1016/j.tics.2020.09.004},
abstract = {Artificial intelligence research has seen enormous progress over the past few decades, but it predominantly relies on fixed datasets and stationary environments. Continual learning is an increasingly relevant area of study that asks how artificial systems might learn sequentially, as biological systems do, from a continuous stream of correlated data. In the present review, we relate continual learning to the learning dynamics of neural networks, highlighting the potential it has to considerably improve data efficiency. We further consider the many new biologically inspired approaches that have emerged in recent years, focusing on those that utilize regularization, modularity, memory, and meta-learning, and highlight some of the most promising and impactful directions.},
language = {en},
number = {12},
urldate = {2023-03-05},
journal = {Trends in Cognitive Sciences},
author = {Hadsell, Raia and Rao, Dushyant and Rusu, Andrei A. and Pascanu, Razvan},
month = dec,
year = {2020},
keywords = {artificial intelligence, lifelong, memory, meta-learning, non-stationary},
pages = {1028--1040},
}
Downloads: 0
{"_id":"68ZkqjWfaqR2nnqgM","bibbaseid":"hadsell-rao-rusu-pascanu-embracingchangecontinuallearningindeepneuralnetworks-2020","author_short":["Hadsell, R.","Rao, D.","Rusu, A. A.","Pascanu, R."],"bibdata":{"bibtype":"article","type":"article","title":"Embracing Change: Continual Learning in Deep Neural Networks","volume":"24","issn":"1364-6613","shorttitle":"Embracing Change","url":"https://www.sciencedirect.com/science/article/pii/S1364661320302199","doi":"10.1016/j.tics.2020.09.004","abstract":"Artificial intelligence research has seen enormous progress over the past few decades, but it predominantly relies on fixed datasets and stationary environments. Continual learning is an increasingly relevant area of study that asks how artificial systems might learn sequentially, as biological systems do, from a continuous stream of correlated data. In the present review, we relate continual learning to the learning dynamics of neural networks, highlighting the potential it has to considerably improve data efficiency. We further consider the many new biologically inspired approaches that have emerged in recent years, focusing on those that utilize regularization, modularity, memory, and meta-learning, and highlight some of the most promising and impactful directions.","language":"en","number":"12","urldate":"2023-03-05","journal":"Trends in Cognitive Sciences","author":[{"propositions":[],"lastnames":["Hadsell"],"firstnames":["Raia"],"suffixes":[]},{"propositions":[],"lastnames":["Rao"],"firstnames":["Dushyant"],"suffixes":[]},{"propositions":[],"lastnames":["Rusu"],"firstnames":["Andrei","A."],"suffixes":[]},{"propositions":[],"lastnames":["Pascanu"],"firstnames":["Razvan"],"suffixes":[]}],"month":"December","year":"2020","keywords":"artificial intelligence, lifelong, memory, meta-learning, non-stationary","pages":"1028–1040","bibtex":"@article{hadsell_embracing_2020,\n\ttitle = {Embracing {Change}: {Continual} {Learning} in {Deep} {Neural} {Networks}},\n\tvolume = {24},\n\tissn = {1364-6613},\n\tshorttitle = {Embracing {Change}},\n\turl = {https://www.sciencedirect.com/science/article/pii/S1364661320302199},\n\tdoi = {10.1016/j.tics.2020.09.004},\n\tabstract = {Artificial intelligence research has seen enormous progress over the past few decades, but it predominantly relies on fixed datasets and stationary environments. Continual learning is an increasingly relevant area of study that asks how artificial systems might learn sequentially, as biological systems do, from a continuous stream of correlated data. In the present review, we relate continual learning to the learning dynamics of neural networks, highlighting the potential it has to considerably improve data efficiency. We further consider the many new biologically inspired approaches that have emerged in recent years, focusing on those that utilize regularization, modularity, memory, and meta-learning, and highlight some of the most promising and impactful directions.},\n\tlanguage = {en},\n\tnumber = {12},\n\turldate = {2023-03-05},\n\tjournal = {Trends in Cognitive Sciences},\n\tauthor = {Hadsell, Raia and Rao, Dushyant and Rusu, Andrei A. and Pascanu, Razvan},\n\tmonth = dec,\n\tyear = {2020},\n\tkeywords = {artificial intelligence, lifelong, memory, meta-learning, non-stationary},\n\tpages = {1028--1040},\n}\n\n\n\n","author_short":["Hadsell, R.","Rao, D.","Rusu, A. A.","Pascanu, R."],"key":"hadsell_embracing_2020","id":"hadsell_embracing_2020","bibbaseid":"hadsell-rao-rusu-pascanu-embracingchangecontinuallearningindeepneuralnetworks-2020","role":"author","urls":{"Paper":"https://www.sciencedirect.com/science/article/pii/S1364661320302199"},"keyword":["artificial intelligence","lifelong","memory","meta-learning","non-stationary"],"metadata":{"authorlinks":{}},"html":""},"bibtype":"article","biburl":"https://bibbase.org/zotero/mh_lenguyen","dataSources":["iwKepCrWBps7ojhDx"],"keywords":["artificial intelligence","lifelong","memory","meta-learning","non-stationary"],"search_terms":["embracing","change","continual","learning","deep","neural","networks","hadsell","rao","rusu","pascanu"],"title":"Embracing Change: Continual Learning in Deep Neural Networks","year":2020}