When forgetting fosters learning: A neural network model for Statistical Learning. Endress, A. D & Johnson, S P *Cognition*, 2021. Corrigendum Paper Supplementary text doi abstract bibtex 4 downloads Learning often requires splitting continuous signals into recurring units, such as the discrete words constituting fluent speech; these units then need to be encoded in memory. A prominent candidate mechanism involves statistical learning of co-occurrence statistics like transitional probabilities (TPs), reflecting the idea that items from the same unit (e.g., syllables within a word) predict each other better than items from different units. TP computations are surprisingly flexible and sophisticated. Humans are sensitive to forward and backward TPs, compute TPs between adjacent items and longer-distance items, and even recognize TPs in novel units. We explain these hallmarks of statistical learning with a simple model with tunable, Hebbian excitatory connections and inhibitory interactions controlling the overall activation. With weak forgetting, activations are long-lasting, yielding associations among all items; with strong forgetting, no associations ensue as activations do not outlast stimuli; with intermediate forgetting, the network reproduces the hallmarks above. Forgetting thus is a key determinant of these sophisticated learning abilities. Further, in line with earlier dissociations between statistical learning and memory encoding, our model reproduces the hallmarks of statistical learning in the absence of a memory store in which items could be placed.

@Article{Endress-TP-Model,
author = {Endress, Ansgar D and S P Johnson},
journal = {Cognition},
title = {When forgetting fosters learning: A neural network model for Statistical Learning},
year = {2021},
volume = {104621},
abstract = {Learning often requires splitting continuous signals into recurring units, such as the discrete words constituting fluent speech; these units then need to be encoded in memory. A prominent candidate mechanism involves statistical learning of co-occurrence statistics like transitional probabilities (TPs), reflecting the idea that items from the same unit (e.g., syllables within a word) predict each other better than items from different units. TP computations are surprisingly flexible and sophisticated. Humans are sensitive to forward and backward TPs, compute TPs between adjacent items and longer-distance items, and even recognize TPs in novel units. We explain these hallmarks of statistical learning with a simple model with tunable, Hebbian excitatory connections and inhibitory interactions controlling the overall activation. With weak forgetting, activations are long-lasting, yielding associations among all items; with strong forgetting, no associations ensue as activations do not outlast stimuli; with intermediate forgetting, the network reproduces the hallmarks above. Forgetting thus is a key determinant of these sophisticated learning abilities. Further, in line with earlier dissociations between statistical learning and memory encoding, our model reproduces the hallmarks of statistical learning in the absence of a memory store in which items could be placed.},
doi = {10.1016/j.cognition.2021.104621},
groups = {Own papers},
url_corrigendum = {https://endress.org/publications/endress_tp_model_reply_tw.pdf},
url_paper = {https://endress.org/publications/endress_tp_model.pdf},
url_supplementary_text = {https://endress.org/publications/endress_tp_model_sm.pdf},
}

Downloads: 4

{"_id":"a7eHfBXSushLcL8eL","bibbaseid":"endress-johnson-whenforgettingfosterslearninganeuralnetworkmodelforstatisticallearning-2021","author_short":["Endress, A. D","Johnson, S P"],"bibdata":{"bibtype":"article","type":"article","author":[{"propositions":[],"lastnames":["Endress"],"firstnames":["Ansgar","D"],"suffixes":[]},{"firstnames":["S","P"],"propositions":[],"lastnames":["Johnson"],"suffixes":[]}],"journal":"Cognition","title":"When forgetting fosters learning: A neural network model for Statistical Learning","year":"2021","volume":"104621","abstract":"Learning often requires splitting continuous signals into recurring units, such as the discrete words constituting fluent speech; these units then need to be encoded in memory. A prominent candidate mechanism involves statistical learning of co-occurrence statistics like transitional probabilities (TPs), reflecting the idea that items from the same unit (e.g., syllables within a word) predict each other better than items from different units. TP computations are surprisingly flexible and sophisticated. Humans are sensitive to forward and backward TPs, compute TPs between adjacent items and longer-distance items, and even recognize TPs in novel units. We explain these hallmarks of statistical learning with a simple model with tunable, Hebbian excitatory connections and inhibitory interactions controlling the overall activation. With weak forgetting, activations are long-lasting, yielding associations among all items; with strong forgetting, no associations ensue as activations do not outlast stimuli; with intermediate forgetting, the network reproduces the hallmarks above. Forgetting thus is a key determinant of these sophisticated learning abilities. Further, in line with earlier dissociations between statistical learning and memory encoding, our model reproduces the hallmarks of statistical learning in the absence of a memory store in which items could be placed.","doi":"10.1016/j.cognition.2021.104621","groups":"Own papers","url_corrigendum":"https://endress.org/publications/endress_tp_model_reply_tw.pdf","url_paper":"https://endress.org/publications/endress_tp_model.pdf","url_supplementary_text":"https://endress.org/publications/endress_tp_model_sm.pdf","bibtex":"@Article{Endress-TP-Model,\n author = {Endress, Ansgar D and S P Johnson},\n journal = {Cognition},\n title = {When forgetting fosters learning: A neural network model for Statistical Learning},\n year = {2021},\n volume = {104621},\n abstract = {Learning often requires splitting continuous signals into recurring units, such as the discrete words constituting fluent speech; these units then need to be encoded in memory. A prominent candidate mechanism involves statistical learning of co-occurrence statistics like transitional probabilities (TPs), reflecting the idea that items from the same unit (e.g., syllables within a word) predict each other better than items from different units. TP computations are surprisingly flexible and sophisticated. Humans are sensitive to forward and backward TPs, compute TPs between adjacent items and longer-distance items, and even recognize TPs in novel units. We explain these hallmarks of statistical learning with a simple model with tunable, Hebbian excitatory connections and inhibitory interactions controlling the overall activation. With weak forgetting, activations are long-lasting, yielding associations among all items; with strong forgetting, no associations ensue as activations do not outlast stimuli; with intermediate forgetting, the network reproduces the hallmarks above. Forgetting thus is a key determinant of these sophisticated learning abilities. Further, in line with earlier dissociations between statistical learning and memory encoding, our model reproduces the hallmarks of statistical learning in the absence of a memory store in which items could be placed.},\n doi = {10.1016/j.cognition.2021.104621},\n groups = {Own papers},\n url_corrigendum = {https://endress.org/publications/endress_tp_model_reply_tw.pdf},\n url_paper = {https://endress.org/publications/endress_tp_model.pdf},\n url_supplementary_text = {https://endress.org/publications/endress_tp_model_sm.pdf},\n}\n\n","author_short":["Endress, A. D","Johnson, S P"],"key":"Endress-TP-Model","id":"Endress-TP-Model","bibbaseid":"endress-johnson-whenforgettingfosterslearninganeuralnetworkmodelforstatisticallearning-2021","role":"author","urls":{" corrigendum":"https://endress.org/publications/endress_tp_model_reply_tw.pdf"," paper":"https://endress.org/publications/endress_tp_model.pdf"," supplementary text":"https://endress.org/publications/endress_tp_model_sm.pdf"},"metadata":{"authorlinks":{}},"downloads":4,"html":""},"bibtype":"article","biburl":"http://endress.org/publications/ansgar.bib","dataSources":["xE7CSwEa7agRkLBX4","erKcjL263nRKd8djG","xPGxHAeh3vZpx4yyE"],"keywords":[],"search_terms":["forgetting","fosters","learning","neural","network","model","statistical","learning","endress","johnson"],"title":"When forgetting fosters learning: A neural network model for Statistical Learning","year":2021,"downloads":4}