Large-scale foundation models and generative AI for BigData neuroscience. Wang, R. & Chen, Z. S. Neuroscience Research, June, 2024.
Paper doi abstract bibtex Recent advances in machine learning have led to revolutionary breakthroughs in computer games, image and natural language understanding, and scientific discovery. Foundation models and large-scale language models (LLMs) have recently achieved human-like intelligence thanks to BigData. With the help of self-supervised learning (SSL) and transfer learning, these models may potentially reshape the landscapes of neuroscience research and make a significant impact on the future. Here we present a mini-review on recent advances in foundation models and generative AI models as well as their applications in neuroscience, including natural language and speech, semantic memory, brain-machine interfaces (BMIs), and data augmentation. We argue that this paradigm-shift framework will open new avenues for many neuroscience research directions and discuss the accompanying challenges and opportunities.
@article{wang_large-scale_2024,
title = {Large-scale foundation models and generative {AI} for {BigData} neuroscience},
issn = {0168-0102},
url = {https://www.sciencedirect.com/science/article/pii/S0168010224000750},
doi = {10.1016/j.neures.2024.06.003},
abstract = {Recent advances in machine learning have led to revolutionary breakthroughs in computer games, image and natural language understanding, and scientific discovery. Foundation models and large-scale language models (LLMs) have recently achieved human-like intelligence thanks to BigData. With the help of self-supervised learning (SSL) and transfer learning, these models may potentially reshape the landscapes of neuroscience research and make a significant impact on the future. Here we present a mini-review on recent advances in foundation models and generative AI models as well as their applications in neuroscience, including natural language and speech, semantic memory, brain-machine interfaces (BMIs), and data augmentation. We argue that this paradigm-shift framework will open new avenues for many neuroscience research directions and discuss the accompanying challenges and opportunities.},
urldate = {2025-01-06},
journal = {Neuroscience Research},
author = {Wang, Ran and Chen, Zhe Sage},
month = jun,
year = {2024},
keywords = {BigData, Brain-machine interface, Embedding, Foundation model, Generative AI, Representation learning, Self-supervised learning, Transfer learning, Transformer},
}
Downloads: 0
{"_id":"yWCTmWC7SAZhuuBhS","bibbaseid":"wang-chen-largescalefoundationmodelsandgenerativeaiforbigdataneuroscience-2024","author_short":["Wang, R.","Chen, Z. S."],"bibdata":{"bibtype":"article","type":"article","title":"Large-scale foundation models and generative AI for BigData neuroscience","issn":"0168-0102","url":"https://www.sciencedirect.com/science/article/pii/S0168010224000750","doi":"10.1016/j.neures.2024.06.003","abstract":"Recent advances in machine learning have led to revolutionary breakthroughs in computer games, image and natural language understanding, and scientific discovery. Foundation models and large-scale language models (LLMs) have recently achieved human-like intelligence thanks to BigData. With the help of self-supervised learning (SSL) and transfer learning, these models may potentially reshape the landscapes of neuroscience research and make a significant impact on the future. Here we present a mini-review on recent advances in foundation models and generative AI models as well as their applications in neuroscience, including natural language and speech, semantic memory, brain-machine interfaces (BMIs), and data augmentation. We argue that this paradigm-shift framework will open new avenues for many neuroscience research directions and discuss the accompanying challenges and opportunities.","urldate":"2025-01-06","journal":"Neuroscience Research","author":[{"propositions":[],"lastnames":["Wang"],"firstnames":["Ran"],"suffixes":[]},{"propositions":[],"lastnames":["Chen"],"firstnames":["Zhe","Sage"],"suffixes":[]}],"month":"June","year":"2024","keywords":"BigData, Brain-machine interface, Embedding, Foundation model, Generative AI, Representation learning, Self-supervised learning, Transfer learning, Transformer","bibtex":"@article{wang_large-scale_2024,\n\ttitle = {Large-scale foundation models and generative {AI} for {BigData} neuroscience},\n\tissn = {0168-0102},\n\turl = {https://www.sciencedirect.com/science/article/pii/S0168010224000750},\n\tdoi = {10.1016/j.neures.2024.06.003},\n\tabstract = {Recent advances in machine learning have led to revolutionary breakthroughs in computer games, image and natural language understanding, and scientific discovery. Foundation models and large-scale language models (LLMs) have recently achieved human-like intelligence thanks to BigData. With the help of self-supervised learning (SSL) and transfer learning, these models may potentially reshape the landscapes of neuroscience research and make a significant impact on the future. Here we present a mini-review on recent advances in foundation models and generative AI models as well as their applications in neuroscience, including natural language and speech, semantic memory, brain-machine interfaces (BMIs), and data augmentation. We argue that this paradigm-shift framework will open new avenues for many neuroscience research directions and discuss the accompanying challenges and opportunities.},\n\turldate = {2025-01-06},\n\tjournal = {Neuroscience Research},\n\tauthor = {Wang, Ran and Chen, Zhe Sage},\n\tmonth = jun,\n\tyear = {2024},\n\tkeywords = {BigData, Brain-machine interface, Embedding, Foundation model, Generative AI, Representation learning, Self-supervised learning, Transfer learning, Transformer},\n}\n\n\n\n","author_short":["Wang, R.","Chen, Z. S."],"key":"wang_large-scale_2024","id":"wang_large-scale_2024","bibbaseid":"wang-chen-largescalefoundationmodelsandgenerativeaiforbigdataneuroscience-2024","role":"author","urls":{"Paper":"https://www.sciencedirect.com/science/article/pii/S0168010224000750"},"keyword":["BigData","Brain-machine interface","Embedding","Foundation model","Generative AI","Representation learning","Self-supervised learning","Transfer learning","Transformer"],"metadata":{"authorlinks":{}},"downloads":0},"bibtype":"article","biburl":"https://bibbase.org/zotero-group/peerherholz/5704536","dataSources":["6GhResmoSoCCHimNC"],"keywords":["bigdata","brain-machine interface","embedding","foundation model","generative ai","representation learning","self-supervised learning","transfer learning","transformer"],"search_terms":["large","scale","foundation","models","generative","bigdata","neuroscience","wang","chen"],"title":"Large-scale foundation models and generative AI for BigData neuroscience","year":2024}