A Survey on Time-Series Pre-Trained Models. Ma, Q., Liu, Z., Zheng, Z., Huang, Z., Zhu, S., Yu, Z., & Kwok, J. T. May, 2023. arXiv:2305.10716 [cs]Paper doi abstract bibtex Time-Series Mining (TSM) is an important research area since it shows great potential in practical applications. Deep learning models that rely on massive labeled data have been utilized for TSM successfully. However, constructing a large-scale well-labeled dataset is difficult due to data annotation costs. Recently, Pre-Trained Models have gradually attracted attention in the time series domain due to their remarkable performance in computer vision and natural language processing. In this survey, we provide a comprehensive review of Time-Series Pre-Trained Models (TS-PTMs), aiming to guide the understanding, applying, and studying TS-PTMs. Specifically, we first briefly introduce the typical deep learning models employed in TSM. Then, we give an overview of TS-PTMs according to the pre-training techniques. The main categories we explore include supervised, unsupervised, and self-supervised TS-PTMs. Further, extensive experiments are conducted to analyze the advantages and disadvantages of transfer learning strategies, Transformer-based models, and representative TS-PTMs. Finally, we point out some potential directions of TS-PTMs for future work.
@misc{ma_survey_2023,
title = {A {Survey} on {Time}-{Series} {Pre}-{Trained} {Models}},
url = {http://arxiv.org/abs/2305.10716},
doi = {10.48550/arXiv.2305.10716},
abstract = {Time-Series Mining (TSM) is an important research area since it shows great potential in practical applications. Deep learning models that rely on massive labeled data have been utilized for TSM successfully. However, constructing a large-scale well-labeled dataset is difficult due to data annotation costs. Recently, Pre-Trained Models have gradually attracted attention in the time series domain due to their remarkable performance in computer vision and natural language processing. In this survey, we provide a comprehensive review of Time-Series Pre-Trained Models (TS-PTMs), aiming to guide the understanding, applying, and studying TS-PTMs. Specifically, we first briefly introduce the typical deep learning models employed in TSM. Then, we give an overview of TS-PTMs according to the pre-training techniques. The main categories we explore include supervised, unsupervised, and self-supervised TS-PTMs. Further, extensive experiments are conducted to analyze the advantages and disadvantages of transfer learning strategies, Transformer-based models, and representative TS-PTMs. Finally, we point out some potential directions of TS-PTMs for future work.},
urldate = {2023-10-17},
publisher = {arXiv},
author = {Ma, Qianli and Liu, Zhen and Zheng, Zhenjing and Huang, Ziyang and Zhu, Siying and Yu, Zhongzhong and Kwok, James T.},
month = may,
year = {2023},
note = {arXiv:2305.10716 [cs]},
keywords = {Computer Science - Artificial Intelligence, Computer Science - Machine Learning},
}
Downloads: 0
{"_id":"LmrLQKeEyjz3tzniK","bibbaseid":"ma-liu-zheng-huang-zhu-yu-kwok-asurveyontimeseriespretrainedmodels-2023","author_short":["Ma, Q.","Liu, Z.","Zheng, Z.","Huang, Z.","Zhu, S.","Yu, Z.","Kwok, J. T."],"bibdata":{"bibtype":"misc","type":"misc","title":"A Survey on Time-Series Pre-Trained Models","url":"http://arxiv.org/abs/2305.10716","doi":"10.48550/arXiv.2305.10716","abstract":"Time-Series Mining (TSM) is an important research area since it shows great potential in practical applications. Deep learning models that rely on massive labeled data have been utilized for TSM successfully. However, constructing a large-scale well-labeled dataset is difficult due to data annotation costs. Recently, Pre-Trained Models have gradually attracted attention in the time series domain due to their remarkable performance in computer vision and natural language processing. In this survey, we provide a comprehensive review of Time-Series Pre-Trained Models (TS-PTMs), aiming to guide the understanding, applying, and studying TS-PTMs. Specifically, we first briefly introduce the typical deep learning models employed in TSM. Then, we give an overview of TS-PTMs according to the pre-training techniques. The main categories we explore include supervised, unsupervised, and self-supervised TS-PTMs. Further, extensive experiments are conducted to analyze the advantages and disadvantages of transfer learning strategies, Transformer-based models, and representative TS-PTMs. Finally, we point out some potential directions of TS-PTMs for future work.","urldate":"2023-10-17","publisher":"arXiv","author":[{"propositions":[],"lastnames":["Ma"],"firstnames":["Qianli"],"suffixes":[]},{"propositions":[],"lastnames":["Liu"],"firstnames":["Zhen"],"suffixes":[]},{"propositions":[],"lastnames":["Zheng"],"firstnames":["Zhenjing"],"suffixes":[]},{"propositions":[],"lastnames":["Huang"],"firstnames":["Ziyang"],"suffixes":[]},{"propositions":[],"lastnames":["Zhu"],"firstnames":["Siying"],"suffixes":[]},{"propositions":[],"lastnames":["Yu"],"firstnames":["Zhongzhong"],"suffixes":[]},{"propositions":[],"lastnames":["Kwok"],"firstnames":["James","T."],"suffixes":[]}],"month":"May","year":"2023","note":"arXiv:2305.10716 [cs]","keywords":"Computer Science - Artificial Intelligence, Computer Science - Machine Learning","bibtex":"@misc{ma_survey_2023,\n\ttitle = {A {Survey} on {Time}-{Series} {Pre}-{Trained} {Models}},\n\turl = {http://arxiv.org/abs/2305.10716},\n\tdoi = {10.48550/arXiv.2305.10716},\n\tabstract = {Time-Series Mining (TSM) is an important research area since it shows great potential in practical applications. Deep learning models that rely on massive labeled data have been utilized for TSM successfully. However, constructing a large-scale well-labeled dataset is difficult due to data annotation costs. Recently, Pre-Trained Models have gradually attracted attention in the time series domain due to their remarkable performance in computer vision and natural language processing. In this survey, we provide a comprehensive review of Time-Series Pre-Trained Models (TS-PTMs), aiming to guide the understanding, applying, and studying TS-PTMs. Specifically, we first briefly introduce the typical deep learning models employed in TSM. Then, we give an overview of TS-PTMs according to the pre-training techniques. The main categories we explore include supervised, unsupervised, and self-supervised TS-PTMs. Further, extensive experiments are conducted to analyze the advantages and disadvantages of transfer learning strategies, Transformer-based models, and representative TS-PTMs. Finally, we point out some potential directions of TS-PTMs for future work.},\n\turldate = {2023-10-17},\n\tpublisher = {arXiv},\n\tauthor = {Ma, Qianli and Liu, Zhen and Zheng, Zhenjing and Huang, Ziyang and Zhu, Siying and Yu, Zhongzhong and Kwok, James T.},\n\tmonth = may,\n\tyear = {2023},\n\tnote = {arXiv:2305.10716 [cs]},\n\tkeywords = {Computer Science - Artificial Intelligence, Computer Science - Machine Learning},\n}\n\n\n\n\n\n\n\n","author_short":["Ma, Q.","Liu, Z.","Zheng, Z.","Huang, Z.","Zhu, S.","Yu, Z.","Kwok, J. T."],"key":"ma_survey_2023","id":"ma_survey_2023","bibbaseid":"ma-liu-zheng-huang-zhu-yu-kwok-asurveyontimeseriespretrainedmodels-2023","role":"author","urls":{"Paper":"http://arxiv.org/abs/2305.10716"},"keyword":["Computer Science - Artificial Intelligence","Computer Science - Machine Learning"],"metadata":{"authorlinks":{}},"html":""},"bibtype":"misc","biburl":"https://bibbase.org/zotero/mh_lenguyen","dataSources":["iwKepCrWBps7ojhDx"],"keywords":["computer science - artificial intelligence","computer science - machine learning"],"search_terms":["survey","time","series","pre","trained","models","ma","liu","zheng","huang","zhu","yu","kwok"],"title":"A Survey on Time-Series Pre-Trained Models","year":2023}