Generalisation Bounds for Kernel PCA through PAC-Bayes Learning. Haddouche, M., Guedj, B., & Shawe-Taylor, J. Stat, 2024. Paper Arxiv Pdf doi abstract bibtex 9 downloads Principal component analysis(PCA) is a popular method for dimension reduction and has attracted an unfailing interest for decades. More recently, kernel PCA (KPCA) has emerged as an extension of PCA, but despite its use in practice, a sound theoretical understanding of KPCA is missing. We contribute several empirical generalisation bounds on the efficiency of KPCA, involving the empirical eigenvalues of the kernel Gram matrix. Our bounds are derived through the use of probably approximately correct (PAC)-Bayes theory and highlight the importance of some desirable properties of datasets, expressed as variance-typed terms, to attain fast rates, achievable for a wide class of kernels.
@article{haddouche2020upper,
title={Generalisation Bounds for {Kernel PCA} through {PAC-Bayes} Learning},
author={Maxime Haddouche and Benjamin Guedj and John Shawe-Taylor},
year={2024},
journal ={Stat},
abstract = {Principal component analysis(PCA) is a popular method for dimension reduction and has attracted an unfailing interest for decades. More recently, kernel PCA (KPCA) has emerged as an extension of PCA, but despite its use in practice, a sound theoretical understanding of KPCA is missing. We contribute several empirical generalisation bounds on the efficiency of KPCA, involving the empirical eigenvalues of the kernel Gram matrix. Our bounds are derived through the use of probably approximately correct (PAC)-Bayes theory and highlight the importance of some desirable properties of datasets, expressed as variance-typed terms, to attain fast rates, achievable for a wide class of kernels.},
volume = {13},
number = {4},
url = {https://onlinelibrary.wiley.com/doi/10.1002/sta4.719},
url_arXiv = {https://arxiv.org/abs/2012.10369},
url_PDF = {https://arxiv.org/pdf/2012.10369.pdf},
doi = {10.1002/sta4.719},
eprint={2012.10369},
archivePrefix={arXiv},
primaryClass={cs.LG},
keywords={mine}
}
Downloads: 9
{"_id":"47kiubzm6s7y94oub","bibbaseid":"haddouche-guedj-shawetaylor-generalisationboundsforkernelpcathroughpacbayeslearning-2024","author_short":["Haddouche, M.","Guedj, B.","Shawe-Taylor, J."],"bibdata":{"bibtype":"article","type":"article","title":"Generalisation Bounds for Kernel PCA through PAC-Bayes Learning","author":[{"firstnames":["Maxime"],"propositions":[],"lastnames":["Haddouche"],"suffixes":[]},{"firstnames":["Benjamin"],"propositions":[],"lastnames":["Guedj"],"suffixes":[]},{"firstnames":["John"],"propositions":[],"lastnames":["Shawe-Taylor"],"suffixes":[]}],"year":"2024","journal":"Stat","abstract":"Principal component analysis(PCA) is a popular method for dimension reduction and has attracted an unfailing interest for decades. More recently, kernel PCA (KPCA) has emerged as an extension of PCA, but despite its use in practice, a sound theoretical understanding of KPCA is missing. We contribute several empirical generalisation bounds on the efficiency of KPCA, involving the empirical eigenvalues of the kernel Gram matrix. Our bounds are derived through the use of probably approximately correct (PAC)-Bayes theory and highlight the importance of some desirable properties of datasets, expressed as variance-typed terms, to attain fast rates, achievable for a wide class of kernels.","volume":"13","number":"4","url":"https://onlinelibrary.wiley.com/doi/10.1002/sta4.719","url_arxiv":"https://arxiv.org/abs/2012.10369","url_pdf":"https://arxiv.org/pdf/2012.10369.pdf","doi":"10.1002/sta4.719","eprint":"2012.10369","archiveprefix":"arXiv","primaryclass":"cs.LG","keywords":"mine","bibtex":"@article{haddouche2020upper,\ntitle={Generalisation Bounds for {Kernel PCA} through {PAC-Bayes} Learning}, \nauthor={Maxime Haddouche and Benjamin Guedj and John Shawe-Taylor},\nyear={2024},\njournal ={Stat},\nabstract = {Principal component analysis(PCA) is a popular method for dimension reduction and has attracted an unfailing interest for decades. More recently, kernel PCA (KPCA) has emerged as an extension of PCA, but despite its use in practice, a sound theoretical understanding of KPCA is missing. We contribute several empirical generalisation bounds on the efficiency of KPCA, involving the empirical eigenvalues of the kernel Gram matrix. Our bounds are derived through the use of probably approximately correct (PAC)-Bayes theory and highlight the importance of some desirable properties of datasets, expressed as variance-typed terms, to attain fast rates, achievable for a wide class of kernels.},\nvolume = {13},\nnumber = {4},\nurl = {https://onlinelibrary.wiley.com/doi/10.1002/sta4.719},\nurl_arXiv = {https://arxiv.org/abs/2012.10369},\nurl_PDF = {https://arxiv.org/pdf/2012.10369.pdf},\ndoi = {10.1002/sta4.719},\neprint={2012.10369},\narchivePrefix={arXiv},\nprimaryClass={cs.LG},\nkeywords={mine}\n}\n\n","author_short":["Haddouche, M.","Guedj, B.","Shawe-Taylor, J."],"key":"haddouche2020upper","id":"haddouche2020upper","bibbaseid":"haddouche-guedj-shawetaylor-generalisationboundsforkernelpcathroughpacbayeslearning-2024","role":"author","urls":{"Paper":"https://onlinelibrary.wiley.com/doi/10.1002/sta4.719"," arxiv":"https://arxiv.org/abs/2012.10369"," pdf":"https://arxiv.org/pdf/2012.10369.pdf"},"keyword":["mine"],"metadata":{"authorlinks":{}},"downloads":9,"html":""},"bibtype":"article","biburl":"https://bguedj.github.io/files/bguedj-publications.bib","dataSources":["suE7RgYeZEnSYr5Fy"],"keywords":["mine"],"search_terms":["generalisation","bounds","kernel","pca","through","pac","bayes","learning","haddouche","guedj","shawe-taylor"],"title":"Generalisation Bounds for Kernel PCA through PAC-Bayes Learning","year":2024,"downloads":9}