Paper doi abstract bibtex

Machine learning algorithms based on parametrized quantum circuits are prime candidates for near-term applications on noisy quantum computers. In this direction, various types of quantum machine learning models have been introduced and studied extensively. Yet, our understanding of how these models compare, both mutually and to classical models, remains limited. In this work, we identify a constructive framework that captures all standard models based on parametrized quantum circuits: that of linear quantum models. In particular, we show using tools from quantum information theory how data re-uploading circuits, an apparent outlier of this framework, can be efficiently mapped into the simpler picture of linear models in quantum Hilbert spaces. Furthermore, we analyze the experimentally-relevant resource requirements of these models in terms of qubit number and amount of data needed to learn. Based on recent results from classical machine learning, we prove that linear quantum models must utilize exponentially more qubits than data re-uploading models in order to solve certain learning tasks, while kernel methods additionally require exponentially more data points. Our results provide a more comprehensive view of quantum machine learning models as well as insights on the compatibility of different models with NISQ constraints.

@article{jerbi_quantum_2023, title = {Quantum machine learning beyond kernel methods}, volume = {14}, issn = {2041-1723}, url = {http://arxiv.org/abs/2110.13162}, doi = {10.1038/s41467-023-36159-y}, abstract = {Machine learning algorithms based on parametrized quantum circuits are prime candidates for near-term applications on noisy quantum computers. In this direction, various types of quantum machine learning models have been introduced and studied extensively. Yet, our understanding of how these models compare, both mutually and to classical models, remains limited. In this work, we identify a constructive framework that captures all standard models based on parametrized quantum circuits: that of linear quantum models. In particular, we show using tools from quantum information theory how data re-uploading circuits, an apparent outlier of this framework, can be efficiently mapped into the simpler picture of linear models in quantum Hilbert spaces. Furthermore, we analyze the experimentally-relevant resource requirements of these models in terms of qubit number and amount of data needed to learn. Based on recent results from classical machine learning, we prove that linear quantum models must utilize exponentially more qubits than data re-uploading models in order to solve certain learning tasks, while kernel methods additionally require exponentially more data points. Our results provide a more comprehensive view of quantum machine learning models as well as insights on the compatibility of different models with NISQ constraints.}, language = {en}, number = {1}, urldate = {2023-07-07}, journal = {Nature Communications}, author = {Jerbi, Sofiene and Fiderer, Lukas J. and Nautrup, Hendrik Poulsen and Kübler, Jonas M. and Briegel, Hans J. and Dunjko, Vedran}, month = jan, year = {2023}, note = {arXiv:2110.13162 [quant-ph, stat]}, keywords = {Quantum Physics, Computer Science - Machine Learning, Statistics - Machine Learning, Computer Science - Artificial Intelligence}, pages = {517}, annote = {Comment: 10+10 pages, 14 figures; significant changes in the main text, corrections in the numerical simulations}, file = {Jerbi et al. - 2023 - Quantum machine learning beyond kernel methods.pdf:/Users/georgehuang/Zotero/storage/U9Z229LH/Jerbi et al. - 2023 - Quantum machine learning beyond kernel methods.pdf:application/pdf}, }

Downloads: 0