Breaking the Limits of Message Passing Graph Neural Networks. Balcilar, M., Héroux, P., Gaüzère, B., Vasseur, P., Adam, S., & Honeine, P. In Meila, M. & Zhang, T., editors, Proceedings of the 38th International Conference on Machine Learning (ICML), volume 139, of Proceedings of Machine Learning Research, pages 599–608, Vienna, Austria, 18 - 24 July, 2021. PMLR. Paper Link Slides Poster Video abstract bibtex Since the Message Passing (Graph) Neural Networks (MPNNs) have a linear complexity with respect to the number of nodes when applied to sparse graphs, they have been widely implemented and still raise a lot of interest even though their theoretical expressive power is limited to the first order Weisfeiler-Lehman test (1-WL). In this paper, we show that if the graph convolution supports are designed in spectral-domain by a non-linear custom function of eigenvalues and masked with an arbitrary large receptive field, the MPNN is theoretically more powerful than the 1-WL test, experimentally as powerful as a 3-WL existing model and is spatially localized. Moreover, by designing custom filter functions, outputs can have various frequency components that allow the convolution process to learn different relationships between given input graph signals and their associated properties. So far, the best 3-WL equivalent graph neural networks have a computational complexity in $\mathcal{O}(n^3)$ with memory usage in $\mathcal{O}(n^2)$, consider non-local update mechanism and do not provide the spectral richness of output profile. The proposed method overcomes all these aforementioned problems and reaches state-of-the-art results in many downstream tasks.
@INPROCEEDINGS{21.icml.gnn,
title={Breaking the Limits of Message Passing Graph Neural Networks},
author={Muhammet Balcilar and Pierre H{\'e}roux and Benoît Ga{\"u}z{\`e}re and Pascal Vasseur and S{\'e}bastien Adam and Paul Honeine},
booktitle={Proceedings of the 38th International Conference on Machine Learning (ICML)},
pages = "599--608",
editor = "Meila, Marina and Zhang, Tong",
address = "Vienna, Austria",
series = "Proceedings of Machine Learning Research",
publisher = {PMLR},
volume = "139",
year = "2021",
month = "18 - 24~" # jul,
acronym = "ICML",
url_paper = "http://honeine.fr/paul/publi/21.icml.gnn.pdf",
url_link = "http://proceedings.mlr.press/v139/balcilar21a.html",
url_slides = "http://honeine.fr/paul/publi/21.icml.gnn_slides.pdf",
url_poster = "http://honeine.fr/paul/publi/21.icml.gnn_poster.pdf",
url_video = "https://icml.cc/virtual/2021/spotlight/8578",
abstract = "Since the Message Passing (Graph) Neural Networks (MPNNs) have a linear complexity with respect to the number of nodes when applied to sparse graphs, they have been widely implemented and still raise a lot of interest even though their theoretical expressive power is limited to the first order Weisfeiler-Lehman test (1-WL). In this paper, we show that if the graph convolution supports are designed in spectral-domain by a non-linear custom function of eigenvalues and masked with an arbitrary large receptive field, the MPNN is theoretically more powerful than the 1-WL test, experimentally as powerful as a 3-WL existing model and is spatially localized. Moreover, by designing custom filter functions, outputs can have various frequency components that allow the convolution process to learn different relationships between given input graph signals and their associated properties.
So far, the best 3-WL equivalent graph neural networks have a computational complexity in $\mathcal{O}(n^3)$ with memory usage in $\mathcal{O}(n^2)$, consider non-local update mechanism and do not provide the spectral richness of output profile. The proposed method overcomes all these aforementioned problems and reaches state-of-the-art results in many downstream tasks.",
}
Downloads: 0
{"_id":"8LxRZKuHHP7ZtBRSg","bibbaseid":"balcilar-hroux-gazre-vasseur-adam-honeine-breakingthelimitsofmessagepassinggraphneuralnetworks-2021","author_short":["Balcilar, M.","Héroux, P.","Gaüzère, B.","Vasseur, P.","Adam, S.","Honeine, P."],"bibdata":{"bibtype":"inproceedings","type":"inproceedings","title":"Breaking the Limits of Message Passing Graph Neural Networks","author":[{"firstnames":["Muhammet"],"propositions":[],"lastnames":["Balcilar"],"suffixes":[]},{"firstnames":["Pierre"],"propositions":[],"lastnames":["Héroux"],"suffixes":[]},{"firstnames":["Benoît"],"propositions":[],"lastnames":["Gaüzère"],"suffixes":[]},{"firstnames":["Pascal"],"propositions":[],"lastnames":["Vasseur"],"suffixes":[]},{"firstnames":["Sébastien"],"propositions":[],"lastnames":["Adam"],"suffixes":[]},{"firstnames":["Paul"],"propositions":[],"lastnames":["Honeine"],"suffixes":[]}],"booktitle":"Proceedings of the 38th International Conference on Machine Learning (ICML)","pages":"599–608","editor":[{"propositions":[],"lastnames":["Meila"],"firstnames":["Marina"],"suffixes":[]},{"propositions":[],"lastnames":["Zhang"],"firstnames":["Tong"],"suffixes":[]}],"address":"Vienna, Austria","series":"Proceedings of Machine Learning Research","publisher":"PMLR","volume":"139","year":"2021","month":"18 - 24 July","acronym":"ICML","url_paper":"http://honeine.fr/paul/publi/21.icml.gnn.pdf","url_link":"http://proceedings.mlr.press/v139/balcilar21a.html","url_slides":"http://honeine.fr/paul/publi/21.icml.gnn_slides.pdf","url_poster":"http://honeine.fr/paul/publi/21.icml.gnn_poster.pdf","url_video":"https://icml.cc/virtual/2021/spotlight/8578","abstract":"Since the Message Passing (Graph) Neural Networks (MPNNs) have a linear complexity with respect to the number of nodes when applied to sparse graphs, they have been widely implemented and still raise a lot of interest even though their theoretical expressive power is limited to the first order Weisfeiler-Lehman test (1-WL). In this paper, we show that if the graph convolution supports are designed in spectral-domain by a non-linear custom function of eigenvalues and masked with an arbitrary large receptive field, the MPNN is theoretically more powerful than the 1-WL test, experimentally as powerful as a 3-WL existing model and is spatially localized. Moreover, by designing custom filter functions, outputs can have various frequency components that allow the convolution process to learn different relationships between given input graph signals and their associated properties. So far, the best 3-WL equivalent graph neural networks have a computational complexity in $\\mathcal{O}(n^3)$ with memory usage in $\\mathcal{O}(n^2)$, consider non-local update mechanism and do not provide the spectral richness of output profile. The proposed method overcomes all these aforementioned problems and reaches state-of-the-art results in many downstream tasks.","bibtex":"@INPROCEEDINGS{21.icml.gnn,\n title={Breaking the Limits of Message Passing Graph Neural Networks},\n author={Muhammet Balcilar and Pierre H{\\'e}roux and Benoît Ga{\\\"u}z{\\`e}re and Pascal Vasseur and S{\\'e}bastien Adam and Paul Honeine},\n booktitle={Proceedings of the 38th International Conference on Machine Learning (ICML)},\n pages = \"599--608\",\n editor = \t\"Meila, Marina and Zhang, Tong\",\n address = \"Vienna, Austria\",\n series = \"Proceedings of Machine Learning Research\",\n publisher = {PMLR},\n volume = \"139\",\n year = \"2021\",\n month = \"18 - 24~\" # jul,\n acronym = \"ICML\",\n url_paper = \"http://honeine.fr/paul/publi/21.icml.gnn.pdf\",\n url_link = \"http://proceedings.mlr.press/v139/balcilar21a.html\",\n url_slides = \"http://honeine.fr/paul/publi/21.icml.gnn_slides.pdf\",\n url_poster = \"http://honeine.fr/paul/publi/21.icml.gnn_poster.pdf\",\n url_video = \"https://icml.cc/virtual/2021/spotlight/8578\",\n abstract = \"Since the Message Passing (Graph) Neural Networks (MPNNs) have a linear complexity with respect to the number of nodes when applied to sparse graphs, they have been widely implemented and still raise a lot of interest even though their theoretical expressive power is limited to the first order Weisfeiler-Lehman test (1-WL). In this paper, we show that if the graph convolution supports are designed in spectral-domain by a non-linear custom function of eigenvalues and masked with an arbitrary large receptive field, the MPNN is theoretically more powerful than the 1-WL test, experimentally as powerful as a 3-WL existing model and is spatially localized. Moreover, by designing custom filter functions, outputs can have various frequency components that allow the convolution process to learn different relationships between given input graph signals and their associated properties.\nSo far, the best 3-WL equivalent graph neural networks have a computational complexity in $\\mathcal{O}(n^3)$ with memory usage in $\\mathcal{O}(n^2)$, consider non-local update mechanism and do not provide the spectral richness of output profile. The proposed method overcomes all these aforementioned problems and reaches state-of-the-art results in many downstream tasks.\",\n}\n\n\n\n","author_short":["Balcilar, M.","Héroux, P.","Gaüzère, B.","Vasseur, P.","Adam, S.","Honeine, P."],"editor_short":["Meila, M.","Zhang, T."],"key":"21.icml.gnn","id":"21.icml.gnn","bibbaseid":"balcilar-hroux-gazre-vasseur-adam-honeine-breakingthelimitsofmessagepassinggraphneuralnetworks-2021","role":"author","urls":{" paper":"http://honeine.fr/paul/publi/21.icml.gnn.pdf"," link":"http://proceedings.mlr.press/v139/balcilar21a.html"," slides":"http://honeine.fr/paul/publi/21.icml.gnn_slides.pdf"," poster":"http://honeine.fr/paul/publi/21.icml.gnn_poster.pdf"," video":"https://icml.cc/virtual/2021/spotlight/8578"},"metadata":{"authorlinks":{}},"html":""},"bibtype":"inproceedings","biburl":"http://honeine.fr/paul/biblio_ph.bib","dataSources":["DsERGQxgYm5hGq3CY"],"keywords":[],"search_terms":["breaking","limits","message","passing","graph","neural","networks","balcilar","héroux","gaüzère","vasseur","adam","honeine"],"title":"Breaking the Limits of Message Passing Graph Neural Networks","year":2021}