A Comprehensive Survey on Graph Neural Networks.
Wu, Z.; Pan, S.; Chen, F.; Long, G.; Zhang, C.; and Yu, P. S.
2019.
cite arxiv:1901.00596Comment: Minor revision (updated tables and references)
Paper
doi
link
bibtex
abstract
@misc{wu2019comprehensive,
abstract = {Deep learning has revolutionized many machine learning tasks in recent years,
ranging from image classification and video processing to speech recognition
and natural language understanding. The data in these tasks are typically
represented in the Euclidean space. However, there is an increasing number of
applications where data are generated from non-Euclidean domains and are
represented as graphs with complex relationships and interdependency between
objects. The complexity of graph data has imposed significant challenges on
existing machine learning algorithms. Recently, many studies on extending deep
learning approaches for graph data have emerged. In this survey, we provide a
comprehensive overview of graph neural networks (GNNs) in data mining and
machine learning fields. We propose a new taxonomy to divide the
state-of-the-art graph neural networks into four categories, namely recurrent
graph neural networks, convolutional graph neural networks, graph autoencoders,
and spatial-temporal graph neural networks. We further discuss the applications
of graph neural networks across various domains and summarize the open source
codes, benchmark data sets, and model evaluation of graph neural networks.
Finally, we propose potential research directions in this rapidly growing
field.},
added-at = {2021-06-20T23:53:41.000+0200},
author = {Wu, Zonghan and Pan, Shirui and Chen, Fengwen and Long, Guodong and Zhang, Chengqi and Yu, Philip S.},
biburl = {https://www.bibsonomy.org/bibtex/2bb7e3fc5967186532b1d7582f08df93f/kherud},
description = {A Comprehensive Survey on Graph Neural Networks},
doi = {10.1109/TNNLS.2020.2978386},
interhash = {e6df299a7965fc0158ecd71e6d922246},
intrahash = {bb7e3fc5967186532b1d7582f08df93f},
keywords = {thema:gnn4rec final},
note = {cite arxiv:1901.00596Comment: Minor revision (updated tables and references)},
timestamp = {2021-06-20T23:53:41.000+0200},
title = {A Comprehensive Survey on Graph Neural Networks},
url = {http://arxiv.org/abs/1901.00596},
year = 2019
}
Deep learning has revolutionized many machine learning tasks in recent years, ranging from image classification and video processing to speech recognition and natural language understanding. The data in these tasks are typically represented in the Euclidean space. However, there is an increasing number of applications where data are generated from non-Euclidean domains and are represented as graphs with complex relationships and interdependency between objects. The complexity of graph data has imposed significant challenges on existing machine learning algorithms. Recently, many studies on extending deep learning approaches for graph data have emerged. In this survey, we provide a comprehensive overview of graph neural networks (GNNs) in data mining and machine learning fields. We propose a new taxonomy to divide the state-of-the-art graph neural networks into four categories, namely recurrent graph neural networks, convolutional graph neural networks, graph autoencoders, and spatial-temporal graph neural networks. We further discuss the applications of graph neural networks across various domains and summarize the open source codes, benchmark data sets, and model evaluation of graph neural networks. Finally, we propose potential research directions in this rapidly growing field.
A Comprehensive Survey on Graph Neural Networks.
Wu, Z.; Pan, S.; Chen, F.; Long, G.; Zhang, C.; and Yu, P. S.
arXiv:1901.00596 [cs, stat]. December 2019.
Paper
link
bibtex
abstract
@article{wu2019comprehensive,
abstract = {Deep learning has revolutionized many machine learning tasks in recent years, ranging from image classification and video processing to speech recognition and natural language understanding. The data in these tasks are typically represented in the Euclidean space. However, there is an increasing number of applications where data are generated from non-Euclidean domains and are represented as graphs with complex relationships and interdependency between objects. The complexity of graph data has imposed significant challenges on existing machine learning algorithms. Recently, many studies on extending deep learning approaches for graph data have emerged. In this survey, we provide a comprehensive overview of graph neural networks (GNNs) in data mining and machine learning fields. We propose a new taxonomy to divide the state-of-the-art graph neural networks into four categories, namely recurrent graph neural networks, convolutional graph neural networks, graph autoencoders and spatial-temporal graph neural networks. We further discuss the applications of graph neural networks across various domains and summarize the open source codes and benchmarks of the existing algorithms on different learning tasks. Finally, we propose potential research directions in this rapidly growing field.},
added-at = {2021-05-17T20:57:46.000+0200},
author = {Wu, Zonghan and Pan, Shirui and Chen, Fengwen and Long, Guodong and Zhang, Chengqi and Yu, Philip S.},
biburl = {https://www.bibsonomy.org/bibtex/26e2f75da0827c0288df78183ce6255c0/nilsd},
file = {Wu et al - A Comprehensive Survey on Graph Neural Networks.pdf:C\:\\Users\\Admin\\Documents\\Research\\_Paperbase\\Graph Embeddings\\Wu et al - A Comprehensive Survey on Graph Neural Networks.pdf:application/pdf},
interhash = {e6df299a7965fc0158ecd71e6d922246},
intrahash = {6e2f75da0827c0288df78183ce6255c0},
journal = {arXiv:1901.00596 [cs, stat]},
keywords = {thema:defend final},
language = {en},
month = dec,
timestamp = {2021-05-17T20:57:46.000+0200},
title = {A {Comprehensive} {Survey} on {Graph} {Neural} {Networks}},
url = {http://arxiv.org/abs/1901.00596},
urldate = {2019-12-10},
year = 2019
}
Deep learning has revolutionized many machine learning tasks in recent years, ranging from image classification and video processing to speech recognition and natural language understanding. The data in these tasks are typically represented in the Euclidean space. However, there is an increasing number of applications where data are generated from non-Euclidean domains and are represented as graphs with complex relationships and interdependency between objects. The complexity of graph data has imposed significant challenges on existing machine learning algorithms. Recently, many studies on extending deep learning approaches for graph data have emerged. In this survey, we provide a comprehensive overview of graph neural networks (GNNs) in data mining and machine learning fields. We propose a new taxonomy to divide the state-of-the-art graph neural networks into four categories, namely recurrent graph neural networks, convolutional graph neural networks, graph autoencoders and spatial-temporal graph neural networks. We further discuss the applications of graph neural networks across various domains and summarize the open source codes and benchmarks of the existing algorithms on different learning tasks. Finally, we propose potential research directions in this rapidly growing field.
A Comprehensive Survey on Graph Neural Networks.
Wu, Z.; Pan, S.; Chen, F.; Long, G.; Zhang, C.; and Yu, P. S.
2019.
cite arxiv:1901.00596Comment: Minor revision (updated tables and references)
Paper
doi
link
bibtex
abstract
@misc{wu2019comprehensive,
abstract = {Deep learning has revolutionized many machine learning tasks in recent years,
ranging from image classification and video processing to speech recognition
and natural language understanding. The data in these tasks are typically
represented in the Euclidean space. However, there is an increasing number of
applications where data are generated from non-Euclidean domains and are
represented as graphs with complex relationships and interdependency between
objects. The complexity of graph data has imposed significant challenges on
existing machine learning algorithms. Recently, many studies on extending deep
learning approaches for graph data have emerged. In this survey, we provide a
comprehensive overview of graph neural networks (GNNs) in data mining and
machine learning fields. We propose a new taxonomy to divide the
state-of-the-art graph neural networks into four categories, namely recurrent
graph neural networks, convolutional graph neural networks, graph autoencoders,
and spatial-temporal graph neural networks. We further discuss the applications
of graph neural networks across various domains and summarize the open source
codes, benchmark data sets, and model evaluation of graph neural networks.
Finally, we propose potential research directions in this rapidly growing
field.},
added-at = {2021-05-01T14:28:26.000+0200},
author = {Wu, Zonghan and Pan, Shirui and Chen, Fengwen and Long, Guodong and Zhang, Chengqi and Yu, Philip S.},
biburl = {https://www.bibsonomy.org/bibtex/2bb7e3fc5967186532b1d7582f08df93f/lukas.heppel},
description = {A Comprehensive Survey on Graph Neural Networks},
doi = {10.1109/TNNLS.2020.2978386},
interhash = {e6df299a7965fc0158ecd71e6d922246},
intrahash = {bb7e3fc5967186532b1d7582f08df93f},
keywords = {thema:r_gcn final},
note = {cite arxiv:1901.00596Comment: Minor revision (updated tables and references)},
timestamp = {2021-06-21T18:40:31.000+0200},
title = {A Comprehensive Survey on Graph Neural Networks},
url = {http://arxiv.org/abs/1901.00596},
year = 2019
}
Deep learning has revolutionized many machine learning tasks in recent years, ranging from image classification and video processing to speech recognition and natural language understanding. The data in these tasks are typically represented in the Euclidean space. However, there is an increasing number of applications where data are generated from non-Euclidean domains and are represented as graphs with complex relationships and interdependency between objects. The complexity of graph data has imposed significant challenges on existing machine learning algorithms. Recently, many studies on extending deep learning approaches for graph data have emerged. In this survey, we provide a comprehensive overview of graph neural networks (GNNs) in data mining and machine learning fields. We propose a new taxonomy to divide the state-of-the-art graph neural networks into four categories, namely recurrent graph neural networks, convolutional graph neural networks, graph autoencoders, and spatial-temporal graph neural networks. We further discuss the applications of graph neural networks across various domains and summarize the open source codes, benchmark data sets, and model evaluation of graph neural networks. Finally, we propose potential research directions in this rapidly growing field.
A Comprehensive Survey on Graph Neural Networks.
Wu, Z.; Pan, S.; Chen, F.; Long, G.; Zhang, C.; and Yu, P. S.
2019.
cite arxiv:1901.00596Comment: Minor revision (updated tables and references)
Paper
doi
link
bibtex
abstract
@misc{wu2019comprehensive,
abstract = {Deep learning has revolutionized many machine learning tasks in recent years,
ranging from image classification and video processing to speech recognition
and natural language understanding. The data in these tasks are typically
represented in the Euclidean space. However, there is an increasing number of
applications where data are generated from non-Euclidean domains and are
represented as graphs with complex relationships and interdependency between
objects. The complexity of graph data has imposed significant challenges on
existing machine learning algorithms. Recently, many studies on extending deep
learning approaches for graph data have emerged. In this survey, we provide a
comprehensive overview of graph neural networks (GNNs) in data mining and
machine learning fields. We propose a new taxonomy to divide the
state-of-the-art graph neural networks into four categories, namely recurrent
graph neural networks, convolutional graph neural networks, graph autoencoders,
and spatial-temporal graph neural networks. We further discuss the applications
of graph neural networks across various domains and summarize the open source
codes, benchmark data sets, and model evaluation of graph neural networks.
Finally, we propose potential research directions in this rapidly growing
field.},
added-at = {2020-11-07T03:29:29.000+0100},
author = {Wu, Zonghan and Pan, Shirui and Chen, Fengwen and Long, Guodong and Zhang, Chengqi and Yu, Philip S.},
biburl = {https://www.bibsonomy.org/bibtex/2bb7e3fc5967186532b1d7582f08df93f/rikbose},
description = {A Comprehensive Survey on Graph Neural Networks},
doi = {10.1109/TNNLS.2020.2978386},
interhash = {e6df299a7965fc0158ecd71e6d922246},
intrahash = {bb7e3fc5967186532b1d7582f08df93f},
keywords = {survey graph-neural-networks},
note = {cite arxiv:1901.00596Comment: Minor revision (updated tables and references)},
timestamp = {2020-11-07T03:29:29.000+0100},
title = {A Comprehensive Survey on Graph Neural Networks},
url = {http://arxiv.org/abs/1901.00596},
year = 2019
}
Deep learning has revolutionized many machine learning tasks in recent years, ranging from image classification and video processing to speech recognition and natural language understanding. The data in these tasks are typically represented in the Euclidean space. However, there is an increasing number of applications where data are generated from non-Euclidean domains and are represented as graphs with complex relationships and interdependency between objects. The complexity of graph data has imposed significant challenges on existing machine learning algorithms. Recently, many studies on extending deep learning approaches for graph data have emerged. In this survey, we provide a comprehensive overview of graph neural networks (GNNs) in data mining and machine learning fields. We propose a new taxonomy to divide the state-of-the-art graph neural networks into four categories, namely recurrent graph neural networks, convolutional graph neural networks, graph autoencoders, and spatial-temporal graph neural networks. We further discuss the applications of graph neural networks across various domains and summarize the open source codes, benchmark data sets, and model evaluation of graph neural networks. Finally, we propose potential research directions in this rapidly growing field.
Detecting Suicidal Ideation with Data Protection in Online Communities.
Ji, S.; Long, G.; Pan, S.; Zhu, T.; Jiang, J.; and Wang, S.
In Li, G.; Yang, J.; Gama, J.; Natwichai, J.; and Tong, Y., editor(s),
DASFAA (3), volume 11448, of
Lecture Notes in Computer Science, pages 225-229, 2019. Springer
Link
Paper
link
bibtex
@inproceedings{conf/dasfaa/JiLPZ0W19,
added-at = {2020-10-02T00:00:00.000+0200},
author = {Ji, Shaoxiong and Long, Guodong and Pan, Shirui and Zhu, Tianqing and Jiang, Jing and Wang, Sen},
biburl = {https://www.bibsonomy.org/bibtex/2650563e5a277a4af34ff63263e5296e2/dblp},
booktitle = {DASFAA (3)},
crossref = {conf/dasfaa/2019w},
editor = {Li, Guoliang and Yang, Jun and Gama, João and Natwichai, Juggapong and Tong, Yongxin},
ee = {https://doi.org/10.1007/978-3-030-18590-9_17},
interhash = {b64af2c0fcf4c1e991d2ce947cca17dc},
intrahash = {650563e5a277a4af34ff63263e5296e2},
isbn = {978-3-030-18590-9},
keywords = {dblp},
pages = {225-229},
publisher = {Springer},
series = {Lecture Notes in Computer Science},
timestamp = {2020-12-01T11:36:30.000+0100},
title = {Detecting Suicidal Ideation with Data Protection in Online Communities.},
url = {http://dblp.uni-trier.de/db/conf/dasfaa/dasfaa2019w.html#JiLPZ0W19},
volume = 11448,
year = 2019
}
Decentralized Learning with Average Difference Aggregation for Proactive Online Social Care.
Ji, S.; Long, G.; Pan, S.; Zhu, T.; Jiang, J.; Wang, S.; and Li, X.
CoRR, abs/1905.07665. 2019.
Link
Paper
link
bibtex
@article{journals/corr/abs-1905-07665,
added-at = {2020-10-02T00:00:00.000+0200},
author = {Ji, Shaoxiong and Long, Guodong and Pan, Shirui and Zhu, Tianqing and Jiang, Jing and Wang, Sen and Li, Xue},
biburl = {https://www.bibsonomy.org/bibtex/26f7c251cc4d11c597c7bb601b4fe03f8/dblp},
ee = {http://arxiv.org/abs/1905.07665},
interhash = {ae5269aad89ad2348f25663ee8dd8ee3},
intrahash = {6f7c251cc4d11c597c7bb601b4fe03f8},
journal = {CoRR},
keywords = {dblp},
timestamp = {2020-10-03T11:42:00.000+0200},
title = {Decentralized Learning with Average Difference Aggregation for Proactive Online Social Care.},
url = {http://dblp.uni-trier.de/db/journals/corr/corr1905.html#abs-1905-07665},
volume = {abs/1905.07665},
year = 2019
}
Adversarial Action Data Augmentation for Similar Gesture Action Recognition.
Wu, D.; Chen, J.; Sharma, N.; Pan, S.; Long, G.; and Blumenstein, M.
In
IJCNN, pages 1-8, 2019. IEEE
Link
Paper
link
bibtex
@inproceedings{conf/ijcnn/WuCSPLB19,
added-at = {2020-08-12T00:00:00.000+0200},
author = {Wu, Di and Chen, Junjun and Sharma, Nabin and Pan, Shirui and Long, Guodong and Blumenstein, Michael},
biburl = {https://www.bibsonomy.org/bibtex/27c70dd3ec81be2ae96ab052ddc54f3c1/dblp},
booktitle = {IJCNN},
crossref = {conf/ijcnn/2019},
ee = {https://doi.org/10.1109/IJCNN.2019.8851993},
interhash = {5cfeab2e8141b74608a92676a191a3d0},
intrahash = {7c70dd3ec81be2ae96ab052ddc54f3c1},
isbn = {978-1-7281-1985-4},
keywords = {dblp},
pages = {1-8},
publisher = {IEEE},
timestamp = {2020-08-13T11:58:54.000+0200},
title = {Adversarial Action Data Augmentation for Similar Gesture Action Recognition.},
url = {http://dblp.uni-trier.de/db/conf/ijcnn/ijcnn2019.html#WuCSPLB19},
year = 2019
}
A Comprehensive Survey on Graph Neural Networks.
Wu, Z.; Pan, S.; Chen, F.; Long, G.; Zhang, C.; and Yu, P. S.
arXiv:1901.00596 [cs, stat]. December 2019.
arXiv: 1901.00596
Paper
link
bibtex
abstract
@article{wu_comprehensive_2019,
abstract = {Deep learning has revolutionized many machine learning tasks in recent years, ranging from image classification and video processing to speech recognition and natural language understanding. The data in these tasks are typically represented in the Euclidean space. However, there is an increasing number of applications where data are generated from non-Euclidean domains and are represented as graphs with complex relationships and interdependency between objects. The complexity of graph data has imposed significant challenges on existing machine learning algorithms. Recently, many studies on extending deep learning approaches for graph data have emerged. In this survey, we provide a comprehensive overview of graph neural networks (GNNs) in data mining and machine learning fields. We propose a new taxonomy to divide the state-of-the-art graph neural networks into four categories, namely recurrent graph neural networks, convolutional graph neural networks, graph autoencoders and spatial-temporal graph neural networks. We further discuss the applications of graph neural networks across various domains and summarize the open source codes and benchmarks of the existing algorithms on different learning tasks. Finally, we propose potential research directions in this rapidly growing field.},
added-at = {2020-07-14T23:26:07.000+0200},
author = {Wu, Zonghan and Pan, Shirui and Chen, Fengwen and Long, Guodong and Zhang, Chengqi and Yu, Philip S.},
biburl = {https://www.bibsonomy.org/bibtex/26e2f75da0827c0288df78183ce6255c0/twagener},
file = {Wu et al - A Comprehensive Survey on Graph Neural Networks.pdf:C\:\\Users\\Admin\\Documents\\Research\\_Paperbase\\Graph Embeddings\\Wu et al - A Comprehensive Survey on Graph Neural Networks.pdf:application/pdf},
interhash = {e6df299a7965fc0158ecd71e6d922246},
intrahash = {6e2f75da0827c0288df78183ce6255c0},
journal = {arXiv:1901.00596 [cs, stat]},
keywords = {thema:graph_cnn final},
language = {en},
month = dec,
note = {arXiv: 1901.00596},
timestamp = {2020-07-14T23:27:15.000+0200},
title = {A {Comprehensive} {Survey} on {Graph} {Neural} {Networks}},
url = {http://arxiv.org/abs/1901.00596},
urldate = {2019-12-10},
year = 2019
}
Deep learning has revolutionized many machine learning tasks in recent years, ranging from image classification and video processing to speech recognition and natural language understanding. The data in these tasks are typically represented in the Euclidean space. However, there is an increasing number of applications where data are generated from non-Euclidean domains and are represented as graphs with complex relationships and interdependency between objects. The complexity of graph data has imposed significant challenges on existing machine learning algorithms. Recently, many studies on extending deep learning approaches for graph data have emerged. In this survey, we provide a comprehensive overview of graph neural networks (GNNs) in data mining and machine learning fields. We propose a new taxonomy to divide the state-of-the-art graph neural networks into four categories, namely recurrent graph neural networks, convolutional graph neural networks, graph autoencoders and spatial-temporal graph neural networks. We further discuss the applications of graph neural networks across various domains and summarize the open source codes and benchmarks of the existing algorithms on different learning tasks. Finally, we propose potential research directions in this rapidly growing field.
Domain-Adversarial Graph Neural Networks for Text Classification.
Wu, M.; Pan, S.; Zhu, X.; Zhou, C.; and Pan, L.
In Wang, J.; Shim, K.; and Wu, X., editor(s),
ICDM, pages 648-657, 2019. IEEE
Link
Paper
link
bibtex
@inproceedings{conf/icdm/WuPZZP19,
added-at = {2020-06-16T00:00:00.000+0200},
author = {Wu, Man and Pan, Shirui and Zhu, Xingquan and Zhou, Chuan and Pan, Lei},
biburl = {https://www.bibsonomy.org/bibtex/20c8ec4d341ccdc702769af5c73837029/dblp},
booktitle = {ICDM},
crossref = {conf/icdm/2019},
editor = {Wang, Jianyong and Shim, Kyuseok and Wu, Xindong},
ee = {https://doi.org/10.1109/ICDM.2019.00075},
interhash = {f3f650bfbb6af3b8759cebc7c59917e7},
intrahash = {0c8ec4d341ccdc702769af5c73837029},
isbn = {978-1-7281-4604-1},
keywords = {dblp},
pages = {648-657},
publisher = {IEEE},
timestamp = {2020-06-17T11:54:01.000+0200},
title = {Domain-Adversarial Graph Neural Networks for Text Classification.},
url = {http://dblp.uni-trier.de/db/conf/icdm/icdm2019.html#WuPZZP19},
year = 2019
}
Learning Private Neural Language Modeling with Attentive Aggregation.
Ji, S.; Pan, S.; Long, G.; Li, X.; Jiang, J.; and Huang, Z.
In
IJCNN, pages 1-8, 2019. IEEE
Link
Paper
link
bibtex
@inproceedings{conf/ijcnn/JiPL0JH19,
added-at = {2020-06-15T00:00:00.000+0200},
author = {Ji, Shaoxiong and Pan, Shirui and Long, Guodong and Li, Xue and Jiang, Jing and Huang, Zi},
biburl = {https://www.bibsonomy.org/bibtex/2ca2f39edf8f695bf538e1f02223045fc/dblp},
booktitle = {IJCNN},
crossref = {conf/ijcnn/2019},
ee = {https://doi.org/10.1109/IJCNN.2019.8852464},
interhash = {e420eea174d1f3d571be953616832cc5},
intrahash = {ca2f39edf8f695bf538e1f02223045fc},
isbn = {978-1-7281-1985-4},
keywords = {dblp},
pages = {1-8},
publisher = {IEEE},
timestamp = {2020-06-16T12:59:55.000+0200},
title = {Learning Private Neural Language Modeling with Attentive Aggregation.},
url = {http://dblp.uni-trier.de/db/conf/ijcnn/ijcnn2019.html#JiPL0JH19},
year = 2019
}
IEEE Access Special Section Editorial: Advanced Data Analytics for Large-Scale Complex Data Environments.
Wu, J.; Pan, S.; Jiang, J.; Cai, Z.; Du, B.; Tian, Y.; Wang, S.; and Wang, H.
IEEE Access, 7: 33778-33786. 2019.
Link
Paper
link
bibtex
@article{journals/access/WuPJCDTWW19,
added-at = {2020-06-15T00:00:00.000+0200},
author = {Wu, Jia and Pan, Shirui and Jiang, Junjun and Cai, Zhihua and Du, Bo and Tian, Yingjie and Wang, Shuaiqiang and Wang, Haishuai},
biburl = {https://www.bibsonomy.org/bibtex/2d8b9d9b82372448d1df26d14c897c6b0/dblp},
ee = {https://doi.org/10.1109/ACCESS.2019.2895440},
interhash = {6058fed7be79b54ae536b8cb51877aa4},
intrahash = {d8b9d9b82372448d1df26d14c897c6b0},
journal = {IEEE Access},
keywords = {dblp},
pages = {33778-33786},
timestamp = {2020-06-16T11:44:55.000+0200},
title = {IEEE Access Special Section Editorial: Advanced Data Analytics for Large-Scale Complex Data Environments.},
url = {http://dblp.uni-trier.de/db/journals/access/access7.html#WuPJCDTWW19},
volume = 7,
year = 2019
}
Cost-Sensitive Parallel Learning Framework for Insurance Intelligence Operation.
Jiang, X.; Pan, S.; Long, G.; Xiong, F.; Jiang, J.; and Zhang, C.
IEEE Trans. Ind. Electron., 66(12): 9713-9723. 2019.
Link
Paper
link
bibtex
@article{journals/tie/JiangPLXJZ19,
added-at = {2020-05-22T00:00:00.000+0200},
author = {Jiang, Xinxin and Pan, Shirui and Long, Guodong and Xiong, Fei and Jiang, Jing and Zhang, Chengqi},
biburl = {https://www.bibsonomy.org/bibtex/2469c70db3a8176cf795a414ef5eb2f0d/dblp},
ee = {https://doi.org/10.1109/TIE.2018.2873526},
interhash = {44b3b270cd37182e67ec6a44b83ba9cd},
intrahash = {469c70db3a8176cf795a414ef5eb2f0d},
journal = {IEEE Trans. Ind. Electron.},
keywords = {dblp},
number = 12,
pages = {9713-9723},
timestamp = {2020-05-23T11:49:17.000+0200},
title = {Cost-Sensitive Parallel Learning Framework for Insurance Intelligence Operation.},
url = {http://dblp.uni-trier.de/db/journals/tie/tie66.html#JiangPLXJZ19},
volume = 66,
year = 2019
}
Hyperspectral Image Classification With Context-Aware Dynamic Graph Convolutional Network.
Wan, S.; Gong, C.; Zhong, P.; Pan, S.; Li, G.; and Yang, J.
CoRR, abs/1909.11953. 2019.
Link
Paper
link
bibtex
@article{journals/corr/abs-1909-11953,
added-at = {2020-05-14T00:00:00.000+0200},
author = {Wan, Sheng and Gong, Chen and Zhong, Ping and Pan, Shirui and Li, Guangyu and Yang, Jian},
biburl = {https://www.bibsonomy.org/bibtex/2e2c50d5a50c5e908829bd6c79435c357/dblp},
ee = {http://arxiv.org/abs/1909.11953},
interhash = {9f7c2804057cddfa73ab03a2b946c2f0},
intrahash = {e2c50d5a50c5e908829bd6c79435c357},
journal = {CoRR},
keywords = {dblp},
timestamp = {2020-05-15T11:39:13.000+0200},
title = {Hyperspectral Image Classification With Context-Aware Dynamic Graph Convolutional Network.},
url = {http://dblp.uni-trier.de/db/journals/corr/corr1909.html#abs-1909-11953},
volume = {abs/1909.11953},
year = 2019
}
Time series feature learning with labeled and unlabeled data.
Wang, H.; Zhang, Q.; Wu, J.; Pan, S.; and Chen, Y.
Pattern Recognit., 89: 55-66. 2019.
Link
Paper
link
bibtex
1 download
@article{journals/pr/WangZWPC19,
added-at = {2020-02-24T00:00:00.000+0100},
author = {Wang, Haishuai and Zhang, Qin and Wu, Jia and Pan, Shirui and Chen, Yixin},
biburl = {https://www.bibsonomy.org/bibtex/21d6b569393c268c0aebe8ec86352b228/dblp},
ee = {https://doi.org/10.1016/j.patcog.2018.12.026},
interhash = {8a2b08145104d89b1778e67cdcce8d0f},
intrahash = {1d6b569393c268c0aebe8ec86352b228},
journal = {Pattern Recognit.},
keywords = {dblp},
pages = {55-66},
timestamp = {2020-02-25T12:11:34.000+0100},
title = {Time series feature learning with labeled and unlabeled data.},
url = {http://dblp.uni-trier.de/db/journals/pr/pr89.html#WangZWPC19},
volume = 89,
year = 2019
}
A Comprehensive Survey on Graph Neural Networks.
Wu, Z.; Pan, S.; Chen, F.; Long, G.; Zhang, C.; and Yu, P. S.
arXiv:1901.00596 [cs, stat]. December 2019.
arXiv: 1901.00596
Paper
link
bibtex
abstract
@article{wu_comprehensive_2019,
abstract = {Deep learning has revolutionized many machine learning tasks in recent years, ranging from image classification and video processing to speech recognition and natural language understanding. The data in these tasks are typically represented in the Euclidean space. However, there is an increasing number of applications where data are generated from non-Euclidean domains and are represented as graphs with complex relationships and interdependency between objects. The complexity of graph data has imposed significant challenges on existing machine learning algorithms. Recently, many studies on extending deep learning approaches for graph data have emerged. In this survey, we provide a comprehensive overview of graph neural networks (GNNs) in data mining and machine learning fields. We propose a new taxonomy to divide the state-of-the-art graph neural networks into four categories, namely recurrent graph neural networks, convolutional graph neural networks, graph autoencoders and spatial-temporal graph neural networks. We further discuss the applications of graph neural networks across various domains and summarize the open source codes and benchmarks of the existing algorithms on different learning tasks. Finally, we propose potential research directions in this rapidly growing field.},
added-at = {2020-02-21T16:09:44.000+0100},
author = {Wu, Zonghan and Pan, Shirui and Chen, Fengwen and Long, Guodong and Zhang, Chengqi and Yu, Philip S.},
biburl = {https://www.bibsonomy.org/bibtex/26e2f75da0827c0288df78183ce6255c0/tschumacher},
file = {Wu et al - A Comprehensive Survey on Graph Neural Networks.pdf:C\:\\Users\\Admin\\Documents\\Research\\_Paperbase\\Graph Embeddings\\Wu et al - A Comprehensive Survey on Graph Neural Networks.pdf:application/pdf},
interhash = {e6df299a7965fc0158ecd71e6d922246},
intrahash = {6e2f75da0827c0288df78183ce6255c0},
journal = {arXiv:1901.00596 [cs, stat]},
keywords = {Survey GNN Embedding_Algorithm Node_Embeddings},
language = {en},
month = dec,
note = {arXiv: 1901.00596},
timestamp = {2020-02-21T16:09:44.000+0100},
title = {A {Comprehensive} {Survey} on {Graph} {Neural} {Networks}},
url = {http://arxiv.org/abs/1901.00596},
urldate = {2019-12-10},
year = 2019
}
Deep learning has revolutionized many machine learning tasks in recent years, ranging from image classification and video processing to speech recognition and natural language understanding. The data in these tasks are typically represented in the Euclidean space. However, there is an increasing number of applications where data are generated from non-Euclidean domains and are represented as graphs with complex relationships and interdependency between objects. The complexity of graph data has imposed significant challenges on existing machine learning algorithms. Recently, many studies on extending deep learning approaches for graph data have emerged. In this survey, we provide a comprehensive overview of graph neural networks (GNNs) in data mining and machine learning fields. We propose a new taxonomy to divide the state-of-the-art graph neural networks into four categories, namely recurrent graph neural networks, convolutional graph neural networks, graph autoencoders and spatial-temporal graph neural networks. We further discuss the applications of graph neural networks across various domains and summarize the open source codes and benchmarks of the existing algorithms on different learning tasks. Finally, we propose potential research directions in this rapidly growing field.
Adversarially Regularized Graph Autoencoder for Graph Embedding.
Pan, S.; Hu, R.; Long, G.; Jiang, J.; Yao, L.; and Zhang, C.
arXiv:1802.04407 [cs, stat]. January 2019.
arXiv: 1802.04407
Paper
link
bibtex
abstract
@article{pan_adversarially_2019,
abstract = {Graph embedding is an effective method to represent graph data in a low dimensional space for graph analytics. Most existing embedding algorithms typically focus on preserving the topological structure or minimizing the reconstruction errors of graph data, but they have mostly ignored the data distribution of the latent codes from the graphs, which often results in inferior embedding in realworld graph data. In this paper, we propose a novel adversarial graph embedding framework for graph data. The framework encodes the topological structure and node content in a graph to a compact representation, on which a decoder is trained to reconstruct the graph structure. Furthermore, the latent representation is enforced to match a prior distribution via an adversarial training scheme. To learn a robust embedding, two variants of adversarial approaches, adversarially regularized graph autoencoder (ARGA) and adversarially regularized variational graph autoencoder (ARVGA), are developed. Experimental studies on real-world graphs validate our design and demonstrate that our algorithms outperform baselines by a wide margin in link prediction, graph clustering, and graph visualization tasks.},
added-at = {2020-02-21T16:09:44.000+0100},
author = {Pan, Shirui and Hu, Ruiqi and Long, Guodong and Jiang, Jing and Yao, Lina and Zhang, Chengqi},
biburl = {https://www.bibsonomy.org/bibtex/291ea7ebabd9bfdf0b9f67681929a7e65/tschumacher},
file = {Pan et al - Adversarially Regularized Graph Autoencoder for Graph Embedding.pdf:C\:\\Users\\Admin\\Documents\\Research\\_Paperbase\\Graph Embeddings\\Pan et al - Adversarially Regularized Graph Autoencoder for Graph Embedding.pdf:application/pdf},
interhash = {1592e559fd68df393bc2a399d4f3607a},
intrahash = {91ea7ebabd9bfdf0b9f67681929a7e65},
journal = {arXiv:1802.04407 [cs, stat]},
keywords = {Autoencoder Neural_Embedding Adversarial_Learning Embedding_Algorithm Node_Embeddings},
language = {en},
month = jan,
note = {arXiv: 1802.04407},
timestamp = {2020-02-21T16:09:44.000+0100},
title = {Adversarially {Regularized} {Graph} {Autoencoder} for {Graph} {Embedding}},
url = {http://arxiv.org/abs/1802.04407},
urldate = {2019-12-10},
year = 2019
}
Graph embedding is an effective method to represent graph data in a low dimensional space for graph analytics. Most existing embedding algorithms typically focus on preserving the topological structure or minimizing the reconstruction errors of graph data, but they have mostly ignored the data distribution of the latent codes from the graphs, which often results in inferior embedding in realworld graph data. In this paper, we propose a novel adversarial graph embedding framework for graph data. The framework encodes the topological structure and node content in a graph to a compact representation, on which a decoder is trained to reconstruct the graph structure. Furthermore, the latent representation is enforced to match a prior distribution via an adversarial training scheme. To learn a robust embedding, two variants of adversarial approaches, adversarially regularized graph autoencoder (ARGA) and adversarially regularized variational graph autoencoder (ARVGA), are developed. Experimental studies on real-world graphs validate our design and demonstrate that our algorithms outperform baselines by a wide margin in link prediction, graph clustering, and graph visualization tasks.
Relation Structure-Aware Heterogeneous Graph Neural Network.
Zhu, S.; Zhou, C.; Pan, S.; Zhu, X.; and Wang, B.
In Wang, J.; Shim, K.; and Wu, X., editor(s),
ICDM, pages 1534-1539, 2019. IEEE
Link
Paper
link
bibtex
@inproceedings{conf/icdm/ZhuZPZW19,
added-at = {2020-02-10T00:00:00.000+0100},
author = {Zhu, Shichao and Zhou, Chuan and Pan, Shirui and Zhu, Xingquan and Wang, Bin},
biburl = {https://www.bibsonomy.org/bibtex/2bf09f1a53af80292f3a5ef6c2cb51c63/dblp},
booktitle = {ICDM},
crossref = {conf/icdm/2019},
editor = {Wang, Jianyong and Shim, Kyuseok and Wu, Xindong},
ee = {https://doi.org/10.1109/ICDM.2019.00203},
interhash = {9f9e86f43bb8ecc3e179cae377ad6df2},
intrahash = {bf09f1a53af80292f3a5ef6c2cb51c63},
isbn = {978-1-7281-4604-1},
keywords = {dblp},
pages = {1534-1539},
publisher = {IEEE},
timestamp = {2020-02-11T11:45:22.000+0100},
title = {Relation Structure-Aware Heterogeneous Graph Neural Network.},
url = {http://dblp.uni-trier.de/db/conf/icdm/icdm2019.html#ZhuZPZW19},
year = 2019
}
A Comprehensive Survey on Graph Neural Networks.
Wu, Z.; Pan, S.; Chen, F.; Long, G.; Zhang, C.; and Yu, P. S.
2019.
cite arxiv:1901.00596
Paper
link
bibtex
abstract
@misc{wu2019comprehensive,
abstract = {Deep learning has revolutionized many machine learning tasks in recent years,
ranging from image classification and video processing to speech recognition
and natural language understanding. The data in these tasks are typically
represented in the Euclidean space. However, there is an increasing number of
applications where data are generated from non-Euclidean domains and are
represented as graphs with complex relationships and interdependency between
objects. The complexity of graph data has imposed significant challenges on
existing machine learning algorithms. Recently, many studies on extending deep
learning approaches for graph data have emerged. In this survey, we provide a
comprehensive overview of graph neural networks (GNNs) in data mining and
machine learning fields. We propose a new taxonomy to divide the
state-of-the-art graph neural networks into four categories, namely recurrent
graph neural networks, convolutional graph neural networks, graph autoencoders,
and spatial-temporal graph neural networks. We further discuss the applications
of graph neural networks across various domains and summarize the open source
codes, benchmark data sets, and model evaluation of graph neural networks.
Finally, we propose potential research directions in this rapidly growing
field.},
added-at = {2020-02-05T00:42:52.000+0100},
author = {Wu, Zonghan and Pan, Shirui and Chen, Fengwen and Long, Guodong and Zhang, Chengqi and Yu, Philip S.},
biburl = {https://www.bibsonomy.org/bibtex/2bb7e3fc5967186532b1d7582f08df93f/peter.ralph},
interhash = {e6df299a7965fc0158ecd71e6d922246},
intrahash = {bb7e3fc5967186532b1d7582f08df93f},
keywords = {review graph_neural_networks neural_networks graph_theory},
note = {cite arxiv:1901.00596},
timestamp = {2020-02-05T00:42:52.000+0100},
title = {A Comprehensive Survey on Graph Neural Networks},
url = {http://arxiv.org/abs/1901.00596},
year = 2019
}
Deep learning has revolutionized many machine learning tasks in recent years, ranging from image classification and video processing to speech recognition and natural language understanding. The data in these tasks are typically represented in the Euclidean space. However, there is an increasing number of applications where data are generated from non-Euclidean domains and are represented as graphs with complex relationships and interdependency between objects. The complexity of graph data has imposed significant challenges on existing machine learning algorithms. Recently, many studies on extending deep learning approaches for graph data have emerged. In this survey, we provide a comprehensive overview of graph neural networks (GNNs) in data mining and machine learning fields. We propose a new taxonomy to divide the state-of-the-art graph neural networks into four categories, namely recurrent graph neural networks, convolutional graph neural networks, graph autoencoders, and spatial-temporal graph neural networks. We further discuss the applications of graph neural networks across various domains and summarize the open source codes, benchmark data sets, and model evaluation of graph neural networks. Finally, we propose potential research directions in this rapidly growing field.
Low-Bit Quantization for Attributed Network Representation Learning.
Yang, H.; Pan, S.; Chen, L.; Zhou, C.; and Zhang, P.
In Kraus, S., editor(s),
IJCAI, pages 4047-4053, 2019. ijcai.org
Link
Paper
link
bibtex
@inproceedings{conf/ijcai/YangP0Z019,
added-at = {2020-02-05T00:00:00.000+0100},
author = {Yang, Hong and Pan, Shirui and Chen, Ling and Zhou, Chuan and Zhang, Peng},
biburl = {https://www.bibsonomy.org/bibtex/2baf5cfc14cc1e97d2122f16546e86054/dblp},
booktitle = {IJCAI},
crossref = {conf/ijcai/2019},
editor = {Kraus, Sarit},
ee = {https://doi.org/10.24963/ijcai.2019/562},
interhash = {5cfd9352e3e1bb1992c00d26fc4b90aa},
intrahash = {baf5cfc14cc1e97d2122f16546e86054},
keywords = {dblp},
pages = {4047-4053},
publisher = {ijcai.org},
timestamp = {2020-02-06T11:41:34.000+0100},
title = {Low-Bit Quantization for Attributed Network Representation Learning.},
url = {http://dblp.uni-trier.de/db/conf/ijcai/ijcai2019.html#YangP0Z019},
year = 2019
}
Attentive Dual Embedding for Understanding Medical Concepts in Electronic Health Records.
Peng, X.; Long, G.; Pan, S.; Jiang, J.; and Niu, Z.
In
IJCNN, pages 1-8, 2019. IEEE
Link
Paper
link
bibtex
@inproceedings{conf/ijcnn/PengLPJN19,
added-at = {2020-02-03T00:00:00.000+0100},
author = {Peng, Xueping and Long, Guodong and Pan, Shirui and Jiang, Jing and Niu, Zhendong},
biburl = {https://www.bibsonomy.org/bibtex/2e9e9006bc1e2b6980b0a56fd9f175d34/dblp},
booktitle = {IJCNN},
crossref = {conf/ijcnn/2019},
ee = {https://doi.org/10.1109/IJCNN.2019.8852429},
interhash = {68340fd82ec637da47c6bd9cf7ccaecb},
intrahash = {e9e9006bc1e2b6980b0a56fd9f175d34},
isbn = {978-1-7281-1985-4},
keywords = {dblp},
pages = {1-8},
publisher = {IEEE},
timestamp = {2020-02-04T11:45:29.000+0100},
title = {Attentive Dual Embedding for Understanding Medical Concepts in Electronic Health Records.},
url = {http://dblp.uni-trier.de/db/conf/ijcnn/ijcnn2019.html#PengLPJN19},
year = 2019
}
An Explainable Deep Fusion Network for Affect Recognition Using Physiological Signals.
Lin, J.; Pan, S.; Lee, C. S.; and Oviatt, S. L.
In Zhu, W.; Tao, D.; Cheng, X.; Cui, P.; Rundensteiner, E. A.; Carmel, D.; He, Q.; and Yu, J. X., editor(s),
CIKM, pages 2069-2072, 2019. ACM
Link
Paper
link
bibtex
@inproceedings{conf/cikm/LinPLO19,
added-at = {2019-11-04T00:00:00.000+0100},
author = {Lin, Jionghao and Pan, Shirui and Lee, Cheng Siong and Oviatt, Sharon L.},
biburl = {https://www.bibsonomy.org/bibtex/25ddb787e6aec421b68f1e9b733df2e23/dblp},
booktitle = {CIKM},
crossref = {conf/cikm/2019},
editor = {Zhu, Wenwu and Tao, Dacheng and Cheng, Xueqi and Cui, Peng and Rundensteiner, Elke A. and Carmel, David and He, Qi and Yu, Jeffrey Xu},
ee = {https://doi.org/10.1145/3357384.3358160},
interhash = {f363788a2f2a584217c86b5ee253b746},
intrahash = {5ddb787e6aec421b68f1e9b733df2e23},
isbn = {978-1-4503-6976-3},
keywords = {dblp},
pages = {2069-2072},
publisher = {ACM},
timestamp = {2019-11-05T11:39:50.000+0100},
title = {An Explainable Deep Fusion Network for Affect Recognition Using Physiological Signals.},
url = {http://dblp.uni-trier.de/db/conf/cikm/cikm2019.html#LinPLO19},
year = 2019
}
Long-short Distance Aggregation Networks for Positive Unlabeled Graph Learning.
Wu, M.; Pan, S.; Du, L.; Tsang, I. W.; Zhu, X.; and Du, B.
In Zhu, W.; Tao, D.; Cheng, X.; Cui, P.; Rundensteiner, E. A.; Carmel, D.; He, Q.; and Yu, J. X., editor(s),
CIKM, pages 2157-2160, 2019. ACM
Link
Paper
link
bibtex
@inproceedings{conf/cikm/WuPDTZD19,
added-at = {2019-11-04T00:00:00.000+0100},
author = {Wu, Man and Pan, Shirui and Du, Lan and Tsang, Ivor W. and Zhu, Xingquan and Du, Bo},
biburl = {https://www.bibsonomy.org/bibtex/2cf0e274e71319c7519aa8e45f534fe26/dblp},
booktitle = {CIKM},
crossref = {conf/cikm/2019},
editor = {Zhu, Wenwu and Tao, Dacheng and Cheng, Xueqi and Cui, Peng and Rundensteiner, Elke A. and Carmel, David and He, Qi and Yu, Jeffrey Xu},
ee = {https://doi.org/10.1145/3357384.3358122},
interhash = {3344a67602e6de9ce76e1a7fcaf1e756},
intrahash = {cf0e274e71319c7519aa8e45f534fe26},
isbn = {978-1-4503-6976-3},
keywords = {dblp},
pages = {2157-2160},
publisher = {ACM},
timestamp = {2019-11-05T11:39:50.000+0100},
title = {Long-short Distance Aggregation Networks for Positive Unlabeled Graph Learning.},
url = {http://dblp.uni-trier.de/db/conf/cikm/cikm2019.html#WuPDTZD19},
year = 2019
}
Suicidal Ideation Detection: A Review of Machine Learning Methods and Applications.
Ji, S.; Pan, S.; Li, X.; Cambria, E.; Long, G.; and Huang, Z.
CoRR, abs/1910.12611. 2019.
Link
Paper
link
bibtex
@article{journals/corr/abs-1910-12611,
added-at = {2019-10-31T00:00:00.000+0100},
author = {Ji, Shaoxiong and Pan, Shirui and Li, Xue and Cambria, Erik and Long, Guodong and Huang, Zi},
biburl = {https://www.bibsonomy.org/bibtex/268617e82873baa8cd90553d5114a653c/dblp},
ee = {http://arxiv.org/abs/1910.12611},
interhash = {4143f2913afe4fe866a7953071287b0a},
intrahash = {68617e82873baa8cd90553d5114a653c},
journal = {CoRR},
keywords = {dblp},
timestamp = {2019-11-01T11:40:22.000+0100},
title = {Suicidal Ideation Detection: A Review of Machine Learning Methods and Applications.},
url = {http://dblp.uni-trier.de/db/journals/corr/corr1910.html#abs-1910-12611},
volume = {abs/1910.12611},
year = 2019
}
DAGCN: Dual Attention Graph Convolutional Networks.
Chen, F.; Pan, S.; Jiang, J.; Huo, H.; and Long, G.
In
IJCNN, pages 1-8, 2019. IEEE
Link
Paper
link
bibtex
@inproceedings{conf/ijcnn/ChenPJHL19,
added-at = {2019-10-02T00:00:00.000+0200},
author = {Chen, Fengwen and Pan, Shirui and Jiang, Jing and Huo, Huan and Long, Guodong},
biburl = {https://www.bibsonomy.org/bibtex/29b1db08b83f284b10bb27af207694a97/dblp},
booktitle = {IJCNN},
crossref = {conf/ijcnn/2019},
ee = {https://doi.org/10.1109/IJCNN.2019.8851698},
interhash = {b4f8d060178effef6156aea99e1a491c},
intrahash = {9b1db08b83f284b10bb27af207694a97},
isbn = {978-1-7281-1985-4},
keywords = {dblp},
pages = {1-8},
publisher = {IEEE},
timestamp = {2019-10-03T11:37:15.000+0200},
title = {DAGCN: Dual Attention Graph Convolutional Networks.},
url = {http://dblp.uni-trier.de/db/conf/ijcnn/ijcnn2019.html#ChenPJHL19},
year = 2019
}
Label Embedding with Partial Heterogeneous Contexts.
Shi, Y.; Xu, D.; Pan, Y.; Tsang, I. W.; and Pan, S.
In
AAAI, pages 4926-4933, 2019. AAAI Press
Link
Paper
link
bibtex
@inproceedings{conf/aaai/ShiXPTP19,
added-at = {2019-09-25T00:00:00.000+0200},
author = {Shi, Yaxin and Xu, Donna and Pan, Yuangang and Tsang, Ivor W. and Pan, Shirui},
biburl = {https://www.bibsonomy.org/bibtex/2eccc9cfd6ee72944bf6455c08abba396/dblp},
booktitle = {AAAI},
crossref = {conf/aaai/2019},
ee = {https://doi.org/10.1609/aaai.v33i01.33014926},
interhash = {093e405fc0bf00079d40d72ee90af559},
intrahash = {eccc9cfd6ee72944bf6455c08abba396},
isbn = {978-1-57735-809-1},
keywords = {dblp},
pages = {4926-4933},
publisher = {AAAI Press},
timestamp = {2019-09-26T13:27:40.000+0200},
title = {Label Embedding with Partial Heterogeneous Contexts.},
url = {http://dblp.uni-trier.de/db/conf/aaai/aaai2019.html#ShiXPTP19},
year = 2019
}
Attributed Graph Clustering: A Deep Attentional Embedding Approach.
Wang, C.; Pan, S.; Hu, R.; Long, G.; Jiang, J.; and Zhang, C.
In Kraus, S., editor(s),
IJCAI, pages 3670-3676, 2019. ijcai.org
Link
Paper
link
bibtex
@inproceedings{conf/ijcai/WangPHLJZ19,
added-at = {2019-09-24T00:00:00.000+0200},
author = {Wang, Chun and Pan, Shirui and Hu, Ruiqi and Long, Guodong and Jiang, Jing and Zhang, Chengqi},
biburl = {https://www.bibsonomy.org/bibtex/26bb0ffc29bfb7f4025ecf64322712417/dblp},
booktitle = {IJCAI},
crossref = {conf/ijcai/2019},
editor = {Kraus, Sarit},
ee = {https://doi.org/10.24963/ijcai.2019/509},
interhash = {266e61b0385b23347974234b783ac6c7},
intrahash = {6bb0ffc29bfb7f4025ecf64322712417},
keywords = {dblp},
pages = {3670-3676},
publisher = {ijcai.org},
timestamp = {2019-09-25T11:42:01.000+0200},
title = {Attributed Graph Clustering: A Deep Attentional Embedding Approach.},
url = {http://dblp.uni-trier.de/db/conf/ijcai/ijcai2019.html#WangPHLJZ19},
year = 2019
}
Attributed Graph Clustering: A Deep Attentional Embedding Approach.
Wang, C.; Pan, S.; Hu, R.; Long, G.; Jiang, J.; and Zhang, C.
CoRR, abs/1906.06532. 2019.
Link
Paper
link
bibtex
@article{journals/corr/abs-1906-06532,
added-at = {2019-09-24T00:00:00.000+0200},
author = {Wang, Chun and Pan, Shirui and Hu, Ruiqi and Long, Guodong and Jiang, Jing and Zhang, Chengqi},
biburl = {https://www.bibsonomy.org/bibtex/25f4739bf2486f0bc5972960fd7166c5a/dblp},
ee = {http://arxiv.org/abs/1906.06532},
interhash = {266e61b0385b23347974234b783ac6c7},
intrahash = {5f4739bf2486f0bc5972960fd7166c5a},
journal = {CoRR},
keywords = {dblp},
timestamp = {2019-09-25T11:38:45.000+0200},
title = {Attributed Graph Clustering: A Deep Attentional Embedding Approach.},
url = {http://dblp.uni-trier.de/db/journals/corr/corr1906.html#abs-1906-06532},
volume = {abs/1906.06532},
year = 2019
}
Graph WaveNet for Deep Spatial-Temporal Graph Modeling.
Wu, Z.; Pan, S.; Long, G.; Jiang, J.; and Zhang, C.
In Kraus, S., editor(s),
IJCAI, pages 1907-1913, 2019. ijcai.org
Link
Paper
link
bibtex
@inproceedings{conf/ijcai/WuPLJZ19,
added-at = {2019-09-24T00:00:00.000+0200},
author = {Wu, Zonghan and Pan, Shirui and Long, Guodong and Jiang, Jing and Zhang, Chengqi},
biburl = {https://www.bibsonomy.org/bibtex/21256e1662b09444c4771c261ca4876dd/dblp},
booktitle = {IJCAI},
crossref = {conf/ijcai/2019},
editor = {Kraus, Sarit},
ee = {https://doi.org/10.24963/ijcai.2019/264},
interhash = {415f8fa2450566527ff1689cdd3a6645},
intrahash = {1256e1662b09444c4771c261ca4876dd},
keywords = {dblp},
pages = {1907-1913},
publisher = {ijcai.org},
timestamp = {2019-09-25T11:42:01.000+0200},
title = {Graph WaveNet for Deep Spatial-Temporal Graph Modeling.},
url = {http://dblp.uni-trier.de/db/conf/ijcai/ijcai2019.html#WuPLJZ19},
year = 2019
}
Efficient Novelty-Driven Neural Architecture Search.
Zhang, M.; Li, H.; Pan, S.; Liu, T.; and Su, S. W.
CoRR, abs/1907.09109. 2019.
Link
Paper
link
bibtex
@article{journals/corr/abs-1907-09109,
added-at = {2019-07-30T00:00:00.000+0200},
author = {Zhang, Miao and Li, Huiqi and Pan, Shirui and Liu, Taoping and Su, Steven W.},
biburl = {https://www.bibsonomy.org/bibtex/2b16ef6217cee21d4a3a01c4c0383c77b/dblp},
ee = {http://arxiv.org/abs/1907.09109},
interhash = {e9158c4515ac6126341f46b81d417bfe},
intrahash = {b16ef6217cee21d4a3a01c4c0383c77b},
journal = {CoRR},
keywords = {dblp},
timestamp = {2019-07-31T11:35:49.000+0200},
title = {Efficient Novelty-Driven Neural Architecture Search.},
url = {http://dblp.uni-trier.de/db/journals/corr/corr1907.html#abs-1907-09109},
volume = {abs/1907.09109},
year = 2019
}
Graph WaveNet for Deep Spatial-Temporal Graph Modeling.
Wu, Z.; Pan, S.; Long, G.; Jiang, J.; and Zhang, C.
CoRR, abs/1906.00121. 2019.
Link
Paper
link
bibtex
@article{journals/corr/abs-1906-00121,
added-at = {2019-06-13T00:00:00.000+0200},
author = {Wu, Zonghan and Pan, Shirui and Long, Guodong and Jiang, Jing and Zhang, Chengqi},
biburl = {https://www.bibsonomy.org/bibtex/2c9f5cdb40d426e28473109e884850d35/dblp},
ee = {http://arxiv.org/abs/1906.00121},
interhash = {415f8fa2450566527ff1689cdd3a6645},
intrahash = {c9f5cdb40d426e28473109e884850d35},
journal = {CoRR},
keywords = {dblp},
timestamp = {2019-06-14T11:38:33.000+0200},
title = {Graph WaveNet for Deep Spatial-Temporal Graph Modeling.},
url = {http://dblp.uni-trier.de/db/journals/corr/corr1906.html#abs-1906-00121},
volume = {abs/1906.00121},
year = 2019
}
DAGCN: Dual Attention Graph Convolutional Networks.
Chen, F.; Pan, S.; Jiang, J.; Huo, H.; and Long, G.
CoRR, abs/1904.02278. 2019.
Link
Paper
link
bibtex
@article{journals/corr/abs-1904-02278,
added-at = {2019-04-24T00:00:00.000+0200},
author = {Chen, Fengwen and Pan, Shirui and Jiang, Jing and Huo, Huan and Long, Guodong},
biburl = {https://www.bibsonomy.org/bibtex/25ffa25cc1ecf243f0527beeece8985bf/dblp},
ee = {http://arxiv.org/abs/1904.02278},
interhash = {b4f8d060178effef6156aea99e1a491c},
intrahash = {5ffa25cc1ecf243f0527beeece8985bf},
journal = {CoRR},
keywords = {dblp},
timestamp = {2019-04-25T11:37:54.000+0200},
title = {DAGCN: Dual Attention Graph Convolutional Networks.},
url = {http://dblp.uni-trier.de/db/journals/corr/corr1904.html#abs-1904-02278},
volume = {abs/1904.02278},
year = 2019
}
CFOND: Consensus Factorization for Co-Clustering Networked Data.
Guo, T.; Pan, S.; Zhu, X.; and Zhang, C.
IEEE Trans. Knowl. Data Eng., 31(4): 706-719. 2019.
Link
Paper
link
bibtex
@article{journals/tkde/GuoPZZ19,
added-at = {2019-04-12T00:00:00.000+0200},
author = {Guo, Ting and Pan, Shirui and Zhu, Xingquan and Zhang, Chengqi},
biburl = {https://www.bibsonomy.org/bibtex/25c58fdf0c723c9647f789a4b1ba4ff74/dblp},
ee = {https://doi.org/10.1109/TKDE.2018.2846555},
interhash = {e1ea317fc52fa6e7aaadf13a3e9a2114},
intrahash = {5c58fdf0c723c9647f789a4b1ba4ff74},
journal = {IEEE Trans. Knowl. Data Eng.},
keywords = {dblp},
number = 4,
pages = {706-719},
timestamp = {2019-04-13T11:36:32.000+0200},
title = {CFOND: Consensus Factorization for Co-Clustering Networked Data.},
url = {http://dblp.uni-trier.de/db/journals/tkde/tkde31.html#GuoPZZ19},
volume = 31,
year = 2019
}
Learning Graph Embedding with Adversarial Training Methods.
Pan, S.; Hu, R.; Fung, S.; Long, G.; Jiang, J.; and Zhang, C.
CoRR, abs/1901.01250. 2019.
Link
Paper
link
bibtex
@article{journals/corr/abs-1901-01250,
added-at = {2019-02-12T00:00:00.000+0100},
author = {Pan, Shirui and Hu, Ruiqi and Fung, Sai-Fu and Long, Guodong and Jiang, Jing and Zhang, Chengqi},
biburl = {https://www.bibsonomy.org/bibtex/26595e6fb0c6561f59023efe02f98fbf5/dblp},
ee = {http://arxiv.org/abs/1901.01250},
interhash = {4161a3d98ecf853ff211c2aa21a1b901},
intrahash = {6595e6fb0c6561f59023efe02f98fbf5},
journal = {CoRR},
keywords = {dblp},
timestamp = {2019-02-13T11:37:19.000+0100},
title = {Learning Graph Embedding with Adversarial Training Methods.},
url = {http://dblp.uni-trier.de/db/journals/corr/corr1901.html#abs-1901-01250},
volume = {abs/1901.01250},
year = 2019
}
A Comprehensive Survey on Graph Neural Networks.
Wu, Z.; Pan, S.; Chen, F.; Long, G.; Zhang, C.; and Yu, P. S.
CoRR, abs/1901.00596. 2019.
Link
Paper
link
bibtex
@article{journals/corr/abs-1901-00596,
added-at = {2019-01-31T00:00:00.000+0100},
author = {Wu, Zonghan and Pan, Shirui and Chen, Fengwen and Long, Guodong and Zhang, Chengqi and Yu, Philip S.},
biburl = {https://www.bibsonomy.org/bibtex/2264f0f19c455d44844d938fb399768a8/dblp},
ee = {http://arxiv.org/abs/1901.00596},
interhash = {e6df299a7965fc0158ecd71e6d922246},
intrahash = {264f0f19c455d44844d938fb399768a8},
journal = {CoRR},
keywords = {dblp},
timestamp = {2019-02-01T11:37:15.000+0100},
title = {A Comprehensive Survey on Graph Neural Networks.},
url = {http://dblp.uni-trier.de/db/journals/corr/corr1901.html#abs-1901-00596},
volume = {abs/1901.00596},
year = 2019
}