var bibbase_data = {"data":"\"Loading..\"\n\n
\n\n \n\n \n\n \n \n\n \n\n \n \n\n \n\n \n
\n generated by\n \n \"bibbase.org\"\n\n \n
\n \n\n
\n\n \n\n\n
\n\n Excellent! Next you can\n create a new website with this list, or\n embed it in an existing web page by copying & pasting\n any of the following snippets.\n\n
\n JavaScript\n (easiest)\n
\n \n <script src=\"https://bibbase.org/show?bib=https%3A%2F%2Fbibbase.org%2Fzotero-mypublications%2Fshiruipan&jsonp=1&jsonp=1\"></script>\n \n
\n\n PHP\n
\n \n <?php\n $contents = file_get_contents(\"https://bibbase.org/show?bib=https%3A%2F%2Fbibbase.org%2Fzotero-mypublications%2Fshiruipan&jsonp=1\");\n print_r($contents);\n ?>\n \n
\n\n iFrame\n (not recommended)\n
\n \n <iframe src=\"https://bibbase.org/show?bib=https%3A%2F%2Fbibbase.org%2Fzotero-mypublications%2Fshiruipan&jsonp=1\"></iframe>\n \n
\n\n

\n For more details see the documention.\n

\n
\n
\n\n
\n\n This is a preview! To use this list on your own web site\n or create a new web site from it,\n create a free account. The file will be added\n and you will be able to edit it in the File Manager.\n We will show you instructions once you've created your account.\n
\n\n
\n\n

To the site owner:

\n\n

Action required! Mendeley is changing its\n API. In order to keep using Mendeley with BibBase past April\n 14th, you need to:\n

    \n
  1. renew the authorization for BibBase on Mendeley, and
  2. \n
  3. update the BibBase URL\n in your page the same way you did when you initially set up\n this page.\n
  4. \n
\n

\n\n

\n \n \n Fix it now\n

\n
\n\n
\n\n\n
\n \n \n
\n
\n  \n 2023\n \n \n (6)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Graph Sequential Neural ODE Process for Link Prediction on Dynamic and Sparse Graphs.\n \n \n \n\n\n \n Luo, L.; Haffari, G.; and Pan, S.\n\n\n \n\n\n\n In Chua, T.; Lauw, H. W.; Si, L.; Terzi, E.; and Tsaparas, P., editor(s), Proceedings of the Sixteenth ACM International Conference on Web Search and Data Mining, WSDM 2023, Singapore, 27 February 2023 - 3 March 2023, pages 778–786, 2023. ACM\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{luo_graph_2023,\n\ttitle = {Graph {Sequential} {Neural} {ODE} {Process} for {Link} {Prediction} on {Dynamic} and {Sparse} {Graphs}},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1145/3539597.3570465},\n\tbooktitle = {Proceedings of the {Sixteenth} {ACM} {International} {Conference} on {Web} {Search} and {Data} {Mining}, {WSDM} 2023, {Singapore}, 27 {February} 2023 - 3 {March} 2023},\n\tpublisher = {ACM},\n\tauthor = {Luo, Linhao and Haffari, Gholamreza and Pan, Shirui},\n\teditor = {Chua, Tat-Seng and Lauw, Hady W. and Si, Luo and Terzi, Evimaria and Tsaparas, Panayiotis},\n\tyear = {2023},\n\tpages = {778--786},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n GOOD-D: On Unsupervised Graph Out-Of-Distribution Detection.\n \n \n \n\n\n \n Liu, Y.; Ding, K.; Liu, H.; and Pan, S.\n\n\n \n\n\n\n In Chua, T.; Lauw, H. W.; Si, L.; Terzi, E.; and Tsaparas, P., editor(s), Proceedings of the Sixteenth ACM International Conference on Web Search and Data Mining, WSDM 2023, Singapore, 27 February 2023 - 3 March 2023, pages 339–347, 2023. ACM\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{liu_good-d:_2023,\n\ttitle = {{GOOD}-{D}: {On} {Unsupervised} {Graph} {Out}-{Of}-{Distribution} {Detection}},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1145/3539597.3570446},\n\tbooktitle = {Proceedings of the {Sixteenth} {ACM} {International} {Conference} on {Web} {Search} and {Data} {Mining}, {WSDM} 2023, {Singapore}, 27 {February} 2023 - 3 {March} 2023},\n\tpublisher = {ACM},\n\tauthor = {Liu, Yixin and Ding, Kaize and Liu, Huan and Pan, Shirui},\n\teditor = {Chua, Tat-Seng and Lauw, Hady W. and Si, Luo and Terzi, Evimaria and Tsaparas, Panayiotis},\n\tyear = {2023},\n\tpages = {339--347},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n A Survey of Community Detection Approaches: From Statistical Modeling to Deep Learning.\n \n \n \n\n\n \n Jin, D.; Yu, Z.; Jiao, P.; Pan, S.; He, D.; Wu, J.; Yu, P.; and Zhang, W.\n\n\n \n\n\n\n IEEE Transactions on Knowledge and Data Engineering (TKDE), 35(2): 1149–1170. 2023.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{jin_survey_2023,\n\ttitle = {A {Survey} of {Community} {Detection} {Approaches}: {From} {Statistical} {Modeling} to {Deep} {Learning}},\n\tvolume = {35},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/TKDE.2021.3104155 (Impact Factor: 6.977; JCR Ranked Q1; Top Journal in Data Mining)},\n\tnumber = {2},\n\tjournal = {IEEE Transactions on Knowledge and Data Engineering (TKDE)},\n\tauthor = {Jin, Di and Yu, Zhizhi and Jiao, Pengfei and Pan, Shirui and He, Dongxiao and Wu, Jia and Yu, Philip and Zhang, Weixiong},\n\tyear = {2023},\n\tpages = {1149--1170},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Learning Graph Representations With Maximal Cliques.\n \n \n \n\n\n \n Molaei, S.; Bousejin, N. G.; Zare, H.; Jalili, M.; and Pan, S.\n\n\n \n\n\n\n IEEE Transactions on Neural Networks and Learning Systems (TNNLS), 34(2): 1089–1096. 2023.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{molaei_learning_2023,\n\ttitle = {Learning {Graph} {Representations} {With} {Maximal} {Cliques}},\n\tvolume = {34},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/TNNLS.2021.3104901 (Impact Factor: 10.451; JCR Ranked Q1)},\n\tnumber = {2},\n\tjournal = {IEEE Transactions on Neural Networks and Learning Systems (TNNLS)},\n\tauthor = {Molaei, Soheila and Bousejin, Nima Ghanbari and Zare, Hadi and Jalili, Mahdi and Pan, Shirui},\n\tyear = {2023},\n\tpages = {1089--1096},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n An Empirical Survey on Long Document Summarization: Datasets, Models and Metrics.\n \n \n \n\n\n \n Koh, H. Y.; Ju, J.; Liu, M.; and Pan, S.\n\n\n \n\n\n\n ACM Computing Surveys (CSUR), 55(8): 154:1–35. 2023.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{koh_empirical_2023,\n\ttitle = {An {Empirical} {Survey} on {Long} {Document} {Summarization}: {Datasets}, {Models} and {Metrics}},\n\tvolume = {55},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1145/3545176},\n\tnumber = {8},\n\tjournal = {ACM Computing Surveys (CSUR)},\n\tauthor = {Koh, Huan Yee and Ju, Jiaxin and Liu, Ming and Pan, Shirui},\n\tyear = {2023},\n\tpages = {154:1--35},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Reinforced, Incremental and Cross-lingual Event Detection From Social Messages.\n \n \n \n\n\n \n Peng, H.; Zhang, R.; Li, S.; Cao, Y.; Pan, S.; and Yu, P. S\n\n\n \n\n\n\n IEEE Transactions on Pattern Analysis and Machine Intelligence (TPAMI), 45(1): 980–998. 2023.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{peng_reinforced_2023,\n\ttitle = {Reinforced, {Incremental} and {Cross}-lingual {Event} {Detection} {From} {Social} {Messages}},\n\tvolume = {45},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/TPAMI.2022.3144993 (Impact Factor: 16.389; JCR Ranked Q1; Top Journal in AI)},\n\tnumber = {1},\n\tjournal = {IEEE Transactions on Pattern Analysis and Machine Intelligence (TPAMI)},\n\tauthor = {Peng, Hao and Zhang, Ruitong and Li, Shaoning and Cao, Yuwei and Pan, Shirui and Yu, Philip S},\n\tyear = {2023},\n\tpages = {980--998},\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2022\n \n \n (26)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Learning multi-level weight-centric features for few-shot learning.\n \n \n \n\n\n \n Liang, M.; Huang, S.; Pan, S.; Gong, M.; and Liu, W.\n\n\n \n\n\n\n Pattern Recognition (PR), 128: 108662. 2022.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n  \n \n 1 download\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{liang_learning_2022,\n\ttitle = {Learning multi-level weight-centric features for few-shot learning},\n\tvolume = {128},\n\tcopyright = {All rights reserved},\n\tissn = {0031-3203},\n\tdoi = {https://doi.org/10.1016/j.patcog.2022.108662},\n\tabstract = {Few-shot learning is currently enjoying a considerable resurgence of interest, aided by the recent advance of deep learning. Contemporary approaches based on weight-generation scheme delivers a straightforward and flexible solution to the problem. However, they did not fully consider both the representation power for unseen categories and weight generation capacity in feature learning, making it a significant performance bottleneck. This paper proposes a multi-level weight-centric feature learning to give full play to feature extractor’s dual roles in few-shot learning. Our proposed method consists of two essential techniques: a weight-centric training strategy to improve the features’ prototype-ability and a multi-level feature incorporating a mid- and relation-level information. The former increases the feasibility of constructing a discriminative decision boundary based on a few samples. Simultaneously, the latter helps improve the transferability for characterizing novel classes and preserve classification capability for base classes. We extensively evaluate our approach to low-shot classification benchmarks. Experiments demonstrate our proposed method significantly outperforms its counterparts in both standard and generalized settings and using different network backbones.},\n\tjournal = {Pattern Recognition (PR)},\n\tauthor = {Liang, Mingjiang and Huang, Shaoli and Pan, Shirui and Gong, Mingming and Liu, Wei},\n\tyear = {2022},\n\tpages = {108662},\n}\n\n
\n
\n\n\n
\n Few-shot learning is currently enjoying a considerable resurgence of interest, aided by the recent advance of deep learning. Contemporary approaches based on weight-generation scheme delivers a straightforward and flexible solution to the problem. However, they did not fully consider both the representation power for unseen categories and weight generation capacity in feature learning, making it a significant performance bottleneck. This paper proposes a multi-level weight-centric feature learning to give full play to feature extractor’s dual roles in few-shot learning. Our proposed method consists of two essential techniques: a weight-centric training strategy to improve the features’ prototype-ability and a multi-level feature incorporating a mid- and relation-level information. The former increases the feasibility of constructing a discriminative decision boundary based on a few samples. Simultaneously, the latter helps improve the transferability for characterizing novel classes and preserve classification capability for base classes. We extensively evaluate our approach to low-shot classification benchmarks. Experiments demonstrate our proposed method significantly outperforms its counterparts in both standard and generalized settings and using different network backbones.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Deep neighbor-aware embedding for node clustering in attributed graphs.\n \n \n \n\n\n \n Wang, C.; Pan, S.; Yu, C. P.; Hu, R.; Long, G.; and Zhang, C.\n\n\n \n\n\n\n Pattern Recognition (PR), 122: 108230. 2022.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{wang_deep_2022,\n\ttitle = {Deep neighbor-aware embedding for node clustering in attributed graphs},\n\tvolume = {122},\n\tcopyright = {All rights reserved},\n\tissn = {0031-3203},\n\tdoi = {https://doi.org/10.1016/j.patcog.2021.108230},\n\tabstract = {Node clustering aims to partition the vertices in a graph into multiple groups or communities. Existing studies have mostly focused on developing deep learning approaches to learn a latent representation of nodes, based on which simple clustering methods like k-means are applied. These two-step frameworks for node clustering are difficult to manipulate and usually lead to suboptimal performance, mainly because the graph embedding is not goal-directed, i.e., designed for the specific clustering task. In this paper, we propose a clustering-directed deep learning approach, Deep Neighbor-aware Embedded Node Clustering (DNENC for short) for clustering graph data. Our method focuses on attributed graphs to sufficiently explore the two sides of information in graphs. It encodes the topological structure and node content in a graph into a compact representation via a neighbor-aware graph autoencoder, which progressively absorbs information from neighbors via a convolutional or attentional encoder. Multiple neighbor-aware encoders are stacked to build a deep architecture followed by an inner-product decoder for reconstructing the graph structure. Furthermore, soft labels are generated to supervise a self-training process, which iteratively refines the node clustering results. The self-training process is jointly learned and optimized with the graph embedding in a unified framework, to benefit both components mutually. Experimental results compared with state-of-the-art algorithms demonstrate the good performance of our framework.},\n\tjournal = {Pattern Recognition (PR)},\n\tauthor = {Wang, Chun and Pan, Shirui and Yu, Celina P. and Hu, Ruiqi and Long, Guodong and Zhang, Chengqi},\n\tyear = {2022},\n\tpages = {108230},\n}\n\n
\n
\n\n\n
\n Node clustering aims to partition the vertices in a graph into multiple groups or communities. Existing studies have mostly focused on developing deep learning approaches to learn a latent representation of nodes, based on which simple clustering methods like k-means are applied. These two-step frameworks for node clustering are difficult to manipulate and usually lead to suboptimal performance, mainly because the graph embedding is not goal-directed, i.e., designed for the specific clustering task. In this paper, we propose a clustering-directed deep learning approach, Deep Neighbor-aware Embedded Node Clustering (DNENC for short) for clustering graph data. Our method focuses on attributed graphs to sufficiently explore the two sides of information in graphs. It encodes the topological structure and node content in a graph into a compact representation via a neighbor-aware graph autoencoder, which progressively absorbs information from neighbors via a convolutional or attentional encoder. Multiple neighbor-aware encoders are stacked to build a deep architecture followed by an inner-product decoder for reconstructing the graph structure. Furthermore, soft labels are generated to supervise a self-training process, which iteratively refines the node clustering results. The self-training process is jointly learned and optimized with the graph embedding in a unified framework, to benefit both components mutually. Experimental results compared with state-of-the-art algorithms demonstrate the good performance of our framework.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Cross-modal Clinical Graph Transformer For Ophthalmic Report Generation.\n \n \n \n\n\n \n Li, M.; Cai, W.; Verspoor, K.; Pan, S.; Li, X.; and Chang, X.\n\n\n \n\n\n\n In IEEE/CVF Conference on Computer Vision and Pattern Recognition, CVPR-22, New Orleans, Louisiana, US, Jun 19-24, 2022, pages 20624–20633, 2022. \n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{li_cross-modal_2022,\n\ttitle = {Cross-modal {Clinical} {Graph} {Transformer} {For} {Ophthalmic} {Report} {Generation}},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/CVPR52688.2022.02000 (CORE Ranked A*)},\n\tbooktitle = {{IEEE}/{CVF} {Conference} on {Computer} {Vision} and {Pattern} {Recognition}, {CVPR}-22, {New} {Orleans}, {Louisiana}, {US}, {Jun} 19-24, 2022},\n\tauthor = {Li, Mingjie and Cai, Wenjia and Verspoor, Karin and Pan, Shirui and Li, Xiaodan and Chang, Xiaojun},\n\tyear = {2022},\n\tpages = {20624--20633},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n A Probabilistic Graphical Model Based on Neural-symbolic Reasoning for Visual Relationship Detection.\n \n \n \n\n\n \n Yu, D.; Yang, B.; Wei, Q.; Li, A.; and Pan, S.\n\n\n \n\n\n\n In IEEE/CVF Conference on Computer Vision and Pattern Recognition, CVPR-22, New Orleans, Louisiana, US, Jun 19-24, 2022, pages 10599–10608, 2022. \n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{yu_probabilistic_2022,\n\ttitle = {A {Probabilistic} {Graphical} {Model} {Based} on {Neural}-symbolic {Reasoning} for {Visual} {Relationship} {Detection}},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/CVPR52688.2022.01035 (CORE Ranked A*)},\n\tbooktitle = {{IEEE}/{CVF} {Conference} on {Computer} {Vision} and {Pattern} {Recognition}, {CVPR}-22, {New} {Orleans}, {Louisiana}, {US}, {Jun} 19-24, 2022},\n\tauthor = {Yu, Dongran and Yang, Bo and Wei, Qianhao and Li, Anchen and Pan, Shirui},\n\tyear = {2022},\n\tpages = {10599--10608},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n BaLeNAS: Differentiable Architecture Search via Bayesian Learning Rule.\n \n \n \n\n\n \n Zhang, M.; Pan, S.; Chang, X.; Su, S.; Hu, J.; Haffari, R.; and Yang, B.\n\n\n \n\n\n\n In IEEE/CVF Conference on Computer Vision and Pattern Recognition, CVPR-22, New Orleans, Louisiana, US, Jun 19-24, 2022, pages 11861–11870, 2022. \n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{zhang_balenas:_2022,\n\ttitle = {{BaLeNAS}: {Differentiable} {Architecture} {Search} via {Bayesian} {Learning} {Rule}},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/CVPR52688.2022.01157 (CORE Ranked A*)},\n\tbooktitle = {{IEEE}/{CVF} {Conference} on {Computer} {Vision} and {Pattern} {Recognition}, {CVPR}-22, {New} {Orleans}, {Louisiana}, {US}, {Jun} 19-24, 2022},\n\tauthor = {Zhang, Miao and Pan, Shirui and Chang, Xiaojun and Su, Steven and Hu, Jilin and Haffari, Reza and Yang, Bin},\n\tyear = {2022},\n\tpages = {11861--11870},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Graph Self-Supervised Learning: A Survey.\n \n \n \n\n\n \n Liu, Y.; Jin, M.; Pan, S.; Zhou, C.; Zheng, Y.; Xia, F.; and Yu, P. S.\n\n\n \n\n\n\n IEEE Transactions on Knowledge and Data Engineering (TKDE),1–21. 2022.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{liu_graph_2022,\n\ttitle = {Graph {Self}-{Supervised} {Learning}: {A} {Survey}},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/TKDE.2022.3172903 (Impact Factor: 6.977; JCR Ranked Q1; Top Journal in Data Mining)},\n\tjournal = {IEEE Transactions on Knowledge and Data Engineering (TKDE)},\n\tauthor = {Liu, Yixin and Jin, Ming and Pan, Shirui and Zhou, Chuan and Zheng, Yu and Xia, Feng and Yu, Philip S.},\n\tyear = {2022},\n\tpages = {1--21},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Towards graph self-supervised learning with contrastive adjusted zooming.\n \n \n \n\n\n \n Liu, Y.; Jin, M.; Pan, S.; Zhou, C.; Zheng, Y.; Xia, F.; and Yu, P. S.\n\n\n \n\n\n\n IEEE Transactions on Neural Networks and Learning Systems (TNNLS),1–15. 2022.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{liu_towards_2022,\n\ttitle = {Towards graph self-supervised learning with contrastive adjusted zooming},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/TNNLS.2022.3216630},\n\tjournal = {IEEE Transactions on Neural Networks and Learning Systems (TNNLS)},\n\tauthor = {Liu, Yixin and Jin, Ming and Pan, Shirui and Zhou, Chuan and Zheng, Yu and Xia, Feng and Yu, Philip S.},\n\tyear = {2022},\n\tpages = {1--15},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Projective Ranking-based GNN Evasion Attacks.\n \n \n \n\n\n \n Zhang, H.; Yuan, X.; Zhou, C.; and Pan, S.\n\n\n \n\n\n\n IEEE Transactions on Knowledge and Data Engineering (TKDE),1–14. 2022.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n  \n \n 1 download\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{zhang_projective_2022,\n\ttitle = {Projective {Ranking}-based {GNN} {Evasion} {Attacks}},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/TKDE.2022.3219209 (Impact Factor: 6.977; JCR Ranked Q1; Top Journal in Data Mining)},\n\tjournal = {IEEE Transactions on Knowledge and Data Engineering (TKDE)},\n\tauthor = {Zhang, He and Yuan, Xingliang and Zhou, Chuan and Pan, Shirui},\n\tyear = {2022},\n\tpages = {1--14},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Multivariate Time Series Forecasting with Dynamic Graph Neural ODEs.\n \n \n \n\n\n \n Jin, M.; Zheng, Y.; Li, Y.; Chen, S.; Yang, B.; and Pan, S.\n\n\n \n\n\n\n IEEE Transactions on Knowledge and Data Engineering (TKDE),1–14. 2022.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{jin_multivariate_2022,\n\ttitle = {Multivariate {Time} {Series} {Forecasting} with {Dynamic} {Graph} {Neural} {ODEs}},\n\tcopyright = {All rights reserved},\n\tjournal = {IEEE Transactions on Knowledge and Data Engineering (TKDE)},\n\tauthor = {Jin, Ming and Zheng, Yu and Li, Yuan-Fang and Chen, Siheng and Yang, Bin and Pan, Shirui},\n\tyear = {2022},\n\tpages = {1--14},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Predicting Human Mobility via Graph Convolutional Dual-attentive Networks.\n \n \n \n\n\n \n Dang, W.; Wang, H.; Pan, S.; Zhang, P.; Zhou, C.; Chen, X.; and Wang, J.\n\n\n \n\n\n\n In International Conference on Web Search and Data Mining (WSDM), pages 192–200, 2022. ACM\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{dang_predicting_2022,\n\ttitle = {Predicting {Human} {Mobility} via {Graph} {Convolutional} {Dual}-attentive {Networks}},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1145/3488560.3498400 (CORE Ranked A*)},\n\tbooktitle = {International {Conference} on {Web} {Search} and {Data} {Mining} ({WSDM})},\n\tpublisher = {ACM},\n\tauthor = {Dang, Weizhen and Wang, Haibo and Pan, Shirui and Zhang, Pei and Zhou, Chuan and Chen, Xin and Wang, Jilong},\n\tyear = {2022},\n\tpages = {192--200},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Towards Unsupervised Deep Graph Structure Learning.\n \n \n \n\n\n \n Liu, Y.; Zheng, Y.; Zhang, D.; Chen, H.; Peng, H.; and Pan, S.\n\n\n \n\n\n\n In The Web Conference (WWW), Lyon, France 25 – 29 April 2022, pages 1392–1403, 2022. ACM\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{liu_towards_2022-1,\n\ttitle = {Towards {Unsupervised} {Deep} {Graph} {Structure} {Learning}},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1145/3485447.3512186 (CORE Ranked A*)},\n\tbooktitle = {The {Web} {Conference} ({WWW}), {Lyon}, {France} 25 – 29 {April} 2022},\n\tpublisher = {ACM},\n\tauthor = {Liu, Yixin and Zheng, Yu and Zhang, Daokun and Chen, Hongxu and Peng, Hao and Pan, Shirui},\n\tyear = {2022},\n\tpages = {1392--1403},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Towards Spatio-Temporal Aware Traffic Time Series Forecasting.\n \n \n \n\n\n \n Cirstea, R.; Yang, B.; Guo, C.; Kieu, T.; and Pan, S. P.\n\n\n \n\n\n\n In IEEE International Conference on Data Engineering (ICDE-22), (Virtual) Kuala Lumpur, Malaysia, May 9-12, 2022 (CORE Ranked A*), 2022. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{cirstea_towards_2022,\n\ttitle = {Towards {Spatio}-{Temporal} {Aware} {Traffic} {Time} {Series} {Forecasting}},\n\tcopyright = {All rights reserved},\n\tbooktitle = {{IEEE} {International} {Conference} on {Data} {Engineering} ({ICDE}-22), ({Virtual}) {Kuala} {Lumpur}, {Malaysia}, {May} 9-12, 2022 ({CORE} {Ranked} {A}*)},\n\tauthor = {Cirstea, Razvan and Yang, Bin and Guo, Chenjuan and Kieu, Tung and Pan, Shirui Pan},\n\tyear = {2022},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Fire Burns, Swords Cut: Commonsense Inductive Bias for Exploration in Text-based Games.\n \n \n \n\n\n \n Ryu, D.; Shareghi, E.; Fang, M.; Xu, Y.; Pan, S.; and Haffari, R.\n\n\n \n\n\n\n In 60th Annual Meeting of the Association for Computational Linguistics (ACL-2022), Dublin, Ireland, May 22-27, 2022 (CORE Ranked A*), volume 2, pages 515–522, 2022. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{ryu_fire_2022,\n\ttitle = {Fire {Burns}, {Swords} {Cut}: {Commonsense} {Inductive} {Bias} for {Exploration} in {Text}-based {Games}},\n\tvolume = {2},\n\tcopyright = {All rights reserved},\n\tbooktitle = {60th {Annual} {Meeting} of the {Association} for {Computational} {Linguistics} ({ACL}-2022), {Dublin}, {Ireland}, {May} 22-27, 2022 ({CORE} {Ranked} {A}*)},\n\tauthor = {Ryu, Dongwon and Shareghi, Ehsan and Fang, Meng and Xu, Yunqiu and Pan, Shirui and Haffari, Reza},\n\tyear = {2022},\n\tpages = {515--522},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Exploring Relational Semantics for Inductive Knowledge Graph Completion.\n \n \n \n\n\n \n Wang, C.; Zhou, X.; Pan, S.; Dong, L.; Song, Z.; and Sha, Y.\n\n\n \n\n\n\n In Proceedings of the AAAI Conference on Artificial Intelligence (AAAI), Virtual Conference, Feburary 22- March 1, 2022 (CORE Ranked A*), 2022. AAAI Press\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{wang_exploring_2022,\n\ttitle = {Exploring {Relational} {Semantics} for {Inductive} {Knowledge} {Graph} {Completion}},\n\tcopyright = {All rights reserved},\n\tbooktitle = {Proceedings of the {AAAI} {Conference} on {Artificial} {Intelligence} ({AAAI}), {Virtual} {Conference}, {Feburary} 22- {March} 1, 2022 ({CORE} {Ranked} {A}*)},\n\tpublisher = {AAAI Press},\n\tauthor = {Wang, Changjian and Zhou, Xiaofei and Pan, Shirui and Dong, Linhua and Song, Zeliang and Sha, Ying},\n\tyear = {2022},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Dual Space Graph Contrastive Learning.\n \n \n \n\n\n \n Yang, H.; Chen, H.; Pan, S.; Li, L.; Yu, P. S; and Xu, G.\n\n\n \n\n\n\n In The Web Conference (WWW), Lyon, France 25 – 29 April 2022, pages 1238–1247, 2022. ACM\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{yang_dual_2022,\n\ttitle = {Dual {Space} {Graph} {Contrastive} {Learning}},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1145/3485447.3512211 (CORE Ranked A*)},\n\tbooktitle = {The {Web} {Conference} ({WWW}), {Lyon}, {France} 25 – 29 {April} 2022},\n\tpublisher = {ACM},\n\tauthor = {Yang, Haoran and Chen, Hongxu and Pan, Shirui and Li, Lin and Yu, Philip S and Xu, Guandong},\n\tyear = {2022},\n\tpages = {1238--1247},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n A Survey on Knowledge Graphs: Representation, Acquisition, and Applications.\n \n \n \n\n\n \n Ji, S.; Pan, S.; Cambria, E.; Marttinen, P.; and Yu, P. S\n\n\n \n\n\n\n IEEE Transactions on Neural Networks and Learning Systems (TNNLS), 33(2): 494–514. 2022.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{ji_survey_2022,\n\ttitle = {A {Survey} on {Knowledge} {Graphs}: {Representation}, {Acquisition}, and {Applications}},\n\tvolume = {33},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/TNNLS.2021.3070843 (Impact Factor: 10.451; JCR Ranked Q1)},\n\tnumber = {2},\n\tjournal = {IEEE Transactions on Neural Networks and Learning Systems (TNNLS)},\n\tauthor = {Ji, Shaoxiong and Pan, Shirui and Cambria, Erik and Marttinen, Pekka and Yu, Philip S},\n\tyear = {2022},\n\tpages = {494--514},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Predicting Best-Selling New Products in a Major Promotion Campaign Through Graph Convolutional Networks.\n \n \n \n\n\n \n Li, C.; Jiang, W.; Yang, Y.; Pan, S.; Guo, L.; and Huang, G.\n\n\n \n\n\n\n IEEE Transactions on Neural Networks and Learning Systems (TNNLS),1–14. 2022.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{li_predicting_2022,\n\ttitle = {Predicting {Best}-{Selling} {New} {Products} in a {Major} {Promotion} {Campaign} {Through} {Graph} {Convolutional} {Networks}},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/TNNLS.2022.3155690 (Impact Factor: 10.451; JCR Ranked Q1)},\n\tjournal = {IEEE Transactions on Neural Networks and Learning Systems (TNNLS)},\n\tauthor = {Li, Chaojie and Jiang, Wensen and Yang, Yin and Pan, Shirui and Guo, Lijie and Huang, Gang},\n\tyear = {2022},\n\tpages = {1--14},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Anomaly Detection on Attributed Networks via Contrastive Self-Supervised Learning.\n \n \n \n\n\n \n Liu, Y.; Li, Z.; Pan, S.; Gong, C.; Zhou, C.; and Karypis, G.\n\n\n \n\n\n\n IEEE Transactions on Neural Networks and Learning Systems (TNNLS), 33(6): 2378–2392. 2022.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{liu_anomaly_2022,\n\ttitle = {Anomaly {Detection} on {Attributed} {Networks} via {Contrastive} {Self}-{Supervised} {Learning}},\n\tvolume = {33},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/TNNLS.2021.3068344 (Impact Factor: 10.451; JCR Ranked Q1)},\n\tnumber = {6},\n\tjournal = {IEEE Transactions on Neural Networks and Learning Systems (TNNLS)},\n\tauthor = {Liu, Yixin and Li, Zhao and Pan, Shirui and Gong, Chen and Zhou, Chuan and Karypis, George},\n\tyear = {2022},\n\tpages = {2378--2392},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Dual Interactive Graph Convolutional Networks for Hyperspectral Image Classification.\n \n \n \n\n\n \n Wan, S.; Pan, S.; Zhong, P.; Chang, X.; Yang, J.; and Gong, C.\n\n\n \n\n\n\n IEEE Transactions on Geoscience and Remote Sensing (TGRS), 60: 1–14. 2022.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{wan_dual_2022,\n\ttitle = {Dual {Interactive} {Graph} {Convolutional} {Networks} for {Hyperspectral} {Image} {Classification}},\n\tvolume = {60},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/TGRS.2021.3075223 (Impact Factor: 5.6; JCR Ranked Q1)},\n\tjournal = {IEEE Transactions on Geoscience and Remote Sensing (TGRS)},\n\tauthor = {Wan, Sheng and Pan, Shirui and Zhong, Ping and Chang, Xiaojun and Yang, Jian and Gong, Chen},\n\tyear = {2022},\n\tpages = {1--14},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Attraction and Repulsion: Unsupervised Domain Adaptive Graph Contrastive Learning Network.\n \n \n \n\n\n \n Wu, M.; Pan, S.; and Zhu, X.\n\n\n \n\n\n\n IEEE Transactions on Emerging Topics in Computational Intelligence (TETCI),1. 2022.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{wu_attraction_2022,\n\ttitle = {Attraction and {Repulsion}: {Unsupervised} {Domain} {Adaptive} {Graph} {Contrastive} {Learning} {Network}},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/TETCI.2022.3156044},\n\tjournal = {IEEE Transactions on Emerging Topics in Computational Intelligence (TETCI)},\n\tauthor = {Wu, Man and Pan, Shirui and Zhu, Xingquan},\n\tyear = {2022},\n\tpages = {1},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n TraverseNet: Unifying Space and Time in Message Passing for Traffic Forecasting.\n \n \n \n\n\n \n Wu, Z.; Zheng, D.; Pan, S.; Gan, Q.; Long, G.; and Karypis, G.\n\n\n \n\n\n\n IEEE Transactions on Neural Networks and Learning Systems (TNNLS),1–11. 2022.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{wu_traversenet:_2022,\n\ttitle = {{TraverseNet}: {Unifying} {Space} and {Time} in {Message} {Passing} for {Traffic} {Forecasting}},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/TNNLS.2022.3186103},\n\tjournal = {IEEE Transactions on Neural Networks and Learning Systems (TNNLS)},\n\tauthor = {Wu, Zonghan and Zheng, Da and Pan, Shirui and Gan, Quan and Long, Guodong and Karypis, George},\n\tyear = {2022},\n\tpages = {1--11},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Discrete embedding for attributed graphs.\n \n \n \n\n\n \n Yang, H.; Chen, L.; Pan, S.; Wang, H.; and Zhang, P.\n\n\n \n\n\n\n Pattern Recognition (PR), 123: 108368. 2022.\n Publisher: Elsevier\n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{yang_discrete_2022,\n\ttitle = {Discrete embedding for attributed graphs},\n\tvolume = {123},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1016/j.patcog.2021.108368 (Impact Factor: 7.74; JCR Ranked Q1)},\n\tjournal = {Pattern Recognition (PR)},\n\tauthor = {Yang, Hong and Chen, Ling and Pan, Shirui and Wang, Haishuai and Zhang, Peng},\n\tyear = {2022},\n\tnote = {Publisher: Elsevier},\n\tpages = {108368},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Task Scheduling in Three-Dimensional Spatial Crowdsourcing: A Social Welfare Perspective.\n \n \n \n\n\n \n Wang, L.; Yang, D.; Yu, Z.; Xiong, F.; Han, L.; Pan, S.; and Guo, B.\n\n\n \n\n\n\n IEEE Transactions on Mobile Computing (TMC),1–1. 2022.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{wang_task_2022,\n\ttitle = {Task {Scheduling} in {Three}-{Dimensional} {Spatial} {Crowdsourcing}: {A} {Social} {Welfare} {Perspective}},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/TMC.2022.3175305},\n\tjournal = {IEEE Transactions on Mobile Computing (TMC)},\n\tauthor = {Wang, Liang and Yang, Dingqi and Yu, Zhiwen and Xiong, Fei and Han, Lei and Pan, Shirui and Guo, Bin},\n\tyear = {2022},\n\tpages = {1--1},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Compact Scheduling for Task Graph Oriented Mobile Crowdsourcing.\n \n \n \n\n\n \n Wang, L.; Yu, Z.; Han, Q.; Yang, D.; Pan, S.; Yao, Y.; and Zhang, D.\n\n\n \n\n\n\n IEEE Transactions on Mobile Computing (TMC), 21(7): 2358–2371. 2022.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{wang_compact_2022,\n\ttitle = {Compact {Scheduling} for {Task} {Graph} {Oriented} {Mobile} {Crowdsourcing}},\n\tvolume = {21},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/TMC.2020.3040007},\n\tnumber = {7},\n\tjournal = {IEEE Transactions on Mobile Computing (TMC)},\n\tauthor = {Wang, Liang and Yu, Zhiwen and Han, Qi and Yang, Dingqi and Pan, Shirui and Yao, Yuan and Zhang, Daqing},\n\tyear = {2022},\n\tpages = {2358--2371},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Beyond low-pass filtering: Graph convolutional networks with automatic filtering.\n \n \n \n\n\n \n Wu, Z.; Pan, S.; Long, G.; Jiang, J.; and Zhang, C.\n\n\n \n\n\n\n IEEE Transactions on Knowledge and Data Engineering (TKDE),1–12. 2022.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{wu_beyond_2022,\n\ttitle = {Beyond low-pass filtering: {Graph} convolutional networks with automatic filtering},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/TKDE.2022.3186016},\n\tjournal = {IEEE Transactions on Knowledge and Data Engineering (TKDE)},\n\tauthor = {Wu, Zonghan and Pan, Shirui and Long, Guodong and Jiang, Jing and Zhang, Chengqi},\n\tyear = {2022},\n\tpages = {1--12},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n One-shot Learning-based Animal Video Segmentation.\n \n \n \n\n\n \n Xue, T.; Qiao, Y.; Kong, H.; Su, D.; Pan, S.; Rafique, K.; and Sukkarieh, S.\n\n\n \n\n\n\n IEEE Transactions on Industrial Informatics (TII), 18(6): 3799–3807. 2022.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{xue_one-shot_2022,\n\ttitle = {One-shot {Learning}-based {Animal} {Video} {Segmentation}},\n\tvolume = {18},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/TII.2021.3117020 (Impact Factor: 10.215; JCR Ranked Q1)},\n\tnumber = {6},\n\tjournal = {IEEE Transactions on Industrial Informatics (TII)},\n\tauthor = {Xue, Tengfei and Qiao, Yongliang and Kong, He and Su, Daobilige and Pan, Shirui and Rafique, Khalid and Sukkarieh, Salah},\n\tyear = {2022},\n\tpages = {3799--3807},\n}\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2021\n \n \n (18)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Hypergraph Convolutional Network for Group Recommendation.\n \n \n \n\n\n \n Jia, R.; Zhou, X.; Dong, L.; and Pan, S.\n\n\n \n\n\n\n In IEEE International Conference on Data Mining (ICDM), pages 260–269 (CORE Ranked A*), 2021. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{jia_hypergraph_2021,\n\ttitle = {Hypergraph {Convolutional} {Network} for {Group} {Recommendation}},\n\tcopyright = {All rights reserved},\n\tbooktitle = {{IEEE} {International} {Conference} on {Data} {Mining} ({ICDM})},\n\tauthor = {Jia, Renqi and Zhou, Xiaofei and Dong, Linhua and Pan, Shirui},\n\tyear = {2021},\n\tpages = {260--269 (CORE Ranked A*)},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Multi-Scale Contrastive Siamese Networks for Self-Supervised Graph Representation Learning.\n \n \n \n\n\n \n Jin, M.; Zheng, Y.; Li, Y.; Gong, C.; Zhou, C.; and Pan, S.\n\n\n \n\n\n\n In International Joint Conference on Artificial Intelligence, IJCAI, pages 1477–1483 (CORE Ranked A*), 2021. \n _eprint: 2105.05682\n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n  \n \n 1 download\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{jin_multi-scale_2021,\n\ttitle = {Multi-{Scale} {Contrastive} {Siamese} {Networks} for {Self}-{Supervised} {Graph} {Representation} {Learning}},\n\tcopyright = {All rights reserved},\n\tabstract = {Graph representation learning plays a vital role in processing graph-structured data. However, prior arts on graph representation learning heavily rely on the labeling information. To overcome this problem, inspired by the recent success of graph contrastive learning and Siamese networks in visual representation learning, we propose a novel self-supervised approach in this paper to learn node representations by enhancing Siamese self-distillation with multi-scale contrastive learning. Specifically, we first generate two augmented views from the input graph based on local and global perspectives. Then, we employ two objectives called cross-view and cross-network contrastiveness to maximize the agreement between node representations across different views and networks. To demonstrate the effectiveness of our approach, we perform empirical experiments on five real-world datasets. Our method not only achieves new state-of-the-art results but also surpasses some semi-supervised counterparts by large margins.},\n\tbooktitle = {International {Joint} {Conference} on {Artificial} {Intelligence}, {IJCAI}},\n\tauthor = {Jin, Ming and Zheng, Yizhen and Li, Yuan-Fang and Gong, Chen and Zhou, Chuan and Pan, Shirui},\n\tyear = {2021},\n\tnote = {\\_eprint: 2105.05682},\n\tpages = {1477--1483 (CORE Ranked A*)},\n}\n\n
\n
\n\n\n
\n Graph representation learning plays a vital role in processing graph-structured data. However, prior arts on graph representation learning heavily rely on the labeling information. To overcome this problem, inspired by the recent success of graph contrastive learning and Siamese networks in visual representation learning, we propose a novel self-supervised approach in this paper to learn node representations by enhancing Siamese self-distillation with multi-scale contrastive learning. Specifically, we first generate two augmented views from the input graph based on local and global perspectives. Then, we employ two objectives called cross-view and cross-network contrastiveness to maximize the agreement between node representations across different views and networks. To demonstrate the effectiveness of our approach, we perform empirical experiments on five real-world datasets. Our method not only achieves new state-of-the-art results but also surpasses some semi-supervised counterparts by large margins.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Contrastive and Generative Graph Convolutional Networks for Graph-based Semi-Supervised Learning.\n \n \n \n\n\n \n Wan, S.; Pan, S.; Yang, J.; and Gong, C.\n\n\n \n\n\n\n In Thirty-Fifth \\AAAI\\ Conference on Artificial Intelligence, \\AAAI\\ 2021, pages 10049–10057 (CORE Ranked A*), 2021. \\AAAI\\ Press\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{wan_contrastive_2021,\n\ttitle = {Contrastive and {Generative} {Graph} {Convolutional} {Networks} for {Graph}-based {Semi}-{Supervised} {Learning}},\n\tcopyright = {All rights reserved},\n\tbooktitle = {Thirty-{Fifth} \\{{AAAI}\\} {Conference} on {Artificial} {Intelligence}, \\{{AAAI}\\} 2021},\n\tpublisher = {\\{AAAI\\} Press},\n\tauthor = {Wan, Sheng and Pan, Shirui and Yang, Jian and Gong, Chen},\n\tyear = {2021},\n\tpages = {10049--10057 (CORE Ranked A*)},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Task-adaptive Neural Process for User Cold-Start Recommendation.\n \n \n \n\n\n \n Lin, X.; Wu, J.; Zhou, C.; Pan, S.; Cao, Y.; and Wang, B.\n\n\n \n\n\n\n In Leskovec, J.; Grobelnik, M.; Najork, M.; Tang, J.; and Zia, L., editor(s), The Web Conference (WWW), Virtual Event / Ljubljana, Slovenia, April 19-23, 2021, pages 1306–1316, 2021. \\ACM\\ / \\IW3C2\\\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{lin_task-adaptive_2021,\n\ttitle = {Task-adaptive {Neural} {Process} for {User} {Cold}-{Start} {Recommendation}},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1145/3442381.3449908 (CORE Ranked A*)},\n\tbooktitle = {The {Web} {Conference} ({WWW}), {Virtual} {Event} / {Ljubljana}, {Slovenia}, {April} 19-23, 2021},\n\tpublisher = {\\{ACM\\} / \\{IW3C2\\}},\n\tauthor = {Lin, Xixun and Wu, Jia and Zhou, Chuan and Pan, Shirui and Cao, Yanan and Wang, Bin},\n\teditor = {Leskovec, Jure and Grobelnik, Marko and Najork, Marc and Tang, Jie and Zia, Leila},\n\tyear = {2021},\n\tpages = {1306--1316},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n iDARTS: Differentiable Architecture Search with Stochastic Implicit Gradients.\n \n \n \n\n\n \n Zhang, M.; Su, S.; Pan, S.; Chang, X.; Abbasnejad, E.; and Haffari, R.\n\n\n \n\n\n\n In International Conference on Machine Learning (ICML), pages 12557–12566 (CORE Ranked A*), 2021. \n _eprint: 2106.10784\n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{zhang_idarts:_2021,\n\ttitle = {{iDARTS}: {Differentiable} {Architecture} {Search} with {Stochastic} {Implicit} {Gradients}},\n\tcopyright = {All rights reserved},\n\tabstract = {\\${\\textbackslash}backslash\\$textit\\{Differentiable ARchiTecture Search\\} (DARTS) has recently become the mainstream of neural architecture search (NAS) due to its efficiency and simplicity. With a gradient-based bi-level optimization, DARTS alternately optimizes the inner model weights and the outer architecture parameter in a weight-sharing supernet. A key challenge to the scalability and quality of the learned architectures is the need for differentiating through the inner-loop optimisation. While much has been discussed about several potentially fatal factors in DARTS, the architecture gradient, a.k.a. hypergradient, has received less attention. In this paper, we tackle the hypergradient computation in DARTS based on the implicit function theorem, making it only depends on the obtained solution to the inner-loop optimization and agnostic to the optimization path. To further reduce the computational requirements, we formulate a stochastic hypergradient approximation for differentiable NAS, and theoretically show that the architecture optimization with the proposed method, named iDARTS, is expected to converge to a stationary point. Comprehensive experiments on two NAS benchmark search spaces and the common NAS search space verify the effectiveness of our proposed method. It leads to architectures outperforming, with large margins, those learned by the baseline methods.},\n\tbooktitle = {International {Conference} on {Machine} {Learning} ({ICML})},\n\tauthor = {Zhang, Miao and Su, Steven and Pan, Shirui and Chang, Xiaojun and Abbasnejad, Ehsan and Haffari, Reza},\n\tyear = {2021},\n\tnote = {\\_eprint: 2106.10784},\n\tpages = {12557--12566 (CORE Ranked A*)},\n}\n\n
\n
\n\n\n
\n ${\\}backslash$textit\\Differentiable ARchiTecture Search\\ (DARTS) has recently become the mainstream of neural architecture search (NAS) due to its efficiency and simplicity. With a gradient-based bi-level optimization, DARTS alternately optimizes the inner model weights and the outer architecture parameter in a weight-sharing supernet. A key challenge to the scalability and quality of the learned architectures is the need for differentiating through the inner-loop optimisation. While much has been discussed about several potentially fatal factors in DARTS, the architecture gradient, a.k.a. hypergradient, has received less attention. In this paper, we tackle the hypergradient computation in DARTS based on the implicit function theorem, making it only depends on the obtained solution to the inner-loop optimization and agnostic to the optimization path. To further reduce the computational requirements, we formulate a stochastic hypergradient approximation for differentiable NAS, and theoretically show that the architecture optimization with the proposed method, named iDARTS, is expected to converge to a stationary point. Comprehensive experiments on two NAS benchmark search spaces and the common NAS search space verify the effectiveness of our proposed method. It leads to architectures outperforming, with large margins, those learned by the baseline methods.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Identify Topic Relations in Scientific Literature Using Topic Modeling.\n \n \n \n\n\n \n Chen, H.; Wang, X.; Pan, S.; and Xiong, F.\n\n\n \n\n\n\n IEEE Transactions on Engineering Management (TEM), 68(5): 1232–1244. 2021.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{chen_identify_2021,\n\ttitle = {Identify {Topic} {Relations} in {Scientific} {Literature} {Using} {Topic} {Modeling}},\n\tvolume = {68},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/TEM.2019.2903115 (Impact Factor: 6.146)},\n\tnumber = {5},\n\tjournal = {IEEE Transactions on Engineering Management (TEM)},\n\tauthor = {Chen, Hongshu and Wang, Ximeng and Pan, Shirui and Xiong, Fei},\n\tyear = {2021},\n\tpages = {1232--1244},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Suicidal Ideation Detection: A Review of Machine Learning Methods and Applications.\n \n \n \n\n\n \n Ji, S.; Pan, S.; Li, X.; Cambria, E.; Long, G.; and Huang, Z.\n\n\n \n\n\n\n IEEE Transactions on Computational Social Systems (TCSS), 8(1): 214–226. 2021.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{ji_suicidal_2021,\n\ttitle = {Suicidal {Ideation} {Detection}: {A} {Review} of {Machine} {Learning} {Methods} and {Applications}},\n\tvolume = {8},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/TCSS.2020.3021467},\n\tnumber = {1},\n\tjournal = {IEEE Transactions on Computational Social Systems (TCSS)},\n\tauthor = {Ji, Shaoxiong and Pan, Shirui and Li, Xue and Cambria, Erik and Long, Guodong and Huang, Zi},\n\tyear = {2021},\n\tpages = {214--226},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Contrastive Graph Poisson Networks: Semi-Supervised Learning with Extremely Limited Labels.\n \n \n \n\n\n \n Wan, S.; Zhan, Y.; Liu, L.; Yu, B.; Pan, S.; and Gong, C.\n\n\n \n\n\n\n In Thirty-fifth Conference on Neural Information Processing Systems (NeurIPS-21), Virtual Conference, Dec 6 - Dec 14, 2021 (CORE Ranked A*; Top Conference in Machine Learning), 2021. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{wan_contrastive_2021-1,\n\ttitle = {Contrastive {Graph} {Poisson} {Networks}: {Semi}-{Supervised} {Learning} with {Extremely} {Limited} {Labels}},\n\tcopyright = {All rights reserved},\n\tbooktitle = {Thirty-fifth {Conference} on {Neural} {Information} {Processing} {Systems} ({NeurIPS}-21), {Virtual} {Conference}, {Dec} 6 - {Dec} 14, 2021 ({CORE} {Ranked} {A}*; {Top} {Conference} in {Machine} {Learning})},\n\tauthor = {Wan, Sheng and Zhan, Yibing and Liu, Liu and Yu, Baosheng and Pan, Shirui and Gong, Chen},\n\tyear = {2021},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Temporal Network Embedding for Link Prediction via VAE Joint Attention Mechanism.\n \n \n \n\n\n \n Jiao, P.; Guo, X.; Jing, X.; He, D.; Wu, H.; Pan, S.; Gong, M.; and Wang, W.\n\n\n \n\n\n\n IEEE Transactions on Neural Networks and Learning Systems (TNNLS),1–14. 2021.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{jiao_temporal_2021,\n\ttitle = {Temporal {Network} {Embedding} for {Link} {Prediction} via {VAE} {Joint} {Attention} {Mechanism}},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/TNNLS.2021.3084957 (Impact Factor: 10.451; JCR Ranked Q1)},\n\tjournal = {IEEE Transactions on Neural Networks and Learning Systems (TNNLS)},\n\tauthor = {Jiao, Pengfei and Guo, Xuan and Jing, Xin and He, Dongxiao and Wu, Huaming and Pan, Shirui and Gong, Maoguo and Wang, Wenjun},\n\tyear = {2021},\n\tpages = {1--14},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n IGNSCDA: Predicting CircRNA-Disease Associations Based on Improved Graph Convolutional Network and Negative Sampling.\n \n \n \n\n\n \n Lan, W.; Dong, Y.; Chen, Q.; Liu, J.; Wang, J.; Chen, Y. P.; and Pan, S.\n\n\n \n\n\n\n IEEE/ACM Transactions on Computational Biology and Bioinformatics (TCBB),1. 2021.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{lan_ignscda:_2021,\n\ttitle = {{IGNSCDA}: {Predicting} {CircRNA}-{Disease} {Associations} {Based} on {Improved} {Graph} {Convolutional} {Network} and {Negative} {Sampling}},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/TCBB.2021.3111607 (Impact Factor: 3.71)},\n\tjournal = {IEEE/ACM Transactions on Computational Biology and Bioinformatics (TCBB)},\n\tauthor = {Lan, Wei and Dong, Yi and Chen, Qingfeng and Liu, Jin and Wang, Jianxin and Chen, Yi-Ping Phoebe and Pan, Shirui},\n\tyear = {2021},\n\tpages = {1},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Anomaly Detection in Dynamic Graphs via Transformer.\n \n \n \n\n\n \n Liu, Y.; Pan, S.; Wang, Y. G.; Xiong, F.; Wang, L.; Chen, Q.; and Lee, V. C S\n\n\n \n\n\n\n IEEE Transactions on Knowledge and Data Engineering (TKDE),1–14. 2021.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{liu_anomaly_2021,\n\ttitle = {Anomaly {Detection} in {Dynamic} {Graphs} via {Transformer}},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/TKDE.2021.3124061 (Impact Factor: 6.977; JCR Ranked Q1; Top Journal in Data Mining)},\n\tjournal = {IEEE Transactions on Knowledge and Data Engineering (TKDE)},\n\tauthor = {Liu, Yixin and Pan, Shirui and Wang, Yu Guang and Xiong, Fei and Wang, Liang and Chen, Qingfeng and Lee, Vincent C S},\n\tyear = {2021},\n\tpages = {1--14},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Hyperspectral Image Classification with Context-aware Dynamic Graph Convolutional Networks.\n \n \n \n\n\n \n Wan, S.; Zhong, P.; Pan, S.; Yang, J.; Li, G.; and Gong, C.\n\n\n \n\n\n\n IEEE Transactions on Geoscience and Remote Sensing (TGRS), 59(1): 597–612. 2021.\n Publisher: IEEE\n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{wan_hyperspectral_2021,\n\ttitle = {Hyperspectral {Image} {Classification} with {Context}-aware {Dynamic} {Graph} {Convolutional} {Networks}},\n\tvolume = {59},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/TGRS.2020.2994205 (Impact Factor: 5.6; JCR Ranked Q1)},\n\tnumber = {1},\n\tjournal = {IEEE Transactions on Geoscience and Remote Sensing (TGRS)},\n\tauthor = {Wan, Sheng and Zhong, Ping and Pan, Shirui and Yang, Jian and Li, Guangyu and Gong, Chen},\n\tyear = {2021},\n\tnote = {Publisher: IEEE},\n\tpages = {597--612},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Influence Spread in Geo-Social Networks: A Multiobjective Optimization Perspective.\n \n \n \n\n\n \n Wang, L.; Yu, Z.; Xiong, F.; Yang, D.; Pan, S.; and Yan, Z.\n\n\n \n\n\n\n IEEE Transactions on Cybernetics (TCYB), 51(5): 2663–2675. 2021.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{wang_influence_2021,\n\ttitle = {Influence {Spread} in {Geo}-{Social} {Networks}: {A} {Multiobjective} {Optimization} {Perspective}},\n\tvolume = {51},\n\tcopyright = {All rights reserved},\n\tdoi = {doi.org/10.1109/TCYB.2019.2906078  (Impact Factor: 11.448; JCR Ranked Q1)},\n\tnumber = {5},\n\tjournal = {IEEE Transactions on Cybernetics (TCYB)},\n\tauthor = {Wang, Liang and Yu, Zhiwen and Xiong, Fei and Yang, Dingqi and Pan, Shirui and Yan, Zheng},\n\tyear = {2021},\n\tpages = {2663--2675},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Learning Graph Neural Networks with Positive and Unlabeled Nodes.\n \n \n \n\n\n \n Wu, M.; Pan, S.; Du, L.; and Zhu, X.\n\n\n \n\n\n\n ACM Transactions on Knowledge Discovery from Data (TKDD), 15(6): 101:1–101:25. 2021.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{wu_learning_2021,\n\ttitle = {Learning {Graph} {Neural} {Networks} with {Positive} and {Unlabeled} {Nodes}},\n\tvolume = {15},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1145/3450316 (Impact Factor: 2.01)},\n\tnumber = {6},\n\tjournal = {ACM Transactions on Knowledge Discovery from Data (TKDD)},\n\tauthor = {Wu, Man and Pan, Shirui and Du, Lan and Zhu, Xingquan},\n\tyear = {2021},\n\tpages = {101:1--101:25},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n A Comprehensive Survey on Graph Neural Networks.\n \n \n \n\n\n \n Wu, Z.; Pan, S.; Chen, F.; Long, G.; Zhang, C.; and Yu, P. S.\n\n\n \n\n\n\n IEEE Transactions on Neural Networks and Learning Systems (TNNLS), 32(1): 4–24. 2021.\n Publisher: IEEE\n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{wu_comprehensive_2021,\n\ttitle = {A {Comprehensive} {Survey} on {Graph} {Neural} {Networks}},\n\tvolume = {32},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/TNNLS.2020.2978386 (Impact Factor: 10.451; JCR Ranked Q1; Citations: 4000)},\n\tnumber = {1},\n\tjournal = {IEEE Transactions on Neural Networks and Learning Systems (TNNLS)},\n\tauthor = {Wu, Zonghan and Pan, Shirui and Chen, Fengwen and Long, Guodong and Zhang, Chengqi and Yu, Philip S.},\n\tyear = {2021},\n\tnote = {Publisher: IEEE},\n\tpages = {4--24},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Graph Learning: A Survey.\n \n \n \n\n\n \n Xia, F.; Sun, K.; Yu, S.; Aziz, A.; Wan, L.; Pan, S.; and Liu, H.\n\n\n \n\n\n\n IEEE Transactions on Artificial Intelligence (TAI), 2(2): 109–127. 2021.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{xia_graph_2021,\n\ttitle = {Graph {Learning}: {A} {Survey}},\n\tvolume = {2},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/TAI.2021.3076021},\n\tnumber = {2},\n\tjournal = {IEEE Transactions on Artificial Intelligence (TAI)},\n\tauthor = {Xia, Feng and Sun, Ke and Yu, Shuo and Aziz, Abdul and Wan, Liangtian and Pan, Shirui and Liu, Huan},\n\tyear = {2021},\n\tpages = {109--127},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Convolutional Neural Networks based Lung Nodule Classification: A Surrogate-Assisted Evolutionary Algorithm for Hyperparameter Optimization.\n \n \n \n\n\n \n Zhang, M.; Li, H.; Pan, S.; Lyu, J.; Ling, S.; and Su, S.\n\n\n \n\n\n\n IEEE Transactions on Evolutionary Computation (TEvC), 25(5): 869–882. 2021.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{zhang_convolutional_2021,\n\ttitle = {Convolutional {Neural} {Networks} based {Lung} {Nodule} {Classification}: {A} {Surrogate}-{Assisted} {Evolutionary} {Algorithm} for {Hyperparameter} {Optimization}},\n\tvolume = {25},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/TEVC.2021.3060833 (Impact Factor: 11.554; JCR Ranked Q1)},\n\tnumber = {5},\n\tjournal = {IEEE Transactions on Evolutionary Computation (TEvC)},\n\tauthor = {Zhang, Miao and Li, Huiqi and Pan, Shirui and Lyu, Juan and Ling, Steve and Su, Steven},\n\tyear = {2021},\n\tpages = {869--882},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n One-Shot Neural Architecture Search: Maximising Diversity to Overcome Catastrophic Forgetting.\n \n \n \n\n\n \n Zhang, M.; Li, H.; Pan, S.; Chang, X.; Zhou, C.; Ge, Z.; and W. Su, S.\n\n\n \n\n\n\n IEEE Transactions on Pattern Analysis and Machine Intelligence (TPAMI), 43(9): 2921–2935. 2021.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{zhang_one-shot_2021,\n\ttitle = {One-{Shot} {Neural} {Architecture} {Search}: {Maximising} {Diversity} to {Overcome} {Catastrophic} {Forgetting}},\n\tvolume = {43},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/TPAMI.2020.3035351 (Impact Factor: 16.389; JCR Ranked Q1; Top Journal in AI)},\n\tnumber = {9},\n\tjournal = {IEEE Transactions on Pattern Analysis and Machine Intelligence (TPAMI)},\n\tauthor = {Zhang, Miao and Li, Huiqi and Pan, Shirui and Chang, Xiaojun and Zhou, Chuan and Ge, Zongyuan and W. Su, Steven},\n\tyear = {2021},\n\tpages = {2921--2935},\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2020\n \n \n (22)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Going Deep: Graph Convolutional Ladder-Shape Networks.\n \n \n \n\n\n \n Hu, R.; Pan, S.; Long, G.; Lu, Q.; Zhu, L.; and Jiang, J.\n\n\n \n\n\n\n In Proceedings of the Thirty-Fourth AAAI Conference on Artificial Intelligence, AAAI-20, New York, New York, USA, February 7-12, 2020, pages 2838–2845 (CORE Ranked A*), 2020. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{hu_going_2020,\n\ttitle = {Going {Deep}: {Graph} {Convolutional} {Ladder}-{Shape} {Networks}},\n\tcopyright = {All rights reserved},\n\tbooktitle = {Proceedings of the {Thirty}-{Fourth} {AAAI} {Conference} on {Artificial} {Intelligence}, {AAAI}-20, {New} {York}, {New} {York}, {USA}, {February} 7-12, 2020},\n\tauthor = {Hu, Ruiqi and Pan, Shirui and Long, Guodong and Lu, Qinghua and Zhu, Liming and Jiang, Jing},\n\tyear = {2020},\n\tpages = {2838--2845 (CORE Ranked A*)},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Grounding Visual Concepts for Multimedia Event Detection and Multimedia Event Captioning in Zero-shot Setting.\n \n \n \n\n\n \n Li, Z.; Chang, X.; Yao, L.; Pan, S.; Ge, Z.; and Zhang, H.\n\n\n \n\n\n\n In ACM SIGKDD Conference on Knowledge Discovery and Data Mining, KDD-20, August 23–27, 2020, Virtual Event, CA, USA, pages 297–305 (CORE Ranked A*), 2020. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{li_grounding_2020,\n\ttitle = {Grounding {Visual} {Concepts} for {Multimedia} {Event} {Detection} and {Multimedia} {Event} {Captioning} in {Zero}-shot {Setting}},\n\tcopyright = {All rights reserved},\n\tbooktitle = {{ACM} {SIGKDD} {Conference} on {Knowledge} {Discovery} and {Data} {Mining}, {KDD}-20, {August} 23–27, 2020, {Virtual} {Event}, {CA}, {USA}},\n\tauthor = {Li, Zhihui and Chang, Xiaojun and Yao, Lina and Pan, Shirui and Ge, Zongyuan and Zhang, Huaxiang},\n\tyear = {2020},\n\tpages = {297--305 (CORE Ranked A*)},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Graph Stochastic Neural Networks for Semi-supervised Learning.\n \n \n \n\n\n \n Wang, H.; Zhou, C.; Chen, X.; Wu, J.; Pan, S.; and Wang, J.\n\n\n \n\n\n\n In Thirty-fourth Conference on Neural Information Processing Systems, NeurIPS-20, December 6-12, 2020, Virtual Conference, pages 19839–19848 (CORE Ranked A*; Top Conference in Machine Learning), 2020. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{wang_graph_2020,\n\ttitle = {Graph {Stochastic} {Neural} {Networks} for {Semi}-supervised {Learning}},\n\tcopyright = {All rights reserved},\n\tbooktitle = {Thirty-fourth {Conference} on {Neural} {Information} {Processing} {Systems}, {NeurIPS}-20, {December} 6-12, 2020, {Virtual} {Conference}},\n\tauthor = {Wang, Haibo and Zhou, Chuan and Chen, Xin and Wu, Jia and Pan, Shirui and Wang, Jilong},\n\tyear = {2020},\n\tpages = {19839--19848 (CORE Ranked A*; Top Conference in Machine Learning)},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Reinforcement Learning based Meta-path Discovery in Large-scale Heterogeneous Information Networks.\n \n \n \n\n\n \n Wan, G.; Du, B.; Pan, S.; and Haffari, G.\n\n\n \n\n\n\n In Proceedings of the Thirty-Fourth AAAI Conference on Artificial Intelligence, AAAI-20, New York, New York, USA, February 7-12, 2020, pages 6094–6101 (CORE Ranked A*), 2020. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{wan_reinforcement_2020,\n\ttitle = {Reinforcement {Learning} based {Meta}-path {Discovery} in {Large}-scale {Heterogeneous} {Information} {Networks}},\n\tcopyright = {All rights reserved},\n\tbooktitle = {Proceedings of the {Thirty}-{Fourth} {AAAI} {Conference} on {Artificial} {Intelligence}, {AAAI}-20, {New} {York}, {New} {York}, {USA}, {February} 7-12, 2020},\n\tauthor = {Wan, Guojia and Du, Bo and Pan, Shirui and Haffari, Gholamreza},\n\tyear = {2020},\n\tpages = {6094--6101 (CORE Ranked A*)},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Reasoning Like Human: Hierarchical Reinforcement Learning for Knowledge Graph Reasoning.\n \n \n \n\n\n \n Wan, G.; Pan, S.; Gong, C.; Zhou, C.; and Haffari, G.\n\n\n \n\n\n\n In International Joint Conference on Artificial Intelligence, IJCAI-20, Yokohama, Japan, January, 2021, pages 1926–1932 (CORE Ranked A*), 2020. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{wan_reasoning_2020,\n\ttitle = {Reasoning {Like} {Human}: {Hierarchical} {Reinforcement} {Learning} for {Knowledge} {Graph} {Reasoning}},\n\tcopyright = {All rights reserved},\n\tbooktitle = {International {Joint} {Conference} on {Artificial} {Intelligence}, {IJCAI}-20, {Yokohama}, {Japan}, {January}, 2021},\n\tauthor = {Wan, Guojia and Pan, Shirui and Gong, Chen and Zhou, Chuan and Haffari, Gholamreza},\n\tyear = {2020},\n\tpages = {1926--1932 (CORE Ranked A*)},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Cross-Graph: Robust and Unsupervised Embedding for Attributed Graphs with Corrupted Structure.\n \n \n \n\n\n \n Wang, C.; Han, B.; Pan, S.; Jiang, J.; Niu, G.; and Long, G.\n\n\n \n\n\n\n In IEEE International Conference on Data Mining, ICDM-20, November 17-20, 2020, Sorrento, Italy, pages 571–580 (CORE Ranked A*), 2020. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{wang_cross-graph:_2020,\n\ttitle = {Cross-{Graph}: {Robust} and {Unsupervised} {Embedding} for {Attributed} {Graphs} with {Corrupted} {Structure}},\n\tcopyright = {All rights reserved},\n\tbooktitle = {{IEEE} {International} {Conference} on {Data} {Mining}, {ICDM}-20, {November} 17-20, 2020, {Sorrento}, {Italy}},\n\tauthor = {Wang, Chun and Han, Bo and Pan, Shirui and Jiang, Jing and Niu, Gang and Long, Guodong},\n\tyear = {2020},\n\tpages = {571--580 (CORE Ranked A*)},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Unsupervised Domain Adaptive Graph Convolutional Networks.\n \n \n \n\n\n \n Wu, M.; Pan, S.; Zhou, C.; Chang, X.; and Zhu, X.\n\n\n \n\n\n\n In The Web Conference (WWW), WWW-20, Taipei, Taiwan, April 20-24, 2020, pages 1457–1467, 2020. \n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{wu_unsupervised_2020,\n\ttitle = {Unsupervised {Domain} {Adaptive} {Graph} {Convolutional} {Networks}},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1145/3366423.3380219 (CORE Ranked A*)},\n\tbooktitle = {The {Web} {Conference} ({WWW}), {WWW}-20, {Taipei}, {Taiwan}, {April} 20-24, 2020},\n\tauthor = {Wu, Man and Pan, Shirui and Zhou, Chuan and Chang, Xiaojun and Zhu, Xingquan},\n\tyear = {2020},\n\tpages = {1457--1467},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Overcoming Multi-Model Forgetting in One-Shot NAS with Diversity Maximization.\n \n \n \n\n\n \n Zhang, M.; Li, H.; Pan, S.; Chang, X.; and Su, S.\n\n\n \n\n\n\n In IEEE/CVF Conference on Computer Vision and Pattern Recognition, CVPR-20, pages 7809–7818 (CORE Ranked A*), 2020. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{zhang_overcoming_2020,\n\ttitle = {Overcoming {Multi}-{Model} {Forgetting} in {One}-{Shot} {NAS} with {Diversity} {Maximization}},\n\tcopyright = {All rights reserved},\n\tbooktitle = {{IEEE}/{CVF} {Conference} on {Computer} {Vision} and {Pattern} {Recognition}, {CVPR}-20},\n\tauthor = {Zhang, Miao and Li, Huiqi and Pan, Shirui and Chang, Xiaojun and Su, Steven},\n\tyear = {2020},\n\tpages = {7809--7818 (CORE Ranked A*)},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Graph Geometry Interaction Learning.\n \n \n \n\n\n \n Zhu, S.; Pan, S.; Zhou, C.; Wu, J.; Cao, Y.; and Wang, B.\n\n\n \n\n\n\n In Thirty-fourth Conference on Neural Information Processing Systems, NeurIPS-20, December 6-12, 2020, Virtual Conference, pages 7548–7558 (CORE Ranked A*; Top Conference in Machine Learning), 2020. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{zhu_graph_2020,\n\ttitle = {Graph {Geometry} {Interaction} {Learning}},\n\tcopyright = {All rights reserved},\n\tbooktitle = {Thirty-fourth {Conference} on {Neural} {Information} {Processing} {Systems}, {NeurIPS}-20, {December} 6-12, 2020, {Virtual} {Conference}},\n\tauthor = {Zhu, Shichao and Pan, Shirui and Zhou, Chuan and Wu, Jia and Cao, Yanan and Wang, Bin},\n\tyear = {2020},\n\tpages = {7548--7558 (CORE Ranked A*; Top Conference in Machine Learning)},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n GSSNN: Graph Smoothing Splines Neural Networks.\n \n \n \n\n\n \n Zhu, S.; Zhou, L.; Pan, S.; Zhou, C.; Yan, G.; and Wang, B.\n\n\n \n\n\n\n In Proceedings of the Thirty-Fourth AAAI Conference on Artificial Intelligence, AAAI-20, New York, New York, USA, February 7-12, 2020, pages 7007–7014 (CORE Ranked A*), 2020. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{zhu_gssnn:_2020,\n\ttitle = {{GSSNN}: {Graph} {Smoothing} {Splines} {Neural} {Networks}},\n\tcopyright = {All rights reserved},\n\tbooktitle = {Proceedings of the {Thirty}-{Fourth} {AAAI} {Conference} on {Artificial} {Intelligence}, {AAAI}-20, {New} {York}, {New} {York}, {USA}, {February} 7-12, 2020},\n\tauthor = {Zhu, Shichao and Zhou, Lewei and Pan, Shirui and Zhou, Chuan and Yan, Guiying and Wang, Bin},\n\tyear = {2020},\n\tpages = {7007--7014 (CORE Ranked A*)},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Distributed Feature Selection for Big Data using Fuzzy Rough Sets.\n \n \n \n\n\n \n Kong, L.; Qu, W.; Yu, J.; Zuo, H.; Chen, G.; Xiong, F.; Pan, S.; Lin, S.; and Qiu, M.\n\n\n \n\n\n\n IEEE Transactions on Fuzzy Systems (TFS), 28(5): 846–857. 2020.\n Publisher: IEEE\n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{kong_distributed_2020,\n\ttitle = {Distributed {Feature} {Selection} for {Big} {Data} using {Fuzzy} {Rough} {Sets}},\n\tvolume = {28},\n\tcopyright = {All rights reserved},\n\tdoi = {doi.org/10.1109/TFUZZ.2019.2955894 (Impact Factor: 12.029; JCR Ranked Q1)},\n\tnumber = {5},\n\tjournal = {IEEE Transactions on Fuzzy Systems (TFS)},\n\tauthor = {Kong, Linghe and Qu, Wenhao and Yu, Jiadi and Zuo, Hua and Chen, Guihai and Xiong, Fei and Pan, Shirui and Lin, Siyu and Qiu, Meikang},\n\tyear = {2020},\n\tnote = {Publisher: IEEE},\n\tpages = {846--857},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n OpenWGL: Open-World Graph Learning.\n \n \n \n\n\n \n Wu, M.; Pan, S.; and Zhu, X.\n\n\n \n\n\n\n In IEEE International Conference on Data Mining, ICDM-20, November 17-20, 2020, Sorrento, Italy, pages 681–690 (CORE Ranked A*; Best Student Paper Award), 2020. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{wu_openwgl:_2020,\n\ttitle = {{OpenWGL}: {Open}-{World} {Graph} {Learning}},\n\tcopyright = {All rights reserved},\n\tbooktitle = {{IEEE} {International} {Conference} on {Data} {Mining}, {ICDM}-20, {November} 17-20, 2020, {Sorrento}, {Italy}},\n\tauthor = {Wu, Man and Pan, Shirui and Zhu, Xingquan},\n\tyear = {2020},\n\tpages = {681--690 (CORE Ranked A*; Best Student Paper Award)},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Connecting the Dots: Multivariate Time Series Forecasting with Graph Neural Networks.\n \n \n \n\n\n \n Wu, Z.; Pan, S.; Long, G.; Jiang, J.; Chang, X.; and Zhang, C.\n\n\n \n\n\n\n In ACM SIGKDD Conference on Knowledge Discovery and Data Mining, KDD-20, August 23–27, 2020, Virtual Event, CA, USA, pages 753–763 (CORE Ranked A*; Top Conference in Data Mining), 2020. ACM\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{wu_connecting_2020,\n\ttitle = {Connecting the {Dots}: {Multivariate} {Time} {Series} {Forecasting} with {Graph} {Neural} {Networks}},\n\tcopyright = {All rights reserved},\n\tbooktitle = {{ACM} {SIGKDD} {Conference} on {Knowledge} {Discovery} and {Data} {Mining}, {KDD}-20, {August} 23–27, 2020, {Virtual} {Event}, {CA}, {USA}},\n\tpublisher = {ACM},\n\tauthor = {Wu, Zonghan and Pan, Shirui and Long, Guodong and Jiang, Jing and Chang, Xiaojun and Zhang, Chengqi},\n\tyear = {2020},\n\tpages = {753--763 (CORE Ranked A*; Top Conference in Data Mining)},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n A Relation-Specific Attention Network for Joint Entity and Relation Extraction.\n \n \n \n\n\n \n Yuan, Y.; Zhou, X.; Pan, S.; Zhu, Q.; Song, Z.; and Guo, L.\n\n\n \n\n\n\n In International Joint Conference on Artificial Intelligence, IJCAI-20, Yokohama, Japan, January, 2021, pages 4054–4060 (CORE Ranked A*), 2020. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{yuan_relation-specific_2020,\n\ttitle = {A {Relation}-{Specific} {Attention} {Network} for {Joint} {Entity} and {Relation} {Extraction}},\n\tcopyright = {All rights reserved},\n\tbooktitle = {International {Joint} {Conference} on {Artificial} {Intelligence}, {IJCAI}-20, {Yokohama}, {Japan}, {January}, 2021},\n\tauthor = {Yuan, Yue and Zhou, Xiaofei and Pan, Shirui and Zhu, Qiannan and Song, Zeliang and Guo, Li},\n\tyear = {2020},\n\tpages = {4054--4060 (CORE Ranked A*)},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Differentiable Neural Architecture Search in Equivalent Space with Exploration Enhancement.\n \n \n \n\n\n \n Zhang, M.; Li, H.; Pan, S.; Chang, X.; Ge, Z.; and Su, S.\n\n\n \n\n\n\n In Thirty-fourth Conference on Neural Information Processing Systems, NeurIPS-20, December 6-12, 2020, Virtual Conference, pages 13341–13351 (CORE Ranked A*; Top Conference in Machine Learning), 2020. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{zhang_differentiable_2020,\n\ttitle = {Differentiable {Neural} {Architecture} {Search} in {Equivalent} {Space} with {Exploration} {Enhancement}},\n\tcopyright = {All rights reserved},\n\tbooktitle = {Thirty-fourth {Conference} on {Neural} {Information} {Processing} {Systems}, {NeurIPS}-20, {December} 6-12, 2020, {Virtual} {Conference}},\n\tauthor = {Zhang, Miao and Li, Huiqi and Pan, Shirui and Chang, Xiaojun and Ge, Zongyuan and Su, Steven},\n\tyear = {2020},\n\tpages = {13341--13351 (CORE Ranked A*; Top Conference in Machine Learning)},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n One-Shot Neural Architecture Search via Novelty Driven Sampling.\n \n \n \n\n\n \n Zhang, M.; Li, H.; Pan, S.; Liu, T.; and Su, S.\n\n\n \n\n\n\n In International Joint Conference on Artificial Intelligence, IJCAI-20, Yokohama, Japan, January, 2021, pages 3188–3194 (CORE Ranked A*), 2020. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{zhang_one-shot_2020,\n\ttitle = {One-{Shot} {Neural} {Architecture} {Search} via {Novelty} {Driven} {Sampling}},\n\tcopyright = {All rights reserved},\n\tbooktitle = {International {Joint} {Conference} on {Artificial} {Intelligence}, {IJCAI}-20, {Yokohama}, {Japan}, {January}, 2021},\n\tauthor = {Zhang, Miao and Li, Huiqi and Pan, Shirui and Liu, Taoping and Su, Steven},\n\tyear = {2020},\n\tpages = {3188--3194 (CORE Ranked A*)},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Learning graph embedding with adversarial training methods.\n \n \n \n\n\n \n Pan, S.; Hu, R.; Fung, S.; Long, G.; Jiang, J.; and Zhang, C.\n\n\n \n\n\n\n IEEE transactions on cybernetics (TCYB), 50(6): 2475–2487. 2020.\n Publisher: IEEE\n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{pan_learning_2020,\n\ttitle = {Learning graph embedding with adversarial training methods},\n\tvolume = {50},\n\tcopyright = {All rights reserved},\n\tdoi = {doi.org/10.1109/TCYB.2019.2932096  (Impact Factor: 11.448; JCR Ranked Q1)},\n\tnumber = {6},\n\tjournal = {IEEE transactions on cybernetics (TCYB)},\n\tauthor = {Pan, Shirui and Hu, Ruiqi and Fung, Sai-fu and Long, Guodong and Jiang, Jing and Zhang, Chengqi},\n\tyear = {2020},\n\tnote = {Publisher: IEEE},\n\tpages = {2475--2487},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Exploiting Implicit Influence From Information Propagation for Social Recommendation.\n \n \n \n\n\n \n Xiong, F.; Shen, W.; Chen, H.; Pan, S.; Wang, X.; and Yan, Z.\n\n\n \n\n\n\n IEEE transactions on cybernetics (TCYB), 50(10): 4186–4199. 2020.\n Publisher: IEEE\n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{xiong_exploiting_2020,\n\ttitle = {Exploiting {Implicit} {Influence} {From} {Information} {Propagation} for {Social} {Recommendation}},\n\tvolume = {50},\n\tcopyright = {All rights reserved},\n\tdoi = {doi.org/10.1109/TCYB.2019.2939390  (Impact Factor: 11.448; JCR Ranked Q1)},\n\tnumber = {10},\n\tjournal = {IEEE transactions on cybernetics (TCYB)},\n\tauthor = {Xiong, Fei and Shen, Weihan and Chen, Hongshu and Pan, Shirui and Wang, Ximeng and Yan, Zheng},\n\tyear = {2020},\n\tnote = {Publisher: IEEE},\n\tpages = {4186--4199},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Social recommendation with evolutionary opinion dynamics.\n \n \n \n\n\n \n Xiong, F.; Wang, X.; Pan, S.; Yang, H.; Wang, H.; and Zhang, C.\n\n\n \n\n\n\n IEEE Transactions on Systems, Man, and Cybernetics: Systems (TSMC), 50(10): 3804–3816. 2020.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{xiong_social_2020,\n\ttitle = {Social recommendation with evolutionary opinion dynamics},\n\tvolume = {50},\n\tcopyright = {All rights reserved},\n\tissn = {2168-2216},\n\tdoi = {10.1109/TSMC.2018.2854000 (Impact Factor: 13.451; JCR Ranked Q1)},\n\tlanguage = {English},\n\tnumber = {10},\n\tjournal = {IEEE Transactions on Systems, Man, and Cybernetics: Systems (TSMC)},\n\tauthor = {Xiong, Fei and Wang, Ximeng and Pan, Shirui and Yang, Hong and Wang, Haishuai and Zhang, Chengqi},\n\tyear = {2020},\n\tpages = {3804--3816},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Familial Clustering For Weakly-labeled Android Malware Using Hybrid Representation Learning.\n \n \n \n\n\n \n Zhang, Y.; Sui, Y.; Pan, S.; Zheng, Z.; Ning, B.; Tsang, I.; and Zhou, W.\n\n\n \n\n\n\n IEEE Transactions on Information Forensics and Security (TIFS), 15: 3401–3414. 2020.\n Publisher: IEEE\n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{zhang_familial_2020,\n\ttitle = {Familial {Clustering} {For} {Weakly}-labeled {Android} {Malware} {Using} {Hybrid} {Representation} {Learning}},\n\tvolume = {15},\n\tcopyright = {All rights reserved},\n\tdoi = {doi.org/10.1109/TIFS.2019.2947861 (Impact Factor: 7.178; JCR Ranked Q1)},\n\tjournal = {IEEE Transactions on Information Forensics and Security (TIFS)},\n\tauthor = {Zhang, Yanxin and Sui, Yulei and Pan, Shirui and Zheng, Zheng and Ning, Baodi and Tsang, Ivor and Zhou, Wanlei},\n\tyear = {2020},\n\tnote = {Publisher: IEEE},\n\tpages = {3401--3414},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Adaptive knowledge subgraph ensemble for robust and trustworthy knowledge graph completion.\n \n \n \n\n\n \n Wan, G.; Du, B.; Pan, S.; and Wu, J.\n\n\n \n\n\n\n World Wide Web (WWW), 23(1): 471–490. 2020.\n Publisher: Springer\n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{wan_adaptive_2020,\n\ttitle = {Adaptive knowledge subgraph ensemble for robust and trustworthy knowledge graph completion},\n\tvolume = {23},\n\tcopyright = {All rights reserved},\n\tdoi = {doi.org/10.1007/s11280-019-00711-y (Impact Factor: 2.716; JCR Ranked Q2)},\n\tnumber = {1},\n\tjournal = {World Wide Web (WWW)},\n\tauthor = {Wan, Guojia and Du, Bo and Pan, Shirui and Wu, Jia},\n\tyear = {2020},\n\tnote = {Publisher: Springer},\n\tpages = {471--490},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Clustering social audiences in business information networks.\n \n \n \n\n\n \n Zheng, Y.; Hu, R.; Fung, S.; Yu, C.; Long, G.; Guo, T.; and Pan, S.\n\n\n \n\n\n\n Pattern Recognition (PR), 100: 107126. 2020.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{zheng_clustering_2020,\n\ttitle = {Clustering social audiences in business information networks},\n\tvolume = {100},\n\tcopyright = {All rights reserved},\n\tissn = {0031-3203},\n\tdoi = {doi.org/10.1016/j.patcog.2019.107126 (Impact Factor: 7.74; JCR Ranked Q1)},\n\tjournal = {Pattern Recognition (PR)},\n\tauthor = {Zheng, Yu and Hu, Ruiqi and Fung, Sai-fu and Yu, Celina and Long, Guodong and Guo, Ting and Pan, Shirui},\n\tyear = {2020},\n\tpages = {107126},\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2019\n \n \n (9)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n Cost-Sensitive Parallel Learning Framework for Insurance Intelligence Operation.\n \n \n \n \n\n\n \n Jiang, X.; Pan, S.; Long, G.; Xiong, F.; Jiang, J.; and Zhang, C.\n\n\n \n\n\n\n IEEE Transactions Industrial Electronics (TIE), 66(12): 9713–9723. 2019.\n \n\n\n\n
\n\n\n\n \n \n \"Cost-SensitivePaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{jiang_cost-sensitive_2019,\n\ttitle = {Cost-{Sensitive} {Parallel} {Learning} {Framework} for {Insurance} {Intelligence} {Operation}},\n\tvolume = {66},\n\tcopyright = {All rights reserved},\n\turl = {https://doi.org/10.1109/TIE.2018.2873526},\n\tdoi = {10.1109/TIE.2018.2873526 (Impact Factor: 8.236; JCR Ranked Q1)},\n\tnumber = {12},\n\tjournal = {IEEE Transactions Industrial Electronics (TIE)},\n\tauthor = {Jiang, Xinxin and Pan, Shirui and Long, Guodong and Xiong, Fei and Jiang, Jing and Zhang, Chengqi},\n\tyear = {2019},\n\tpages = {9713--9723},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Attributed Graph Clustering: A Deep Attentional Embedding Approach.\n \n \n \n \n\n\n \n Wang, C.; Pan, S.; Hu, R.; Long, G.; Jiang, J.; and Zhang, C.\n\n\n \n\n\n\n In Kraus, S., editor(s), Proceedings of the Twenty-Eighth International Joint Conference on Artificial Intelligence, IJCAI 2019, Macao, China, August 10-16, 2019, pages 3670–3676, 2019. ijcai.org\n \n\n\n\n
\n\n\n\n \n \n \"AttributedPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{wang_attributed_2019,\n\ttitle = {Attributed {Graph} {Clustering}: {A} {Deep} {Attentional} {Embedding} {Approach}},\n\tcopyright = {All rights reserved},\n\turl = {https://doi.org/10.24963/ijcai.2019/509},\n\tdoi = {10.24963/ijcai.2019/509 (CORE Ranked A*)},\n\tbooktitle = {Proceedings of the {Twenty}-{Eighth} {International} {Joint} {Conference} on {Artificial} {Intelligence}, {IJCAI} 2019, {Macao}, {China}, {August} 10-16, 2019},\n\tpublisher = {ijcai.org},\n\tauthor = {Wang, Chun and Pan, Shirui and Hu, Ruiqi and Long, Guodong and Jiang, Jing and Zhang, Chengqi},\n\teditor = {Kraus, Sarit},\n\tyear = {2019},\n\tpages = {3670--3676},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Label Embedding with Partial Heterogeneous Contexts.\n \n \n \n \n\n\n \n Shi, Y.; Xu, D.; Pan, Y.; Tsang, I. W.; and Pan, S.\n\n\n \n\n\n\n In The Thirty-Third AAAI Conference on Artificial Intelligence, AAAI 2019, Honolulu, Hawaii, USA, January 27 - February 1, 2019, pages 4926–4933, 2019. AAAI Press\n \n\n\n\n
\n\n\n\n \n \n \"LabelPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{shi_label_2019,\n\ttitle = {Label {Embedding} with {Partial} {Heterogeneous} {Contexts}},\n\tcopyright = {All rights reserved},\n\turl = {https://doi.org/10.1609/aaai.v33i01.33014926},\n\tdoi = {10.1609/aaai.v33i01.33014926 (CORE Ranked A*)},\n\tbooktitle = {The {Thirty}-{Third} {AAAI} {Conference} on {Artificial} {Intelligence}, {AAAI} 2019, {Honolulu}, {Hawaii}, {USA}, {January} 27 - {February} 1, 2019},\n\tpublisher = {AAAI Press},\n\tauthor = {Shi, Yaxin and Xu, Donna and Pan, Yuangang and Tsang, Ivor W. and Pan, Shirui},\n\tyear = {2019},\n\tpages = {4926--4933},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Domain-Adversarial Graph Neural Networks for Text Classification.\n \n \n \n\n\n \n Wu, M.; Pan, S.; Zhu, X.; Zhou, C.; and Pan, L.\n\n\n \n\n\n\n In IEEE International Conference on Data Mining (ICDM), pages 648–657, 2019. \n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{wu_domain-adversarial_2019,\n\ttitle = {Domain-{Adversarial} {Graph} {Neural} {Networks} for {Text} {Classification}},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/ICDM.2019.00075 (CORE Ranked A*)},\n\tbooktitle = {{IEEE} {International} {Conference} on {Data} {Mining} ({ICDM})},\n\tauthor = {Wu, Man and Pan, Shirui and Zhu, Xingquan and Zhou, Chuan and Pan, Lei},\n\tyear = {2019},\n\tpages = {648--657},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Graph WaveNet for Deep Spatial-Temporal Graph Modeling.\n \n \n \n \n\n\n \n Wu, Z.; Pan, S.; Long, G.; Jiang, J.; and Zhang, C.\n\n\n \n\n\n\n In Kraus, S., editor(s), Proceedings of the Twenty-Eighth International Joint Conference on Artificial Intelligence, IJCAI 2019, Macao, China, August 10-16, 2019, pages 1907–1913, 2019. ijcai.org\n \n\n\n\n
\n\n\n\n \n \n \"GraphPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{wu_graph_2019,\n\ttitle = {Graph {WaveNet} for {Deep} {Spatial}-{Temporal} {Graph} {Modeling}},\n\tcopyright = {All rights reserved},\n\turl = {https://doi.org/10.24963/ijcai.2019/264},\n\tdoi = {10.24963/ijcai.2019/264 (CORE Ranked A*)},\n\tbooktitle = {Proceedings of the {Twenty}-{Eighth} {International} {Joint} {Conference} on {Artificial} {Intelligence}, {IJCAI} 2019, {Macao}, {China}, {August} 10-16, 2019},\n\tpublisher = {ijcai.org},\n\tauthor = {Wu, Zonghan and Pan, Shirui and Long, Guodong and Jiang, Jing and Zhang, Chengqi},\n\teditor = {Kraus, Sarit},\n\tyear = {2019},\n\tpages = {1907--1913},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Low-Bit Quantization for Attributed Network Representation Learning.\n \n \n \n \n\n\n \n Yang, H.; Pan, S.; Chen, L.; Zhou, C.; and Zhang, P.\n\n\n \n\n\n\n In Kraus, S., editor(s), Proceedings of the Twenty-Eighth International Joint Conference on Artificial Intelligence, IJCAI 2019, Macao, China, August 10-16, 2019, pages 4047–4053, 2019. ijcai.org\n \n\n\n\n
\n\n\n\n \n \n \"Low-BitPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{yang_low-bit_2019,\n\ttitle = {Low-{Bit} {Quantization} for {Attributed} {Network} {Representation} {Learning}},\n\tcopyright = {All rights reserved},\n\turl = {https://doi.org/10.24963/ijcai.2019/562},\n\tdoi = {10.24963/ijcai.2019/562 (CORE Ranked A*)},\n\tbooktitle = {Proceedings of the {Twenty}-{Eighth} {International} {Joint} {Conference} on {Artificial} {Intelligence}, {IJCAI} 2019, {Macao}, {China}, {August} 10-16, 2019},\n\tpublisher = {ijcai.org},\n\tauthor = {Yang, Hong and Pan, Shirui and Chen, Ling and Zhou, Chuan and Zhang, Peng},\n\teditor = {Kraus, Sarit},\n\tyear = {2019},\n\tpages = {4047--4053},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Relation Structure-Aware Heterogeneous Graph Neural Network.\n \n \n \n\n\n \n Zhu, S.; Zhou, C.; Pan, S.; Zhu, X.; and Wang, B.\n\n\n \n\n\n\n In IEEE International Conference on Data Mining (ICDM), pages 1534–1539, 2019. \n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{zhu_relation_2019,\n\ttitle = {Relation {Structure}-{Aware} {Heterogeneous} {Graph} {Neural} {Network}},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/ICDM.2019.00203 (CORE Ranked A*)},\n\tbooktitle = {{IEEE} {International} {Conference} on {Data} {Mining} ({ICDM})},\n\tauthor = {Zhu, Shichao and Zhou, Chuan and Pan, Shirui and Zhu, Xingquan and Wang, Bin},\n\tyear = {2019},\n\tpages = {1534--1539},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n CFOND: Consensus Factorization for Co-Clustering Networked Data.\n \n \n \n \n\n\n \n Guo, T.; Pan, S.; Zhu, X.; and Zhang, C.\n\n\n \n\n\n\n IEEE Transactions on Knowledge and Data Engineering (TKDE), 31(4): 706–719. 2019.\n \n\n\n\n
\n\n\n\n \n \n \"CFOND:Paper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{guo_cfond:_2019,\n\ttitle = {{CFOND}: {Consensus} {Factorization} for {Co}-{Clustering} {Networked} {Data}},\n\tvolume = {31},\n\tcopyright = {All rights reserved},\n\turl = {https://doi.org/10.1109/TKDE.2018.2846555},\n\tdoi = {10.1109/TKDE.2018.2846555 (Impact Factor: 6.977; JCR Ranked Q1; Top Journal in Data Mining)},\n\tnumber = {4},\n\tjournal = {IEEE Transactions on Knowledge and Data Engineering (TKDE)},\n\tauthor = {Guo, Ting and Pan, Shirui and Zhu, Xingquan and Zhang, Chengqi},\n\tyear = {2019},\n\tpages = {706--719},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Time series feature learning with labeled and unlabeled data.\n \n \n \n \n\n\n \n Wang, H.; Zhang, Q.; Wu, J.; Pan, S.; and Chen, Y.\n\n\n \n\n\n\n Pattern Recognition (PR), 89: 55–66. 2019.\n \n\n\n\n
\n\n\n\n \n \n \"TimePaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n  \n \n 1 download\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{wang_time_2019,\n\ttitle = {Time series feature learning with labeled and unlabeled data},\n\tvolume = {89},\n\tcopyright = {All rights reserved},\n\turl = {https://doi.org/10.1016/j.patcog.2018.12.026},\n\tdoi = {10.1016/j.patcog.2018.12.026 (Impact Factor: 7.74; JCR Ranked Q1)},\n\tjournal = {Pattern Recognition (PR)},\n\tauthor = {Wang, Haishuai and Zhang, Qin and Wu, Jia and Pan, Shirui and Chen, Yixin},\n\tyear = {2019},\n\tpages = {55--66},\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2018\n \n \n (9)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n Adversarially Regularized Graph Autoencoder for Graph Embedding.\n \n \n \n \n\n\n \n Pan, S.; Hu, R.; Long, G.; Jiang, J.; Yao, L.; and Zhang, C.\n\n\n \n\n\n\n In Lang, J., editor(s), Proceedings of the Twenty-Seventh International Joint Conference on Artificial Intelligence, IJCAI 2018, July 13-19, 2018, Stockholm, Sweden, pages 2609–2615, 2018. ijcai.org\n \n\n\n\n
\n\n\n\n \n \n \"AdversariallyPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{pan_adversarially_2018,\n\ttitle = {Adversarially {Regularized} {Graph} {Autoencoder} for {Graph} {Embedding}},\n\tcopyright = {All rights reserved},\n\turl = {https://doi.org/10.24963/ijcai.2018/362},\n\tdoi = {10.24963/ijcai.2018/362 (CORE Ranked A*)},\n\tbooktitle = {Proceedings of the {Twenty}-{Seventh} {International} {Joint} {Conference} on {Artificial} {Intelligence}, {IJCAI} 2018, {July} 13-19, 2018, {Stockholm}, {Sweden}},\n\tpublisher = {ijcai.org},\n\tauthor = {Pan, Shirui and Hu, Ruiqi and Long, Guodong and Jiang, Jing and Yao, Lina and Zhang, Chengqi},\n\teditor = {Lang, Jérôme},\n\tyear = {2018},\n\tpages = {2609--2615},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Discrete Network Embedding.\n \n \n \n \n\n\n \n Shen, X.; Pan, S.; Liu, W.; Ong, Y.; and Sun, Q.\n\n\n \n\n\n\n In Lang, J., editor(s), Proceedings of the Twenty-Seventh International Joint Conference on Artificial Intelligence, IJCAI 2018, July 13-19, 2018, Stockholm, Sweden, pages 3549–3555, 2018. ijcai.org\n \n\n\n\n
\n\n\n\n \n \n \"DiscretePaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{shen_discrete_2018,\n\ttitle = {Discrete {Network} {Embedding}},\n\tcopyright = {All rights reserved},\n\turl = {https://doi.org/10.24963/ijcai.2018/493},\n\tdoi = {10.24963/ijcai.2018/493 (CORE Ranked A*)},\n\tbooktitle = {Proceedings of the {Twenty}-{Seventh} {International} {Joint} {Conference} on {Artificial} {Intelligence}, {IJCAI} 2018, {July} 13-19, 2018, {Stockholm}, {Sweden}},\n\tpublisher = {ijcai.org},\n\tauthor = {Shen, Xiaobo and Pan, Shirui and Liu, Weiwei and Ong, Yew-Soon and Sun, Quan-Sen},\n\teditor = {Lang, Jérôme},\n\tyear = {2018},\n\tpages = {3549--3555},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Active Discriminative Network Representation Learning.\n \n \n \n \n\n\n \n Gao, L.; Yang, H.; Zhou, C.; Wu, J.; Pan, S.; and Hu, Y.\n\n\n \n\n\n\n In Lang, J., editor(s), Proceedings of the Twenty-Seventh International Joint Conference on Artificial Intelligence, IJCAI 2018, July 13-19, 2018, Stockholm, Sweden, pages 2142–2148, 2018. ijcai.org\n \n\n\n\n
\n\n\n\n \n \n \"ActivePaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{gao_active_2018,\n\ttitle = {Active {Discriminative} {Network} {Representation} {Learning}},\n\tcopyright = {All rights reserved},\n\turl = {https://doi.org/10.24963/ijcai.2018/296},\n\tdoi = {10.24963/ijcai.2018/296 (CORE Ranked A*)},\n\tbooktitle = {Proceedings of the {Twenty}-{Seventh} {International} {Joint} {Conference} on {Artificial} {Intelligence}, {IJCAI} 2018, {July} 13-19, 2018, {Stockholm}, {Sweden}},\n\tpublisher = {ijcai.org},\n\tauthor = {Gao, Li and Yang, Hong and Zhou, Chuan and Wu, Jia and Pan, Shirui and Hu, Yue},\n\teditor = {Lang, Jérôme},\n\tyear = {2018},\n\tpages = {2142--2148},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n DiSAN: Directional Self-Attention Network for RNN/CNN-Free Language Understanding.\n \n \n \n \n\n\n \n Shen, T.; Zhou, T.; Long, G.; Jiang, J.; Pan, S.; and Zhang, C.\n\n\n \n\n\n\n In McIlraith, S. A.; and Weinberger, K. Q., editor(s), Proceedings of the Thirty-Second AAAI Conference on Artificial Intelligence, (AAAI-18), New Orleans, Louisiana, USA, February 2-7, 2018, pages 5446–5455 (CORE Ranked A*), 2018. AAAI Press\n \n\n\n\n
\n\n\n\n \n \n \"DiSAN:Paper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{shen_disan:_2018,\n\ttitle = {{DiSAN}: {Directional} {Self}-{Attention} {Network} for {RNN}/{CNN}-{Free} {Language} {Understanding}},\n\tcopyright = {All rights reserved},\n\turl = {https://www.aaai.org/ocs/index.php/AAAI/AAAI18/paper/view/16126},\n\tbooktitle = {Proceedings of the {Thirty}-{Second} {AAAI} {Conference} on {Artificial} {Intelligence}, ({AAAI}-18), {New} {Orleans}, {Louisiana}, {USA}, {February} 2-7, 2018},\n\tpublisher = {AAAI Press},\n\tauthor = {Shen, Tao and Zhou, Tianyi and Long, Guodong and Jiang, Jing and Pan, Shirui and Zhang, Chengqi},\n\teditor = {McIlraith, Sheila A. and Weinberger, Kilian Q.},\n\tyear = {2018},\n\tpages = {5446--5455 (CORE Ranked A*)},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Binarized attributed network embedding.\n \n \n \n \n\n\n \n Yang, H.; Pan, S.; Zhang, P.; Chen, L.; Lian, D.; and Zhang, C.\n\n\n \n\n\n\n In IEEE International Conference on Data Mining, ICDM 2018, Singapore, November 17-20, 2018, pages 1476–1481, 2018. IEEE Computer Society\n \n\n\n\n
\n\n\n\n \n \n \"BinarizedPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{yang_binarized_2018,\n\ttitle = {Binarized attributed network embedding},\n\tcopyright = {All rights reserved},\n\turl = {https://doi.org/10.1109/ICDM.2018.8626170},\n\tdoi = {10.1109/ICDM.2018.8626170 (CORE Ranked A*)},\n\tbooktitle = {{IEEE} {International} {Conference} on {Data} {Mining}, {ICDM} 2018, {Singapore}, {November} 17-20, 2018},\n\tpublisher = {IEEE Computer Society},\n\tauthor = {Yang, Hong and Pan, Shirui and Zhang, Peng and Chen, Ling and Lian, Defu and Zhang, Chengqi},\n\tyear = {2018},\n\tpages = {1476--1481},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n A Three-Layered Mutually Reinforced Model for Personalized Citation Recommendation.\n \n \n \n\n\n \n Cai, X.; Han, J.; Li, W.; Zhang, R.; Pan, S.; and Yang, L.\n\n\n \n\n\n\n IEEE Transactions on Neural Networks and Learning Systems (TNNLS), 29(12): 6026–6037. 2018.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{cai_three-layered_2018,\n\ttitle = {A {Three}-{Layered} {Mutually} {Reinforced} {Model} for {Personalized} {Citation} {Recommendation}},\n\tvolume = {29},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/TNNLS.2018.2817245 (Impact Factor: 10.451; JCR Ranked Q1)},\n\tnumber = {12},\n\tjournal = {IEEE Transactions on Neural Networks and Learning Systems (TNNLS)},\n\tauthor = {Cai, Xiaoyan and Han, Junwei and Li, Wenjie and Zhang, Renxian and Pan, Shirui and Yang, Libin},\n\tyear = {2018},\n\tpages = {6026--6037},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Multi-Instance Learning with Discriminative Bag Mapping.\n \n \n \n \n\n\n \n Wu, J.; Pan, S.; Zhu, X.; Zhang, C.; and Wu, X.\n\n\n \n\n\n\n IEEE Transactions on Knowledge and Data Engineering (TKDE), 30(6): 1065–1080. 2018.\n \n\n\n\n
\n\n\n\n \n \n \"Multi-InstancePaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{wu_multi-instance_2018,\n\ttitle = {Multi-{Instance} {Learning} with {Discriminative} {Bag} {Mapping}},\n\tvolume = {30},\n\tcopyright = {All rights reserved},\n\turl = {https://doi.org/10.1109/TKDE.2017.2788430},\n\tdoi = {10.1109/TKDE.2017.2788430 (Impact Factor: 6.977; JCR Ranked Q1; Top Journal in Data Mining)},\n\tnumber = {6},\n\tjournal = {IEEE Transactions on Knowledge and Data Engineering (TKDE)},\n\tauthor = {Wu, Jia and Pan, Shirui and Zhu, Xingquan and Zhang, Chengqi and Wu, Xindong},\n\tyear = {2018},\n\tpages = {1065--1080},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Multiple Structure-View Learning for Graph Classification.\n \n \n \n\n\n \n Wu, J.; Pan, S.; Zhu, X.; Zhang, C.; and Yu, P. S.\n\n\n \n\n\n\n IEEE Transactions on Neural Networks and Learning Systems (TNNLS), 29(7): 3236–3251. 2018.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{wu_multiple_2018,\n\ttitle = {Multiple {Structure}-{View} {Learning} for {Graph} {Classification}},\n\tvolume = {29},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/TNNLS.2017.2703832 (Impact Factor: 10.451; JCR Ranked Q1)},\n\tnumber = {7},\n\tjournal = {IEEE Transactions on Neural Networks and Learning Systems (TNNLS)},\n\tauthor = {Wu, Jia and Pan, Shirui and Zhu, Xingquan and Zhang, Chengqi and Yu, Philip S.},\n\tyear = {2018},\n\tpages = {3236--3251},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Hashing for Adaptive Real-Time Graph Stream Classification With Concept Drifts.\n \n \n \n \n\n\n \n Chi, L.; Li, B.; Zhu, X.; Pan, S.; and Chen, L.\n\n\n \n\n\n\n IEEE transactions on cybernetics (TCYB), 48(5): 1591–1604. 2018.\n \n\n\n\n
\n\n\n\n \n \n \"HashingPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{chi_hashing_2018,\n\ttitle = {Hashing for {Adaptive} {Real}-{Time} {Graph} {Stream} {Classification} {With} {Concept} {Drifts}},\n\tvolume = {48},\n\tcopyright = {All rights reserved},\n\turl = {https://doi.org/10.1109/TCYB.2017.2708979},\n\tdoi = {10.1109/TCYB.2017.2708979 (Impact Factor: 11.448; JCR Ranked Q1)},\n\tnumber = {5},\n\tjournal = {IEEE transactions on cybernetics (TCYB)},\n\tauthor = {Chi, Lianhua and Li, Bin and Zhu, Xingquan and Pan, Shirui and Chen, Ling},\n\tyear = {2018},\n\tpages = {1591--1604},\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2017\n \n \n (3)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Positive and Unlabeled Multi-Graph Learning.\n \n \n \n\n\n \n Wu, J.; Pan, S.; Zhu, X.; Zhang, C.; and Wu, X.\n\n\n \n\n\n\n IEEE transactions on cybernetics (TCYB), 47(4): 818–829. 2017.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{wu_positive_2017,\n\ttitle = {Positive and {Unlabeled} {Multi}-{Graph} {Learning}},\n\tvolume = {47},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/TCYB.2016.2527239 (Impact Factor: 11.448; JCR Ranked Q1)},\n\tnumber = {4},\n\tjournal = {IEEE transactions on cybernetics (TCYB)},\n\tauthor = {Wu, Jia and Pan, Shirui and Zhu, Xingquan and Zhang, Chengqi and Wu, Xindong},\n\tyear = {2017},\n\tpages = {818--829},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Exploiting Attribute Correlations: A Novel Trace Lasso-Based Weakly Supervised Dictionary Learning Method.\n \n \n \n \n\n\n \n Wu, L.; Wang, Y.; and Pan, S.\n\n\n \n\n\n\n IEEE transactions on cybernetics (TCYB), 47(12): 4497–4508. 2017.\n \n\n\n\n
\n\n\n\n \n \n \"ExploitingPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{wu_exploiting_2017,\n\ttitle = {Exploiting {Attribute} {Correlations}: {A} {Novel} {Trace} {Lasso}-{Based} {Weakly} {Supervised} {Dictionary} {Learning} {Method}},\n\tvolume = {47},\n\tcopyright = {All rights reserved},\n\turl = {https://doi.org/10.1109/TCYB.2016.2612686},\n\tdoi = {10.1109/TCYB.2016.2612686 (Impact Factor: 11.448; JCR Ranked Q1)},\n\tnumber = {12},\n\tjournal = {IEEE transactions on cybernetics (TCYB)},\n\tauthor = {Wu, Lin and Wang, Yang and Pan, Shirui},\n\tyear = {2017},\n\tpages = {4497--4508},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Task Sensitive Feature Exploration and Learning for Multitask Graph Classification.\n \n \n \n\n\n \n Pan, S.; Wu, J.; Zhu, X.; Long, G.; and Zhang, C.\n\n\n \n\n\n\n IEEE transactions on cybernetics (TCYB), 47(3): 744–758. 2017.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{pan_task_2017,\n\ttitle = {Task {Sensitive} {Feature} {Exploration} and {Learning} for {Multitask} {Graph} {Classification}},\n\tvolume = {47},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/TCYB.2016.2526058 (Impact Factor: 11.448; JCR Ranked Q1)},\n\tnumber = {3},\n\tjournal = {IEEE transactions on cybernetics (TCYB)},\n\tauthor = {Pan, Shirui and Wu, Jia and Zhu, Xingquan and Long, Guodong and Zhang, Chengqi},\n\tyear = {2017},\n\tpages = {744--758},\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2016\n \n \n (5)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Tri-Party Deep Network Representation.\n \n \n \n\n\n \n Pan, S.; Wu, J.; Zhu, X.; Zhang, C.; and Wang, Y.\n\n\n \n\n\n\n In Kambhampati, S., editor(s), Proceedings of the Twenty-Fifth International Joint Conference on Artificial Intelligence, IJCAI 2016, New York, NY, USA, 9-15 July 2016, pages 1895–1901 (CORE Ranked A*), 2016. IJCAI/AAAI Press\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{pan_tri-party_2016,\n\ttitle = {Tri-{Party} {Deep} {Network} {Representation}},\n\tcopyright = {All rights reserved},\n\tbooktitle = {Proceedings of the {Twenty}-{Fifth} {International} {Joint} {Conference} on {Artificial} {Intelligence}, {IJCAI} 2016, {New} {York}, {NY}, {USA}, 9-15 {July} 2016},\n\tpublisher = {IJCAI/AAAI Press},\n\tauthor = {Pan, Shirui and Wu, Jia and Zhu, Xingquan and Zhang, Chengqi and Wang, Yang},\n\teditor = {Kambhampati, Subbarao},\n\tyear = {2016},\n\tpages = {1895--1901 (CORE Ranked A*)},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Iterative Views Agreement: An Iterative Low-Rank Based Structured Optimization Method to Multi-View Spectral Clustering.\n \n \n \n\n\n \n Wang, Y.; Zhang, W.; Wu, L.; Lin, X.; Fang, M.; and Pan, S.\n\n\n \n\n\n\n In Kambhampati, S., editor(s), Proceedings of the Twenty-Fifth International Joint Conference on Artificial Intelligence, IJCAI 2016, New York, NY, USA, 9-15 July 2016, pages 2153–2159 (CORE Ranked A*), 2016. IJCAI/AAAI Press\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{wang_iterative_2016,\n\ttitle = {Iterative {Views} {Agreement}: {An} {Iterative} {Low}-{Rank} {Based} {Structured} {Optimization} {Method} to {Multi}-{View} {Spectral} {Clustering}},\n\tcopyright = {All rights reserved},\n\tbooktitle = {Proceedings of the {Twenty}-{Fifth} {International} {Joint} {Conference} on {Artificial} {Intelligence}, {IJCAI} 2016, {New} {York}, {NY}, {USA}, 9-15 {July} 2016},\n\tpublisher = {IJCAI/AAAI Press},\n\tauthor = {Wang, Yang and Zhang, Wenjie and Wu, Lin and Lin, Xuemin and Fang, Meng and Pan, Shirui},\n\teditor = {Kambhampati, Subbarao},\n\tyear = {2016},\n\tpages = {2153--2159 (CORE Ranked A*)},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Direct Discriminative Bag Mapping for Multi-Instance Learning.\n \n \n \n \n\n\n \n Wu, J.; Pan, S.; Zhang, P.; and Zhu, X.\n\n\n \n\n\n\n In Schuurmans, D.; and Wellman, M. P., editor(s), Proceedings of the Thirtieth AAAI Conference on Artificial Intelligence, February 12-17, 2016, Phoenix, Arizona, USA, pages 4274–4275 (CORE Ranked A*), 2016. AAAI Press\n \n\n\n\n
\n\n\n\n \n \n \"DirectPaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{wu_direct_2016,\n\ttitle = {Direct {Discriminative} {Bag} {Mapping} for {Multi}-{Instance} {Learning}},\n\tcopyright = {All rights reserved},\n\turl = {http://www.aaai.org/ocs/index.php/AAAI/AAAI16/paper/view/11781},\n\tbooktitle = {Proceedings of the {Thirtieth} {AAAI} {Conference} on {Artificial} {Intelligence}, {February} 12-17, 2016, {Phoenix}, {Arizona}, {USA}},\n\tpublisher = {AAAI Press},\n\tauthor = {Wu, Jia and Pan, Shirui and Zhang, Peng and Zhu, Xingquan},\n\teditor = {Schuurmans, Dale and Wellman, Michael P.},\n\tyear = {2016},\n\tpages = {4274--4275 (CORE Ranked A*)},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Joint Structure Feature Exploration and Regularization for Multi-Task Graph Classification.\n \n \n \n\n\n \n Pan, S.; Wu, J.; Zhu, X.; Zhang, C.; and Yu, P. S.\n\n\n \n\n\n\n IEEE Transactions on Knowledge and Data Engineering (TKDE), 28(3): 715–728. 2016.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{pan_joint_2016,\n\ttitle = {Joint {Structure} {Feature} {Exploration} and {Regularization} for {Multi}-{Task} {Graph} {Classification}},\n\tvolume = {28},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/TKDE.2015.2492567 (Impact Factor: 6.977; JCR Ranked Q1; Top Journal in Data Mining)},\n\tnumber = {3},\n\tjournal = {IEEE Transactions on Knowledge and Data Engineering (TKDE)},\n\tauthor = {Pan, Shirui and Wu, Jia and Zhu, Xingquan and Zhang, Chengqi and Yu, Philip S.},\n\tyear = {2016},\n\tpages = {715--728},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n SODE: Self-Adaptive One-Dependence Estimators for classification.\n \n \n \n\n\n \n Wu, J.; Pan, S.; Zhu, X.; Zhang, P.; and Zhang, C.\n\n\n \n\n\n\n Pattern Recognition (PR), 51: 358–377. 2016.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{wu_sode:_2016,\n\ttitle = {{SODE}: {Self}-{Adaptive} {One}-{Dependence} {Estimators} for classification},\n\tvolume = {51},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1016/j.patcog.2015.08.023 (Impact Factor: 7.74; JCR Ranked Q1)},\n\tjournal = {Pattern Recognition (PR)},\n\tauthor = {Wu, Jia and Pan, Shirui and Zhu, Xingquan and Zhang, Peng and Zhang, Chengqi},\n\tyear = {2016},\n\tpages = {358--377},\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2015\n \n \n (5)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Multi-Graph-View Learning for Complicated Object Classification.\n \n \n \n\n\n \n Wu, J.; Pan, S.; Zhu, X.; Cai, Z.; and Zhang, C.\n\n\n \n\n\n\n In Yang, Q.; and Wooldridge, M. J., editor(s), Proceedings of the Twenty-Fourth International Joint Conference on Artificial Intelligence, IJCAI 2015, Buenos Aires, Argentina, July 25-31, 2015, pages 3953–3959 (CORE Ranked A*), 2015. AAAI Press\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{wu_multi-graph-view_2015,\n\ttitle = {Multi-{Graph}-{View} {Learning} for {Complicated} {Object} {Classification}},\n\tcopyright = {All rights reserved},\n\tbooktitle = {Proceedings of the {Twenty}-{Fourth} {International} {Joint} {Conference} on {Artificial} {Intelligence}, {IJCAI} 2015, {Buenos} {Aires}, {Argentina}, {July} 25-31, 2015},\n\tpublisher = {AAAI Press},\n\tauthor = {Wu, Jia and Pan, Shirui and Zhu, Xingquan and Cai, Zhihua and Zhang, Chengqi},\n\teditor = {Yang, Qiang and Wooldridge, Michael J.},\n\tyear = {2015},\n\tpages = {3953--3959 (CORE Ranked A*)},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Finding the best not the most: regularized loss minimization subgraph selection for graph classification.\n \n \n \n\n\n \n Pan, S.; Wu, J.; Zhu, X.; Long, G.; and Zhang, C.\n\n\n \n\n\n\n Pattern Recognition (PR), 48(11): 3783–3796. 2015.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{pan_finding_2015,\n\ttitle = {Finding the best not the most: regularized loss minimization subgraph selection for graph classification},\n\tvolume = {48},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1016/j.patcog.2015.05.019 (Impact Factor: 7.74; JCR Ranked Q1)},\n\tnumber = {11},\n\tjournal = {Pattern Recognition (PR)},\n\tauthor = {Pan, Shirui and Wu, Jia and Zhu, Xingquan and Long, Guodong and Zhang, Chengqi},\n\tyear = {2015},\n\tpages = {3783--3796},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Graph Ensemble Boosting for Imbalanced Noisy Graph Stream Classification.\n \n \n \n\n\n \n Pan, S.; Wu, J.; Zhu, X.; and Zhang, C.\n\n\n \n\n\n\n IEEE transactions on cybernetics (TCYB), 45(5): 940–954. 2015.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{pan_graph_2015,\n\ttitle = {Graph {Ensemble} {Boosting} for {Imbalanced} {Noisy} {Graph} {Stream} {Classification}},\n\tvolume = {45},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/TCYB.2014.2341031 (Impact Factor: 11.448; JCR Ranked Q1)},\n\tnumber = {5},\n\tjournal = {IEEE transactions on cybernetics (TCYB)},\n\tauthor = {Pan, Shirui and Wu, Jia and Zhu, Xingquan and Zhang, Chengqi},\n\tyear = {2015},\n\tpages = {940--954},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n CogBoost: Boosting for Fast Cost-Sensitive Graph Classification.\n \n \n \n\n\n \n Pan, S.; Wu, J.; and Zhu, X.\n\n\n \n\n\n\n IEEE Transactions on Knowledge and Data Engineering (TKDE), 27(11): 2933–2946. 2015.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{pan_cogboost:_2015,\n\ttitle = {{CogBoost}: {Boosting} for {Fast} {Cost}-{Sensitive} {Graph} {Classification}},\n\tvolume = {27},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/TKDE.2015.2391115 (Impact Factor: 6.977; JCR Ranked Q1; Top Journal in Data Mining)},\n\tnumber = {11},\n\tjournal = {IEEE Transactions on Knowledge and Data Engineering (TKDE)},\n\tauthor = {Pan, Shirui and Wu, Jia and Zhu, Xingquan},\n\tyear = {2015},\n\tpages = {2933--2946},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Boosting for Multi-Graph Classification.\n \n \n \n\n\n \n Wu, J.; Pan, S.; Zhu, X.; and Cai, Z.\n\n\n \n\n\n\n IEEE transactions on cybernetics (TCYB), 45(3): 430–443. 2015.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{wu_boosting_2015,\n\ttitle = {Boosting for {Multi}-{Graph} {Classification}},\n\tvolume = {45},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/TCYB.2014.2327111 (Impact Factor: 11.448; JCR Ranked Q1)},\n\tnumber = {3},\n\tjournal = {IEEE transactions on cybernetics (TCYB)},\n\tauthor = {Wu, Jia and Pan, Shirui and Zhu, Xingquan and Cai, Zhihua},\n\tyear = {2015},\n\tpages = {430--443},\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2014\n \n \n (1)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Multi-graph-view Learning for Graph Classification.\n \n \n \n\n\n \n Wu, J.; Hong, Z.; Pan, S.; Zhu, X.; Cai, Z.; and Zhang, C.\n\n\n \n\n\n\n In Kumar, R.; Toivonen, H.; Pei, J.; Huang, J. Z.; and Wu, X., editor(s), 2014 IEEE International Conference on Data Mining, ICDM 2014, Shenzhen, China, December 14-17, 2014, pages 590–599, 2014. IEEE Computer Society\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{wu_multi-graph-view_2014,\n\ttitle = {Multi-graph-view {Learning} for {Graph} {Classification}},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/ICDM.2014.97 (CORE Ranked A*)},\n\tbooktitle = {2014 {IEEE} {International} {Conference} on {Data} {Mining}, {ICDM} 2014, {Shenzhen}, {China}, {December} 14-17, 2014},\n\tpublisher = {IEEE Computer Society},\n\tauthor = {Wu, Jia and Hong, Zhibin and Pan, Shirui and Zhu, Xingquan and Cai, Zhihua and Zhang, Chengqi},\n\teditor = {Kumar, Ravi and Toivonen, Hannu and Pei, Jian and Huang, Joshua Zhexue and Wu, Xindong},\n\tyear = {2014},\n\tpages = {590--599},\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2013\n \n \n (2)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Graph stream classification using labeled and unlabeled graphs.\n \n \n \n\n\n \n Pan, S.; Zhu, X.; Zhang, C.; and Yu, P. S.\n\n\n \n\n\n\n In Jensen, C. S.; Jermaine, C. M.; and Zhou, X., editor(s), 29th IEEE International Conference on Data Engineering, ICDE 2013, Brisbane, Australia, April 8-12, 2013, pages 398–409, 2013. IEEE Computer Society\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{pan_graph_2013,\n\ttitle = {Graph stream classification using labeled and unlabeled graphs},\n\tcopyright = {All rights reserved},\n\tdoi = {10.1109/ICDE.2013.6544842 (CORE Ranked A*)},\n\tbooktitle = {29th {IEEE} {International} {Conference} on {Data} {Engineering}, {ICDE} 2013, {Brisbane}, {Australia}, {April} 8-12, 2013},\n\tpublisher = {IEEE Computer Society},\n\tauthor = {Pan, Shirui and Zhu, Xingquan and Zhang, Chengqi and Yu, Philip S.},\n\teditor = {Jensen, Christian S. and Jermaine, Christopher M. and Zhou, Xiaofang},\n\tyear = {2013},\n\tpages = {398--409},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Graph Classification with Imbalanced Class Distributions and Noise.\n \n \n \n\n\n \n Pan, S.; and Zhu, X.\n\n\n \n\n\n\n In Rossi, F., editor(s), IJCAI 2013, Proceedings of the 23rd International Joint Conference on Artificial Intelligence, Beijing, China, August 3-9, 2013, pages 1586–1592 (CORE Ranked A*), 2013. IJCAI/AAAI\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{pan_graph_2013-1,\n\ttitle = {Graph {Classification} with {Imbalanced} {Class} {Distributions} and {Noise}},\n\tcopyright = {All rights reserved},\n\tbooktitle = {{IJCAI} 2013, {Proceedings} of the 23rd {International} {Joint} {Conference} on {Artificial} {Intelligence}, {Beijing}, {China}, {August} 3-9, 2013},\n\tpublisher = {IJCAI/AAAI},\n\tauthor = {Pan, Shirui and Zhu, Xingquan},\n\teditor = {Rossi, Francesca},\n\tyear = {2013},\n\tpages = {1586--1592 (CORE Ranked A*)},\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n\n\n\n
\n\n\n \n\n \n \n \n \n\n
\n"}; document.write(bibbase_data.data);