var bibbase_data = {"data":"\"Loading..\"\n\n
\n\n \n\n \n\n \n \n\n \n\n \n \n\n \n\n \n
\n generated by\n \n \"bibbase.org\"\n\n \n
\n \n\n
\n\n \n\n\n
\n\n Excellent! Next you can\n create a new website with this list, or\n embed it in an existing web page by copying & pasting\n any of the following snippets.\n\n
\n JavaScript\n (easiest)\n
\n \n <script src=\"https://bibbase.org/show?bib=https%3A%2F%2Fbibbase.org%2Fnetwork%2Ffiles%2FdzSEA6BwTnC4hoDJj&jsonp=1&jsonp=1\"></script>\n \n
\n\n PHP\n
\n \n <?php\n $contents = file_get_contents(\"https://bibbase.org/show?bib=https%3A%2F%2Fbibbase.org%2Fnetwork%2Ffiles%2FdzSEA6BwTnC4hoDJj&jsonp=1\");\n print_r($contents);\n ?>\n \n
\n\n iFrame\n (not recommended)\n
\n \n <iframe src=\"https://bibbase.org/show?bib=https%3A%2F%2Fbibbase.org%2Fnetwork%2Ffiles%2FdzSEA6BwTnC4hoDJj&jsonp=1\"></iframe>\n \n
\n\n

\n For more details see the documention.\n

\n
\n
\n\n
\n\n This is a preview! To use this list on your own web site\n or create a new web site from it,\n create a free account. The file will be added\n and you will be able to edit it in the File Manager.\n We will show you instructions once you've created your account.\n
\n\n
\n\n

To the site owner:

\n\n

Action required! Mendeley is changing its\n API. In order to keep using Mendeley with BibBase past April\n 14th, you need to:\n

    \n
  1. renew the authorization for BibBase on Mendeley, and
  2. \n
  3. update the BibBase URL\n in your page the same way you did when you initially set up\n this page.\n
  4. \n
\n

\n\n

\n \n \n Fix it now\n

\n
\n\n
\n\n\n
\n \n \n
\n
\n  \n 2023\n \n \n (5)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n Learning Uncertainty for Unknown Domains with Zero-Target-Assumption.\n \n \n \n \n\n\n \n Yu, Y.; Sajjad, H.; and Xu, J.\n\n\n \n\n\n\n In International Conference on Learning Representations (ICLR), May 2023. \n \n\n\n\n
\n\n\n\n \n \n \"LearningPaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n  \n \n 6 downloads\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n\n\n\n
\n
@InProceedings{yu_generalization_iclr2023,\n  title={{Learning Uncertainty for Unknown Domains with Zero-Target-Assumption}},\n  author={Yu Yu and Hassan Sajjad and Jia Xu},\nbooktitle={International Conference on Learning Representations (ICLR)},\nyear={2023},\nlocation = {Kigali Rwanda},\nkeywords = {conference},\nMonth = {May},\nurl = {https://openreview.net/pdf?id=pWVASryOyFw},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Impact of Adversarial Training on Robustness and Generalizability of Language Models.\n \n \n \n \n\n\n \n Altinisik, E.; Sajjad, H.; Sencar, H. T.; Messaoud, S.; and Chawla, S.\n\n\n \n\n\n\n In Proceedings of the Findings of Association for Computational Linguistics (ACL), Jul 2023. \n \n\n\n\n
\n\n\n\n \n \n \"ImpactPaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n  \n \n 3 downloads\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@InProceedings{adverserial_acl23,\n  title={{Impact of Adversarial Training on Robustness and Generalizability of Language Models}},\n  author={Enes Altinisik and Hassan Sajjad and Husrev Taha Sencar and Safa Messaoud and Sanjay Chawla},\n  booktitle={Proceedings of the Findings of Association for Computational Linguistics (ACL)},\n  year={2023},\n  location = {Toronto, Canada},\n  month={Jul},\n  url = {https://arxiv.org/abs/2211.05523},\n} \n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n NeuroX Library for Neuron Analysis of Deep NLP Models.\n \n \n \n\n\n \n Dalvi, F.; Sajjad, H.; ; and Durrani, N.\n\n\n \n\n\n\n In Proceedings of the Association for Computational Linguistics (ACL), Jul 2023. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@InProceedings{neurox-acl23:demo,\n  title={{NeuroX Library for Neuron Analysis of Deep NLP Models}},\n  author={Fahim Dalvi and Hassan Sajjad and and Nadir Durrani},\n  booktitle={Proceedings of the Association for Computational Linguistics (ACL)},\n  year={2023},\n  location = {Toronto, Canada},\n  month={Jul},\n} \n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n NxPlain: A Web-based Tool for Discovery of Latent Concepts.\n \n \n \n \n\n\n \n Dalvi, F.; Durrani, N.; Sajjad, H.; Jaban, T.; Husaini, M.; and Abbas, U.\n\n\n \n\n\n\n In Proceedings of the European Chapter of the Association for Computational Linguistics (EACL), May 2023. \n \n\n\n\n
\n\n\n\n \n \n \"NxPlain:Paper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n  \n \n 3 downloads\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@InProceedings{nxplain-eacl23:demo,\n  title={{NxPlain: A Web-based Tool for Discovery of Latent Concepts}},\n  author={Fahim Dalvi and Nadir Durrani and Hassan Sajjad and Tamim Jaban and Mus'ab Husaini and Ummar Abbas},\n  booktitle={Proceedings of the European Chapter of the Association for Computational Linguistics (EACL)},\n  year={2023},\n  location = {Dubrovnik, Croatia},\n  month={May},\n  url = {https://aclanthology.org/2023.eacl-demo.10/}\n} \n\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n ConceptX: A Framework for Latent Concept Analysis.\n \n \n \n \n\n\n \n Alam, F.; Dalvi, F.; Durrani, N.; Sajjad, H.; Khan, A. R.; and Xu, J.\n\n\n \n\n\n\n In AAAI Conference on Artificial Intelligence (AAAI), February 2023. \n \n\n\n\n
\n\n\n\n \n \n \"ConceptX:Paper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n  \n \n 2 downloads\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@InProceedings{conceptx-aaai23:demo,\n  title={{ConceptX: A Framework for Latent Concept Analysis}},\n  author={Firoj Alam and Fahim Dalvi and Nadir Durrani and Hassan Sajjad and Abdul Rafae Khan and Jia Xu},\n  booktitle={AAAI Conference on Artificial Intelligence (AAAI)},\n  year={2023},\n  location = {Washington DC, USA},\n  month={February},\n  url = {https://arxiv.org/pdf/2211.06642.pdf}\n} 
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2022\n \n \n (7)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Neuron-level Interpretation of Deep NLP Models: A Survey.\n \n \n \n\n\n \n Sajjad, H.; Durrani, N.; and Dalvi, F.\n\n\n \n\n\n\n Transactions of the Association for Computational Linguistics (TACL). 2022.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{sajjad_neuron_survey:tacl22,\n  title={{Neuron-level Interpretation of Deep NLP Models: A Survey}},\n  author={Hassan Sajjad and Nadir Durrani and Fahim Dalvi},\n  Journal={Transactions of the Association for Computational Linguistics (TACL)},\n  year={2022},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n On the Effect of Dropping Layers of Pre-trained Transformer Models.\n \n \n \n\n\n \n Sajjad, H.; Dalvi, F.; Durrani, N.; and Nakov, P.\n\n\n \n\n\n\n Computer Speech & Language (CSL). 2022.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{sajjad_poorman:csl2022,\ntitle = {{On the Effect of Dropping Layers of Pre-trained Transformer Models}},\njournal = {Computer Speech & Language (CSL)},\nyear = {2022},\nissn = {0885-2308},\ndoi = {https://doi.org/10.1016/j.csl.2022.101429},\nauthor = {Hassan Sajjad and Fahim Dalvi and Nadir Durrani and Preslav Nakov},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Effect of Post-processing on Contextualized Word Representations.\n \n \n \n \n\n\n \n Sajjad, H.; Alam, F.; Dalvi, F.; and Durrani, N.\n\n\n \n\n\n\n In Proceedings of the 29th International Conference on Computational Linguistics (COLING), pages 3127–3142, Gyeongju, Republic of Korea, Oct 2022. International Committee on Computational Linguistics\n \n\n\n\n
\n\n\n\n \n \n \"EffectPaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n  \n \n 2 downloads\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{sajjad-etal-2022-effect,\n    title = "Effect of Post-processing on Contextualized Word Representations",\n    author = "Sajjad, Hassan  and\n      Alam, Firoj  and\n      Dalvi, Fahim  and\n      Durrani, Nadir",\n    booktitle = "Proceedings of the 29th International Conference on Computational Linguistics (COLING)",\n    month = {Oct},\n    year = "2022",\n    address = "Gyeongju, Republic of Korea",\n    publisher = "International Committee on Computational Linguistics",\n    url = {https://aclanthology.org/2022.coling-1.277},\n    pages = "3127--3142",\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Probing for Constituency Structure in Neural Language Models.\n \n \n \n \n\n\n \n Arps, D.; Samih, Y.; Kallmeyer, L.; and Sajjad, H.\n\n\n \n\n\n\n In Findings of the Conference on Empirical Methods in Natural Language Processing (EMNLP), Abu Dhabi, Dec 2022. \n \n\n\n\n
\n\n\n\n \n \n \"ProbingPaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n  \n \n 2 downloads\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{david2022_constituency,\ntitle={{Probing for Constituency Structure in Neural Language Models}},\nauthor={David Arps and Younes Samih and Laura Kallmeyer and Hassan Sajjad},\nbooktitle={Findings of the Conference on Empirical Methods in Natural Language Processing (EMNLP)},\nyear={2022},\nmonth={Dec},\naddress = "Abu Dhabi",\nurl={https://aclanthology.org/2022.findings-emnlp.502/},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n On the Transformation of Latent Space in Fine-Tuned NLP Models.\n \n \n \n \n\n\n \n Durrani, N.; Sajjad, H.; Dalvi, F.; and Alam, F.\n\n\n \n\n\n\n In Proceedings of the Conference on Empirical Methods in Natural Language Processing (EMNLP), Abu Dhabi, Dec 2022. \n \n\n\n\n
\n\n\n\n \n \n \"OnPaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n  \n \n 2 downloads\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{durrani2022_latent_ftmodels,\ntitle={{On the Transformation of Latent Space in Fine-Tuned NLP Models}},\nauthor={Nadir Durrani and Hassan Sajjad and Fahim Dalvi and Firoj Alam},\nbooktitle={Proceedings of the Conference on Empirical Methods in Natural Language Processing (EMNLP)},\nyear={2022},\nmonth={Dec},\naddress = "Abu Dhabi",\nurl={https://aclanthology.org/2022.emnlp-main.97/},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Discovering Latent Concepts Learned in BERT.\n \n \n \n \n\n\n \n Dalvi, F.; Khan, A. R.; Alam, F.; Durrani, N.; Xu, J.; and Sajjad, H.\n\n\n \n\n\n\n In International Conference on Learning Representations (ICLR), Online, May 2022. \n \n\n\n\n
\n\n\n\n \n \n \"DiscoveringPaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n  \n \n 2 downloads\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{dalvi2022discovering,\ntitle={{Discovering Latent Concepts Learned in BERT}},\nauthor={Fahim Dalvi and Abdul Rafae Khan and Firoj Alam and Nadir Durrani and Jia Xu and Hassan Sajjad},\nbooktitle={International Conference on Learning Representations (ICLR)},\nyear={2022},\nmonth={May},\naddress = "Online",\nurl={https://openreview.net/pdf?id=POTMtpYI1xH},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Analyzing Encoded Concepts in Transformer Language Models.\n \n \n \n \n\n\n \n Sajjad, H.; Durrani, N.; Dalvi, F.; Alam, F.; Khan, A.; and Xu, J.\n\n\n \n\n\n\n In Proceedings of the 2022 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies (NAACL-HLT), Seattle, United States, July 2022. \n \n\n\n\n
\n\n\n\n \n \n \"AnalyzingPaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n  \n \n 3 downloads\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{sajjad-etal-2022-analyzing,\n    title = {{Analyzing Encoded Concepts in Transformer Language Models}},\n    author = "Sajjad, Hassan  and\n      Durrani, Nadir  and\n      Dalvi, Fahim  and\n      Alam, Firoj  and\n      Khan, Abdul  and\n      Xu, Jia",\n    booktitle = "Proceedings of the 2022 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies (NAACL-HLT)",\n    month = {July},\n    year = "2022",\n    address = "Seattle, United States",\n    url={https://aclanthology.org/2022.naacl-main.225/},\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n\n\n\n
\n\n\n \n\n \n \n \n \n\n
\n"}; document.write(bibbase_data.data);