\n \n \n
\n
\n\n \n \n \n \n \n \n Contextual knowledge-informed deep domain generalization for bearing fault diagnosis.\n \n \n \n \n\n\n \n Lundström, A.; O’Nils, M.; and Qureshi, F. Z.\n\n\n \n\n\n\n
IEEE Access,196842–196854. December 2024.\n
\n\n
\n\n
\n\n
\n\n \n \n
paper\n \n \n\n \n \n doi\n \n \n\n \n link\n \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n \n \n \n \n \n\n\n\n
\n
@article{LMQ24,\n author = {Adam Lundström and Mattias O’Nils and Faisal Z. Qureshi},\n title = {Contextual knowledge-informed deep domain generalization for bearing fault diagnosis},\n journal = {IEEE Access},\n month = {December},\n year = {2024},\n pages = {196842--196854},\n keywords = {bearing-fault-diagnosis}, \n url_Paper = {pubs/24-ieee-access-j.pdf},\n doi = {10.1109/ACCESS.2024.3520624}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n
\n\n \n \n \n \n \n \n Hyperspectral Image Compression Using Sampling and Implicit Neural Representations.\n \n \n \n \n\n\n \n Rezasoltani, S.; and Qureshi, F. Z.\n\n\n \n\n\n\n
IEEE Transactions on Geoscience and Remote Sensing, 63: 12pp. December 2024.\n
\n\n
\n\n
\n\n
\n\n \n \n
paper\n \n \n\n \n \n doi\n \n \n\n \n link\n \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n \n \n \n \n \n\n\n\n
\n
@article{RQ24,\n author = {Shima Rezasoltani and Faisal Z. Qureshi},\n title = {Hyperspectral Image Compression Using Sampling and Implicit Neural Representations},\n journal = {IEEE Transactions on Geoscience and Remote Sensing},\n pages = {12pp},\n volume = {63},\n month = {December},\n year = {2024},\n issn = {1558-0644},\n keywords = {hsi-inr-compression}, \n doi = {10.1109/TGRS.2024.3509718},\n url_Paper = {pubs/24-ieee-tgrs-j-hsi-compression.pdf}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n
\n\n \n \n \n \n \n \n 20 years of the Bio-Analytic Resource for Plant Biology.\n \n \n \n \n\n\n \n Sullivan, A.; Lombardo, M.; Pasha, A.; Lau, V.; Zhuang, J.; Christendat, A.; Pereira, B.; Zhao, T.; Li, Y.; Wong, R.; Qureshi, F.; and Provart, N.\n\n\n \n\n\n\n
Nucleic Acids Research, 53: 1576–1586. October 2024.\n
Journal published in January 2025\n\n
\n\n
\n\n
\n\n \n \n
Paper\n \n \n\n \n \n doi\n \n \n\n \n link\n \n \n\n bibtex\n \n\n \n \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n \n \n \n\n\n\n
\n
@article{10.1093/nar/gkae920,\n author = {Sullivan, Alexander and Lombardo, Michael N and Pasha, Asher and Lau, Vincent and Zhuang, Jian Yun and Christendat, Ashley and Pereira, Bruno and Zhao, Tianhui and Li, Youyang and Wong, Rachel and Qureshi, Faisal Z and Provart, Nicholas J},\n title = "{20 years of the Bio-Analytic Resource for Plant Biology}",\n journal = {Nucleic Acids Research},\n pages = {1576--1586},\n volume = {53},\n issue = {D1},\n year = {2024},\n month = {October},\n abstract = "{The Bio-Analytic Resource for Plant Biology (‘the BAR’, at https://bar.utoronto.ca) is celebrating its 20th year in operation in 2025. The BAR encompasses and provides visualization tools for large ‘omics data sets from plants. The BAR covers data from Arabidopsis, tomato, wheat, barley and 29 other plant species (with data for 2 others to be released soon). These data include nucleotide and protein sequence data, gene expression data, protein-protein and protein–DNA interactions, protein structures, subcellular localizations, and polymorphisms. The data are stored in more than 200 relational databases holding 186 GB of data and are presented to the researchers via web apps. These web apps provide data analysis and visualization tools. Some of the most popular tools are eFP (‘electronic fluorescent pictograph’) Browsers, ePlants and ThaleMine (an Arabidopsis-specific instance of InterMine). The BAR was designated a Global Core Biodata Resource in 2023. Like other GCBRs, the BAR has excellent operational stability, provides access without login requirement, and provides an API for researchers to be able to access BAR data programmatically. We present in this update a new overarching search tool called Gaia that permits easy access to all BAR data, powered by machine learning and artificial intelligence.}",\n issn = {0305-1048},\n doi = {10.1093/nar/gkae920},\n url = {https://doi.org/10.1093/nar/gkae920},\n eprint = {https://academic.oup.com/nar/advance-article-pdf/doi/10.1093/nar/gkae920/59995839/gkae920.pdf},\n note = {Journal published in January 2025}\n}\n\n
\n
\n\n\n
\n The Bio-Analytic Resource for Plant Biology (‘the BAR’, at https://bar.utoronto.ca) is celebrating its 20th year in operation in 2025. The BAR encompasses and provides visualization tools for large ‘omics data sets from plants. The BAR covers data from Arabidopsis, tomato, wheat, barley and 29 other plant species (with data for 2 others to be released soon). These data include nucleotide and protein sequence data, gene expression data, protein-protein and protein–DNA interactions, protein structures, subcellular localizations, and polymorphisms. The data are stored in more than 200 relational databases holding 186 GB of data and are presented to the researchers via web apps. These web apps provide data analysis and visualization tools. Some of the most popular tools are eFP (‘electronic fluorescent pictograph’) Browsers, ePlants and ThaleMine (an Arabidopsis-specific instance of InterMine). The BAR was designated a Global Core Biodata Resource in 2023. Like other GCBRs, the BAR has excellent operational stability, provides access without login requirement, and provides an API for researchers to be able to access BAR data programmatically. We present in this update a new overarching search tool called Gaia that permits easy access to all BAR data, powered by machine learning and artificial intelligence.\n
\n\n\n
\n\n\n
\n
\n\n \n \n \n \n \n \n Attention Based Simple Primitives for Open-World Compositional Zero-Shot Learning.\n \n \n \n \n\n\n \n Munir, A.; Qureshi, F.; Khan, M.; and Ali, M.\n\n\n \n\n\n\n In
Proc. 25th Conference on Digital Image Computing Techniques & Applications (DICTA24), pages 8pp, Perth, November 2024. \n
\n\n
\n\n
\n\n
\n\n \n \n
paper\n \n \n\n \n \n doi\n \n \n\n \n link\n \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n \n \n \n \n \n\n\n\n
\n
@InProceedings{24-dicta-c,\n author = {A. Munir and F.Z. Qureshi and M.H. Khan and M. Ali},\n title = {Attention Based Simple Primitives for Open-World Compositional Zero-Shot Learning},\n booktitle = {Proc. 25th Conference on Digital Image Computing Techniques \\& Applications (DICTA24)},\n year = {2024},\n month = {November},\n address = {Perth},\n pages = {8pp},\n doi = {10.1109/DICTA63115.2024.00107},\n url_Paper = {pubs/24-dicta-c.pdf},\n keywords = {czsl}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n
\n\n \n \n \n \n \n \n SpACNN-LDVAE: Spatial Attention Convolutional Latent Dirichlet Variational Autoencoder for Hyperspectral Pixel Unmixing.\n \n \n \n \n\n\n \n Chitnis, S.; Mantripragada, K.; and Qureshi, F.\n\n\n \n\n\n\n In
Proceedings International Geoscience and Remote Sensing Symposium (IGARSS), pages 6pp, Athens, July 2024. \n
\n\n
\n\n
\n\n
\n\n \n \n
paper\n \n \n\n \n\n \n link\n \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n\n\n\n
\n
@InProceedings{24-igarss-paper,\n title={SpACNN-LDVAE: Spatial Attention Convolutional Latent Dirichlet Variational Autoencoder for Hyperspectral Pixel Unmixing}, \n author={S. Chitnis and K. Mantripragada and F.Z. Qureshi},\n year={2024},\n pages={6pp},\n month={July},\n address={Athens},\n booktitle={Proceedings International Geoscience and Remote Sensing Symposium (IGARSS)},\n url_Paper={pubs/24-igarss-paper.pdf},\n keywords={hsi-unmixing,hsi-unmixing-spacnn}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n
\n\n \n \n \n \n \n \n Hyperspectral Pixel Unmixing with Latent Dirichlet Variational Autoencoder.\n \n \n \n \n\n\n \n Mantripragada, K.; and Qureshi, F. Z.\n\n\n \n\n\n\n
IEEE Transactions on Geoscience and Remote Sensing, 62: 13pp. January 2024.\n
\n\n
\n\n
\n\n
\n\n \n \n
paper\n \n \n\n \n \n doi\n \n \n\n \n link\n \n \n\n bibtex\n \n\n \n \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n \n \n \n \n \n\n\n\n
\n
@ARTICLE{24-ieee-igrs-j,\n author={Mantripragada, Kiran and Qureshi, Faisal Z.},\n journal={IEEE Transactions on Geoscience and Remote Sensing}, \n title={Hyperspectral Pixel Unmixing with Latent Dirichlet Variational Autoencoder}, \n year={2024},\n volume={62},\n number={},\n pages={13pp},\n abstract={We present a method for hyperspectral pixel unmixing. The proposed method assumes that (1) abundances can be encoded as Dirichlet distributions and (2) spectra of endmembers can be represented as multivariate Normal distributions. The method solves the problem of abundance estimation and endmember extraction within a variational autoencoder setting where a Dirichlet bottleneck layer models the abundances, and the decoder performs endmember extraction. The proposed method can also leverage transfer learning paradigm, where the model is only trained on synthetic data containing pixels that are linear combinations of one or more endmembers of interest. In this case, we retrieve endmembers (spectra) from the United States Geological Survey Spectral Library. The model thus trained can be subsequently used to perform pixel unmixing on “real data” that contains a subset of the endmembers used to generated the synthetic data. The model achieves state-of-the-art results on several benchmarks: Cuprite, Urban Hydice and Samson. We also present new synthetic dataset, OnTech-HSI-Syn-21, that can be used to study hyperspectral pixel unmixing methods. We showcase the transfer learning capabilities of the proposed model on Cuprite and OnTech-HSI-Syn-21 datasets. In summary, the proposed method can be applied for pixel unmixing a variety of domains, including agriculture, forestry, mineralogy, analysis of materials, healthcare, etc. Additionally, the proposed method eschews the need for labelled data for training by leveraging the transfer learning paradigm, where the model is trained on synthetic data generated using the endmembers present in the “real” data.},\n doi={10.1109/TGRS.2024.3357589},\n ISSN={1558-0644},\n month={January},\n keywords = {hsi-unmixing},\n url_Paper = {pubs/24-ieee-tgrs-j.pdf}\n}\n\n
\n
\n\n\n
\n We present a method for hyperspectral pixel unmixing. The proposed method assumes that (1) abundances can be encoded as Dirichlet distributions and (2) spectra of endmembers can be represented as multivariate Normal distributions. The method solves the problem of abundance estimation and endmember extraction within a variational autoencoder setting where a Dirichlet bottleneck layer models the abundances, and the decoder performs endmember extraction. The proposed method can also leverage transfer learning paradigm, where the model is only trained on synthetic data containing pixels that are linear combinations of one or more endmembers of interest. In this case, we retrieve endmembers (spectra) from the United States Geological Survey Spectral Library. The model thus trained can be subsequently used to perform pixel unmixing on “real data” that contains a subset of the endmembers used to generated the synthetic data. The model achieves state-of-the-art results on several benchmarks: Cuprite, Urban Hydice and Samson. We also present new synthetic dataset, OnTech-HSI-Syn-21, that can be used to study hyperspectral pixel unmixing methods. We showcase the transfer learning capabilities of the proposed model on Cuprite and OnTech-HSI-Syn-21 datasets. In summary, the proposed method can be applied for pixel unmixing a variety of domains, including agriculture, forestry, mineralogy, analysis of materials, healthcare, etc. Additionally, the proposed method eschews the need for labelled data for training by leveraging the transfer learning paradigm, where the model is trained on synthetic data generated using the endmembers present in the “real” data.\n
\n\n\n
\n\n\n\n\n\n