Enhancing Few-Shot Image Classification With Unlabelled Examples. Bateni, P., Barber, J., van de Meent, J., & Wood, F. In Proceedings of the IEEE/CVF Winter Conference on Applications of Computer Vision (WACV), pages 2796-2805, January, 2022. Arxiv Paper abstract bibtex 6 downloads We develop a transductive meta-learning method that uses unlabelled instances to improve few-shot image classification performance. Our approach combines a regularized Mahalanobis-distance-based soft k-means clustering procedure with a modified state of the art neural adaptive feature extractor to achieve improved test-time classification accuracy using unlabelled data. We evaluate our method on transductive few-shot learning tasks, in which the goal is to jointly predict labels for query (test) examples given a set of support (training) examples. We achieve state of the art performance on the Meta-Dataset, mini-ImageNet and tiered-ImageNet benchmarks.
@InProceedings{Bateni_2022_WACV,
author = {Bateni, Peyman and Barber, Jarred and van de Meent, Jan-Willem and Wood, Frank},
title = {Enhancing Few-Shot Image Classification With Unlabelled Examples},
booktitle = {Proceedings of the IEEE/CVF Winter Conference on Applications of Computer Vision (WACV)},
month = {January},
year = {2022},
pages = {2796-2805},
url_ArXiv = {https://arxiv.org/abs/2006.12245},
url_Paper = {https://ieeexplore.ieee.org/document/9706775},
support = {D3M,LwLL},
abstract={We develop a transductive meta-learning method that uses unlabelled instances to improve few-shot image classification performance. Our approach combines a regularized Mahalanobis-distance-based soft k-means clustering procedure with a modified state of the art neural adaptive feature extractor to achieve improved test-time classification accuracy using unlabelled data. We evaluate our method on transductive few-shot learning tasks, in which the goal is to jointly predict labels for query (test) examples given a set of support (training) examples. We achieve state of the art performance on the Meta-Dataset, mini-ImageNet and tiered-ImageNet benchmarks.}
}
Downloads: 6
{"_id":"Ku7ZydmHy5TZzZb7B","bibbaseid":"bateni-barber-vandemeent-wood-enhancingfewshotimageclassificationwithunlabelledexamples-2022","author_short":["Bateni, P.","Barber, J.","van de Meent, J.","Wood, F."],"bibdata":{"bibtype":"inproceedings","type":"inproceedings","author":[{"propositions":[],"lastnames":["Bateni"],"firstnames":["Peyman"],"suffixes":[]},{"propositions":[],"lastnames":["Barber"],"firstnames":["Jarred"],"suffixes":[]},{"propositions":["van","de"],"lastnames":["Meent"],"firstnames":["Jan-Willem"],"suffixes":[]},{"propositions":[],"lastnames":["Wood"],"firstnames":["Frank"],"suffixes":[]}],"title":"Enhancing Few-Shot Image Classification With Unlabelled Examples","booktitle":"Proceedings of the IEEE/CVF Winter Conference on Applications of Computer Vision (WACV)","month":"January","year":"2022","pages":"2796-2805","url_arxiv":"https://arxiv.org/abs/2006.12245","url_paper":"https://ieeexplore.ieee.org/document/9706775","support":"D3M,LwLL","abstract":"We develop a transductive meta-learning method that uses unlabelled instances to improve few-shot image classification performance. Our approach combines a regularized Mahalanobis-distance-based soft k-means clustering procedure with a modified state of the art neural adaptive feature extractor to achieve improved test-time classification accuracy using unlabelled data. We evaluate our method on transductive few-shot learning tasks, in which the goal is to jointly predict labels for query (test) examples given a set of support (training) examples. We achieve state of the art performance on the Meta-Dataset, mini-ImageNet and tiered-ImageNet benchmarks.","bibtex":"@InProceedings{Bateni_2022_WACV,\n author = {Bateni, Peyman and Barber, Jarred and van de Meent, Jan-Willem and Wood, Frank},\n title = {Enhancing Few-Shot Image Classification With Unlabelled Examples},\n booktitle = {Proceedings of the IEEE/CVF Winter Conference on Applications of Computer Vision (WACV)},\n month = {January},\n year = {2022},\n pages = {2796-2805},\n url_ArXiv = {https://arxiv.org/abs/2006.12245},\n url_Paper = {https://ieeexplore.ieee.org/document/9706775},\n support = {D3M,LwLL},\n abstract={We develop a transductive meta-learning method that uses unlabelled instances to improve few-shot image classification performance. Our approach combines a regularized Mahalanobis-distance-based soft k-means clustering procedure with a modified state of the art neural adaptive feature extractor to achieve improved test-time classification accuracy using unlabelled data. We evaluate our method on transductive few-shot learning tasks, in which the goal is to jointly predict labels for query (test) examples given a set of support (training) examples. We achieve state of the art performance on the Meta-Dataset, mini-ImageNet and tiered-ImageNet benchmarks.}\n}\n\n","author_short":["Bateni, P.","Barber, J.","van de Meent, J.","Wood, F."],"key":"Bateni_2022_WACV","id":"Bateni_2022_WACV","bibbaseid":"bateni-barber-vandemeent-wood-enhancingfewshotimageclassificationwithunlabelledexamples-2022","role":"author","urls":{" arxiv":"https://arxiv.org/abs/2006.12245"," paper":"https://ieeexplore.ieee.org/document/9706775"},"metadata":{"authorlinks":{}},"downloads":6},"bibtype":"inproceedings","biburl":"https://raw.githubusercontent.com/plai-group/bibliography/master/group_publications.bib","dataSources":["BKH7YtW7K7WNMA3cj","7avRLRrz2ifJGMKcD","wyN5DxtoT6AQuiXnm"],"keywords":[],"search_terms":["enhancing","few","shot","image","classification","unlabelled","examples","bateni","barber","van de meent","wood"],"title":"Enhancing Few-Shot Image Classification With Unlabelled Examples","year":2022,"downloads":6}