Deep Embedded Self-Organizing Map for Joint Representation Learning and Topology-Preserving Clustering. Forest, F., Lebbah, M., Azzag, H., & Lacaille, J. Neural Computing and Applications, 2021. Link Paper Code doi abstract bibtex 108 downloads A recent research area in unsupervised learning is the combination of representation learning with deep neural networks and data clustering. The success of deep learning for supervised tasks is widely established. However, recent research has demonstrated how neural networks are able to learn representations to improve clustering in their intermediate feature space, using specific regularizations. By considering representation learning and clustering as a joint task, models learn clustering-friendly spaces and outperform two-stage approaches where dimensionality reduction and clustering are performed separately. Recently, this idea has been extended to topology-preserving clustering models, known as self-organizing maps (SOM). This work is a thorough study on the deep embedded self-organizing map (DESOM), a model composed of an autoencoder and a SOM layer, training jointly the code vectors and network weights to learn SOM-friendly representations. In other words, SOM induces a form a regularization to improve the quality of quantization and topology in latent space. After detailing the architecture, loss and training algorithm, we study hyperparameters with a series of experiments. Different SOM-based models are evaluated in terms of clustering, visualization and classification on benchmark datasets. We study benefits and trade-offs of joint representation learning and self-organization. DESOM achieves competitive results, requires no pretraining and produces topologically organized visualizations.
@article{forest2021deepembedded,
abstract = {A recent research area in unsupervised learning is the combination of representation learning with deep neural networks and data clustering. The success of deep learning for supervised tasks is widely established. However, recent research has demonstrated how neural networks are able to learn representations to improve clustering in their intermediate feature space, using specific regularizations. By considering representation learning and clustering as a joint task, models learn clustering-friendly spaces and outperform two-stage approaches where dimensionality reduction and clustering are performed separately. Recently, this idea has been extended to topology-preserving clustering models, known as self-organizing maps (SOM). This work is a thorough study on the deep embedded self-organizing map (DESOM), a model composed of an autoencoder and a SOM layer, training jointly the code vectors and network weights to learn SOM-friendly representations. In other words, SOM induces a form a regularization to improve the quality of quantization and topology in latent space. After detailing the architecture, loss and training algorithm, we study hyperparameters with a series of experiments. Different SOM-based models are evaluated in terms of clustering, visualization and classification on benchmark datasets. We study benefits and trade-offs of joint representation learning and self-organization. DESOM achieves competitive results, requires no pretraining and produces topologically organized visualizations.},
author = {Forest, Florent and Lebbah, Mustapha and Azzag, Hanene and Lacaille, J{\'{e}}r{\^{o}}me},
doi = {10.1007/s00521-021-06331-w},
isbn = {0052102106},
journal = {Neural Computing and Applications},
keywords = {autoencoder,clustering,deep learning,representation learning,self-organizing maps,visualization},
title = {{Deep Embedded Self-Organizing Map for Joint Representation Learning and Topology-Preserving Clustering}},
year = {2021},
url_Link = {https://link.springer.com/article/10.1007/s00521-021-06331-w},
url_Paper = {https://www.researchgate.net/journal/Neural-Computing-and-Applications-1433-3058/publication/353679111_Deep_embedded_self-organizing_maps_for_joint_representation_learning_and_topology-preserving_clustering/links/610a2059169a1a0103daf991/Deep-embedded-self-organizing-maps-for-joint-representation-learning-and-topology-preserving-clustering.pdf},
url_Code = {https://github.com/FlorentF9/DESOM},
bibbase_note = {<img src="assets/img/papers/desom.png">}
}
Downloads: 108
{"_id":"g3jEvkjzRCrEBZQ7a","bibbaseid":"forest-lebbah-azzag-lacaille-deepembeddedselforganizingmapforjointrepresentationlearningandtopologypreservingclustering-2021","author_short":["Forest, F.","Lebbah, M.","Azzag, H.","Lacaille, J."],"bibdata":{"bibtype":"article","type":"article","abstract":"A recent research area in unsupervised learning is the combination of representation learning with deep neural networks and data clustering. The success of deep learning for supervised tasks is widely established. However, recent research has demonstrated how neural networks are able to learn representations to improve clustering in their intermediate feature space, using specific regularizations. By considering representation learning and clustering as a joint task, models learn clustering-friendly spaces and outperform two-stage approaches where dimensionality reduction and clustering are performed separately. Recently, this idea has been extended to topology-preserving clustering models, known as self-organizing maps (SOM). This work is a thorough study on the deep embedded self-organizing map (DESOM), a model composed of an autoencoder and a SOM layer, training jointly the code vectors and network weights to learn SOM-friendly representations. In other words, SOM induces a form a regularization to improve the quality of quantization and topology in latent space. After detailing the architecture, loss and training algorithm, we study hyperparameters with a series of experiments. Different SOM-based models are evaluated in terms of clustering, visualization and classification on benchmark datasets. We study benefits and trade-offs of joint representation learning and self-organization. DESOM achieves competitive results, requires no pretraining and produces topologically organized visualizations.","author":[{"propositions":[],"lastnames":["Forest"],"firstnames":["Florent"],"suffixes":[]},{"propositions":[],"lastnames":["Lebbah"],"firstnames":["Mustapha"],"suffixes":[]},{"propositions":[],"lastnames":["Azzag"],"firstnames":["Hanene"],"suffixes":[]},{"propositions":[],"lastnames":["Lacaille"],"firstnames":["Jérôme"],"suffixes":[]}],"doi":"10.1007/s00521-021-06331-w","isbn":"0052102106","journal":"Neural Computing and Applications","keywords":"autoencoder,clustering,deep learning,representation learning,self-organizing maps,visualization","title":"Deep Embedded Self-Organizing Map for Joint Representation Learning and Topology-Preserving Clustering","year":"2021","url_link":"https://link.springer.com/article/10.1007/s00521-021-06331-w","url_paper":"https://www.researchgate.net/journal/Neural-Computing-and-Applications-1433-3058/publication/353679111_Deep_embedded_self-organizing_maps_for_joint_representation_learning_and_topology-preserving_clustering/links/610a2059169a1a0103daf991/Deep-embedded-self-organizing-maps-for-joint-representation-learning-and-topology-preserving-clustering.pdf","url_code":"https://github.com/FlorentF9/DESOM","bibbase_note":"<img src=\"assets/img/papers/desom.png\">","bibtex":"@article{forest2021deepembedded,\nabstract = {A recent research area in unsupervised learning is the combination of representation learning with deep neural networks and data clustering. The success of deep learning for supervised tasks is widely established. However, recent research has demonstrated how neural networks are able to learn representations to improve clustering in their intermediate feature space, using specific regularizations. By considering representation learning and clustering as a joint task, models learn clustering-friendly spaces and outperform two-stage approaches where dimensionality reduction and clustering are performed separately. Recently, this idea has been extended to topology-preserving clustering models, known as self-organizing maps (SOM). This work is a thorough study on the deep embedded self-organizing map (DESOM), a model composed of an autoencoder and a SOM layer, training jointly the code vectors and network weights to learn SOM-friendly representations. In other words, SOM induces a form a regularization to improve the quality of quantization and topology in latent space. After detailing the architecture, loss and training algorithm, we study hyperparameters with a series of experiments. Different SOM-based models are evaluated in terms of clustering, visualization and classification on benchmark datasets. We study benefits and trade-offs of joint representation learning and self-organization. DESOM achieves competitive results, requires no pretraining and produces topologically organized visualizations.},\nauthor = {Forest, Florent and Lebbah, Mustapha and Azzag, Hanene and Lacaille, J{\\'{e}}r{\\^{o}}me},\ndoi = {10.1007/s00521-021-06331-w},\nisbn = {0052102106},\njournal = {Neural Computing and Applications},\nkeywords = {autoencoder,clustering,deep learning,representation learning,self-organizing maps,visualization},\ntitle = {{Deep Embedded Self-Organizing Map for Joint Representation Learning and Topology-Preserving Clustering}},\nyear = {2021},\nurl_Link = {https://link.springer.com/article/10.1007/s00521-021-06331-w},\nurl_Paper = {https://www.researchgate.net/journal/Neural-Computing-and-Applications-1433-3058/publication/353679111_Deep_embedded_self-organizing_maps_for_joint_representation_learning_and_topology-preserving_clustering/links/610a2059169a1a0103daf991/Deep-embedded-self-organizing-maps-for-joint-representation-learning-and-topology-preserving-clustering.pdf},\nurl_Code = {https://github.com/FlorentF9/DESOM},\nbibbase_note = {<img src=\"assets/img/papers/desom.png\">}\n}\n\n","author_short":["Forest, F.","Lebbah, M.","Azzag, H.","Lacaille, J."],"key":"forest2021deepembedded","id":"forest2021deepembedded","bibbaseid":"forest-lebbah-azzag-lacaille-deepembeddedselforganizingmapforjointrepresentationlearningandtopologypreservingclustering-2021","role":"author","urls":{" link":"https://link.springer.com/article/10.1007/s00521-021-06331-w"," paper":"https://www.researchgate.net/journal/Neural-Computing-and-Applications-1433-3058/publication/353679111_Deep_embedded_self-organizing_maps_for_joint_representation_learning_and_topology-preserving_clustering/links/610a2059169a1a0103daf991/Deep-embedded-self-organizing-maps-for-joint-representation-learning-and-topology-preserving-clustering.pdf"," code":"https://github.com/FlorentF9/DESOM"},"keyword":["autoencoder","clustering","deep learning","representation learning","self-organizing maps","visualization"],"metadata":{"authorlinks":{}},"downloads":108},"bibtype":"article","biburl":"https://florentfo.rest/files/publications.bib","dataSources":["DgnR6pzJ98ZEp97PW","2puawT8ZAQyYRypA3","pBkCjKbyeirr5jeAd","6rNfa4Kp6dL5sGmf5","xH8ySTsEPTLou9gyR"],"keywords":["autoencoder","clustering","deep learning","representation learning","self-organizing maps","visualization"],"search_terms":["deep","embedded","self","organizing","map","joint","representation","learning","topology","preserving","clustering","forest","lebbah","azzag","lacaille"],"title":"Deep Embedded Self-Organizing Map for Joint Representation Learning and Topology-Preserving Clustering","year":2021,"downloads":108}