\n \n \n
\n
\n \n 2025\n \n \n (5)\n \n \n
\n
\n \n \n
\n
\n\n \n \n \n \n \n \n Compression in 3D Gaussian Splatting: A Survey of Methods, Trends, and Future Directions.\n \n \n \n \n\n\n \n Salman Ali, M.; Zhang, C.; Cagnazzo, M.; Valenzise, G.; Tartaglione, E.; and Bae, S.\n\n\n \n\n\n\n
arXiv e-prints. 2025.\n
\n\n
\n\n
\n\n
\n\n \n \n
Paper\n \n \n\n \n\n \n link\n \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n \n \n \n\n\n\n
\n
@article{salman2025compression,\n title={Compression in 3D Gaussian Splatting: A Survey of Methods, Trends, and Future Directions},\n author={Salman Ali, Muhammad and Zhang, Chaoning and Cagnazzo, Marco and Valenzise, Giuseppe and Tartaglione, Enzo and Bae, Sung-Ho},\n journal={arXiv e-prints},\n url={https://arxiv.org/pdf/2502.19457},\n year={2025}\n}\n
\n
\n\n\n\n
\n\n\n
\n
\n\n \n \n \n \n \n \n Diffusing DeBias: a Recipe for Turning a Bug into a Feature.\n \n \n \n \n\n\n \n Ciranni, M.; Pastore, V. P.; Di Via, R.; Tartaglione, E.; Odone, F.; and Murino, V.\n\n\n \n\n\n\n
arXiv preprint arXiv:2502.09564. 2025.\n
\n\n
\n\n
\n\n
\n\n \n \n
Paper\n \n \n\n \n\n \n link\n \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n \n \n \n\n\n\n
\n
@article{ciranni2025diffusing,\n title={Diffusing DeBias: a Recipe for Turning a Bug into a Feature},\n author={Ciranni, Massimiliano and Pastore, Vito Paolo and Di Via, Roberto and Tartaglione, Enzo and Odone, Francesca and Murino, Vittorio},\n journal={arXiv preprint arXiv:2502.09564},\n year={2025},\n url={https://arxiv.org/pdf/2502.09564}\n}\n
\n
\n\n\n\n
\n\n\n
\n
\n\n \n \n \n \n \n \n GoDe: Gaussians on Demand for Progressive Level of Detail and Scalable Compression.\n \n \n \n \n\n\n \n Di Sario, F.; Renzulli, R.; Grangetto, M.; Sugimoto, A.; and Tartaglione, E.\n\n\n \n\n\n\n
arXiv preprint arXiv:2501.13558. 2025.\n
\n\n
\n\n
\n\n
\n\n \n \n
Paper\n \n \n\n \n\n \n link\n \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n \n \n \n\n\n\n
\n
@article{di2025gode,\n title={GoDe: Gaussians on Demand for Progressive Level of Detail and Scalable Compression},\n author={Di Sario, Francesco and Renzulli, Riccardo and Grangetto, Marco and Sugimoto, Akihiro and Tartaglione, Enzo},\n journal={arXiv preprint arXiv:2501.13558},\n year={2025},\n url={https://arxiv.org/pdf/2501.13558}\n}\n
\n
\n\n\n\n
\n\n\n
\n
\n\n \n \n \n \n \n \n No Images, No Problem: Retaining Knowledge in Continual VQA with Questions-Only Memory.\n \n \n \n \n\n\n \n Marouf, I. E.; Tartaglione, E.; Lathuiliere, S.; and van de Weijer, J.\n\n\n \n\n\n\n
arXiv preprint arXiv:2502.04469. 2025.\n
\n\n
\n\n
\n\n
\n\n \n \n
Paper\n \n \n\n \n\n \n link\n \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n \n \n \n\n\n\n
\n
@article{marouf2025no,\n title={No Images, No Problem: Retaining Knowledge in Continual VQA with Questions-Only Memory},\n author={Marouf, Imad Eddine and Tartaglione, Enzo and Lathuiliere, Stephane and van de Weijer, Joost},\n journal={arXiv preprint arXiv:2502.04469},\n year={2025},\n url={https://arxiv.org/pdf/2502.04469}\n}\n
\n
\n\n\n\n
\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n \n 2024\n \n \n (4)\n \n \n
\n
\n \n \n
\n
\n\n \n \n \n \n \n \n Memory-Optimized Once-For-All Network.\n \n \n \n \n\n\n \n Girard, M.; Quétu, V.; Tardieu, S.; Nguyen, V.; and Tartaglione, E.\n\n\n \n\n\n\n
arXiv preprint arXiv:2409.05900. 2024.\n
\n\n
\n\n
\n\n
\n\n \n \n
Paper\n \n \n\n \n\n \n link\n \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n \n \n \n\n\n\n
\n
@article{girard2024memory,\n title={Memory-Optimized Once-For-All Network},\n author={Girard, Maxime and Qu{\\'e}tu, Victor and Tardieu, Samuel and Nguyen, Van-Tam and Tartaglione, Enzo},\n journal={arXiv preprint arXiv:2409.05900},\n year={2024},\n url={https://arxiv.org/pdf/2409.05900}\n}\n
\n
\n\n\n\n
\n\n\n
\n
\n\n \n \n \n \n \n \n Say My Name: a Model's Bias Discovery Framework.\n \n \n \n \n\n\n \n Ciranni, M.; Molinaro, L.; Barbano, C. A.; Fiandrotti, A.; Murino, V.; Pastore, V. P.; and Tartaglione, E.\n\n\n \n\n\n\n
arXiv preprint arXiv:2408.09570. 2024.\n
\n\n
\n\n
\n\n
\n\n \n \n
Paper\n \n \n\n \n\n \n link\n \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n \n \n \n\n\n\n
\n
@article{ciranni2024say,\n title={Say My Name: a Model's Bias Discovery Framework},\n author={Ciranni, Massimiliano and Molinaro, Luca and Barbano, Carlo Alberto and Fiandrotti, Attilio and Murino, Vittorio and Pastore, Vito Paolo and Tartaglione, Enzo},\n journal={arXiv preprint arXiv:2408.09570},\n year={2024},\n url={https://arxiv.org/pdf/2408.09570}\n}\n
\n
\n\n\n\n
\n\n\n
\n
\n\n \n \n \n \n \n \n LaCoOT: Layer Collapse through Optimal Transport.\n \n \n \n \n\n\n \n Quétu, V.; Hezbri, N.; and Tartaglione, E.\n\n\n \n\n\n\n
arXiv preprint arXiv:2406.08933. 2024.\n
\n\n
\n\n
\n\n
\n\n \n \n
Paper\n \n \n\n \n\n \n link\n \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n \n \n \n\n\n\n
\n
@article{quetu2024lacoot,\n title={LaCoOT: Layer Collapse through Optimal Transport},\n author={Qu{\\'e}tu, Victor and Hezbri, Nour and Tartaglione, Enzo},\n journal={arXiv preprint arXiv:2406.08933},\n year={2024},\n url={https://arxiv.org/pdf/2406.08933}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n
\n\n \n \n \n \n \n \n NEPENTHE: Entropy-Based Pruning as a Neural Network Depth's Reducer.\n \n \n \n \n\n\n \n Liao, Z.; Quétu, V.; Nguyen, V.; and Tartaglione, E.\n\n\n \n\n\n\n
arXiv preprint arXiv:2404.16890. 2024.\n
\n\n
\n\n
\n\n
\n\n \n \n
Paper\n \n \n\n \n\n \n link\n \n \n\n bibtex\n \n\n \n\n \n \n \n 1 download\n \n \n\n \n \n \n \n \n \n \n\n \n \n \n\n\n\n
\n
@article{liao2024nepenthe,\n title={NEPENTHE: Entropy-Based Pruning as a Neural Network Depth's Reducer},\n author={Liao, Zhu and Qu{\\'e}tu, Victor and Nguyen, Van-Tam and Tartaglione, Enzo},\n journal={arXiv preprint arXiv:2404.16890},\n year={2024},\n url={https://arxiv.org/pdf/2404.16890}\n}\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n \n 2023\n \n \n (2)\n \n \n
\n
\n \n \n
\n\n\n
\n
\n\n \n \n \n \n \n \n Rethinking Class-incremental Learning in the Era of Large Pre-trained Models via Test-Time Adaptation.\n \n \n \n \n\n\n \n Marouf, I. E.; Roy, S.; Tartaglione, E.; and Lathuilière, S.\n\n\n \n\n\n\n
arXiv preprint arXiv:2310.11482. 2023.\n
\n\n
\n\n
\n\n
\n\n \n \n
Paper\n \n \n\n \n\n \n link\n \n \n\n bibtex\n \n\n \n\n \n \n \n 1 download\n \n \n\n \n \n \n \n \n \n \n\n \n \n \n\n\n\n
\n
@article{marouf2023rethinking,\n title={Rethinking Class-incremental Learning in the Era of Large Pre-trained Models via Test-Time Adaptation},\n author={Marouf, Imad Eddine and Roy, Subhankar and Tartaglione, Enzo and Lathuili{\\`e}re, St{\\'e}phane},\n journal={arXiv preprint arXiv:2310.11482},\n year={2023},\n url={https://arxiv.org/pdf/2310.11482.pdf}\n}\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n \n 2022\n \n \n (1)\n \n \n
\n
\n \n \n
\n
\n\n \n \n \n \n \n \n REM: Routing Entropy Minimization for Capsule Networks.\n \n \n \n \n\n\n \n Renzulli, R.; Tartaglione, E.; and Grangetto, M.\n\n\n \n\n\n\n
arXiv preprint arXiv:2204.01298. 2022.\n
\n\n
\n\n
\n\n
\n\n \n \n
Paper\n \n \n\n \n\n \n link\n \n \n\n bibtex\n \n\n \n\n \n \n \n 2 downloads\n \n \n\n \n \n \n \n \n \n \n\n \n \n \n\n\n\n
\n
@article{renzulli2022rem,\n title={REM: Routing Entropy Minimization for Capsule Networks},\n author={Renzulli, Riccardo and Tartaglione, Enzo and Grangetto, Marco},\n journal={arXiv preprint arXiv:2204.01298},\n year={2022},\n url={https://arxiv.org/pdf/2204.01298.pdf}\n}
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n\n\n\n