Reducing SqueezeNet Storage Size with Depthwise Separable Convolutions. Santos, A., De Souza, C., Zanchettin, C., Macedo, D., Oliveira, A., & Ludermir, T. In Proceedings of the International Joint Conference on Neural Networks, volume 2018-July, 2018.
doi  abstract   bibtex   
© 2018 IEEE. Current research in the field of convolutional neu-ral networks usually focuses on improving network accuracy, regardless of the network size and inference time. In this paper, we investigate the effects of storage space reduction in SqueezeNet as it relates to inference time when processing single test samples. In order to reduce the storage space, we suggest adjusting SqueezeNet's Fire Modules to include Depthwise Separable Convolutions (DSC). The resulting network, referred to as SqueezeNet-DSC, is compared to different convolutional neural networks such as MobileNet, AlexNet, VGG19, and the original SqueezeNet itself. When analyzing the models, we consider accuracy, the number of parameters, parameter storage size and processing time of a single test sample on CIFAR-10 and CIFAR-100 databases. The SqueezeNet-DSC exhibited a considerable size reduction (37% the size of SqueezeNet), while experiencing a loss in network accuracy of 1,07% in CIFAR-10 and 3,06% in top 1 CIFAR-100.
@inproceedings{
 title = {Reducing SqueezeNet Storage Size with Depthwise Separable Convolutions},
 type = {inproceedings},
 year = {2018},
 volume = {2018-July},
 id = {7cc54a14-0680-387c-908c-f547044fd5b8},
 created = {2019-02-14T18:02:01.676Z},
 file_attached = {false},
 profile_id = {74e7d4ea-3dac-3118-aab9-511a5b337e8f},
 last_modified = {2019-02-14T18:02:01.676Z},
 read = {false},
 starred = {false},
 authored = {true},
 confirmed = {false},
 hidden = {false},
 private_publication = {false},
 abstract = {© 2018 IEEE. Current research in the field of convolutional neu-ral networks usually focuses on improving network accuracy, regardless of the network size and inference time. In this paper, we investigate the effects of storage space reduction in SqueezeNet as it relates to inference time when processing single test samples. In order to reduce the storage space, we suggest adjusting SqueezeNet's Fire Modules to include Depthwise Separable Convolutions (DSC). The resulting network, referred to as SqueezeNet-DSC, is compared to different convolutional neural networks such as MobileNet, AlexNet, VGG19, and the original SqueezeNet itself. When analyzing the models, we consider accuracy, the number of parameters, parameter storage size and processing time of a single test sample on CIFAR-10 and CIFAR-100 databases. The SqueezeNet-DSC exhibited a considerable size reduction (37% the size of SqueezeNet), while experiencing a loss in network accuracy of 1,07% in CIFAR-10 and 3,06% in top 1 CIFAR-100.},
 bibtype = {inproceedings},
 author = {Santos, A.G. and De Souza, C.O. and Zanchettin, C. and Macedo, D. and Oliveira, A.L.I. and Ludermir, T.},
 doi = {10.1109/IJCNN.2018.8489442},
 booktitle = {Proceedings of the International Joint Conference on Neural Networks}
}

Downloads: 0