@inproceedings{94d808dcffdd420490a207e3b779c928,
title = "Searching for N:M Fine-grained Sparsity of Weights and Activations in Neural Networks",
abstract = "Sparsity in deep neural networks has been extensively studied to compress and accelerate models for environments with limited resources. The general approach of pruning aims at enforcing sparsity on the obtained model, with minimal accuracy loss, but with a sparsity structure that enables acceleration on hardware. The sparsity can be enforced on either the weights or activations of the network, and existing works tend to focus on either one for the entire network. In this paper, we suggest a strategy based on Neural Architecture Search (NAS) to sparsify both activations and weights throughout the network, while utilizing the recent approach of N:M fine-grained structured sparsity that enables practical acceleration on dedicated GPUs. We show that a combination of weight and activation pruning is superior to each option separately. Furthermore, during the training, the choice between pruning the weights of activations can be motivated by practical inference costs (e.g., memory bandwidth). We demonstrate the efficiency of the approach on several image classification datasets.",
keywords = "Activation pruning, N:M fine-grained Sparsity, Neural architecture search, Weight pruning",
author = "Ruth Akiva-Hochman and Finder, {Shahaf E.} and Turek, {Javier S.} and Eran Treister",
note = "Publisher Copyright: {\textcopyright} 2023, The Author(s), under exclusive license to Springer Nature Switzerland AG.; 17th European Conference on Computer Vision, ECCV 2022 ; Conference date: 23-10-2022 Through 27-10-2022",
year = "2023",
month = jan,
day = "1",
doi = "10.1007/978-3-031-25082-8_9",
language = "American English",
isbn = "9783031250811",
series = "Lecture Notes in Computer Science",
publisher = "Springer Science and Business Media Deutschland GmbH",
pages = "130--143",
editor = "Leonid Karlinsky and Tomer Michaeli and Ko Nishino",
booktitle = "Computer Vision – ECCV 2022 Workshops, Proceedings",
address = "Germany",
}