@inproceedings{9e9110785db84f9cad253fa990e444b0,
title = "Dynamically Sacrificing Accuracy for Reduced Computation: Cascaded Inference Based on Softmax Confidence",
abstract = "We study the tradeoff between computational effort and classification accuracy in a cascade of deep neural networks. During inference, the user sets the acceptable accuracy degradation which then automatically determines confidence thresholds for the intermediate classifiers. As soon as the confidence threshold is met, inference terminates immediately without having to compute the output of the complete network. Confidence levels are derived directly from the softmax outputs of intermediate classifiers, as we do not train special decision functions. We show that using a softmax output as a confidence measure in a cascade of deep neural networks leads to a reduction of 15 % –50 % in the number of MAC operations while degrading the classification accuracy by roughly 1 %. Our method can be easily incorporated into pre-trained non-cascaded architectures, as we exemplify on ResNet. Our main contribution is a method that dynamically adjusts the tradeoff between accuracy and computation without retraining the model.",
keywords = "Deep learning, Efficient inference, Neural networks",
author = "Konstantin Berestizshevsky and Guy Even",
note = "Publisher Copyright: {\textcopyright} 2019, Springer Nature Switzerland AG.; 28th International Conference on Artificial Neural Networks, ICANN 2019 ; Conference date: 17-09-2019 Through 19-09-2019",
year = "2019",
doi = "10.1007/978-3-030-30484-3_26",
language = "الإنجليزيّة",
isbn = "9783030304836",
series = "Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)",
publisher = "Springer Verlag",
pages = "306--320",
editor = "Tetko, {Igor V.} and Pavel Karpov and Fabian Theis and Vera Kurkov{\'a}",
booktitle = "Artificial Neural Networks and Machine Learning – ICANN 2019",
address = "ألمانيا",
}