@inproceedings{8eb342cd928a4f1fb7cc78bed0e8c9f0,
title = "Generative Low-Shot Network Expansion",
abstract = "Conventional deep learning classifiers are static in the sense that they are trained on a predefined set of classes and learning to classify a novel class typically requires re-training. In this work, we address the problem of Low-Shot network-expansion learning. We introduce a learning framework which enables expanding a pre-trained (base) deep network to classify novel classes when the number of examples for the novel classes is particularly small. We present a simple yet powerful hard distillation method where the base network is augmented with additional weights to classify the novel classes, while keeping the weights of the base network unchanged. We show that since only a small number of weights needs to be trained, the hard distillation excels in low-shot training scenarios. Furthermore, hard distillation avoids detriment to classification performance on the base classes. Finally, we show that low-shot network expansion can be done with a very small memory footprint by using a compact generative model of the base classes training data with only a negligible degradation relative to learning with the full training set.",
author = "Adi Hayat and Mark Kliger and Shachar Fleishman and Daniel Cohen-Or",
note = "Publisher Copyright: {\textcopyright} 2018 IEEE.; 2018 IEEE/RSJ International Conference on Intelligent Robots and Systems, IROS 2018 ; Conference date: 01-10-2018 Through 05-10-2018",
year = "2018",
month = dec,
day = "27",
doi = "https://doi.org/10.1109/IROS.2018.8594004",
language = "الإنجليزيّة",
series = "IEEE International Conference on Intelligent Robots and Systems",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "6072--6077",
booktitle = "2018 IEEE/RSJ International Conference on Intelligent Robots and Systems, IROS 2018",
address = "الولايات المتّحدة",
}