@inproceedings{e9147562ce074410a7f3017c1b5dae7a,
title = "Neural Spectrum Alignment: Empirical Study",
abstract = "Expressiveness and generalization of deep models was recently addressed via the connection between neural networks (NNs) and kernel learning, where first-order dynamics of NN during a gradient-descent (GD) optimization were related to gradient similarity kernel, also known as Neural Tangent Kernel (NTK)[9]. In the majority of works this kernel is considered to be time-invariant[9, 13]. In contrast, we empirically explore these properties along the optimization and show that in practice top eigenfunctions of NTK align toward the target function learned by NN which improves the overall optimization performance. Moreover, these top eigenfunctions serve as basis functions for NN output - a function represented by NN is spanned almost completely by them for the entire optimization process. Further, we study how learning rate decay affects the neural spectrum. We argue that the presented phenomena may lead to a more complete theoretical understanding behind NN learning.",
keywords = "Deep learning, Kernel learning, Neural tangent kernel",
author = "Dmitry Kopitkov and Indelman Vadim",
note = "Publisher Copyright: {\textcopyright} 2020, Springer Nature Switzerland AG.; 29th International Conference on Artificial Neural Networks, ICANN 2020 ; Conference date: 15-09-2020 Through 18-09-2020",
year = "2020",
doi = "10.1007/978-3-030-61616-8\_14",
language = "الإنجليزيّة",
isbn = "9783030616151",
series = "Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)",
publisher = "Springer Science and Business Media Deutschland GmbH",
pages = "168--179",
editor = "Igor Farka{\v s} and Paolo Masulli and Stefan Wermter",
booktitle = "Artificial Neural Networks and Machine Learning – ICANN 2020 - 29th International Conference on Artificial Neural Networks, Proceedings",
address = "ألمانيا",
}