@inproceedings{44665e66b40d44d08b7143c9ac02b942,
title = "Neural (Tangent Kernel) Collapse",
abstract = "This work bridges two important concepts: the Neural Tangent Kernel (NTK), which captures the evolution of deep neural networks (DNNs) during training, and the Neural Collapse (NC) phenomenon, which refers to the emergence of symmetry and structure in the last-layer features of well-trained classification DNNs. We adopt the natural assumption that the empirical NTK develops a block structure aligned with the class labels, i.e., samples within the same class have stronger correlations than samples from different classes. Under this assumption, we derive the dynamics of DNNs trained with mean squared (MSE) loss and break them into interpretable phases. Moreover, we identify an invariant that captures the essence of the dynamics, and use it to prove the emergence of NC in DNNs with block-structured NTK. We provide large-scale numerical experiments on three common DNN architectures and three benchmark datasets to support our theory.",
author = "Mariia Seleznova and Dana Weitzner and Raja Giryes and Gitta Kutyniok and Chou, \{Hung Hsu\}",
note = "Publisher Copyright: {\textcopyright} 2023 Neural information processing systems foundation. All rights reserved.; 37th Conference on Neural Information Processing Systems, NeurIPS 2023 ; Conference date: 10-12-2023 Through 16-12-2023",
year = "2023",
language = "الإنجليزيّة",
series = "Advances in Neural Information Processing Systems",
publisher = "Neural information processing systems foundation",
editor = "A. Oh and T. Neumann and A. Globerson and K. Saenko and M. Hardt and S. Levine",
booktitle = "Advances in Neural Information Processing Systems 36 - 37th Conference on Neural Information Processing Systems, NeurIPS 2023",
address = "الولايات المتّحدة",
}