@inproceedings{c34213cdf6844f38b512e03b5ba51229,
title = "On the similarity between the laplace and neural tangent kernels",
abstract = "Recent theoretical work has shown that massively overparameterized neural networks are equivalent to kernel regressors that use Neural Tangent Kernels (NTKs). Experiments show that these kernel methods perform similarly to real neural networks. Here we show that NTK for fully connected networks with ReLU activation is closely related to the standard Laplace kernel. We show theoretically that for normalized data on the hypersphere both kernels have the same eigenfunctions and their eigenvalues decay polynomially at the same rate, implying that their Reproducing Kernel Hilbert Spaces (RKHS) include the same sets of functions. This means that both kernels give rise to classes of functions with the same smoothness properties. The two kernels differ for data off the hypersphere, but experiments indicate that when data is properly normalized these differences are not significant. Finally, we provide experiments on real data comparing NTK and the Laplace kernel, along with a larger class of γ-exponential kernels. We show that these perform almost identically. Our results suggest that much insight about neural networks can be obtained from analysis of the well-known Laplace kernel, which has a simple closed form.",
author = "Amnon Geifman and Abhay Yadav and Yoni Kasten and Meirav Galun and David Jacobs and Ronen Basri",
note = "Publisher Copyright: {\textcopyright} 2020 Neural information processing systems foundation. All rights reserved.; 34th Conference on Neural Information Processing Systems, NeurIPS 2020 ; Conference date: 06-12-2020 Through 12-12-2020",
year = "2020",
month = dec,
day = "6",
doi = "10.48550/arXiv.2007.01580",
language = "الإنجليزيّة",
isbn = "9781713829546",
series = "Advances in Neural Information Processing Systems",
editor = "H. Larochelle",
booktitle = "NIPS'20",
}