@inproceedings{8794ed94c9a446948d54f4a20e0c324a,
title = "Information Theoretic Lower Bounds for Information Theoretic Upper Bounds",
abstract = "We examine the relationship between the mutual information between the output model and the empirical sample and the generalization of the algorithm in the context of stochastic convex optimization. Despite increasing interest in information-theoretic generalization bounds, it is uncertain if these bounds can provide insight into the exceptional performance of various learning algorithms. Our study of stochastic convex optimization reveals that, for true risk minimization, dimension-dependent mutual information is necessary. This indicates that existing information-theoretic generalization bounds fall short in capturing the generalization capabilities of algorithms like SGD and regularized ERM, which have dimension-independent sample complexity.",
author = "Roi Livni",
note = "Publisher Copyright: {\textcopyright} 2023 Neural information processing systems foundation. All rights reserved.; 37th Conference on Neural Information Processing Systems, NeurIPS 2023 ; Conference date: 10-12-2023 Through 16-12-2023",
year = "2023",
language = "الإنجليزيّة",
series = "Advances in Neural Information Processing Systems",
editor = "A. Oh and T. Neumann and A. Globerson and K. Saenko and M. Hardt and S. Levine",
booktitle = "Advances in Neural Information Processing Systems 36 - 37th Conference on Neural Information Processing Systems, NeurIPS 2023",
}