@inproceedings{593f9c14286945a885d3d6549d300c6a,
title = "Finite Littlestone Dimension Implies Finite Information Complexity",
abstract = "We prove that every online learnable class of functions of Littlestone dimension d admits a learning algorithm with finite information complexity. Towards this end, we use the notion of a globally stable algorithm. Generally, the information complexity of such a globally stable algorithm is large yet finite, roughly exponential in d. We also show there is room for improvement; for a canonical online learnable class, indicator functions of affine subspaces of dimension d, the information complexity can be upper bounded logarithmically in d.",
keywords = "Littlestone dimension, Mutual information, PAC learning",
author = "Aditya Pradeep and Ido Nachum and Michael Gastpar",
note = "Publisher Copyright: {\textcopyright} 2022 IEEE.; 2022 IEEE International Symposium on Information Theory, ISIT 2022 ; Conference date: 26-06-2022 Through 01-07-2022",
year = "2022",
doi = "https://doi.org/10.1109/ISIT50566.2022.9834457",
language = "American English",
series = "IEEE International Symposium on Information Theory - Proceedings",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "3055--3060",
booktitle = "2022 IEEE International Symposium on Information Theory, ISIT 2022",
address = "United States",
}