@inproceedings{0f7f859350ab449491da920d4012944e,
title = "Optimal Rates for Random Order Online Optimization",
abstract = "We study online convex optimization in the random order model, recently proposed by Garber et al. [8], where the loss functions may be chosen by an adversary, but are then presented to the online algorithm in a uniformly random order. Focusing on the scenario where the cumulative loss function is (strongly) convex, yet individual loss functions are smooth but might be non-convex, we give algorithms that achieve the optimal bounds and significantly outperform the results of Garber et al. [8], completely removing the dimension dependence and improving their scaling with respect to the strong convexity parameter. Our analysis relies on novel connections between algorithmic stability and generalization for sampling without-replacement analogous to those studied in the with-replacement i.i.d. setting, as well as on a refined average stability analysis of stochastic gradient descent.",
author = "Uri Sherman and Tomer Koren and Yishay Mansour",
note = "Publisher Copyright: {\textcopyright} 2021 Neural information processing systems foundation. All rights reserved.; 35th Conference on Neural Information Processing Systems, NeurIPS 2021 ; Conference date: 06-12-2021 Through 14-12-2021",
year = "2021",
language = "الإنجليزيّة",
series = "Advances in Neural Information Processing Systems",
publisher = "Neural information processing systems foundation",
pages = "2097--2108",
editor = "Marc'Aurelio Ranzato and Alina Beygelzimer and Yann Dauphin and Liang, \{Percy S.\} and \{Wortman Vaughan\}, Jenn",
booktitle = "Advances in Neural Information Processing Systems 34 - 35th Conference on Neural Information Processing Systems, NeurIPS 2021",
address = "الولايات المتّحدة",
}