@inproceedings{01b24c7ab0b241f8a26c8bac42ad1192,
title = "The complexity of finding stationary points with stochastic gradient descent",
abstract = "We study the iteration complexity of stochastic gradient descent (SGD) for minimizing the gradient norm of smooth, possibly nonconvex functions. We provide several results, implying that the classical O(e4) upper bound (for making the average gradient norm less than ) cannot be improved upon, unless a combination of additional assumptions is made. Notably, this holds even if we limit ourselves to convex quadratic functions. We also show that for nonconvex functions, the feasibility of minimizing gradients with SGD is surprisingly sensitive to the choice of optimality criteria.",
author = "Yoel Drori and Ohad Shamir",
note = "Publisher Copyright: {\textcopyright} Author(s) 2020. All rights reserved.; 37th International Conference on Machine Learning, ICML 2020 ; Conference date: 13-07-2020 Through 18-07-2020",
year = "2020",
language = "الإنجليزيّة",
series = "37th International Conference on Machine Learning, ICML 2020",
pages = "2636--2645",
editor = "Hal Daume and Aarti Singh",
booktitle = "37th International Conference on Machine Learning, ICML 2020",
}