@inproceedings{8c19b253c42346ec8af65fa96b69bfe3,
title = "Self-Concordant Analysis of Frank-Wolfe Algorithms",
abstract = "Projection-free optimization via different variants of the Frank-Wolfe (FW), a.k.a. Conditional Gradient method has become one of the cornerstones in optimization for machine learning since in many cases the linear minimization oracle is much cheaper to implement than projections and some sparsity needs to be preserved. In a number of applications, e.g. Poisson inverse problems or quantum state tomography, the loss is given by a self-concordant (SC) function having unbounded curvature, implying absence of theoretical guarantees for the existing FW methods. We use the theory of SC functions to provide a new adaptive step size for FW methods and prove global convergence rate O(1=k) after k iterations. If the problem admits a stronger local linear minimization oracle, we construct a novel FW method with linear convergence rate for SC functions.",
author = "Pavel Dvurechensky and Petr Ostroukhov and Kamil Safin and Shimrit Shtern and Mathias Staudigl",
note = "Publisher Copyright: {\textcopyright} Author(s) 2020. All rights reserved.; 37th International Conference on Machine Learning, ICML 2020 ; Conference date: 13-07-2020 Through 18-07-2020",
year = "2020",
language = "الإنجليزيّة",
series = "37th International Conference on Machine Learning, ICML 2020",
pages = "2794--2804",
editor = "Hal Daume and Aarti Singh",
booktitle = "37th International Conference on Machine Learning, ICML 2020",
}