@inproceedings{9a0deef47db746f49a8dd1860b69d784,
title = "Logistic regression: Tight bounds for stochastic and online optimization",
abstract = "The logistic loss function is often advocated in machine learning and statistics as a smooth and strictly convex surrogate for the 0-1 loss. In this paper we investigate the question of whether these smoothness and convexity properties make the logistic loss preferable to other widely considered options such as the hinge loss. We show that in contrast to known asymptotic bounds, as long as the number of prediction/optimization iterations is sub exponential, the logistic loss provides no improvement over a generic non-smooth loss function such as the hinge loss. In particular we show that the convergence rate of stochastic logistic optimization is bounded from below by a polynomial in the diameter of the decision set and the number of prediction iterations, and provide a matching tight upper bound. This resolves the COLT open problem of McMahan and Streeter (2012).",
keywords = "Logistic regression, Lower bounds, Online learning, Stochastic optimization",
author = "Elad Hazan and Tomer Koren and Levy, {Kfir Y.}",
note = "Publisher Copyright: {\textcopyright} 2014 E. Hazan, T. Koren & K.Y. Levy.; 27th Conference on Learning Theory, COLT 2014 ; Conference date: 13-06-2014 Through 15-06-2014",
year = "2014",
language = "אנגלית",
volume = "35",
series = "Proceedings of Machine Learning Research",
publisher = "PMLR",
pages = "197--209",
editor = "Balcan, {Maria Florina} and Vitaly Feldman and Csaba Szepesv{\'a}ri",
booktitle = "Proceedings of The 27th Conference on Learning Theory",
}