@inproceedings{7643ce2e55024f0dae947c4aa6d3c4b1,
title = "Agnostic learning nonconvex function classes",
abstract = "We consider the sample complexity of agnostic learning with respect to squared loss. It is known that if the function class F used for learning is convex then one can obtain better sample complexity bounds than usual. It has been claimed that there is a lower bound that showed there was an essential gap in the rate. In this paper we show that the lower bound proof has a gap in it. Although we do not provide a definitive answer to its validity. More positively, we show one can obtain {"}fast{"} sample complexity bounds for nonconvex F for {"}most{"} target conditional expectations. The new bounds depend on the detailed geometry of F, in particular the distance in a certain sense of the target's conditional expectation from the set of nonuniqueness points of the class F.",
author = "Shahar Mendelson and Williamson, {Robert C.}",
note = "Publisher Copyright: {\textcopyright} Springer-Verlag Berlin Heidelberg 2002.; 15th Annual Conference on Computational Learning Theory, COLT 2002 ; Conference date: 08-07-2002 Through 10-07-2002",
year = "2002",
doi = "10.1007/3-540-45435-7_1",
language = "English",
series = "Lecture Notes in Artificial Intelligence (Subseries of Lecture Notes in Computer Science)",
publisher = "Springer Verlag",
pages = "1--13",
editor = "Jyrki Kivinen and Sloan, {Robert H.}",
booktitle = "Computational Learning Theory - 15th Annual Conference on Computational Learning Theory, COLT 2002, Proceedings",
address = "Germany",
}