@inproceedings{b978bf2d3b424c609449631f4a15c398,
title = "No free lunch versus Occam's Razor in supervised learning",
abstract = "The No Free Lunch theorems are often used to argue that domain specific knowledge is required to design successful algorithms. We use algorithmic information theory to argue the case for a universal bias allowing an algorithm to succeed in all interesting problem domains. Additionally, we give a new algorithm for off-line classification, inspired by Solomonoff induction, with good performance on all structured (compressible) problems under reasonable assumptions. This includes a proof of the efficacy of the well-known heuristic of randomly selecting training data in the hope of reducing the misclassification rate.",
keywords = "Kolmogorov complexity, Occam's razor, Supervised learning, no free lunch",
author = "Tor Lattimore and Marcus Hutter",
year = "2013",
doi = "10.1007/978-3-642-44958-1_17",
language = "English",
isbn = "9783642449574",
series = "Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)",
publisher = "Springer Verlag",
pages = "223--235",
booktitle = "Algorithmic Probability and Friends",
address = "Germany",
note = "Ray Solomonoff 85th Memorial Conference on Algorithmic Probability and Friends: Bayesian Prediction and Artificial Intelligence ; Conference date: 30-11-2011 Through 02-12-2011",
}