@inproceedings{10f560e79bc54948a239574e374ba6b3,
title = "Boosting stochastic Newton with entropy constraint for large-scale image classification",
abstract = "Large scale image classification requires efficient scalable learning methods with linear complexity in the number of samples. Although Stochastic Gradient Descent is an efficient alternative to classical Support Vector Machine, this method suffers from slow convergence. In this paper, our contribution is two folds. First we consider the minimization of specific calibrated losses, for which we show how to reliably estimate posteriors, binary entropy and margin. Secondly we propose a Boosting Stochastic Newton Descent (BSN) method for minimization in the primal space of these specific calibrated loss. BSN approximates the inverse Hessian by the best low-rank approximation. The original-itty of BSN relies on the fact that it does perform a boosting scheme without computing iterative weight update over the examples. We validate BSN by benchmarking it against several variants of the state-of-the-art SGD algorithm on the large scale Image Net dataset. The results on Image Net large scale image classification display that BSN improves significantly accuracy of the SGD baseline while being faster by orders of magnitude.",
author = "Ali, \{Wafa Bel Haj\} and Richard Nock and Michel Barlaud",
note = "Publisher Copyright: {\textcopyright} 2014 IEEE.; 22nd International Conference on Pattern Recognition, ICPR 2014 ; Conference date: 24-08-2014 Through 28-08-2014",
year = "2014",
month = dec,
day = "4",
doi = "10.1109/ICPR.2014.49",
language = "English",
series = "Proceedings - International Conference on Pattern Recognition",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "232--237",
booktitle = "Proceedings - International Conference on Pattern Recognition",
address = "United States",
}