@inproceedings{35f8aa23241b4043aa00760db536db89,
title = "Riemannian structure of some new gradient descent learning algorithms",
abstract = "We consider some generalizations of the classical LMS learning algorithm including the exponentiated gradient (EG) algorithm. We show how one can develop these algorithms in terms of a prior distribution over the weight space. Our framework subsumes the notion of {"}link-functions{"}. Differential geometric methods are used to develop the algorithms as gradient descent with respect to the natural gradient in the Riemannian structure induced by the prior distribution. This provides a Bayesian Riemannian interpretation of the EG and related algorithms. We relate our work to that of Amari (1985, 1997, 1998) and others who used similar tools in a different manner. Simulation experiments illustrating the behaviour of the new algorithms are presented.",
author = "Mahoney, {R. E.} and Williamson, {R. C.}",
note = "Publisher Copyright: {\textcopyright} 2000 IEEE.; IEEE Adaptive Systems for Signal Processing, Communications, and Control Symposium, AS-SPCC 2000 ; Conference date: 01-10-2000 Through 04-10-2000",
year = "2000",
doi = "10.1109/ASSPCC.2000.882470",
language = "English",
series = "IEEE 2000 Adaptive Systems for Signal Processing, Communications, and Control Symposium, AS-SPCC 2000",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "197--202",
booktitle = "IEEE 2000 Adaptive Systems for Signal Processing, Communications, and Control Symposium, AS-SPCC 2000",
address = "United States",
}