@inbook{aadade4a5e31484c925fea173b5fc290,
title = "Learning domain invariant embeddings by matching distributions",
abstract = "One of the characteristics of the domain shift problem is that the source and target data have been drawn from different distributions. A natural approach to addressing this problem therefore consists of learning an embedding of the source and target data such that they have similar distributions in the new space. In this chapter, we study several methods that follow this approach. At the core of these methods lies the notion of distance between two distributions. We first discuss domain adaptation (DA) techniques that rely on the Maximum Mean Discrepancy to measure such a distance. We then study the use of alternative distribution distance measures within one specific Domain Adaptation framework. In this context, we focus on f-divergences, and in particular on the KL divergence and the Hellinger distance. Throughout the chapter, we evaluate the different methods and distance measures on the task of visual object recognition and compare them against related baselines on a standard DA benchmark dataset.",
author = "Mahsa Baktashmotlagh and Mehrtash Harandi and Mathieu Salzmann",
note = "Publisher Copyright: {\textcopyright} Springer International Publishing AG 2017.",
year = "2017",
doi = "10.1007/978-3-319-58347-1_5",
language = "English",
series = "Advances in Computer Vision and Pattern Recognition",
publisher = "Springer London",
number = "9783319583464",
pages = "95--114",
booktitle = "Advances in Computer Vision and Pattern Recognition",
address = "United Kingdom",
edition = "9783319583464",
}