@inproceedings{cb9410e2a06040bcae780edcbe5103bf,
title = "A physical intelligent instrument using recurrent neural networks",
abstract = "This paper describes a new intelligent interactive instrument, based on an embedded computing platform, where deep neural networks are applied to interactive music generation. Even though using neural networks for music composition is not uncommon, a lot of these models tend to not support any form of user interaction. We introduce a self-contained intelligent instrument using generative models, with support for real-time interaction where the user can adjust high-level parameters to modify the music generated by the instrument. We describe the technical details of our generative model and discuss the experience of using the system as part of musical performance.",
author = "N{\ae}ss, {Torgrim R.} and Martin, {Charles P.}",
note = "Publisher Copyright: {\textcopyright} 2019 Steering Committee of the International Conference on New Interfaces for Musical Expression. All rights reserved.; 19th International conference on New Interfaces for Musical Expression, NIME 2019 ; Conference date: 03-06-2019 Through 06-06-2019",
year = "2019",
language = "English",
series = "Proceedings of the International Conference on New Interfaces for Musical Expression",
publisher = "International Conference on New Interfaces for Musical Expression",
pages = "79--82",
booktitle = "Proceedings of the International Conference on New Interfaces for Musical Expression",
}