<script src="https://bibbase.org/show?bib=https%3A%2F%2Fbibbase.org%2Fzotero-mypublications%2Fdaniel.bedoya.r&jsonp=1"></script>
<?php
$contents = file_get_contents("https://bibbase.org/show?bib=https%3A%2F%2Fbibbase.org%2Fzotero-mypublications%2Fdaniel.bedoya.r");
print_r($contents);
?>
<iframe src="https://bibbase.org/show?bib=https%3A%2F%2Fbibbase.org%2Fzotero-mypublications%2Fdaniel.bedoya.r"></iframe>
For more details see the documention.
To the site owner:
Action required! Mendeley is changing its API. In order to keep using Mendeley with BibBase past April 14th, you need to:
@inproceedings{bedoya_creating_2022, address = {Saint-Étienne, France}, title = {Creating {Experiments} with {Cosmonote}: {Advancing} {Web}-{Based} {Annotations} for {Performed} {Music}}, copyright = {All rights reserved}, shorttitle = {Creating {Experiments} with {Cosmonote}}, url = {https://hal.science/hal-03714624}, doi = {10.5281/zenodo.6576284}, abstract = {CosmoNote is a web-based application for visualizing and annotating expressive structures in performed music. This demo highlights the flexibility of CosmoNote's reconfigurable interface, features, and toolsets to design experiments to advance knowledge in music science, music perception, and music expressivity. CosmoNote's music pieces are organized into collections accessible by user accounts with defined roles. The music representations integrate synchronized visual layers representing recorded piano performance data and features to facilitate and inform annotations. The web app provides four annotation types: boundaries, regions, note-groups and comments. Through the selection of pieces, collections and user roles, Cos-moNote's features can be tailored to investigate diverse research questions about experimental design for music audio-based annotations.}, language = {en}, urldate = {2023-02-11}, booktitle = {Proceedings of the 19th {Sound} and {Music} {Computing} {Conference}}, author = {Bedoya, Daniel and Fyfe, Lawrence and Chew, Elaine}, month = jun, year = {2022}, pages = {683--684}, }
@inproceedings{fyfe_nutshell_2022, address = {Cannes, France}, series = {{WAC} '22}, title = {A {Nutshell} {Guide} to {Annotating} {Recorded} {Piano} {Performances} on the {Web} with {CosmoNote}}, copyright = {All rights reserved}, url = {https://hal.science/hal-03715778}, doi = {10.5281/zenodo.6769016}, abstract = {CosmoNote is a web-based citizen science tool for annotating expressive musical structures created by performers during piano performances. For this demonstration, we describe the annotation process in CosmoNote that allows citizen scientists to find and mark these expressive musical structures.}, language = {en}, urldate = {2023-02-11}, booktitle = {7th {International} {Web} {Audio} {Conference}}, author = {Fyfe, Lawrence and Bedoya, Daniel and Chew, Elaine}, month = jul, year = {2022}, }
@article{bedoya_perceiver-centered_2022, title = {A {Perceiver}-{Centered} {Approach} for {Representing} and {Annotating} {Prosodic} {Functions} in {Performed} {Music}}, volume = {13}, copyright = {All rights reserved}, issn = {1664-1078}, url = {https://www.frontiersin.org/articles/10.3389/fpsyg.2022.886570}, doi = {10.3389/fpsyg.2022.886570}, abstract = {Musical prosody is characterized by the acoustic variations that make music expressive. However, few systematic and scalable studies exist on the function it serves or on effective tools to carry out such studies. To address this gap, we introduce a novel approach to capturing information about prosodic functions through a citizen science paradigm. In typical bottom-up approaches to studying musical prosody, acoustic properties in performed music and basic musical structures such as accents and phrases are mapped to prosodic functions, namely segmentation and prominence. In contrast, our top-down, human-centered method puts listener annotations of musical prosodic functions first, to analyze the connection between these functions, the underlying musical structures, and acoustic properties. The method is applied primarily to the exploring of segmentation and prominence in performed solo piano music. These prosodic functions are marked by means of four annotation types—boundaries, regions, note groups, and comments—in the CosmoNote web-based citizen science platform, which presents the music signal or MIDI data and related acoustic features in information layers that can be toggled on and off. Various annotation strategies are discussed and appraised: intuitive vs. analytical; real-time vs. retrospective; and, audio-based vs. visual. The end-to-end process of the data collection is described, from the providing of prosodic examples to the structuring and formatting of the annotation data for analysis, to techniques for preventing precision errors. The aim is to obtain reliable and coherent annotations that can be applied to theoretical and data-driven models of musical prosody. The outcomes include a growing library of prosodic examples with the goal of achieving an annotation convention for studying musical prosody in performed music.}, language = {EN}, urldate = {2023-02-11}, journal = {Frontiers in Psychology}, author = {Bedoya, Daniel and Fyfe, Lawrence and Chew, Elaine}, year = {2022}, }
@article{fyfe_annotation_2022, title = {Annotation and {Analysis} of {Recorded} {Piano} {Performances} on the {Web}}, volume = {70}, copyright = {All rights reserved}, url = {https://www.aes.org/e-lib/browse.cfm?elib=22020}, doi = {10.17743/jaes.2022.0057}, abstract = {Advancing knowledge and understanding about performed music is hampered by a lack of annotation data for music expressivity. To enable large-scale collection of annotations and explorations of performed music, the authors have created a workflow that is enabled by CosmoNote, aWeb-based citizen science tool for annotating musical structures created by the performer and experienced by the listener during expressive piano performances. To enable annotation tasks with CosmoNote, annotators can...}, language = {English}, number = {11}, urldate = {2023-02-11}, journal = {Journal of the Audio Engineering Society}, author = {Fyfe, Lawrence and Bedoya, Daniel and Chew, Elaine}, month = nov, year = {2022}, note = {Publisher: Audio Engineering Society}, pages = {962--978}, }
@inproceedings{fyfe_cosmonote:_2021, address = {Barcelona, Spain}, series = {{WAC} '21}, title = {{CosmoNote}: {A} {Web}-based {Citizen} {Science} {Tool} for {Annotating} {Music} {Performances}}, copyright = {All rights reserved}, isbn = {2663-5844}, url = {http://webaudioconf.com/posts/2021_25/}, abstract = {CosmoNote is a web-based citizen science tool for annotating musical structures, with a focus on structures created by the performer during expressive musical performance. The software interface enables the superimposition of synchronized discrete and continuous information layers which include note data representations, audio features such as loudness and tempo, and score features such as harmonic tension in a visual and audio environment. The tools provide the means for users to signal performance decisions such as segmentation and prominence using boundaries of varying strengths, regions, comments, and note groupings. User-friendly interaction features have been built in to facilitate ease of annotation; these include the ability to zoom in, listen to, and mark up specific segments of music. The data collected will be used to discover the vocabulary of performed music structures and to aid in the understanding of expressive choices and nuances.}, urldate = {2022-01-12}, booktitle = {Proceedings of the {International} {Web} {Audio} {Conference}}, publisher = {UPF}, author = {Fyfe, Lawrence and Bedoya, Daniel and Guichaoua, Corentin and Chew, Elaine}, month = jul, year = {2021}, note = {Joglar-Ongay, Luis and Serra, Xavier and Font, Frederic and Tovstogan, Philip and Stolfi, Ariane and A. Correya, Albin and Ramires, Antonio and Bogdanov, Dmitry and Faraldo, Angel and Favory, Xavier}, pages = {1--6}, }
@article{bedoya_even_2021, title = {Even violins can cry: specifically vocal emotional behaviours also drive the perception of emotions in non-vocal music}, volume = {376}, copyright = {All rights reserved}, shorttitle = {Even violins can cry}, url = {https://royalsocietypublishing.org/doi/full/10.1098/rstb.2020.0396}, doi = {10.1098/rstb.2020.0396}, abstract = {A wealth of theoretical and empirical arguments have suggested that music triggers emotional responses by resembling the inflections of expressive vocalizations, but have done so using low-level acoustic parameters (pitch, loudness, speed) that, in fact, may not be processed by the listener in reference to human voice. Here, we take the opportunity of the recent availability of computational models that allow the simulation of three specifically vocal emotional behaviours: smiling, vocal tremor and vocal roughness. When applied to musical material, we find that these three acoustic manipulations trigger emotional perceptions that are remarkably similar to those observed on speech and scream sounds, and identical across musician and non-musician listeners. Strikingly, this not only applied to singing voice with and without musical background, but also to purely instrumental material. This article is part of the theme issue ‘Voice modulation: from origin and mechanism to social impact (Part I)’.}, number = {1840}, urldate = {2023-02-11}, journal = {Philosophical Transactions of the Royal Society B: Biological Sciences}, author = {Bedoya, Daniel and Arias, Pablo and Rachman, Laura and Liuni, Marco and Canonne, Clément and Goupil, Louise and Aucouturier, Jean-Julien}, month = nov, year = {2021}, note = {Publisher: Royal Society}, pages = {20200396}, }
@inproceedings{le_carrou_acoustique_2018, address = {Le Havre, France}, title = {Acoustique des harpes d’{Afrique} centrale : étude préliminaire}, copyright = {All rights reserved}, url = {https://www.conforg.fr/cfa2018/output_directory2/data/articles/000307.pdf}, abstract = {S’inscrivant dans le cadre d’une recherche pluridisciplinaire consacrée à l’histoire évolutive des harpes d’Afrique centrale, cette étude porte sur la comparaison des propriétés acoustiques de trois harpes collectées au Gabon chez les ethnies Fang, Tsogho et Massango. Les harpes du Gabon sont pratiquées en contexte rituel (confrérie initiatique, rituel thérapeutique) et leur diffusion au sein des populations qui en jouent (Tsogho, Massango, Apindji, Myéné, Fang) s’est faite conjointement avec la l’emprunt des pratiques rituelles, notamment le culte du bwiti. De tradition orale, la pratique musicale est apprise de manière implicite par imprégnation et imitation. Il en est de même pour la transmission des connaissances et savoir-faire de la fabrication des instruments. La grande diversité et variabilité de l’instrument observée au niveau morphologique, esthétique, musical et symbolique aussi bien entre les populations qu’au sein de chaque population nous amène à étudier les processus évolutifs de l’instrument au niveau de ses paramètres constitutifs notamment déterminer et définir les descripteurs acoustiques en complément des descripteurs iconographiques et ethnomusicologiques. Le comportement vibratoire des trois harpes a été analysé. De forme triangulaire, elles sont munies de 8 cordes en nylon attachées, d’une part, aux chevilles fixées au manche, et de l’autre, à un cordier en bois fixé en-dessous de la table d’harmonie faite à partir d’une peau d’animal (antilope ou céphalophe). Les méthodes classiquement utilisées pour les instruments occidentaux (mesure de mobilité, analyse modale) ont été mises en place mais se sont avérées limitées du fait du comportement non-linéaire de la peau constituant la table d’harmonie. Les résultats montrent que la manière dont cette peau est préparée revêt un élément essentiel dans le comportement vibratoire de l’instrument. L’élaboration d’une méthodologie d’analyse systématique va permettre d’effectuer des mesures sur le terrain et dans les collections muséales afin d’enrichir le corpus d’étude comparative des harpes.}, language = {fr}, booktitle = {14ème {Congrès} {Français} d'{Acoustique}}, author = {Le Carrou, Jean-Loïc and Bedoya, Daniel and Mifune, Marie-France and Le Bomin, Sylvie}, month = apr, year = {2018}, pages = {249--254}, }