Head movements, facial expressions and feedback in conversations: empirical evidence from Danish multimodal data. Paggio, P. & Navarretta, C. Journal on Multimodal User Interfaces, 7(1):29–37, March, 2013. Paper doi abstract bibtex This article deals with multimodal feedback in two Danish multimodal corpora, i.e., a collection of map-task dialogues and a corpus of free conversations in first encounters between pairs of subjects. Machine learning techniques are applied to both sets of data to investigate various relations between the non-verbal behaviour—more specifically head movements and facial expressions—and speech with regard to the expression of feedback. In the map-task data, we study the extent to which the dialogue act type of linguistic feedback expressions can be classified automatically based on the non-verbal features. In the conversational data, on the other hand, non-verbal and speech features are used together to distinguish feedback from other multimodal behaviours. The results of the two sets of experiments indicate in general that head movements, and to a lesser extent facial expressions, are important indicators of feedback, and that gestures and speech disambiguate each other in the machine learning process.
@article{paggio_head_2013,
title = {Head movements, facial expressions and feedback in conversations: empirical evidence from {Danish} multimodal data},
volume = {7},
issn = {1783-8738},
shorttitle = {Head movements, facial expressions and feedback in conversations},
url = {https://doi.org/10.1007/s12193-012-0105-9},
doi = {10.1007/s12193-012-0105-9},
abstract = {This article deals with multimodal feedback in two Danish multimodal corpora, i.e., a collection of map-task dialogues and a corpus of free conversations in first encounters between pairs of subjects. Machine learning techniques are applied to both sets of data to investigate various relations between the non-verbal behaviour—more specifically head movements and facial expressions—and speech with regard to the expression of feedback. In the map-task data, we study the extent to which the dialogue act type of linguistic feedback expressions can be classified automatically based on the non-verbal features. In the conversational data, on the other hand, non-verbal and speech features are used together to distinguish feedback from other multimodal behaviours. The results of the two sets of experiments indicate in general that head movements, and to a lesser extent facial expressions, are important indicators of feedback, and that gestures and speech disambiguate each other in the machine learning process.},
language = {en},
number = {1},
urldate = {2019-09-05},
journal = {Journal on Multimodal User Interfaces},
author = {Paggio, Patrizia and Navarretta, Costanza},
month = mar,
year = {2013},
keywords = {Feedback, Gestures, Backchanneling, Facial expressions, Head movements},
pages = {29--37},
}
Downloads: 0
{"_id":"CQv8FqwbZ5v77bByg","bibbaseid":"paggio-navarretta-headmovementsfacialexpressionsandfeedbackinconversationsempiricalevidencefromdanishmultimodaldata-2013","downloads":0,"creationDate":"2016-10-05T13:48:43.730Z","title":"Head movements, facial expressions and feedback in conversations: empirical evidence from Danish multimodal data","author_short":["Paggio, P.","Navarretta, C."],"year":2013,"bibtype":"article","biburl":"https://sorensandagersorensen.dk/res/danishgrammar.bib","bibdata":{"bibtype":"article","type":"article","title":"Head movements, facial expressions and feedback in conversations: empirical evidence from Danish multimodal data","volume":"7","issn":"1783-8738","shorttitle":"Head movements, facial expressions and feedback in conversations","url":"https://doi.org/10.1007/s12193-012-0105-9","doi":"10.1007/s12193-012-0105-9","abstract":"This article deals with multimodal feedback in two Danish multimodal corpora, i.e., a collection of map-task dialogues and a corpus of free conversations in first encounters between pairs of subjects. Machine learning techniques are applied to both sets of data to investigate various relations between the non-verbal behaviour—more specifically head movements and facial expressions—and speech with regard to the expression of feedback. In the map-task data, we study the extent to which the dialogue act type of linguistic feedback expressions can be classified automatically based on the non-verbal features. In the conversational data, on the other hand, non-verbal and speech features are used together to distinguish feedback from other multimodal behaviours. The results of the two sets of experiments indicate in general that head movements, and to a lesser extent facial expressions, are important indicators of feedback, and that gestures and speech disambiguate each other in the machine learning process.","language":"en","number":"1","urldate":"2019-09-05","journal":"Journal on Multimodal User Interfaces","author":[{"propositions":[],"lastnames":["Paggio"],"firstnames":["Patrizia"],"suffixes":[]},{"propositions":[],"lastnames":["Navarretta"],"firstnames":["Costanza"],"suffixes":[]}],"month":"March","year":"2013","keywords":"Feedback, Gestures, Backchanneling, Facial expressions, Head movements","pages":"29–37","bibtex":"@article{paggio_head_2013,\n\ttitle = {Head movements, facial expressions and feedback in conversations: empirical evidence from {Danish} multimodal data},\n\tvolume = {7},\n\tissn = {1783-8738},\n\tshorttitle = {Head movements, facial expressions and feedback in conversations},\n\turl = {https://doi.org/10.1007/s12193-012-0105-9},\n\tdoi = {10.1007/s12193-012-0105-9},\n\tabstract = {This article deals with multimodal feedback in two Danish multimodal corpora, i.e., a collection of map-task dialogues and a corpus of free conversations in first encounters between pairs of subjects. Machine learning techniques are applied to both sets of data to investigate various relations between the non-verbal behaviour—more specifically head movements and facial expressions—and speech with regard to the expression of feedback. In the map-task data, we study the extent to which the dialogue act type of linguistic feedback expressions can be classified automatically based on the non-verbal features. In the conversational data, on the other hand, non-verbal and speech features are used together to distinguish feedback from other multimodal behaviours. The results of the two sets of experiments indicate in general that head movements, and to a lesser extent facial expressions, are important indicators of feedback, and that gestures and speech disambiguate each other in the machine learning process.},\n\tlanguage = {en},\n\tnumber = {1},\n\turldate = {2019-09-05},\n\tjournal = {Journal on Multimodal User Interfaces},\n\tauthor = {Paggio, Patrizia and Navarretta, Costanza},\n\tmonth = mar,\n\tyear = {2013},\n\tkeywords = {Feedback, Gestures, Backchanneling, Facial expressions, Head movements},\n\tpages = {29--37},\n}\n\n","author_short":["Paggio, P.","Navarretta, C."],"key":"paggio_head_2013","id":"paggio_head_2013","bibbaseid":"paggio-navarretta-headmovementsfacialexpressionsandfeedbackinconversationsempiricalevidencefromdanishmultimodaldata-2013","role":"author","urls":{"Paper":"https://doi.org/10.1007/s12193-012-0105-9"},"keyword":["Feedback","Gestures","Backchanneling","Facial expressions","Head movements"],"metadata":{"authorlinks":{}},"html":""},"search_terms":["head","movements","facial","expressions","feedback","conversations","empirical","evidence","danish","multimodal","data","paggio","navarretta"],"keywords":["feedback","gestures","backchanneling","facial expressions","head movements"],"authorIDs":[],"dataSources":["tudya6YojbqEiF783","yFX9q6NiDASbnkXn4","36DYTpvwhpPPf8eDJ"]}