AFFECT-EXPRESSIVE HAND GESTURES SYNTHESIS AND ANIMATION. Bozkurt, E., Erzin, E., & Yemez, Y. In 2015 IEEE INTERNATIONAL CONFERENCE ON MULTIMEDIA & EXPO (ICME), of IEEE International Conference on Multimedia and Expo, 2015. IEEE International Conference on Multimedia & Expo (ICME), Turin, ITALY, JUN 29-JUL 03, 2015abstract bibtex Speech and hand gestures form a composite communicative signal that boosts the naturalness and affectiveness of the communication. We present a multimodal framework for joint analysis of continuous affect, speech prosody and hand gestures towards automatic synthesis of realistic hand gestures from spontaneous speech using the hidden semi-Markov models (HSMMs). To the best of our knowledge, this is the first attempt for synthesizing hand gestures using continuous dimensional affect space, i.e., activation, valence, and dominance. We model relationships between acoustic features describing speech prosody and hand gestures with and without using the continuous affect information in speaker independent configurations and evaluate the multimodal analysis framework by generating hand gesture animations, also via objective evaluations. Our experimental studies are promising, conveying the role of affect for modeling the dynamics of speech-gesture relationship.
@inproceedings{ ISI:000380486500101,
Author = {Bozkurt, Elif and Erzin, Engin and Yemez, Yucel},
Book-Group-Author = {{IEEE}},
Title = {{AFFECT-EXPRESSIVE HAND GESTURES SYNTHESIS AND ANIMATION}},
Booktitle = {{2015 IEEE INTERNATIONAL CONFERENCE ON MULTIMEDIA \& EXPO (ICME)}},
Series = {{IEEE International Conference on Multimedia and Expo}},
Year = {{2015}},
Note = {{IEEE International Conference on Multimedia \& Expo (ICME), Turin,
ITALY, JUN 29-JUL 03, 2015}},
Abstract = {{Speech and hand gestures form a composite communicative signal that
boosts the naturalness and affectiveness of the communication. We
present a multimodal framework for joint analysis of continuous affect,
speech prosody and hand gestures towards automatic synthesis of
realistic hand gestures from spontaneous speech using the hidden
semi-Markov models (HSMMs). To the best of our knowledge, this is the
first attempt for synthesizing hand gestures using continuous
dimensional affect space, i.e., activation, valence, and dominance. We
model relationships between acoustic features describing speech prosody
and hand gestures with and without using the continuous affect
information in speaker independent configurations and evaluate the
multimodal analysis framework by generating hand gesture animations,
also via objective evaluations. Our experimental studies are promising,
conveying the role of affect for modeling the dynamics of speech-gesture
relationship.}},
ISSN = {{1945-7871}},
ISBN = {{978-1-4799-7082-7}},
ResearcherID-Numbers = {{Erzin, Engin/H-1716-2011}},
ORCID-Numbers = {{Erzin, Engin/0000-0002-2715-2368}},
Unique-ID = {{ISI:000380486500101}},
}
Downloads: 0
{"_id":"6CEbCSyStAHh8Xooy","bibbaseid":"bozkurt-erzin-yemez-affectexpressivehandgesturessynthesisandanimation-2015","downloads":0,"creationDate":"2015-12-09T21:23:15.292Z","title":"AFFECT-EXPRESSIVE HAND GESTURES SYNTHESIS AND ANIMATION","author_short":["Bozkurt, E.","Erzin, E.","Yemez, Y."],"year":2015,"bibtype":"inproceedings","biburl":"http://home.ku.edu.tr/~eerzin/pubs/mvgl.bib","bibdata":{"bibtype":"inproceedings","type":"inproceedings","author":[{"propositions":[],"lastnames":["Bozkurt"],"firstnames":["Elif"],"suffixes":[]},{"propositions":[],"lastnames":["Erzin"],"firstnames":["Engin"],"suffixes":[]},{"propositions":[],"lastnames":["Yemez"],"firstnames":["Yucel"],"suffixes":[]}],"book-group-author":"IEEE","title":"AFFECT-EXPRESSIVE HAND GESTURES SYNTHESIS AND ANIMATION","booktitle":"2015 IEEE INTERNATIONAL CONFERENCE ON MULTIMEDIA & EXPO (ICME)","series":"IEEE International Conference on Multimedia and Expo","year":"2015","note":"IEEE International Conference on Multimedia & Expo (ICME), Turin, ITALY, JUN 29-JUL 03, 2015","abstract":"Speech and hand gestures form a composite communicative signal that boosts the naturalness and affectiveness of the communication. We present a multimodal framework for joint analysis of continuous affect, speech prosody and hand gestures towards automatic synthesis of realistic hand gestures from spontaneous speech using the hidden semi-Markov models (HSMMs). To the best of our knowledge, this is the first attempt for synthesizing hand gestures using continuous dimensional affect space, i.e., activation, valence, and dominance. We model relationships between acoustic features describing speech prosody and hand gestures with and without using the continuous affect information in speaker independent configurations and evaluate the multimodal analysis framework by generating hand gesture animations, also via objective evaluations. Our experimental studies are promising, conveying the role of affect for modeling the dynamics of speech-gesture relationship.","issn":"1945-7871","isbn":"978-1-4799-7082-7","researcherid-numbers":"Erzin, Engin/H-1716-2011","orcid-numbers":"Erzin, Engin/0000-0002-2715-2368","unique-id":"ISI:000380486500101","bibtex":"@inproceedings{ ISI:000380486500101,\nAuthor = {Bozkurt, Elif and Erzin, Engin and Yemez, Yucel},\nBook-Group-Author = {{IEEE}},\nTitle = {{AFFECT-EXPRESSIVE HAND GESTURES SYNTHESIS AND ANIMATION}},\nBooktitle = {{2015 IEEE INTERNATIONAL CONFERENCE ON MULTIMEDIA \\& EXPO (ICME)}},\nSeries = {{IEEE International Conference on Multimedia and Expo}},\nYear = {{2015}},\nNote = {{IEEE International Conference on Multimedia \\& Expo (ICME), Turin,\n ITALY, JUN 29-JUL 03, 2015}},\nAbstract = {{Speech and hand gestures form a composite communicative signal that\n boosts the naturalness and affectiveness of the communication. We\n present a multimodal framework for joint analysis of continuous affect,\n speech prosody and hand gestures towards automatic synthesis of\n realistic hand gestures from spontaneous speech using the hidden\n semi-Markov models (HSMMs). To the best of our knowledge, this is the\n first attempt for synthesizing hand gestures using continuous\n dimensional affect space, i.e., activation, valence, and dominance. We\n model relationships between acoustic features describing speech prosody\n and hand gestures with and without using the continuous affect\n information in speaker independent configurations and evaluate the\n multimodal analysis framework by generating hand gesture animations,\n also via objective evaluations. Our experimental studies are promising,\n conveying the role of affect for modeling the dynamics of speech-gesture\n relationship.}},\nISSN = {{1945-7871}},\nISBN = {{978-1-4799-7082-7}},\nResearcherID-Numbers = {{Erzin, Engin/H-1716-2011}},\nORCID-Numbers = {{Erzin, Engin/0000-0002-2715-2368}},\nUnique-ID = {{ISI:000380486500101}},\n}\n\n","author_short":["Bozkurt, E.","Erzin, E.","Yemez, Y."],"key":"ISI:000380486500101","id":"ISI:000380486500101","bibbaseid":"bozkurt-erzin-yemez-affectexpressivehandgesturessynthesisandanimation-2015","role":"author","urls":{},"metadata":{"authorlinks":{"erzin, e":"http://home.ku.edu.tr/~eerzin/pubs/index6.html"}},"downloads":0,"html":""},"search_terms":["affect","expressive","hand","gestures","synthesis","animation","bozkurt","erzin","yemez"],"keywords":[],"authorIDs":["s4rze5RZET4EY5wXY"],"dataSources":["P7SB4qiBxZPhjXYRW","eoMYcQtZLjtLCGT3K"]}