Unobtrusive emotion sensing and interpretation in smart environment. Starostenko, O., Cortés, X., Sánchez, J., A., & Alarcon-Aquino, V. Journal of Ambient Intelligence and Smart Environments, 7(1):59-83, 2015. Website doi abstract bibtex Currently, a particular focus of human centered technology is in expanding traditional contextual sensing and smart processing capabilities of ubiquitous systems exploiting user's affective and emotional states to develop more natural communication between computing artefacts and users. This paper presents a smart environment of Web services that has been developed to integrate and manage different existing and new emotion sensing applications, which working together provide tracking and recognition of human affective state in real time. In addition, two emotion interpreters based on the proposed 6-FACS and Distance models have been developed. Both models operate with encoded facial deformations described either in terms of Ekman's Action Units or Facial Animation Parameters of MPEG-4 standards. Fuzzy inference system based on reasoning model implemented in a knowledge base has been used for quantitative measurement and recognition of three-level intensity of basic and non-prototypical facial expressions. Designed frameworks integrated to smart environment have been tested in order to evaluate capability of the proposed models to extract and classify facial expressions providing precision of interpretation of basic emotions in range of 65-96% and non-prototypical emotions in range of 55-65%. The conducted tests confirm that such basic as non-prototypical expressions may be composed by other basic emotions establishing in this way the concordance between existing psychological models of emotions and Ekman's model traditionally used by affective computing applications.
@article{
title = {Unobtrusive emotion sensing and interpretation in smart environment},
type = {article},
year = {2015},
keywords = {Affective computing applications,Facial expression recognition,Sensing basic and non-prototypical emotions},
pages = {59-83},
volume = {7},
websites = {https://www.medra.org/servlet/aliasResolver?alias=iospress&doi=10.3233/AIS-140298},
id = {618217e6-8aef-31c2-a28f-4c772b8129c4},
created = {2022-08-29T17:42:38.879Z},
file_attached = {false},
profile_id = {940dd160-7d67-3a5f-b9f8-935da0571367},
group_id = {92fccab2-8d44-33bc-b301-7b94bb18523c},
last_modified = {2022-08-29T17:42:38.879Z},
read = {false},
starred = {false},
authored = {false},
confirmed = {true},
hidden = {false},
private_publication = {false},
abstract = {Currently, a particular focus of human centered technology is in expanding traditional contextual sensing and smart processing capabilities of ubiquitous systems exploiting user's affective and emotional states to develop more natural communication between computing artefacts and users. This paper presents a smart environment of Web services that has been developed to integrate and manage different existing and new emotion sensing applications, which working together provide tracking and recognition of human affective state in real time. In addition, two emotion interpreters based on the proposed 6-FACS and Distance models have been developed. Both models operate with encoded facial deformations described either in terms of Ekman's Action Units or Facial Animation Parameters of MPEG-4 standards. Fuzzy inference system based on reasoning model implemented in a knowledge base has been used for quantitative measurement and recognition of three-level intensity of basic and non-prototypical facial expressions. Designed frameworks integrated to smart environment have been tested in order to evaluate capability of the proposed models to extract and classify facial expressions providing precision of interpretation of basic emotions in range of 65-96% and non-prototypical emotions in range of 55-65%. The conducted tests confirm that such basic as non-prototypical expressions may be composed by other basic emotions establishing in this way the concordance between existing psychological models of emotions and Ekman's model traditionally used by affective computing applications.},
bibtype = {article},
author = {Starostenko, Oleg and Cortés, Ximena and Sánchez, J. Afredo and Alarcon-Aquino, Vicente},
doi = {10.3233/AIS-140298},
journal = {Journal of Ambient Intelligence and Smart Environments},
number = {1}
}
Downloads: 0
{"_id":"eHzQj6G5jfHrtxTo3","bibbaseid":"starostenko-corts-snchez-alarconaquino-unobtrusiveemotionsensingandinterpretationinsmartenvironment-2015","downloads":0,"creationDate":"2018-11-16T03:10:01.166Z","title":"Unobtrusive emotion sensing and interpretation in smart environment","author_short":["Starostenko, O.","Cortés, X.","Sánchez, J., A.","Alarcon-Aquino, V."],"year":2015,"bibtype":"article","biburl":"https://bibbase.org/service/mendeley/940dd160-7d67-3a5f-b9f8-935da0571367","bibdata":{"title":"Unobtrusive emotion sensing and interpretation in smart environment","type":"article","year":"2015","keywords":"Affective computing applications,Facial expression recognition,Sensing basic and non-prototypical emotions","pages":"59-83","volume":"7","websites":"https://www.medra.org/servlet/aliasResolver?alias=iospress&doi=10.3233/AIS-140298","id":"618217e6-8aef-31c2-a28f-4c772b8129c4","created":"2022-08-29T17:42:38.879Z","file_attached":false,"profile_id":"940dd160-7d67-3a5f-b9f8-935da0571367","group_id":"92fccab2-8d44-33bc-b301-7b94bb18523c","last_modified":"2022-08-29T17:42:38.879Z","read":false,"starred":false,"authored":false,"confirmed":"true","hidden":false,"private_publication":false,"abstract":"Currently, a particular focus of human centered technology is in expanding traditional contextual sensing and smart processing capabilities of ubiquitous systems exploiting user's affective and emotional states to develop more natural communication between computing artefacts and users. This paper presents a smart environment of Web services that has been developed to integrate and manage different existing and new emotion sensing applications, which working together provide tracking and recognition of human affective state in real time. In addition, two emotion interpreters based on the proposed 6-FACS and Distance models have been developed. Both models operate with encoded facial deformations described either in terms of Ekman's Action Units or Facial Animation Parameters of MPEG-4 standards. Fuzzy inference system based on reasoning model implemented in a knowledge base has been used for quantitative measurement and recognition of three-level intensity of basic and non-prototypical facial expressions. Designed frameworks integrated to smart environment have been tested in order to evaluate capability of the proposed models to extract and classify facial expressions providing precision of interpretation of basic emotions in range of 65-96% and non-prototypical emotions in range of 55-65%. The conducted tests confirm that such basic as non-prototypical expressions may be composed by other basic emotions establishing in this way the concordance between existing psychological models of emotions and Ekman's model traditionally used by affective computing applications.","bibtype":"article","author":"Starostenko, Oleg and Cortés, Ximena and Sánchez, J. Afredo and Alarcon-Aquino, Vicente","doi":"10.3233/AIS-140298","journal":"Journal of Ambient Intelligence and Smart Environments","number":"1","bibtex":"@article{\n title = {Unobtrusive emotion sensing and interpretation in smart environment},\n type = {article},\n year = {2015},\n keywords = {Affective computing applications,Facial expression recognition,Sensing basic and non-prototypical emotions},\n pages = {59-83},\n volume = {7},\n websites = {https://www.medra.org/servlet/aliasResolver?alias=iospress&doi=10.3233/AIS-140298},\n id = {618217e6-8aef-31c2-a28f-4c772b8129c4},\n created = {2022-08-29T17:42:38.879Z},\n file_attached = {false},\n profile_id = {940dd160-7d67-3a5f-b9f8-935da0571367},\n group_id = {92fccab2-8d44-33bc-b301-7b94bb18523c},\n last_modified = {2022-08-29T17:42:38.879Z},\n read = {false},\n starred = {false},\n authored = {false},\n confirmed = {true},\n hidden = {false},\n private_publication = {false},\n abstract = {Currently, a particular focus of human centered technology is in expanding traditional contextual sensing and smart processing capabilities of ubiquitous systems exploiting user's affective and emotional states to develop more natural communication between computing artefacts and users. This paper presents a smart environment of Web services that has been developed to integrate and manage different existing and new emotion sensing applications, which working together provide tracking and recognition of human affective state in real time. In addition, two emotion interpreters based on the proposed 6-FACS and Distance models have been developed. Both models operate with encoded facial deformations described either in terms of Ekman's Action Units or Facial Animation Parameters of MPEG-4 standards. Fuzzy inference system based on reasoning model implemented in a knowledge base has been used for quantitative measurement and recognition of three-level intensity of basic and non-prototypical facial expressions. Designed frameworks integrated to smart environment have been tested in order to evaluate capability of the proposed models to extract and classify facial expressions providing precision of interpretation of basic emotions in range of 65-96% and non-prototypical emotions in range of 55-65%. The conducted tests confirm that such basic as non-prototypical expressions may be composed by other basic emotions establishing in this way the concordance between existing psychological models of emotions and Ekman's model traditionally used by affective computing applications.},\n bibtype = {article},\n author = {Starostenko, Oleg and Cortés, Ximena and Sánchez, J. Afredo and Alarcon-Aquino, Vicente},\n doi = {10.3233/AIS-140298},\n journal = {Journal of Ambient Intelligence and Smart Environments},\n number = {1}\n}","author_short":["Starostenko, O.","Cortés, X.","Sánchez, J., A.","Alarcon-Aquino, V."],"urls":{"Website":"https://www.medra.org/servlet/aliasResolver?alias=iospress&doi=10.3233/AIS-140298"},"biburl":"https://bibbase.org/service/mendeley/940dd160-7d67-3a5f-b9f8-935da0571367","bibbaseid":"starostenko-corts-snchez-alarconaquino-unobtrusiveemotionsensingandinterpretationinsmartenvironment-2015","role":"author","keyword":["Affective computing applications","Facial expression recognition","Sensing basic and non-prototypical emotions"],"metadata":{"authorlinks":{}},"downloads":0},"search_terms":["unobtrusive","emotion","sensing","interpretation","smart","environment","starostenko","cortés","sánchez","alarcon-aquino"],"keywords":["affective computing applications","facial expression recognition","sensing basic and non-prototypical emotions"],"authorIDs":[],"dataSources":["G3v3mPpwWPQxFKii5","ya2CyA73rpZseyrZ8","uTykyhFv6T7J2dX6z","2252seNhipfTmjEBQ"]}