Experiences in Building a Real-World Eating Recogniser. Sen, S., Subbaraju, V., Misra, A., Balan, R., K., & Lee, Y. In Proceedings of the 4th International on Workshop on Physical Analytics, of WPA, pages 7-12, 2017. ACM. Website abstract bibtex In this paper, we describe the progressive design of the gesture recognition module of an automated food journaling system -- Annapurna. Annapurna runs on a smartwatch and utilises data from the inertial sensors to first identify eating gestures, and then captures food images which are presented to the user in the form of a food journal. We detail the lessons we learnt from multiple in-the-wild studies, and show how eating recognizer is refined to tackle challenges such as (i) high gestural diversity, and (ii) non-eating activities with similar gestural signatures. Annapurna is finally robust (identifying eating across a wide diversity in food content, eating styles and environments) and accurate (false-positive and false-negative rates of 6.5% and 3.3% respectively)
@inProceedings{
title = {Experiences in Building a Real-World Eating Recogniser},
type = {inProceedings},
year = {2017},
identifiers = {[object Object]},
keywords = {activity-recognition,auracle,camera,eating,intake-detection,wearable,wrist},
pages = {7-12},
websites = {http://dx.doi.org/10.1145/3092305.3092306},
publisher = {ACM},
city = {New York, NY, USA},
series = {WPA},
id = {2551db97-3e68-302c-899d-68af0129ce2b},
created = {2018-07-12T21:31:04.059Z},
file_attached = {false},
profile_id = {f954d000-ce94-3da6-bd26-b983145a920f},
group_id = {b0b145a3-980e-3ad7-a16f-c93918c606ed},
last_modified = {2018-07-12T21:31:04.059Z},
read = {false},
starred = {false},
authored = {false},
confirmed = {true},
hidden = {false},
citation_key = {Sen2017Experiences},
source_type = {inproceedings},
private_publication = {false},
abstract = {In this paper, we describe the progressive design of the gesture recognition module of an automated food journaling system -- Annapurna. Annapurna runs on a smartwatch and utilises data from the inertial sensors to first identify eating gestures, and then captures food images which are presented to the user in the form of a food journal. We detail the lessons we learnt from multiple in-the-wild studies, and show how eating recognizer is refined to tackle challenges such as (i) high gestural diversity, and (ii) non-eating activities with similar gestural signatures. Annapurna is finally robust (identifying eating across a wide diversity in food content, eating styles and environments) and accurate (false-positive and false-negative rates of 6.5% and 3.3% respectively)},
bibtype = {inProceedings},
author = {Sen, Sougata and Subbaraju, Vigneshwaran and Misra, Archan and Balan, Rajesh K and Lee, Youngki},
booktitle = {Proceedings of the 4th International on Workshop on Physical Analytics}
}
Downloads: 0
{"_id":"t8qnhCxa4dhJ8Js6o","bibbaseid":"sen-subbaraju-misra-balan-lee-experiencesinbuildingarealworldeatingrecogniser-2017","downloads":0,"creationDate":"2019-02-15T15:14:58.103Z","title":"Experiences in Building a Real-World Eating Recogniser","author_short":["Sen, S.","Subbaraju, V.","Misra, A.","Balan, R., K.","Lee, Y."],"year":2017,"bibtype":"inProceedings","biburl":null,"bibdata":{"title":"Experiences in Building a Real-World Eating Recogniser","type":"inProceedings","year":"2017","identifiers":"[object Object]","keywords":"activity-recognition,auracle,camera,eating,intake-detection,wearable,wrist","pages":"7-12","websites":"http://dx.doi.org/10.1145/3092305.3092306","publisher":"ACM","city":"New York, NY, USA","series":"WPA","id":"2551db97-3e68-302c-899d-68af0129ce2b","created":"2018-07-12T21:31:04.059Z","file_attached":false,"profile_id":"f954d000-ce94-3da6-bd26-b983145a920f","group_id":"b0b145a3-980e-3ad7-a16f-c93918c606ed","last_modified":"2018-07-12T21:31:04.059Z","read":false,"starred":false,"authored":false,"confirmed":"true","hidden":false,"citation_key":"Sen2017Experiences","source_type":"inproceedings","private_publication":false,"abstract":"In this paper, we describe the progressive design of the gesture recognition module of an automated food journaling system -- Annapurna. Annapurna runs on a smartwatch and utilises data from the inertial sensors to first identify eating gestures, and then captures food images which are presented to the user in the form of a food journal. We detail the lessons we learnt from multiple in-the-wild studies, and show how eating recognizer is refined to tackle challenges such as (i) high gestural diversity, and (ii) non-eating activities with similar gestural signatures. Annapurna is finally robust (identifying eating across a wide diversity in food content, eating styles and environments) and accurate (false-positive and false-negative rates of 6.5% and 3.3% respectively)","bibtype":"inProceedings","author":"Sen, Sougata and Subbaraju, Vigneshwaran and Misra, Archan and Balan, Rajesh K and Lee, Youngki","booktitle":"Proceedings of the 4th International on Workshop on Physical Analytics","bibtex":"@inProceedings{\n title = {Experiences in Building a Real-World Eating Recogniser},\n type = {inProceedings},\n year = {2017},\n identifiers = {[object Object]},\n keywords = {activity-recognition,auracle,camera,eating,intake-detection,wearable,wrist},\n pages = {7-12},\n websites = {http://dx.doi.org/10.1145/3092305.3092306},\n publisher = {ACM},\n city = {New York, NY, USA},\n series = {WPA},\n id = {2551db97-3e68-302c-899d-68af0129ce2b},\n created = {2018-07-12T21:31:04.059Z},\n file_attached = {false},\n profile_id = {f954d000-ce94-3da6-bd26-b983145a920f},\n group_id = {b0b145a3-980e-3ad7-a16f-c93918c606ed},\n last_modified = {2018-07-12T21:31:04.059Z},\n read = {false},\n starred = {false},\n authored = {false},\n confirmed = {true},\n hidden = {false},\n citation_key = {Sen2017Experiences},\n source_type = {inproceedings},\n private_publication = {false},\n abstract = {In this paper, we describe the progressive design of the gesture recognition module of an automated food journaling system -- Annapurna. Annapurna runs on a smartwatch and utilises data from the inertial sensors to first identify eating gestures, and then captures food images which are presented to the user in the form of a food journal. We detail the lessons we learnt from multiple in-the-wild studies, and show how eating recognizer is refined to tackle challenges such as (i) high gestural diversity, and (ii) non-eating activities with similar gestural signatures. Annapurna is finally robust (identifying eating across a wide diversity in food content, eating styles and environments) and accurate (false-positive and false-negative rates of 6.5% and 3.3% respectively)},\n bibtype = {inProceedings},\n author = {Sen, Sougata and Subbaraju, Vigneshwaran and Misra, Archan and Balan, Rajesh K and Lee, Youngki},\n booktitle = {Proceedings of the 4th International on Workshop on Physical Analytics}\n}","author_short":["Sen, S.","Subbaraju, V.","Misra, A.","Balan, R., K.","Lee, Y."],"urls":{"Website":"http://dx.doi.org/10.1145/3092305.3092306"},"bibbaseid":"sen-subbaraju-misra-balan-lee-experiencesinbuildingarealworldeatingrecogniser-2017","role":"author","keyword":["activity-recognition","auracle","camera","eating","intake-detection","wearable","wrist"],"downloads":0},"search_terms":["experiences","building","real","world","eating","recogniser","sen","subbaraju","misra","balan","lee"],"keywords":["activity-recognition","auracle","camera","eating","intake-detection","wearable","wrist"],"authorIDs":[]}