Cognitive learning, monitoring and assistance of industrial workflows using egocentric sensor networks. Bleser, G., Damen, D., Behera, A., Hendeby, G., Mura, K., Miezal, M., Gee, A., Petersen, N., Maçães, G., Domingues, H., Gorecky, D., Almeida, L., Mayol-Cuevas, W., Calway, A., Cohn, A. G., Hogg, D. C., & Stricker, D. PLoS ONE, June, 2015. Publisher: Public Library of Science
Paper doi abstract bibtex Today, the workflows that are involved in industrial assembly and production activities are becoming increasingly complex. To efficiently and safely perform these workflows is demanding on the workers, in particular when it comes to infrequent or repetitive tasks. This burden on the workers can be eased by introducing smart assistance systems. This article presents a scalable concept and an integrated system demonstrator designed for this purpose. The basic idea is to learn workflows from observing multiple expert operators and then transfer the learnt workflow models to novice users. Being entirely learning-based, the proposed system can be applied to various tasks and domains. The above idea has been realized in a prototype, which combines components pushing the state of the art of hardware and software designed with interoperability in mind. The emphasis of this article is on the algorithms developed for the prototype: 1) fusion of inertial and visual sensor information from an on-body sensor network (BSN) to robustly track the user’s pose in magnetically polluted environments; 2) learning-based computer vision algorithms to map the workspace, localize the sensor with respect to the workspace and capture objects, even as they are carried; 3) domain-independent and robust workflow recovery and monitoring algorithms based on spatiotemporal pairwise relations deduced from object and user movement with respect to the scene; and 4) context-sensitive augmented reality (AR) user feedback using a head-mounted display (HMD). A distinguishing key feature of the developed algorithms is that they all operate solely on data from the on-body sensor network and that no external instrumentation is needed. The feasibility of the chosen approach for the complete actionperception- feedback loop is demonstrated on three increasingly complex datasets representing manual industrial tasks. These limited size datasets indicate and highlight the potential of the chosen technology as a combined entity as well as point out limitations of the system. (PsycInfo Database Record (c) 2020 APA, all rights reserved)
@article{bleser_cognitive_2015,
title = {Cognitive learning, monitoring and assistance of industrial workflows using egocentric sensor networks},
volume = {10},
issn = {1932-6203},
url = {https://search.ebscohost.com/login.aspx?direct=true&AuthType=ip,uid&db=psyh&AN=2015-58989-001&site=ehost-live},
doi = {10.1371/journal.pone.0127769},
abstract = {Today, the workflows that are involved in industrial assembly and production activities are becoming increasingly complex. To efficiently and safely perform these workflows is demanding on the workers, in particular when it comes to infrequent or repetitive tasks. This burden on the workers can be eased by introducing smart assistance systems. This article presents a scalable concept and an integrated system demonstrator designed for this purpose. The basic idea is to learn workflows from observing multiple expert operators and then transfer the learnt workflow models to novice users. Being entirely learning-based, the proposed system can be applied to various tasks and domains. The above idea has been realized in a prototype, which combines components pushing the state of the art of hardware and software designed with interoperability in mind. The emphasis of this article is on the algorithms developed for the prototype: 1) fusion of inertial and visual sensor information from an on-body sensor network (BSN) to robustly track the user’s pose in magnetically polluted environments; 2) learning-based computer vision algorithms to map the workspace, localize the sensor with respect to the workspace and capture objects, even as they are carried; 3) domain-independent and robust workflow recovery and monitoring algorithms based on spatiotemporal pairwise relations deduced from object and user movement with respect to the scene; and 4) context-sensitive augmented reality (AR) user feedback using a head-mounted display (HMD). A distinguishing key feature of the developed algorithms is that they all operate solely on data from the on-body sensor network and that no external instrumentation is needed. The feasibility of the chosen approach for the complete actionperception- feedback loop is demonstrated on three increasingly complex datasets representing manual industrial tasks. These limited size datasets indicate and highlight the potential of the chosen technology as a combined entity as well as point out limitations of the system. (PsycInfo Database Record (c) 2020 APA, all rights reserved)},
number = {6},
journal = {PLoS ONE},
author = {Bleser, Gabriele and Damen, Dima and Behera, Ardhendu and Hendeby, Gustaf and Mura, Katharina and Miezal, Markus and Gee, Andrew and Petersen, Nils and Maçães, Gustavo and Domingues, Hugo and Gorecky, Dominic and Almeida, Luis and Mayol-Cuevas, Walterio and Calway, Andrew and Cohn, Anthony G. and Hogg, David C. and Stricker, Didier},
month = jun,
year = {2015},
note = {Publisher: Public Library of Science},
keywords = {Algorithms, Cognition, Cognitive Style, Egocentrism, Expert Systems, Humans, Imaging, Three-Dimensional, Learning, Machine Learning, Occupational Health, Occupational Medicine, Systems Integration, User-Computer Interface, Workflow, Working Conditions, cognitive learning, egocentric sensor networks, expert models, industrial workflows, integrated system, learning-based computer vision algorithms},
}
Downloads: 0
{"_id":"Z75BLJ9ZAh6wz4pPm","bibbaseid":"bleser-damen-behera-hendeby-mura-miezal-gee-petersen-etal-cognitivelearningmonitoringandassistanceofindustrialworkflowsusingegocentricsensornetworks-2015","author_short":["Bleser, G.","Damen, D.","Behera, A.","Hendeby, G.","Mura, K.","Miezal, M.","Gee, A.","Petersen, N.","Maçães, G.","Domingues, H.","Gorecky, D.","Almeida, L.","Mayol-Cuevas, W.","Calway, A.","Cohn, A. G.","Hogg, D. C.","Stricker, D."],"bibdata":{"bibtype":"article","type":"article","title":"Cognitive learning, monitoring and assistance of industrial workflows using egocentric sensor networks","volume":"10","issn":"1932-6203","url":"https://search.ebscohost.com/login.aspx?direct=true&AuthType=ip,uid&db=psyh&AN=2015-58989-001&site=ehost-live","doi":"10.1371/journal.pone.0127769","abstract":"Today, the workflows that are involved in industrial assembly and production activities are becoming increasingly complex. To efficiently and safely perform these workflows is demanding on the workers, in particular when it comes to infrequent or repetitive tasks. This burden on the workers can be eased by introducing smart assistance systems. This article presents a scalable concept and an integrated system demonstrator designed for this purpose. The basic idea is to learn workflows from observing multiple expert operators and then transfer the learnt workflow models to novice users. Being entirely learning-based, the proposed system can be applied to various tasks and domains. The above idea has been realized in a prototype, which combines components pushing the state of the art of hardware and software designed with interoperability in mind. The emphasis of this article is on the algorithms developed for the prototype: 1) fusion of inertial and visual sensor information from an on-body sensor network (BSN) to robustly track the user’s pose in magnetically polluted environments; 2) learning-based computer vision algorithms to map the workspace, localize the sensor with respect to the workspace and capture objects, even as they are carried; 3) domain-independent and robust workflow recovery and monitoring algorithms based on spatiotemporal pairwise relations deduced from object and user movement with respect to the scene; and 4) context-sensitive augmented reality (AR) user feedback using a head-mounted display (HMD). A distinguishing key feature of the developed algorithms is that they all operate solely on data from the on-body sensor network and that no external instrumentation is needed. The feasibility of the chosen approach for the complete actionperception- feedback loop is demonstrated on three increasingly complex datasets representing manual industrial tasks. These limited size datasets indicate and highlight the potential of the chosen technology as a combined entity as well as point out limitations of the system. (PsycInfo Database Record (c) 2020 APA, all rights reserved)","number":"6","journal":"PLoS ONE","author":[{"propositions":[],"lastnames":["Bleser"],"firstnames":["Gabriele"],"suffixes":[]},{"propositions":[],"lastnames":["Damen"],"firstnames":["Dima"],"suffixes":[]},{"propositions":[],"lastnames":["Behera"],"firstnames":["Ardhendu"],"suffixes":[]},{"propositions":[],"lastnames":["Hendeby"],"firstnames":["Gustaf"],"suffixes":[]},{"propositions":[],"lastnames":["Mura"],"firstnames":["Katharina"],"suffixes":[]},{"propositions":[],"lastnames":["Miezal"],"firstnames":["Markus"],"suffixes":[]},{"propositions":[],"lastnames":["Gee"],"firstnames":["Andrew"],"suffixes":[]},{"propositions":[],"lastnames":["Petersen"],"firstnames":["Nils"],"suffixes":[]},{"propositions":[],"lastnames":["Maçães"],"firstnames":["Gustavo"],"suffixes":[]},{"propositions":[],"lastnames":["Domingues"],"firstnames":["Hugo"],"suffixes":[]},{"propositions":[],"lastnames":["Gorecky"],"firstnames":["Dominic"],"suffixes":[]},{"propositions":[],"lastnames":["Almeida"],"firstnames":["Luis"],"suffixes":[]},{"propositions":[],"lastnames":["Mayol-Cuevas"],"firstnames":["Walterio"],"suffixes":[]},{"propositions":[],"lastnames":["Calway"],"firstnames":["Andrew"],"suffixes":[]},{"propositions":[],"lastnames":["Cohn"],"firstnames":["Anthony","G."],"suffixes":[]},{"propositions":[],"lastnames":["Hogg"],"firstnames":["David","C."],"suffixes":[]},{"propositions":[],"lastnames":["Stricker"],"firstnames":["Didier"],"suffixes":[]}],"month":"June","year":"2015","note":"Publisher: Public Library of Science","keywords":"Algorithms, Cognition, Cognitive Style, Egocentrism, Expert Systems, Humans, Imaging, Three-Dimensional, Learning, Machine Learning, Occupational Health, Occupational Medicine, Systems Integration, User-Computer Interface, Workflow, Working Conditions, cognitive learning, egocentric sensor networks, expert models, industrial workflows, integrated system, learning-based computer vision algorithms","bibtex":"@article{bleser_cognitive_2015,\n\ttitle = {Cognitive learning, monitoring and assistance of industrial workflows using egocentric sensor networks},\n\tvolume = {10},\n\tissn = {1932-6203},\n\turl = {https://search.ebscohost.com/login.aspx?direct=true&AuthType=ip,uid&db=psyh&AN=2015-58989-001&site=ehost-live},\n\tdoi = {10.1371/journal.pone.0127769},\n\tabstract = {Today, the workflows that are involved in industrial assembly and production activities are becoming increasingly complex. To efficiently and safely perform these workflows is demanding on the workers, in particular when it comes to infrequent or repetitive tasks. This burden on the workers can be eased by introducing smart assistance systems. This article presents a scalable concept and an integrated system demonstrator designed for this purpose. The basic idea is to learn workflows from observing multiple expert operators and then transfer the learnt workflow models to novice users. Being entirely learning-based, the proposed system can be applied to various tasks and domains. The above idea has been realized in a prototype, which combines components pushing the state of the art of hardware and software designed with interoperability in mind. The emphasis of this article is on the algorithms developed for the prototype: 1) fusion of inertial and visual sensor information from an on-body sensor network (BSN) to robustly track the user’s pose in magnetically polluted environments; 2) learning-based computer vision algorithms to map the workspace, localize the sensor with respect to the workspace and capture objects, even as they are carried; 3) domain-independent and robust workflow recovery and monitoring algorithms based on spatiotemporal pairwise relations deduced from object and user movement with respect to the scene; and 4) context-sensitive augmented reality (AR) user feedback using a head-mounted display (HMD). A distinguishing key feature of the developed algorithms is that they all operate solely on data from the on-body sensor network and that no external instrumentation is needed. The feasibility of the chosen approach for the complete actionperception- feedback loop is demonstrated on three increasingly complex datasets representing manual industrial tasks. These limited size datasets indicate and highlight the potential of the chosen technology as a combined entity as well as point out limitations of the system. (PsycInfo Database Record (c) 2020 APA, all rights reserved)},\n\tnumber = {6},\n\tjournal = {PLoS ONE},\n\tauthor = {Bleser, Gabriele and Damen, Dima and Behera, Ardhendu and Hendeby, Gustaf and Mura, Katharina and Miezal, Markus and Gee, Andrew and Petersen, Nils and Maçães, Gustavo and Domingues, Hugo and Gorecky, Dominic and Almeida, Luis and Mayol-Cuevas, Walterio and Calway, Andrew and Cohn, Anthony G. and Hogg, David C. and Stricker, Didier},\n\tmonth = jun,\n\tyear = {2015},\n\tnote = {Publisher: Public Library of Science},\n\tkeywords = {Algorithms, Cognition, Cognitive Style, Egocentrism, Expert Systems, Humans, Imaging, Three-Dimensional, Learning, Machine Learning, Occupational Health, Occupational Medicine, Systems Integration, User-Computer Interface, Workflow, Working Conditions, cognitive learning, egocentric sensor networks, expert models, industrial workflows, integrated system, learning-based computer vision algorithms},\n}\n\n\n\n","author_short":["Bleser, G.","Damen, D.","Behera, A.","Hendeby, G.","Mura, K.","Miezal, M.","Gee, A.","Petersen, N.","Maçães, G.","Domingues, H.","Gorecky, D.","Almeida, L.","Mayol-Cuevas, W.","Calway, A.","Cohn, A. G.","Hogg, D. C.","Stricker, D."],"key":"bleser_cognitive_2015","id":"bleser_cognitive_2015","bibbaseid":"bleser-damen-behera-hendeby-mura-miezal-gee-petersen-etal-cognitivelearningmonitoringandassistanceofindustrialworkflowsusingegocentricsensornetworks-2015","role":"author","urls":{"Paper":"https://search.ebscohost.com/login.aspx?direct=true&AuthType=ip,uid&db=psyh&AN=2015-58989-001&site=ehost-live"},"keyword":["Algorithms","Cognition","Cognitive Style","Egocentrism","Expert Systems","Humans","Imaging","Three-Dimensional","Learning","Machine Learning","Occupational Health","Occupational Medicine","Systems Integration","User-Computer Interface","Workflow","Working Conditions","cognitive learning","egocentric sensor networks","expert models","industrial workflows","integrated system","learning-based computer vision algorithms"],"metadata":{"authorlinks":{}},"html":""},"bibtype":"article","biburl":"https://bibbase.org/zotero/saurabhr","dataSources":["nxjWwW7fWbb5tfpKz"],"keywords":["algorithms","cognition","cognitive style","egocentrism","expert systems","humans","imaging","three-dimensional","learning","machine learning","occupational health","occupational medicine","systems integration","user-computer interface","workflow","working conditions","cognitive learning","egocentric sensor networks","expert models","industrial workflows","integrated system","learning-based computer vision algorithms"],"search_terms":["cognitive","learning","monitoring","assistance","industrial","workflows","using","egocentric","sensor","networks","bleser","damen","behera","hendeby","mura","miezal","gee","petersen","maçães","domingues","gorecky","almeida","mayol-cuevas","calway","cohn","hogg","stricker"],"title":"Cognitive learning, monitoring and assistance of industrial workflows using egocentric sensor networks","year":2015}