Visual exploration and analysis of human-robot interaction rules. Zhang, H. & Boyles, M., J. In Wong, P., C., Kao, D., L., Hao, M., C., Chen, C., & Healey, C., G., editors, IS&T/SPIE Electronic Imaging conference, pages 86540E, 2, 2013. Society of Photo-Optical Instrumentation Engineers (SPIE). Website doi abstract bibtex We present a novel interaction paradigm for the visual exploration, manipulation and analysis of human-robot interaction (HRI) rules; our development is implemented using a visual programming interface and exploits key techniques drawn from both information visualization and visual data mining to facilitate the interaction design and knowledge discovery process. HRI is often concerned with manipulations of multi-modal signals, events, and commands that form various kinds of interaction rules. Depicting, manipulating and sharing such design-level information is a compelling challenge. Furthermore, the closed loop between HRI programming and knowledge discovery from empirical data is a relatively long cycle. This, in turn, makes design-level verification nearly impossible to perform in an earlier phase. In our work, we exploit a drag-and-drop user interface and visual languages to support depicting responsive behaviors from social participants when they interact with their partners. For our principal test case of gaze-contingent HRI interfaces, this permits us to program and debug the robots’ responsive behaviors through a graphical data-flow chart editor. We exploit additional program manipulation interfaces to provide still further improvement to our programming experience: by simulating the interaction dynamics between a human and a robot behavior model, we allow the researchers to generate, trace and study the perception-action dynamics with a social interaction simulation to verify and refine their designs. Finally, we extend our visual manipulation environment with a visual data-mining tool that allows the user to investigate interesting phenomena such as joint attention and sequential behavioral patterns from multiple multi-modal data streams. We have created instances of HRI interfaces to evaluate and refine our development paradigm. As far as we are aware, this paper reports the first program manipulation paradigm that integrates visual programming interfaces, information visualization, and visual data mining methods to facilitate designing, comprehending, and evaluating HRI interfaces. © (2013) COPYRIGHT Society of Photo-Optical Instrumentation Engineers (SPIE). Downloading of the abstract is permitted for personal use only.
@inproceedings{
title = {Visual exploration and analysis of human-robot interaction rules},
type = {inproceedings},
year = {2013},
pages = {86540E},
websites = {http://hdl.handle.net/2022/15307,http://proceedings.spiedigitallibrary.org/proceeding.aspx?doi=10.1117/12.2002536},
month = {2},
publisher = {Society of Photo-Optical Instrumentation Engineers (SPIE)},
day = {4},
city = {Burlingame, CA},
id = {db891137-1b44-3cd1-9105-1126563bddf6},
created = {2019-10-01T17:20:38.925Z},
file_attached = {false},
profile_id = {42d295c0-0737-38d6-8b43-508cab6ea85d},
last_modified = {2020-05-11T14:43:29.460Z},
read = {false},
starred = {false},
authored = {true},
confirmed = {true},
hidden = {false},
citation_key = {Zhang2013c},
source_type = {misc},
folder_uuids = {22c3b665-9e84-4884-8172-710aa9082eaf},
private_publication = {false},
abstract = {We present a novel interaction paradigm for the visual exploration, manipulation and analysis of human-robot interaction (HRI) rules; our development is implemented using a visual programming interface and exploits key techniques drawn from both information visualization and visual data mining to facilitate the interaction design and knowledge discovery process. HRI is often concerned with manipulations of multi-modal signals, events, and commands that form various kinds of interaction rules. Depicting, manipulating and sharing such design-level information is a compelling challenge. Furthermore, the closed loop between HRI programming and knowledge discovery from empirical data is a relatively long cycle. This, in turn, makes design-level verification nearly impossible to perform in an earlier phase. In our work, we exploit a drag-and-drop user interface and visual languages to support depicting responsive behaviors from social participants when they interact with their partners. For our principal test case of gaze-contingent HRI interfaces, this permits us to program and debug the robots’ responsive behaviors through a graphical data-flow chart editor. We exploit additional program manipulation interfaces to provide still further improvement to our programming experience: by simulating the interaction dynamics between a human and a robot behavior model, we allow the researchers to generate, trace and study the perception-action dynamics with a social interaction simulation to verify and refine their designs. Finally, we extend our visual manipulation environment with a visual data-mining tool that allows the user to investigate interesting phenomena such as joint attention and sequential behavioral patterns from multiple multi-modal data streams. We have created instances of HRI interfaces to evaluate and refine our development paradigm. As far as we are aware, this paper reports the first program manipulation paradigm that integrates visual programming interfaces, information visualization, and visual data mining methods to facilitate designing, comprehending, and evaluating HRI interfaces. © (2013) COPYRIGHT Society of Photo-Optical Instrumentation Engineers (SPIE). Downloading of the abstract is permitted for personal use only.},
bibtype = {inproceedings},
author = {Zhang, Hui and Boyles, Michael J.},
editor = {Wong, Pak Chung and Kao, David L. and Hao, Ming C. and Chen, Chaomei and Healey, Christopher G.},
doi = {10.1117/12.2002536},
booktitle = {IS&T/SPIE Electronic Imaging conference}
}
Downloads: 0
{"_id":"ePFBt4mcgpzfQTzYn","bibbaseid":"zhang-boyles-visualexplorationandanalysisofhumanrobotinteractionrules-2013","downloads":0,"creationDate":"2018-03-12T19:10:27.094Z","title":"Visual exploration and analysis of human-robot interaction rules","author_short":["Zhang, H.","Boyles, M., J."],"year":2013,"bibtype":"inproceedings","biburl":"https://bibbase.org/service/mendeley/42d295c0-0737-38d6-8b43-508cab6ea85d","bibdata":{"title":"Visual exploration and analysis of human-robot interaction rules","type":"inproceedings","year":"2013","pages":"86540E","websites":"http://hdl.handle.net/2022/15307,http://proceedings.spiedigitallibrary.org/proceeding.aspx?doi=10.1117/12.2002536","month":"2","publisher":"Society of Photo-Optical Instrumentation Engineers (SPIE)","day":"4","city":"Burlingame, CA","id":"db891137-1b44-3cd1-9105-1126563bddf6","created":"2019-10-01T17:20:38.925Z","file_attached":false,"profile_id":"42d295c0-0737-38d6-8b43-508cab6ea85d","last_modified":"2020-05-11T14:43:29.460Z","read":false,"starred":false,"authored":"true","confirmed":"true","hidden":false,"citation_key":"Zhang2013c","source_type":"misc","folder_uuids":"22c3b665-9e84-4884-8172-710aa9082eaf","private_publication":false,"abstract":"We present a novel interaction paradigm for the visual exploration, manipulation and analysis of human-robot interaction (HRI) rules; our development is implemented using a visual programming interface and exploits key techniques drawn from both information visualization and visual data mining to facilitate the interaction design and knowledge discovery process. HRI is often concerned with manipulations of multi-modal signals, events, and commands that form various kinds of interaction rules. Depicting, manipulating and sharing such design-level information is a compelling challenge. Furthermore, the closed loop between HRI programming and knowledge discovery from empirical data is a relatively long cycle. This, in turn, makes design-level verification nearly impossible to perform in an earlier phase. In our work, we exploit a drag-and-drop user interface and visual languages to support depicting responsive behaviors from social participants when they interact with their partners. For our principal test case of gaze-contingent HRI interfaces, this permits us to program and debug the robots’ responsive behaviors through a graphical data-flow chart editor. We exploit additional program manipulation interfaces to provide still further improvement to our programming experience: by simulating the interaction dynamics between a human and a robot behavior model, we allow the researchers to generate, trace and study the perception-action dynamics with a social interaction simulation to verify and refine their designs. Finally, we extend our visual manipulation environment with a visual data-mining tool that allows the user to investigate interesting phenomena such as joint attention and sequential behavioral patterns from multiple multi-modal data streams. We have created instances of HRI interfaces to evaluate and refine our development paradigm. As far as we are aware, this paper reports the first program manipulation paradigm that integrates visual programming interfaces, information visualization, and visual data mining methods to facilitate designing, comprehending, and evaluating HRI interfaces. © (2013) COPYRIGHT Society of Photo-Optical Instrumentation Engineers (SPIE). Downloading of the abstract is permitted for personal use only.","bibtype":"inproceedings","author":"Zhang, Hui and Boyles, Michael J.","editor":"Wong, Pak Chung and Kao, David L. and Hao, Ming C. and Chen, Chaomei and Healey, Christopher G.","doi":"10.1117/12.2002536","booktitle":"IS&T/SPIE Electronic Imaging conference","bibtex":"@inproceedings{\n title = {Visual exploration and analysis of human-robot interaction rules},\n type = {inproceedings},\n year = {2013},\n pages = {86540E},\n websites = {http://hdl.handle.net/2022/15307,http://proceedings.spiedigitallibrary.org/proceeding.aspx?doi=10.1117/12.2002536},\n month = {2},\n publisher = {Society of Photo-Optical Instrumentation Engineers (SPIE)},\n day = {4},\n city = {Burlingame, CA},\n id = {db891137-1b44-3cd1-9105-1126563bddf6},\n created = {2019-10-01T17:20:38.925Z},\n file_attached = {false},\n profile_id = {42d295c0-0737-38d6-8b43-508cab6ea85d},\n last_modified = {2020-05-11T14:43:29.460Z},\n read = {false},\n starred = {false},\n authored = {true},\n confirmed = {true},\n hidden = {false},\n citation_key = {Zhang2013c},\n source_type = {misc},\n folder_uuids = {22c3b665-9e84-4884-8172-710aa9082eaf},\n private_publication = {false},\n abstract = {We present a novel interaction paradigm for the visual exploration, manipulation and analysis of human-robot interaction (HRI) rules; our development is implemented using a visual programming interface and exploits key techniques drawn from both information visualization and visual data mining to facilitate the interaction design and knowledge discovery process. HRI is often concerned with manipulations of multi-modal signals, events, and commands that form various kinds of interaction rules. Depicting, manipulating and sharing such design-level information is a compelling challenge. Furthermore, the closed loop between HRI programming and knowledge discovery from empirical data is a relatively long cycle. This, in turn, makes design-level verification nearly impossible to perform in an earlier phase. In our work, we exploit a drag-and-drop user interface and visual languages to support depicting responsive behaviors from social participants when they interact with their partners. For our principal test case of gaze-contingent HRI interfaces, this permits us to program and debug the robots’ responsive behaviors through a graphical data-flow chart editor. We exploit additional program manipulation interfaces to provide still further improvement to our programming experience: by simulating the interaction dynamics between a human and a robot behavior model, we allow the researchers to generate, trace and study the perception-action dynamics with a social interaction simulation to verify and refine their designs. Finally, we extend our visual manipulation environment with a visual data-mining tool that allows the user to investigate interesting phenomena such as joint attention and sequential behavioral patterns from multiple multi-modal data streams. We have created instances of HRI interfaces to evaluate and refine our development paradigm. As far as we are aware, this paper reports the first program manipulation paradigm that integrates visual programming interfaces, information visualization, and visual data mining methods to facilitate designing, comprehending, and evaluating HRI interfaces. © (2013) COPYRIGHT Society of Photo-Optical Instrumentation Engineers (SPIE). Downloading of the abstract is permitted for personal use only.},\n bibtype = {inproceedings},\n author = {Zhang, Hui and Boyles, Michael J.},\n editor = {Wong, Pak Chung and Kao, David L. and Hao, Ming C. and Chen, Chaomei and Healey, Christopher G.},\n doi = {10.1117/12.2002536},\n booktitle = {IS&T/SPIE Electronic Imaging conference}\n}","author_short":["Zhang, H.","Boyles, M., J."],"editor_short":["Wong, P., C.","Kao, D., L.","Hao, M., C.","Chen, C.","Healey, C., G."],"urls":{"Website":"http://hdl.handle.net/2022/15307,http://proceedings.spiedigitallibrary.org/proceeding.aspx?doi=10.1117/12.2002536"},"biburl":"https://bibbase.org/service/mendeley/42d295c0-0737-38d6-8b43-508cab6ea85d","bibbaseid":"zhang-boyles-visualexplorationandanalysisofhumanrobotinteractionrules-2013","role":"author","metadata":{"authorlinks":{}},"downloads":0},"search_terms":["visual","exploration","analysis","human","robot","interaction","rules","zhang","boyles"],"keywords":[],"authorIDs":[],"dataSources":["zgahneP4uAjKbudrQ","ya2CyA73rpZseyrZ8","2252seNhipfTmjEBQ"]}