Multimodal Dialogue in Mobile Local Search. Ehlen, P. & Johnston, M. In Proceedings of the 14th ACM International Conference on Multimodal Interaction, of ICMI '12, pages 303--304, New York, NY, USA, 2012. ACM. Paper doi abstract bibtex Speak4itSM is a multimodal, mobile search application that provides information about local businesses. Users can combine speech and touch input simultaneously to make search queries or commands to the application. For example, a user might say, "gas stations", while simultaneously tracing a route on a touchscreen. In this demonstration, we describe the extension of our multimodal semantic processing architecture and application from a one-shot query system to a multimodal dialogue system that tracks dialogue state over multiple turns. We illustrate the capabilities and limitations of an information-state-based approach to multimodal interpretation. We provide interactive demonstrations of Speak4it on a tablet and a smartphone, and explain the challenges of supporting true multimodal interaction in a deployed mobile service.
@inproceedings{ehlen_multimodal_2012,
address = {New York, NY, USA},
series = {{ICMI} '12},
title = {Multimodal {Dialogue} in {Mobile} {Local} {Search}},
isbn = {978-1-4503-1467-1},
url = {http://doi.acm.org/10.1145/2388676.2388741},
doi = {10.1145/2388676.2388741},
abstract = {Speak4itSM is a multimodal, mobile search application that provides information about local businesses. Users can combine speech and touch input simultaneously to make search queries or commands to the application. For example, a user might say, "gas stations", while simultaneously tracing a route on a touchscreen. In this demonstration, we describe the extension of our multimodal semantic processing architecture and application from a one-shot query system to a multimodal dialogue system that tracks dialogue state over multiple turns. We illustrate the capabilities and limitations of an information-state-based approach to multimodal interpretation. We provide interactive demonstrations of Speak4it on a tablet and a smartphone, and explain the challenges of supporting true multimodal interaction in a deployed mobile service.},
urldate = {2014-06-05TZ},
booktitle = {Proceedings of the 14th {ACM} {International} {Conference} on {Multimodal} {Interaction}},
publisher = {ACM},
author = {Ehlen, Patrick and Johnston, Michael},
year = {2012},
pages = {303--304}
}
Downloads: 0
{"_id":"pPfEcmPz2Xo43aMzo","bibbaseid":"ehlen-johnston-multimodaldialogueinmobilelocalsearch-2012","downloads":0,"creationDate":"2016-10-05T13:48:42.844Z","title":"Multimodal Dialogue in Mobile Local Search","author_short":["Ehlen, P.","Johnston, M."],"year":2012,"bibtype":"inproceedings","biburl":"http://bibbase.org/zotero/alanlivio","bibdata":{"bibtype":"inproceedings","type":"inproceedings","address":"New York, NY, USA","series":"ICMI '12","title":"Multimodal Dialogue in Mobile Local Search","isbn":"978-1-4503-1467-1","url":"http://doi.acm.org/10.1145/2388676.2388741","doi":"10.1145/2388676.2388741","abstract":"Speak4itSM is a multimodal, mobile search application that provides information about local businesses. Users can combine speech and touch input simultaneously to make search queries or commands to the application. For example, a user might say, \"gas stations\", while simultaneously tracing a route on a touchscreen. In this demonstration, we describe the extension of our multimodal semantic processing architecture and application from a one-shot query system to a multimodal dialogue system that tracks dialogue state over multiple turns. We illustrate the capabilities and limitations of an information-state-based approach to multimodal interpretation. We provide interactive demonstrations of Speak4it on a tablet and a smartphone, and explain the challenges of supporting true multimodal interaction in a deployed mobile service.","urldate":"2014-06-05TZ","booktitle":"Proceedings of the 14th ACM International Conference on Multimodal Interaction","publisher":"ACM","author":[{"propositions":[],"lastnames":["Ehlen"],"firstnames":["Patrick"],"suffixes":[]},{"propositions":[],"lastnames":["Johnston"],"firstnames":["Michael"],"suffixes":[]}],"year":"2012","pages":"303--304","bibtex":"@inproceedings{ehlen_multimodal_2012,\n\taddress = {New York, NY, USA},\n\tseries = {{ICMI} '12},\n\ttitle = {Multimodal {Dialogue} in {Mobile} {Local} {Search}},\n\tisbn = {978-1-4503-1467-1},\n\turl = {http://doi.acm.org/10.1145/2388676.2388741},\n\tdoi = {10.1145/2388676.2388741},\n\tabstract = {Speak4itSM is a multimodal, mobile search application that provides information about local businesses. Users can combine speech and touch input simultaneously to make search queries or commands to the application. For example, a user might say, \"gas stations\", while simultaneously tracing a route on a touchscreen. In this demonstration, we describe the extension of our multimodal semantic processing architecture and application from a one-shot query system to a multimodal dialogue system that tracks dialogue state over multiple turns. We illustrate the capabilities and limitations of an information-state-based approach to multimodal interpretation. We provide interactive demonstrations of Speak4it on a tablet and a smartphone, and explain the challenges of supporting true multimodal interaction in a deployed mobile service.},\n\turldate = {2014-06-05TZ},\n\tbooktitle = {Proceedings of the 14th {ACM} {International} {Conference} on {Multimodal} {Interaction}},\n\tpublisher = {ACM},\n\tauthor = {Ehlen, Patrick and Johnston, Michael},\n\tyear = {2012},\n\tpages = {303--304}\n}\n\n","author_short":["Ehlen, P.","Johnston, M."],"key":"ehlen_multimodal_2012","id":"ehlen_multimodal_2012","bibbaseid":"ehlen-johnston-multimodaldialogueinmobilelocalsearch-2012","role":"author","urls":{"Paper":"http://doi.acm.org/10.1145/2388676.2388741"},"downloads":0},"search_terms":["multimodal","dialogue","mobile","local","search","ehlen","johnston"],"keywords":[],"authorIDs":[],"dataSources":["tudya6YojbqEiF783"]}