Extending NCL to Support Multiuser and Multimodal Interactions. Guedes, Á. L., Azevedo, R. G. d. A., Colcher, S., & Barbosa, S. D. In Proceedings of the 22nd Brazilian Symposium on Multimedia and the Web, of Webmedia '16, pages 39–46, New York, NY, USA, 2016. ACM. Acceptance ratio: 30%
Paper doi abstract bibtex 35 downloads Recent advances in technologies for speech, touch and gesture recognition have given rise to a new class of user interfaces that does not only explore multiple modalities but also allows for multiple interacting users. Even so, current declarative multimedia languages e.g. HTML, SMIL, and NCL?support only limited forms of user input (mainly keyboard and mouse) for a single user. In this paper, we aim at studying how the NCL multimedia language could take advantage of those new recognition technologies. To do so, we revisit the model behind NCL, named NCM (Nested Context Model), and extend it with first-class concepts supporting multiuser and multimodal features. To evaluate our approach, we instantiate the proposal and discuss some usage scenarios, developed as NCL applications with our extended features.
@inproceedings{2016_11_guedes,
author={Guedes, \'{A}lan L.V. and Azevedo, Roberto Gerson de Albuquerque and
Colcher, S{\'e}rgio and Barbosa, Simone D.J.},
title={Extending NCL to Support Multiuser and Multimodal Interactions},
booktitle={Proceedings of the 22nd Brazilian Symposium on Multimedia and the
Web},
series={Webmedia '16},
year={2016},
isbn={978-1-4503-4512-5},
location={Teresina, Piauí; State, Brazil},
pages={39--46},
numpages={8},
url={http://doi.acm.org/10.1145/2976796.2976869},
doi={10.1145/2976796.2976869},
acmid={2976869},
publisher={ACM},
address={New York, NY, USA},
keywords={NCL, ginga-NCL, multimedia languages, multimodal interactions,
multiuser interactions, nested context language},
abstract={Recent advances in technologies for speech, touch and gesture
recognition have given rise to a new class of user interfaces that does not
only explore multiple modalities but also allows for multiple interacting
users. Even so, current declarative multimedia languages e.g. HTML, SMIL, and
NCL?support only limited forms of user input (mainly keyboard and mouse) for
a single user. In this paper, we aim at studying how the NCL multimedia
language could take advantage of those new recognition technologies. To do
so, we revisit the model behind NCL, named NCM (Nested Context Model), and
extend it with first-class concepts supporting multiuser and multimodal
features. To evaluate our approach, we instantiate the proposal and discuss
some usage scenarios, developed as NCL applications with our extended
features.},
note={Acceptance ratio: 30\%},
}
Downloads: 35
{"_id":"bZuWaYhXnHiPv2Xwc","bibbaseid":"guedes-azevedo-colcher-barbosa-extendingncltosupportmultiuserandmultimodalinteractions-2016","downloads":35,"creationDate":"2017-07-16T16:15:21.642Z","title":"Extending NCL to Support Multiuser and Multimodal Interactions","author_short":["Guedes, Á. L.","Azevedo, R. G. d. A.","Colcher, S.","Barbosa, S. D."],"year":2016,"bibtype":"inproceedings","biburl":"http://www.telemidia.puc-rio.br/~roberto/biblio/bib.bib","bibdata":{"bibtype":"inproceedings","type":"inproceedings","author":[{"propositions":[],"lastnames":["Guedes"],"firstnames":["Álan","L.V."],"suffixes":[]},{"propositions":[],"lastnames":["Azevedo"],"firstnames":["Roberto","Gerson","de","Albuquerque"],"suffixes":[]},{"propositions":[],"lastnames":["Colcher"],"firstnames":["Sérgio"],"suffixes":[]},{"propositions":[],"lastnames":["Barbosa"],"firstnames":["Simone","D.J."],"suffixes":[]}],"title":"Extending NCL to Support Multiuser and Multimodal Interactions","booktitle":"Proceedings of the 22nd Brazilian Symposium on Multimedia and the Web","series":"Webmedia '16","year":"2016","isbn":"978-1-4503-4512-5","location":"Teresina, Piauí; State, Brazil","pages":"39–46","numpages":"8","url":"http://doi.acm.org/10.1145/2976796.2976869","doi":"10.1145/2976796.2976869","acmid":"2976869","publisher":"ACM","address":"New York, NY, USA","keywords":"NCL, ginga-NCL, multimedia languages, multimodal interactions, multiuser interactions, nested context language","abstract":"Recent advances in technologies for speech, touch and gesture recognition have given rise to a new class of user interfaces that does not only explore multiple modalities but also allows for multiple interacting users. Even so, current declarative multimedia languages e.g. HTML, SMIL, and NCL?support only limited forms of user input (mainly keyboard and mouse) for a single user. In this paper, we aim at studying how the NCL multimedia language could take advantage of those new recognition technologies. To do so, we revisit the model behind NCL, named NCM (Nested Context Model), and extend it with first-class concepts supporting multiuser and multimodal features. To evaluate our approach, we instantiate the proposal and discuss some usage scenarios, developed as NCL applications with our extended features.","note":"Acceptance ratio: 30%","bibtex":"@inproceedings{2016_11_guedes,\nauthor={Guedes, \\'{A}lan L.V. and Azevedo, Roberto Gerson de Albuquerque and\nColcher, S{\\'e}rgio and Barbosa, Simone D.J.},\ntitle={Extending NCL to Support Multiuser and Multimodal Interactions},\nbooktitle={Proceedings of the 22nd Brazilian Symposium on Multimedia and the\nWeb},\nseries={Webmedia '16},\nyear={2016},\nisbn={978-1-4503-4512-5},\nlocation={Teresina, Piauí; State, Brazil},\npages={39--46},\nnumpages={8},\nurl={http://doi.acm.org/10.1145/2976796.2976869},\ndoi={10.1145/2976796.2976869},\nacmid={2976869},\npublisher={ACM},\naddress={New York, NY, USA},\nkeywords={NCL, ginga-NCL, multimedia languages, multimodal interactions,\nmultiuser interactions, nested context language},\nabstract={Recent advances in technologies for speech, touch and gesture\nrecognition have given rise to a new class of user interfaces that does not\nonly explore multiple modalities but also allows for multiple interacting\nusers. Even so, current declarative multimedia languages e.g. HTML, SMIL, and\nNCL?support only limited forms of user input (mainly keyboard and mouse) for\na single user. In this paper, we aim at studying how the NCL multimedia\nlanguage could take advantage of those new recognition technologies. To do\nso, we revisit the model behind NCL, named NCM (Nested Context Model), and\nextend it with first-class concepts supporting multiuser and multimodal\nfeatures. To evaluate our approach, we instantiate the proposal and discuss\nsome usage scenarios, developed as NCL applications with our extended\nfeatures.},\nnote={Acceptance ratio: 30\\%},\n}\n\n","author_short":["Guedes, Á. L.","Azevedo, R. G. d. A.","Colcher, S.","Barbosa, S. D."],"key":"2016_11_guedes","id":"2016_11_guedes","bibbaseid":"guedes-azevedo-colcher-barbosa-extendingncltosupportmultiuserandmultimodalinteractions-2016","role":"author","urls":{"Paper":"http://doi.acm.org/10.1145/2976796.2976869"},"keyword":["NCL","ginga-NCL","multimedia languages","multimodal interactions","multiuser interactions","nested context language"],"metadata":{"authorlinks":{"azevedo, r":"http://139.82.95.2/~roberto/publications"}},"downloads":35,"html":""},"search_terms":["extending","ncl","support","multiuser","multimodal","interactions","guedes","azevedo","colcher","barbosa"],"keywords":["ncl","ginga-ncl","multimedia languages","multimodal interactions","multiuser interactions","nested context language"],"authorIDs":["22qdanuzGpjfebYp2","244dmL4NwKXTfTqFB","24yoNLHLsDa4x9D2M","2wkrqiNDpnJQeYFvw","3fArXRQLQoc8ZwrMq","5508feeedf05089b6e0001ce","5YSSXn6odxExxuoFA","5de8c5c19e80cdde01000072","5dee8365fc4c19df01000114","5df5f4a0a37a40df01000028","5df60bec2b34d0de01000007","5df7cf16dc100cde01000018","5e06bd583cb7b3df010000f7","5e123dd0c196d3de010000e6","5e135e67697554de010001c9","5e1361bbf16095df01000016","5e136472f16095df01000072","5e1517f788b10dde01000104","5e151a5088b10dde0100011c","5e1763eb4df69dde01000162","5e191ef430dc56df01000005","5e1c63b492587bde01000005","5e271e10f51e02de0100018a","5e2d83fd556d50df010000c5","5e32bb75466076df010000b9","5e3d85b096e576de01000140","5e3db84407ca74de0100008b","5e414225b7d170df010000fb","5e41459eb7d170df01000121","5e45df99ad0603df0100010a","5e45e07cad0603df0100011c","5e4ae41441072bdf01000071","5e4bffff0dff2bde010000c4","5e4f0949e5389bde0100000b","5e501ee3933046de0100016e","5e501f2e933046de01000179","5e5e8b5ec0a53dde01000071","5e5ff99113e3aede0100003d","5e63aeed5e3c57de01000146","5yMsKZKLBnZKYvSHM","6AYHMrhkKKxQmxubb","6jHF2A9kwyWmNFCCd","6wryNCAx7Zo8fbwAB","7hPsR4oepA7Xc4HaE","884pdHmj7ppG8iMk9","8yyq55iMiNu7KsgmJ","9JGhhPKtYaFCnMcTC","AED8rhcj6RNJLxXaL","AinPa5MKBTARKabGR","BAt34XTbdkvxhXgcC","CKx8XtftEtEkTS9XX","CWCixdwNu6XxFiHB5","EZ5k5yT5wLDS5oY2k","EiPFBrZGk8szdGvdM","EnvpRHSpqMg8PPvFL","EyTjgNaspXXTjLnRA","FeT67z5eszt53SB9m","G4TmkruogoSArYmz8","G6dsN8GBTgKb4zgHK","GdSNk5rbGcsuazQAA","Gg6FyBxS4c9crJQWY","HK6F7nRGfMHsZwRQn","JBPNdykuQFNRwkJKm","Jq4oRAA6kc2cEWEMx","Js8D8d5fYQiaZimyM","K3h2ZWfWMtS5NWpEB","KzsLTkoPMJ6gNJo64","L8qBKy36xDRcmE8oH","MfBYf6SeReriSdD5q","NNShBA8E6jTJ9pc49","NwmD6quP7sfJgs4cE","P4HhxCoz5c2Bn55xN","RBufWNXJd7ArN6gPp","RKoz2F7cr9jhRzBSA","SKn7QDikeo6LsniwG","Tu33b2iijsccpNwNy","WSi47kKxoBL9zxMPk","XcDM74dsEkKXvGtSE","ZLyJ3SvsZAgkSmMhA","aHoWYt7KttYy4xsyb","dT4Q9vZmW8XW3rssQ","dYyNfMq6QDvMwqEmg","hN8avpNdk8F9Bdmih","hy6qqBWeQBxrfZQkQ","iZrZ562QLES379AeJ","kbbhu3riRm4GbuxXr","mYnbCr4EQk3aayLEc","oZTNoGwQwN6Lwjkyt","pRdcpdf3i5Fm32gWn","pfHvmrarHFaCnXruD","qfTGajRsvSg96C5Lj","s6NMQeuXCvahtn5xD","sTvD7E6KuhsRZRD3E","srPxnveRYPn3cx52j","thzkvvp2oSxPQeiE7","tshc376gy7go5omwM","vnp2utbXmofWRR7MA","wiggSGkKjJvNyXH6j","x2ZvzHk267HfqDhwM","xi7msFqQ4BePcjteq","xjLLYibFy9e2YPqtu","z2zDhcNq9rbP2GbuG","zwSxNcwtdzLJCX6eB"],"dataSources":["g2kK7LGtY6BSAGWXZ","fzQqRdpBjaqFvMqtM","2GMp8PAJ4r2b8svJX"]}