var bibbase_data = {"data":"\"Loading..\"\n\n
\n\n \n\n \n\n \n \n\n \n\n \n \n\n \n\n \n
\n generated by\n \n \"bibbase.org\"\n\n \n
\n \n\n
\n\n \n\n\n
\n\n Excellent! Next you can\n create a new website with this list, or\n embed it in an existing web page by copying & pasting\n any of the following snippets.\n\n
\n JavaScript\n (easiest)\n
\n \n <script src=\"https://bibbase.org/show?bib=https%3A%2F%2Fchristopherclarke.net%2Fpublications.bib&commas=true&jsonp=1&jsonp=1\"></script>\n \n
\n\n PHP\n
\n \n <?php\n $contents = file_get_contents(\"https://bibbase.org/show?bib=https%3A%2F%2Fchristopherclarke.net%2Fpublications.bib&commas=true&jsonp=1\");\n print_r($contents);\n ?>\n \n
\n\n iFrame\n (not recommended)\n
\n \n <iframe src=\"https://bibbase.org/show?bib=https%3A%2F%2Fchristopherclarke.net%2Fpublications.bib&commas=true&jsonp=1\"></iframe>\n \n
\n\n

\n For more details see the documention.\n

\n
\n
\n\n
\n\n This is a preview! To use this list on your own web site\n or create a new web site from it,\n create a free account. The file will be added\n and you will be able to edit it in the File Manager.\n We will show you instructions once you've created your account.\n
\n\n
\n\n

To the site owner:

\n\n

Action required! Mendeley is changing its\n API. In order to keep using Mendeley with BibBase past April\n 14th, you need to:\n

    \n
  1. renew the authorization for BibBase on Mendeley, and
  2. \n
  3. update the BibBase URL\n in your page the same way you did when you initially set up\n this page.\n
  4. \n
\n

\n\n

\n \n \n Fix it now\n

\n
\n\n
\n\n\n
\n \n \n
\n
\n  \n 2025\n \n \n (6)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Openearable 2.0: Open-source earphone platform for physiological ear sensing.\n \n \n \n\n\n \n Röddiger, T., Küttner, M., Lepold, P., King, T., Moschina, D., Bagge, O., Paradiso, J. A, Clarke, C., & Beigl, M.\n\n\n \n\n\n\n Proceedings of the ACM on Interactive, Mobile, Wearable and Ubiquitous Technologies, 9(1): 1–33. 2025.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{roddiger2025openearable,\n  title={Openearable 2.0: Open-source earphone platform for physiological ear sensing},\n  author={R{\\"o}ddiger, Tobias and K{\\"u}ttner, Michael and Lepold, Philipp and King, Tobias and Moschina, Dennis and Bagge, Oliver and Paradiso, Joseph A and Clarke, Christopher and Beigl, Michael},\n  journal={Proceedings of the ACM on Interactive, Mobile, Wearable and Ubiquitous Technologies},\n  volume={9},\n  number={1},\n  pages={1--33},\n  year={2025},\n  publisher={ACM New York, NY, USA}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n RetroSketch: A Retrospective Method for Measuring Emotions and Presence in Virtual Reality.\n \n \n \n\n\n \n Potts, D., Gada, M., Gupta, A., Goel, K., Krzok, K. P., Pate, G., Hartley, J., Weston-Arnold, M., Aylott, J., Clarke, C., & others\n\n\n \n\n\n\n In Proceedings of the 2025 CHI Conference on Human Factors in Computing Systems, pages 1–25, 2025. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{potts2025retrosketch,\n  title={RetroSketch: A Retrospective Method for Measuring Emotions and Presence in Virtual Reality},\n  author={Potts, Dominic and Gada, Miloni and Gupta, Aastha and Goel, Kavya and Krzok, Klaus Philipp and Pate, Genevieve and Hartley, Joseph and Weston-Arnold, Mark and Aylott, Jakob and Clarke, Christopher and others},\n  booktitle={Proceedings of the 2025 CHI Conference on Human Factors in Computing Systems},\n  pages={1--25},\n  year={2025}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Demonstrating OpenEarable 2.0: An AI-Powered Ear Sensing Platform.\n \n \n \n\n\n \n Röddiger, T., Zitz, V., Hummel, J., Küttner, M., Lepold, P., King, T., Paradiso, J. A, Clarke, C., & Beigl, M.\n\n\n \n\n\n\n In Proceedings of the Extended Abstracts of the CHI Conference on Human Factors in Computing Systems, pages 1–4, 2025. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{roddiger2025demonstrating,\n  title={Demonstrating OpenEarable 2.0: An AI-Powered Ear Sensing Platform},\n  author={R{\\"o}ddiger, Tobias and Zitz, Valeria and Hummel, Jonas and K{\\"u}ttner, Michael and Lepold, Philipp and King, Tobias and Paradiso, Joseph A and Clarke, Christopher and Beigl, Michael},\n  booktitle={Proceedings of the Extended Abstracts of the CHI Conference on Human Factors in Computing Systems},\n  pages={1--4},\n  year={2025}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Investigating the Impact of Deformable, Movable, and Rigid Surfaces on Force-Input Interactions.\n \n \n \n\n\n \n Nash, J., Sauvé, K., Sharma, A., Clarke, C., & Alexander, J.\n\n\n \n\n\n\n ACM Transactions on Computer-Human Interaction. 2025.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{nash2025investigating,\n  title={Investigating the Impact of Deformable, Movable, and Rigid Surfaces on Force-Input Interactions},\n  author={Nash, James and Sauv{\\'e}, Kim and Sharma, Adwait and Clarke, Christopher and Alexander, Jason},\n  journal={ACM Transactions on Computer-Human Interaction},\n  year={2025},\n  publisher={ACM}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n BodyPursuits: Exploring Smooth Pursuit Gaze Interaction Based on Body Motion Targets.\n \n \n \n\n\n \n Hansen, A., Makarem, S., Kunze, K., Zhou, Y., Knierim, M. T., Clarke, C., Gellersen, H., Beigl, M., & Röddiger, T.\n\n\n \n\n\n\n In Proceedings of the 2025 Symposium on Eye Tracking Research and Applications, pages 1–8, 2025. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{hansen2025bodypursuits,\n  title={BodyPursuits: Exploring Smooth Pursuit Gaze Interaction Based on Body Motion Targets},\n  author={Hansen, Anja and Makarem, Sarah and Kunze, Kai and Zhou, Yexu and Knierim, Michael Thomas and Clarke, Christopher and Gellersen, Hans and Beigl, Michael and R{\\"o}ddiger, Tobias},\n  booktitle={Proceedings of the 2025 Symposium on Eye Tracking Research and Applications},\n  pages={1--8},\n  year={2025}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n earEOG via Periauricular Electrodes to Facilitate Eye Tracking in a Natural Headphone Form Factor.\n \n \n \n\n\n \n King, T., Knierim, M., Lepold, P., Clarke, C., Gellersen, H., Beigl, M., & Röddiger, T.\n\n\n \n\n\n\n arXiv preprint arXiv:2506.07193. 2025.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{king2025eareog,\n  title={earEOG via Periauricular Electrodes to Facilitate Eye Tracking in a Natural Headphone Form Factor},\n  author={King, Tobias and Knierim, Michael and Lepold, Philipp and Clarke, Christopher and Gellersen, Hans and Beigl, Michael and R{\\"o}ddiger, Tobias},\n  journal={arXiv preprint arXiv:2506.07193},\n  year={2025}\n}
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2024\n \n \n (11)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Sweating the Details: Emotion Recognition and the Influence of Physical Exertion in Virtual Reality Exergaming.\n \n \n \n\n\n \n Potts, D., Broad, Z., Sehgal, T., Hartley, J., O'Neill, E., Jicol, C., Clarke, C., & Lutteroth, C.\n\n\n \n\n\n\n In Proceedings of the 2024 CHI Conference on Human Factors in Computing Systems, pages 1–21, 2024. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{potts2024sweating,\n  title={Sweating the Details: Emotion Recognition and the Influence of Physical Exertion in Virtual Reality Exergaming},\n  author={Potts, Dominic and Broad, Zoe and Sehgal, Tarini and Hartley, Joseph and O'Neill, Eamonn and Jicol, Crescent and Clarke, Christopher and Lutteroth, Christof},\n  booktitle={Proceedings of the 2024 CHI Conference on Human Factors in Computing Systems},\n  pages={1--21},\n  year={2024}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Watch This! Observational Learning in VR Promotes Better Far Transfer than Active Learning for a Fine Psychomotor Task.\n \n \n \n\n\n \n Fitton, I. S., Dark, E., Oliveira da Silva, M. M., Dalton, J., Proulx, M. J, Clarke, C., & Lutteroth, C.\n\n\n \n\n\n\n In Proceedings of the 2024 CHI Conference on Human Factors in Computing Systems, pages 1–19, 2024. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{fitton2024watch,\n  title={Watch This! Observational Learning in VR Promotes Better Far Transfer than Active Learning for a Fine Psychomotor Task},\n  author={Fitton, Isabel Sophie and Dark, Elizabeth and Oliveira da Silva, Manoela Milena and Dalton, Jeremy and Proulx, Michael J and Clarke, Christopher and Lutteroth, Christof},\n  booktitle={Proceedings of the 2024 CHI Conference on Human Factors in Computing Systems},\n  pages={1--19},\n  year={2024}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n REVEAL: REal and Virtual Environments Augmentation Lab@ Bath.\n \n \n \n\n\n \n Potts, D., Jicol, C., Clarke, C., O'Neill, E., Fitton, I. S., Dark, E., Oliveira Da Silva, M. M., Broad, Z., Sehgal, T., Hartley, J., & others\n\n\n \n\n\n\n In Extended Abstracts of the CHI Conference on Human Factors in Computing Systems, pages 1–5, 2024. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{potts2024reveal,\n  title={REVEAL: REal and Virtual Environments Augmentation Lab@ Bath},\n  author={Potts, Dominic and Jicol, Crescent and Clarke, Christopher and O'Neill, Eamonn and Fitton, Isabel Sophie and Dark, Elizabeth and Oliveira Da Silva, Manoela Milena and Broad, Zoe and Sehgal, Tarini and Hartley, Joseph and others},\n  booktitle={Extended Abstracts of the CHI Conference on Human Factors in Computing Systems},\n  pages={1--5},\n  year={2024}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n P169 Optimising self-management in axial spondyloarthritis: a qualitative exploration of patient perspectives.\n \n \n \n\n\n \n Barnett, R., Clarke, C., Sengupta, R., & Rouse, P.\n\n\n \n\n\n\n Rheumatology, 63(Supplement_1): keae163–208. 2024.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{barnett2024p169,\n  title={P169 Optimising self-management in axial spondyloarthritis: a qualitative exploration of patient perspectives},\n  author={Barnett, Rosie and Clarke, Christopher and Sengupta, Raj and Rouse, Peter},\n  journal={Rheumatology},\n  volume={63},\n  number={Supplement\\_1},\n  pages={keae163--208},\n  year={2024},\n  publisher={Oxford University Press}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n DeformIO: Dynamic Stiffness Control on a Deformable Force-sensing Display.\n \n \n \n\n\n \n Nash, J. D., Steer, C., Dinca, T., Sharma, A., Favaratto Santos, A., Wildgoose, B. T., Ager, A., Clarke, C., & Alexander, J.\n\n\n \n\n\n\n In Extended Abstracts of the CHI Conference on Human Factors in Computing Systems, pages 1–8, 2024. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{nash2024deformio,\n  title={DeformIO: Dynamic Stiffness Control on a Deformable Force-sensing Display},\n  author={Nash, James David and Steer, Cameron and Dinca, Teodora and Sharma, Adwait and Favaratto Santos, Alvaro and Wildgoose, Benjamin Timothy and Ager, Alexander and Clarke, Christopher and Alexander, Jason},\n  booktitle={Extended Abstracts of the CHI Conference on Human Factors in Computing Systems},\n  pages={1--8},\n  year={2024}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n AB1460 DEVELOPMENT STRATEGIES AND THEORETICAL UNDERPINNINGS OF SMARTPHONE APPS TO SUPPORT SELF-MANAGEMENT OF RHEUMATIC AND MUSCULOSKELETAL DISEASES: A SYSTEMATIC LITERATURE REVIEW.\n \n \n \n\n\n \n Barnett, R., Clarke, C., Sengupta, R., & Rouse, P.\n\n\n \n\n\n\n Annals of the Rheumatic Diseases, 83: 2092. 2024.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{barnett2024ab1460,\n  title={AB1460 DEVELOPMENT STRATEGIES AND THEORETICAL UNDERPINNINGS OF SMARTPHONE APPS TO SUPPORT SELF-MANAGEMENT OF RHEUMATIC AND MUSCULOSKELETAL DISEASES: A SYSTEMATIC LITERATURE REVIEW},\n  author={Barnett, Rosemarie and Clarke, Christopher and Sengupta, Raj and Rouse, PC},\n  journal={Annals of the Rheumatic Diseases},\n  volume={83},\n  pages={2092},\n  year={2024},\n  publisher={Elsevier}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Accelerating XR Innovation through a pan-European Lab Network: An overview of the EMIL project.\n \n \n \n\n\n \n Blönnigen, J., Clarke, C., Dahn, A., Forelli, L., Gowrishankar, R., Heikura, T., Helzle, V., Hine, P., Jicol, C., Kreische, A., & others\n\n\n \n\n\n\n In Proceedings of the 2024 ACM International Conference on Interactive Media Experiences Workshops, pages 137–141, 2024. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{blonnigen2024accelerating,\n  title={Accelerating XR Innovation through a pan-European Lab Network: An overview of the EMIL project},\n  author={Bl{\\"o}nnigen, Justus and Clarke, Christopher and Dahn, Andreas and Forelli, Lisa and Gowrishankar, Ramyah and Heikura, Tuija and Helzle, Volker and Hine, Paul and Jicol, Crescent and Kreische, Alexander and others},\n  booktitle={Proceedings of the 2024 ACM International Conference on Interactive Media Experiences Workshops},\n  pages={137--141},\n  year={2024}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n To explore the relationship between affective states and engagement in (digital) mental well-being promotional activities in students.\n \n \n \n\n\n \n Groot, J., Brosnan, M., Gjersoe, N., Ainsworth, B., & Clarke, C.\n\n\n \n\n\n\n . 2024.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{groot2024explore,\n  title={To explore the relationship between affective states and engagement in (digital) mental well-being promotional activities in students.},\n  author={Groot, Julia and Brosnan, Mark and Gjersoe, Nathalia and Ainsworth, Ben and Clarke, Christopher},\n  year={2024},\n  publisher={OSF}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Dataset for \"Sweating The Details: Emotion Recognition and the Influence of Physical Exertion in Virtual Reality Exergaming\" and EmoSense SDK.\n \n \n \n\n\n \n Potts, D., Hartley, J., Jicol, C., Clarke, C., & Lutteroth, C.\n\n\n \n\n\n\n . 2024.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{potts2024dataset,\n  title={Dataset for "Sweating The Details: Emotion Recognition and the Influence of Physical Exertion in Virtual Reality Exergaming" and EmoSense SDK},\n  author={Potts, Dominic and Hartley, Joe and Jicol, Crescent and Clarke, Christopher and Lutteroth, Christof},\n  year={2024},\n  publisher={University of Bath}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Watch Out! XR Mobile Displays Improve the Experience of Co-Located VR Gaming Observers.\n \n \n \n\n\n \n Salagean, A., Jicol, C., Dasalla, K., Clarke, C., & Lutteroth, C.\n\n\n \n\n\n\n In 2024 IEEE International Symposium on Mixed and Augmented Reality (ISMAR), pages 1226–1235, 2024. IEEE\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{salagean2024watch,\n  title={Watch Out! XR Mobile Displays Improve the Experience of Co-Located VR Gaming Observers},\n  author={Salagean, Anca and Jicol, Crescent and Dasalla, Kenneth and Clarke, Christopher and Lutteroth, Christof},\n  booktitle={2024 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)},\n  pages={1226--1235},\n  year={2024},\n  organization={IEEE}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Exploring the \"EarSwitch\" concept: a novel ear based control method for assistive technology.\n \n \n \n\n\n \n Hoyle, A. C, Stevenson, R., Leonhardt, M., Gillett, T., Martinez-Hernandez, U., Gompertz, N., Clarke, C., Cazzola, D., & Metcalfe, B. W\n\n\n \n\n\n\n Journal of NeuroEngineering and Rehabilitation, 21(1): 1–18. 2024.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{hoyle2024exploring,\n  title={Exploring the "EarSwitch" concept: a novel ear based control method for assistive technology},\n  author={Hoyle, Anna C and Stevenson, Richard and Leonhardt, Martin and Gillett, Thomas and Martinez-Hernandez, Uriel and Gompertz, Nick and Clarke, Christopher and Cazzola, Dario and Metcalfe, Benjamin W},\n  journal={Journal of NeuroEngineering and Rehabilitation},\n  volume={21},\n  number={1},\n  pages={1--18},\n  year={2024},\n  publisher={BioMed Central}\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2023\n \n \n (9)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Vergence matching: Inferring attention to objects in 3d environments for gaze-assisted selection.\n \n \n \n\n\n \n Sidenmark, L., Clarke, C., Newn, J., Lystbæk, M. N, Pfeuffer, K., & Gellersen, H.\n\n\n \n\n\n\n In Proceedings of the 2023 CHI Conference on Human Factors in Computing Systems, pages 1–15, 2023. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{sidenmark2023vergence,\n  title={Vergence matching: Inferring attention to objects in 3d environments for gaze-assisted selection},\n  author={Sidenmark, Ludwig and Clarke, Christopher and Newn, Joshua and Lystb{\\ae}k, Mathias N and Pfeuffer, Ken and Gellersen, Hans},\n  booktitle={Proceedings of the 2023 CHI Conference on Human Factors in Computing Systems},\n  pages={1--15},\n  year={2023}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Realism and field of view affect presence in vr but not the way you think.\n \n \n \n\n\n \n Jicol, C., Clarke, C., Tor, E., Dakin, R. M, Lancaster, T. C., Chang, S. T., Petrini, K., O'Neill, E., Proulx, M. J, & Lutteroth, C.\n\n\n \n\n\n\n In Proceedings of the 2023 CHI Conference on Human Factors in Computing Systems, pages 1–17, 2023. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{jicol2023realism,\n  title={Realism and field of view affect presence in vr but not the way you think},\n  author={Jicol, Crescent and Clarke, Christopher and Tor, Emilia and Dakin, Rebecca M and Lancaster, Tom Charlie and Chang, Sze Tung and Petrini, Karin and O'Neill, Eamonn and Proulx, Michael J and Lutteroth, Christof},\n  booktitle={Proceedings of the 2023 CHI Conference on Human Factors in Computing Systems},\n  pages={1--17},\n  year={2023}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Imagine that! imaginative suggestibility affects presence in virtual reality.\n \n \n \n\n\n \n Jicol, C., Clarke, C., Tor, E., Yip, H. L., Yoon, J., Bevan, C., Bowden, H., Brann, E., Cater, K., Cole, R., & others\n\n\n \n\n\n\n In Proceedings of the 2023 CHI Conference on Human Factors in Computing Systems, pages 1–11, 2023. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{jicol2023imagine,\n  title={Imagine that! imaginative suggestibility affects presence in virtual reality},\n  author={Jicol, Crescent and Clarke, Christopher and Tor, Emilia and Yip, Hiu Lam and Yoon, Jinha and Bevan, Chris and Bowden, Hugh and Brann, Elisa and Cater, Kirsten and Cole, Richard and others},\n  booktitle={Proceedings of the 2023 CHI Conference on Human Factors in Computing Systems},\n  pages={1--11},\n  year={2023}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Dancing with the avatars: minimal avatar customisation enhances learning in a psychomotor task.\n \n \n \n\n\n \n Fitton, I., Clarke, C., Dalton, J., Proulx, M. J, & Lutteroth, C.\n\n\n \n\n\n\n In Proceedings of the 2023 CHI conference on human factors in computing systems, pages 1–16, 2023. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{fitton2023dancing,\n  title={Dancing with the avatars: minimal avatar customisation enhances learning in a psychomotor task},\n  author={Fitton, Isabel and Clarke, Christopher and Dalton, Jeremy and Proulx, Michael J and Lutteroth, Christof},\n  booktitle={Proceedings of the 2023 CHI conference on human factors in computing systems},\n  pages={1--16},\n  year={2023}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n FakeForward: using deepfake technology for feedforward learning.\n \n \n \n\n\n \n Clarke, C., Xu, J., Zhu, Y., Dharamshi, K., McGill, H., Black, S., & Lutteroth, C.\n\n\n \n\n\n\n In Proceedings of the 2023 CHI Conference on Human Factors in Computing Systems, pages 1–17, 2023. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{clarke2023fakeforward,\n  title={FakeForward: using deepfake technology for feedforward learning},\n  author={Clarke, Christopher and Xu, Jingnan and Zhu, Ye and Dharamshi, Karan and McGill, Harry and Black, Stephen and Lutteroth, Christof},\n  booktitle={Proceedings of the 2023 CHI Conference on Human Factors in Computing Systems},\n  pages={1--17},\n  year={2023}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n REVEAL: REal and Virtual Environments Augmentation Lab@ Bath.\n \n \n \n\n\n \n Lutteroth, C., Jicol, C., Clarke, C., Proulx, M. J, O'Neill, E., Petrini, K., Fitton, I. S., Tor, E., & Yoon, J.\n\n\n \n\n\n\n In Extended Abstracts of the 2023 CHI Conference on Human Factors in Computing Systems, pages 1–4, 2023. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{lutteroth2023reveal,\n  title={REVEAL: REal and Virtual Environments Augmentation Lab@ Bath},\n  author={Lutteroth, Christof and Jicol, Crescent and Clarke, Christopher and Proulx, Michael J and O'Neill, Eamonn and Petrini, Karin and Fitton, Isabel Sophie and Tor, Emilia and Yoon, Jinha},\n  booktitle={Extended Abstracts of the 2023 CHI Conference on Human Factors in Computing Systems},\n  pages={1--4},\n  year={2023}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n The effectiveness of fully automated digital interventions in promoting mental well-being in the general population: systematic review and meta-analysis.\n \n \n \n\n\n \n Groot, J., MacLellan, A., Butler, M., Todor, E., Zulfiqar, M., Thackrah, T., Clarke, C., Brosnan, M., Ainsworth, B., & others\n\n\n \n\n\n\n JMIR mental health, 10(1): e44658. 2023.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{groot2023effectiveness,\n  title={The effectiveness of fully automated digital interventions in promoting mental well-being in the general population: systematic review and meta-analysis},\n  author={Groot, Julia and MacLellan, Alexander and Butler, Madelaine and Todor, Elisa and Zulfiqar, Mahnoor and Thackrah, Timothy and Clarke, Christopher and Brosnan, Mark and Ainsworth, Ben and others},\n  journal={JMIR mental health},\n  volume={10},\n  number={1},\n  pages={e44658},\n  year={2023},\n  publisher={JMIR Publications Inc., Toronto, Canada}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Protocol for a systematic literature review of smartphone apps to support the self-management of rheumatic and musculoskeletal diseases: Development strategies, theoretical underpinnings and barriers to engagement.\n \n \n \n\n\n \n Barnett, R., Clarke, C., Sengupta, R., & Rouse, P. C\n\n\n \n\n\n\n Systematic Reviews, 12(1): 129. 2023.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{barnett2023protocol,\n  title={Protocol for a systematic literature review of smartphone apps to support the self-management of rheumatic and musculoskeletal diseases: Development strategies, theoretical underpinnings and barriers to engagement},\n  author={Barnett, Rosemarie and Clarke, Christopher and Sengupta, Raj and Rouse, Peter C},\n  journal={Systematic Reviews},\n  volume={12},\n  number={1},\n  pages={129},\n  year={2023},\n  publisher={BioMed Central London}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n PROTOCOL Open Access.\n \n \n \n\n\n \n Barnett, R., Clarke, C., Sengupta, R., & Rouse, P. C\n\n\n \n\n\n\n . 2023.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{barnett2023protocol,\n  title={PROTOCOL Open Access},\n  author={Barnett, Rosemarie and Clarke, Christopher and Sengupta, Raj and Rouse, Peter C},\n  year={2023}\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2022\n \n \n (6)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Advanced Visual Interfaces for Augmented Video.\n \n \n \n\n\n \n Coccoli, M., Galluccio, I., Torre, I., Amenduni, F., Cattaneo, A., & Clarke, C.\n\n\n \n\n\n\n In Proceedings of the 2022 International Conference on Advanced Visual Interfaces, pages 1–3, 2022. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n  \n \n 2 downloads\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{coccoli2022advanced,\n  title={Advanced Visual Interfaces for Augmented Video},\n  author={Coccoli, Mauro and Galluccio, Ilenia and Torre, Ilaria and Amenduni, Francesca and Cattaneo, Alberto and Clarke, Christopher},\n  booktitle={Proceedings of the 2022 International Conference on Advanced Visual Interfaces},\n  pages={1--3},\n  year={2022}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Step into my mind palace: Exploration of a collaborative paragogy tool in VR.\n \n \n \n\n\n \n Sims, R., Chang, B., Bennett, V., Krishnan, A., Aboubakar, A., Coman, G., Bahrami, A., Huang, Z., Clarke, C., & Karnik, A.\n\n\n \n\n\n\n In 2022 8th International Conference of the Immersive Learning Research Network (iLRN), pages 1–8, 2022. IEEE\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{sims2022step,\n  title={Step into my mind palace: Exploration of a collaborative paragogy tool in VR},\n  author={Sims, Robert and Chang, Barry and Bennett, Verity and Krishnan, Advaith and Aboubakar, Abdalslam and Coman, George and Bahrami, Abdulrazak and Huang, Zehao and Clarke, Christopher and Karnik, Abhijit},\n  booktitle={2022 8th International Conference of the Immersive Learning Research Network (iLRN)},\n  pages={1--8},\n  year={2022},\n  organization={IEEE}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Sensing with earables: A systematic literature review and taxonomy of phenomena.\n \n \n \n\n\n \n Röddiger, T., Clarke, C., Breitling, P., Schneegans, T., Zhao, H., Gellersen, H., & Beigl, M.\n\n\n \n\n\n\n Proceedings of the ACM on interactive, mobile, wearable and ubiquitous technologies, 6(3): 1–57. 2022.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n  \n \n 3 downloads\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{roddiger2022sensing,\n  title={Sensing with earables: A systematic literature review and taxonomy of phenomena},\n  author={R{\\"o}ddiger, Tobias and Clarke, Christopher and Breitling, Paula and Schneegans, Tim and Zhao, Haibin and Gellersen, Hans and Beigl, Michael},\n  journal={Proceedings of the ACM on interactive, mobile, wearable and ubiquitous technologies},\n  volume={6},\n  number={3},\n  pages={1--57},\n  year={2022},\n  publisher={ACM New York, NY, USA}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n OpenEarable: Open Hardware Earable Sensing Platform.\n \n \n \n\n\n \n Röddiger, T., King, T., Roodt, D. R., Clarke, C., & Beigl, M.\n\n\n \n\n\n\n In EarComp 2022, 2022. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n  \n \n 1 download\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{roddiger2022openearable,\n  title={OpenEarable: Open Hardware Earable Sensing Platform},\n  author={R{\\"o}ddiger, Tobias and King, Tobias and Roodt, Dylan Ray and Clarke, Christopher and Beigl, Michael},\n  booktitle={EarComp 2022},\n  year={2022}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Excerpt from “Sensing with Earables: A Systematic Literature Review and Taxonomy of Phenomena”.\n \n \n \n\n\n \n Röddiger, T., Clarke, C., Breitling, P., Schneegans, T., Zhao, H., Gellersen, H., & Beigl, M.\n\n\n \n\n\n\n In Adjunct Proceedings of the 2022 ACM International Joint Conference on Pervasive and Ubiquitous Computing and the 2022 ACM International Symposium on Wearable Computers, pages 244–245, 2022. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{roddiger2022excerpt,\n  title={Excerpt from “Sensing with Earables: A Systematic Literature Review and Taxonomy of Phenomena”},\n  author={R{\\"o}ddiger, Tobias and Clarke, Christopher and Breitling, Paula and Schneegans, Tim and Zhao, Haibin and Gellersen, Hans and Beigl, Michael},\n  booktitle={Adjunct Proceedings of the 2022 ACM International Joint Conference on Pervasive and Ubiquitous Computing and the 2022 ACM International Symposium on Wearable Computers},\n  pages={244--245},\n  year={2022}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n ETRA'22: 2022 Symposium on Eye Tracking Research and Applications.\n \n \n \n\n\n \n Shic, F., Kasneci, E., Khamis, M., Gellersen, H., Krejtz, K., Weiskopf, D., Blascheck, T., Bradshaw, J., Vrzakova, H., Binaee, K., & others\n\n\n \n\n\n\n In Symposium on Eye Tracking Research and Applications (ETRA 2022), 2022. Association for Computing Machinery\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{shic2022etra,\n  title={ETRA'22: 2022 Symposium on Eye Tracking Research and Applications},\n  author={Shic, Frederick and Kasneci, Enkelejda and Khamis, Mohamed and Gellersen, Hans and Krejtz, Krzysztof and Weiskopf, Daniel and Blascheck, Tanja and Bradshaw, Jessica and Vrzakova, Hana and Binaee, Kamran and others},\n  booktitle={Symposium on Eye Tracking Research and Applications (ETRA 2022)},\n  year={2022},\n  organization={Association for Computing Machinery}\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2021\n \n \n (5)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n EarRumble: Discreet Hands-and Eyes-Free Input by Voluntary Tensor Tympani Muscle Contraction.\n \n \n \n\n\n \n Röddiger, T., Clarke, C., Wolffram, D., Budde, M., & Beigl, M.\n\n\n \n\n\n\n In ACM CHI 2021, pages 1–14. 2021.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@incollection{roddiger2021earrumble,\n  title={EarRumble: Discreet Hands-and Eyes-Free Input by Voluntary Tensor Tympani Muscle Contraction},\n  author={R{\\"o}ddiger, Tobias and Clarke, Christopher and Wolffram, Daniel and Budde, Matthias and Beigl, Michael},\n  booktitle={ACM CHI 2021},\n  pages={1--14},\n  year={2021}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Gaze+ Hold: Eyes-only Direct Manipulation with Continuous Gaze Modulated by Closure of One Eye.\n \n \n \n\n\n \n Gomez, A. R., Clarke, C., Sidenmark, L., & Gellersen, H.\n\n\n \n\n\n\n In ACM ETRA 2021, 2021. Association for Computing Machinery\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{gomez2021gaze+,\n  title={Gaze+ Hold: Eyes-only Direct Manipulation with Continuous Gaze Modulated by Closure of One Eye},\n  author={Gomez, Argenis Ramirez and Clarke, Christopher and Sidenmark, Ludwig and Gellersen, Hans},\n  booktitle={ACM ETRA 2021},\n  year={2021},\n  organization={Association for Computing Machinery}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Reactive Video: Movement Sonification as Auditory Feedback for Supporting Physical Activity.\n \n \n \n\n\n \n Cavdir, D., Clarke, C., Chiu, P., Denoue, L., & Kimber, D.\n\n\n \n\n\n\n In New Interfaces for Musical Expression (NIME) 2021, 2021. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{cavdir2021reactive,\n  title={Reactive Video: Movement Sonification as Auditory Feedback for Supporting Physical Activity},\n  author={Cavdir, Doga and Clarke, Christopher and Chiu, Patrick and Denoue, Laurent and Kimber, Don},\n  booktitle={New Interfaces for Musical Expression (NIME) 2021},\n  year={2021}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n The environmental impact of research communities: insights from conference sustainability chairs.\n \n \n \n\n\n \n Williams, K., Kane, B., Clarke, C., & Widdicks, K.\n\n\n \n\n\n\n XRDS: Crossroads, The ACM Magazine for Students, 27(4): 46–51. 2021.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{williams2021environmental,\n  title={The environmental impact of research communities: insights from conference sustainability chairs},\n  author={Williams, Kristin and Kane, Bridget and Clarke, Chris and Widdicks, Kelly},\n  journal={XRDS: Crossroads, The ACM Magazine for Students},\n  volume={27},\n  number={4},\n  pages={46--51},\n  year={2021},\n  publisher={ACM New York, NY, USA}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n A method of effecting control of an electronic device.\n \n \n \n\n\n \n Gellersen, H., & Clarke, C.\n\n\n \n\n\n\n May 13 2021.\n US Patent App. 16/491,833\n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@misc{gellersen2021method,\n  title={A method of effecting control of an electronic device},\n  author={Gellersen, Hans-Werner and Clarke, Christopher},\n  year={2021},\n  month=may # "~13",\n  publisher={Google Patents},\n  note={US Patent App. 16/491,833}\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2020\n \n \n (5)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Outline Pursuits: Gaze-assisted Selection of Occluded Objects in Virtual Reality.\n \n \n \n\n\n \n Sidenmark*, L., Clarke*, C., Zhang, X., Phu, J., & Gellersen, H.\n\n\n \n\n\n\n . 2020.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{sidenmark2020outline,\n  title={Outline Pursuits: Gaze-assisted Selection of Occluded Objects in Virtual Reality},\n  author={Sidenmark*, Ludwig and Clarke*, Christopher and Zhang, Xuesong and Phu, Jenny and Gellersen, Hans},\n  year={2020},\n  publisher={ACM CHI 2020}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n BimodalGaze: Seamlessly Refined Pointing with Gaze and Filtered Gestural Head Movement.\n \n \n \n\n\n \n Sidenmark, L., Mardanbegi, D., Gomez, A. R., Clarke, C., & Gellersen, H.\n\n\n \n\n\n\n In ACM ETRA 2020, pages 1–9. 2020.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@incollection{sidenmark2020bimodalgaze,\n  title={BimodalGaze: Seamlessly Refined Pointing with Gaze and Filtered Gestural Head Movement},\n  author={Sidenmark, Ludwig and Mardanbegi, Diako and Gomez, Argenis Ramirez and Clarke, Christopher and Gellersen, Hans},\n  booktitle={ACM ETRA 2020},\n  pages={1--9},\n  year={2020}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Dynamic motion coupling of body movement for input control.\n \n \n \n\n\n \n Clarke, C.\n\n\n \n\n\n\n Ph.D. Thesis, Lancaster University (United Kingdom), 2020.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@phdthesis{clarke2020dynamic,\n  title={Dynamic motion coupling of body movement for input control},\n  author={Clarke, Christopher},\n  year={2020},\n  school={Lancaster University (United Kingdom)}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Reactive Video: Adaptive Video Playback Based on User Motion for Supporting Physical Activity.\n \n \n \n\n\n \n Clarke, C., Cavdir, D., Chiu, P., Denoue, L., & Kimber, D.\n\n\n \n\n\n\n . 2020.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n  \n \n 3 downloads\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{clarke2020reactive,\n  title={Reactive Video: Adaptive Video Playback Based on User Motion for Supporting Physical Activity},\n  author={Clarke, Christopher and Cavdir, Doga and Chiu, Patrick and Denoue, Laurent and Kimber, Don},\n  year={2020},\n  publisher={ACM UIST 2020}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Motion Coupling of Earable Devices in Camera View.\n \n \n \n\n\n \n Clarke, C., Ehrich, P., & Gellersen, H.\n\n\n \n\n\n\n . 2020.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{clarke2020motion,\n  title={Motion Coupling of Earable Devices in Camera View},\n  author={Clarke, Christopher and Ehrich, Peter and Gellersen, Hans},\n  year={2020},\n  publisher={ACM MUM 2020}\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2019\n \n \n (1)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Monocular gaze depth estimation using the vestibulo-ocular reflex.\n \n \n \n\n\n \n Mardanbegi, D., Clarke, C., & Gellersen, H.\n\n\n \n\n\n\n In ACM ETRA 2019, pages 1–9. 2019.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@incollection{mardanbegi2019monocular,\n  title={Monocular gaze depth estimation using the vestibulo-ocular reflex},\n  author={Mardanbegi, Diako and Clarke, Christopher and Gellersen, Hans},\n  booktitle={ACM ETRA 2019},\n  pages={1--9},\n  year={2019}\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2017\n \n \n (4)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n AURORA: autonomous real-time on-board video analytics.\n \n \n \n\n\n \n Angelov, P., Sadeghi-Tehran, P., & Clarke, C.\n\n\n \n\n\n\n Neural Computing and Applications, 28: 855–865. 2017.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{angelov2017aurora,\n  title={AURORA: autonomous real-time on-board video analytics},\n  author={Angelov, Plamen and Sadeghi-Tehran, Pouria and Clarke, Christopher},\n  journal={Neural Computing and Applications},\n  volume={28},\n  pages={855--865},\n  year={2017},\n  publisher={Springer London}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Motion correlation: Selecting objects by matching their movement.\n \n \n \n\n\n \n Velloso, E., Carter, M., Newn, J., Esteves, A., Clarke, C., & Gellersen, H.\n\n\n \n\n\n\n ACM Transactions on Computer-Human Interaction (TOCHI), 24(3): 1–35. 2017.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{velloso2017motion,\n  title={Motion correlation: Selecting objects by matching their movement},\n  author={Velloso, Eduardo and Carter, Marcus and Newn, Joshua and Esteves, Augusto and Clarke, Christopher and Gellersen, Hans},\n  journal={ACM Transactions on Computer-Human Interaction (TOCHI)},\n  volume={24},\n  number={3},\n  pages={1--35},\n  year={2017},\n  publisher={ACM New York, NY, USA}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Remote control by body movement in synchrony with orbiting widgets: An evaluation of tracematch.\n \n \n \n\n\n \n Clarke, C., Bellino, A., Esteves, A., & Gellersen, H.\n\n\n \n\n\n\n Proceedings of the ACM on Interactive, Mobile, Wearable and Ubiquitous Technologies, 1(3): 1–22. 2017.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{clarke2017remote,\n  title={Remote control by body movement in synchrony with orbiting widgets: An evaluation of tracematch},\n  author={Clarke, Christopher and Bellino, Alessio and Esteves, Augusto and Gellersen, Hans},\n  journal={Proceedings of the ACM on Interactive, Mobile, Wearable and Ubiquitous Technologies},\n  volume={1},\n  number={3},\n  pages={1--22},\n  year={2017},\n  publisher={ACM New York, NY, USA}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n MatchPoint: Spontaneous Spatial Coupling of Body Movement for Touchless Pointing.\n \n \n \n\n\n \n Clarke, C., & Gellersen, H.\n\n\n \n\n\n\n In ACM UIST 2017, pages 179–192. 2017.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@incollection{clarke2017matchpoint,\n  title={MatchPoint: Spontaneous Spatial Coupling of Body Movement for Touchless Pointing},\n  author={Clarke, Christopher and Gellersen, Hans},\n  booktitle={ACM UIST 2017},\n  pages={179--192},\n  year={2017}\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2016\n \n \n (1)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n TraceMatch: a computer vision technique for user input by tracing of animated controls.\n \n \n \n\n\n \n Clarke, C., Bellino, A., Esteves, A., Velloso, E., & Gellersen, H.\n\n\n \n\n\n\n In Proceedings of the 2016 ACM International Joint Conference on Pervasive and Ubiquitous Computing, pages 298–303, 2016. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n  \n \n 1 download\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{clarke2016tracematch,\n  title={TraceMatch: a computer vision technique for user input by tracing of animated controls},\n  author={Clarke, Christopher and Bellino, Alessio and Esteves, Augusto and Velloso, Eduardo and Gellersen, Hans},\n  booktitle={Proceedings of the 2016 ACM International Joint Conference on Pervasive and Ubiquitous Computing},\n  pages={298--303},\n  year={2016}\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2015\n \n \n (1)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Self-defining memory cues: creative expression and emotional meaning.\n \n \n \n\n\n \n Sas, C., Challioner, S., Clarke, C., Wilson, R., Coman, A., Clinch, S., Harding, M., & Davies, N.\n\n\n \n\n\n\n In Proceedings of the 33rd Annual ACM Conference Extended Abstracts on Human Factors in Computing Systems, pages 2013–2018, 2015. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{sas2015self,\n  title={Self-defining memory cues: creative expression and emotional meaning},\n  author={Sas, Corina and Challioner, Scott and Clarke, Christopher and Wilson, Ross and Coman, Alina and Clinch, Sarah and Harding, Mike and Davies, Nigel},\n  booktitle={Proceedings of the 33rd Annual ACM Conference Extended Abstracts on Human Factors in Computing Systems},\n  pages={2013--2018},\n  year={2015}\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2014\n \n \n (2)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Sariva: Smartphone app for real-time intelligent video analytics.\n \n \n \n\n\n \n Clarke, C., Angelov, P., Yusufu, M, & Sadeghi-Tehran, P.\n\n\n \n\n\n\n Journal of Automation Mobile Robotics and Intelligent Systems, 8. 2014.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{clarke2014sariva,\n  title={Sariva: Smartphone app for real-time intelligent video analytics},\n  author={Clarke, Christopher and Angelov, Plamen and Yusufu, M and Sadeghi-Tehran, Pouria},\n  journal={Journal of Automation Mobile Robotics and Intelligent Systems},\n  volume={8},\n  year={2014}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n A real-time approach for autonomous detection and tracking of moving objects from UAV.\n \n \n \n\n\n \n Sadeghi-Tehran, P., Clarke, C., & Angelov, P.\n\n\n \n\n\n\n In 2014 IEEE Symposium on Evolving and Autonomous Learning Systems (EALS), pages 43–49, 2014. IEEE\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{sadeghi2014real,\n  title={A real-time approach for autonomous detection and tracking of moving objects from UAV},\n  author={Sadeghi-Tehran, Pouria and Clarke, Christopher and Angelov, Plamen},\n  booktitle={2014 IEEE Symposium on Evolving and Autonomous Learning Systems (EALS)},\n  pages={43--49},\n  year={2014},\n  organization={IEEE}\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n\n\n\n
\n\n\n \n\n \n \n \n \n\n
\n"}; document.write(bibbase_data.data);