var bibbase_data = {"data":"\"Loading..\"\n\n
\n\n \n\n \n\n \n \n\n \n\n \n \n\n \n\n \n
\n generated by\n \n \"bibbase.org\"\n\n \n
\n \n\n
\n\n \n\n\n
\n\n Excellent! Next you can\n create a new website with this list, or\n embed it in an existing web page by copying & pasting\n any of the following snippets.\n\n
\n JavaScript\n (easiest)\n
\n \n <script src=\"https://bibbase.org/show?bib=https%3A%2F%2Fbibbase.org%2Fnetwork%2Ffiles%2FegvJwYfyosqpuSzZi&jsonp=1&theme=dividers&jsonp=1\"></script>\n \n
\n\n PHP\n
\n \n <?php\n $contents = file_get_contents(\"https://bibbase.org/show?bib=https%3A%2F%2Fbibbase.org%2Fnetwork%2Ffiles%2FegvJwYfyosqpuSzZi&jsonp=1&theme=dividers\");\n print_r($contents);\n ?>\n \n
\n\n iFrame\n (not recommended)\n
\n \n <iframe src=\"https://bibbase.org/show?bib=https%3A%2F%2Fbibbase.org%2Fnetwork%2Ffiles%2FegvJwYfyosqpuSzZi&jsonp=1&theme=dividers\"></iframe>\n \n
\n\n

\n For more details see the documention.\n

\n
\n
\n\n
\n\n This is a preview! To use this list on your own web site\n or create a new web site from it,\n create a free account. The file will be added\n and you will be able to edit it in the File Manager.\n We will show you instructions once you've created your account.\n
\n\n
\n\n

To the site owner:

\n\n

Action required! Mendeley is changing its\n API. In order to keep using Mendeley with BibBase past April\n 14th, you need to:\n

    \n
  1. renew the authorization for BibBase on Mendeley, and
  2. \n
  3. update the BibBase URL\n in your page the same way you did when you initially set up\n this page.\n
  4. \n
\n

\n\n

\n \n \n Fix it now\n

\n
\n\n
\n\n\n
\n \n \n
\n
\n  \n 2022\n \n \n (6)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Refractive geometry for underwater domes.\n \n \n \n\n\n \n She, M.; Nakath, D.; Song, Y.; and Köser, K.\n\n\n \n\n\n\n ISPRS Journal of Photogrammetry and Remote Sensing, 183: 525–540. 2022.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{she2022refractive,\n  title={Refractive geometry for underwater domes},\n  author={She, Mengkun and Nakath, David and Song, Yifan and K{\\"o}ser, Kevin},\n  journal={ISPRS Journal of Photogrammetry and Remote Sensing},\n  volume={183},\n  pages={525--540},\n  year={2022},\n  publisher={Elsevier}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n An Optical Digital Twin for Underwater Photogrammetry.\n \n \n \n\n\n \n Nakath, D.; She, M.; Song, Y.; and Köser, K.\n\n\n \n\n\n\n PFG–Journal of Photogrammetry, Remote Sensing and Geoinformation Science, 90(1): 69–81. 2022.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n  \n \n 1 download\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{nakath2022optical,\n  title={An Optical Digital Twin for Underwater Photogrammetry},\n  author={Nakath, David and She, Mengkun and Song, Yifan and K{\\"o}ser, Kevin},\n  journal={PFG--Journal of Photogrammetry, Remote Sensing and Geoinformation Science},\n  volume={90},\n  number={1},\n  pages={69--81},\n  year={2022},\n  publisher={Springer International Publishing}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n A Vision on a UNESCO Global Geopark at the Southeastern Dead Sea in Jordan—How Natural Hazards May Offer Geotourism Opportunities.\n \n \n \n\n\n \n Al-Halbouni, D.; AlRabayah, O.; Nakath, D.; and Rüpke, L.\n\n\n \n\n\n\n Land, 11(4): 553. 2022.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{al2022vision,\n  title={A Vision on a UNESCO Global Geopark at the Southeastern Dead Sea in Jordan—How Natural Hazards May Offer Geotourism Opportunities},\n  author={Al-Halbouni, Djamil and AlRabayah, Osama and Nakath, David and R{\\"u}pke, Lars},\n  journal={Land},\n  volume={11},\n  number={4},\n  pages={553},\n  year={2022},\n  publisher={MDPI}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Optical Imaging and Image Restoration Techniques for Deep Ocean Mapping: A Comprehensive Survey.\n \n \n \n\n\n \n Song, Y.; Nakath, D.; She, M.; and Köser, K.\n\n\n \n\n\n\n PFG–Journal of Photogrammetry, Remote Sensing and Geoinformation Science,1–25. 2022.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{song2022optical,\n  title={Optical Imaging and Image Restoration Techniques for Deep Ocean Mapping: A Comprehensive Survey},\n  author={Song, Yifan and Nakath, David and She, Mengkun and K{\\"o}ser, Kevin},\n  journal={PFG--Journal of Photogrammetry, Remote Sensing and Geoinformation Science},\n  pages={1--25},\n  year={2022},\n  publisher={Springer International Publishing}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Towards Cross Domain Transfer Learning for Underwater Correspondence Search.\n \n \n \n\n\n \n Schöntag, P.; Nakath, D.; Röhrl, S.; and Köser, K.\n\n\n \n\n\n\n In International Conference on Image Analysis and Processing, pages 461–472, 2022. Springer, Cham\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{schontag2022towards,\n  title={Towards Cross Domain Transfer Learning for Underwater Correspondence Search},\n  author={Sch{\\"o}ntag, Patricia and Nakath, David and R{\\"o}hrl, Stefan and K{\\"o}ser, Kevin},\n  booktitle={International Conference on Image Analysis and Processing},\n  pages={461--472},\n  year={2022},\n  organization={Springer, Cham}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Digital twinning in the ocean-chanllenges in multimodal sensing and multiscale fusion based on faithful visual models.\n \n \n \n\n\n \n Grossmann, V.; Nakath, D.; Urlaub, M.; Oppelt, N.; Koch, R.; and Köser, K.\n\n\n \n\n\n\n ISPRS Annals of the Photogrammetry, Remote Sensing and Spatial Information Sciences, 4: 345–352. 2022.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{grossmann2022digital,\n  title={Digital twinning in the ocean-chanllenges in multimodal sensing and multiscale fusion based on faithful visual models},\n  author={Grossmann, Vasco and Nakath, David and Urlaub, Morelia and Oppelt, Natascha and Koch, Reinhard and K{\\"o}ser, Kevin},\n  journal={ISPRS Annals of the Photogrammetry, Remote Sensing and Spatial Information Sciences},\n  volume={4},\n  pages={345--352},\n  year={2022},\n  publisher={Copernicus Publications (EGU)}\n}
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2021\n \n \n (4)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Optimization of multi-led setups for underwater robotic vision systems.\n \n \n \n\n\n \n Song, Y.; Sticklus, J.; Nakath, D.; Wenzlaff, E.; Koch, R.; and Köser, K.\n\n\n \n\n\n\n In International Conference on Pattern Recognition, pages 390–397, 2021. Springer, Cham\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{song2021optimization,\n  title={Optimization of multi-led setups for underwater robotic vision systems},\n  author={Song, Yifan and Sticklus, Jan and Nakath, David and Wenzlaff, Emanuel and Koch, Reinhard and K{\\"o}ser, Kevin},\n  booktitle={International Conference on Pattern Recognition},\n  pages={390--397},\n  year={2021},\n  organization={Springer, Cham}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Deep Sea Robotic Imaging Simulator.\n \n \n \n\n\n \n Song, Y.; Nakath, D.; She, M.; Elibol, F.; and Köser, K.\n\n\n \n\n\n\n In Proceedings of the Computer Vision for Automated Analysis of Underwater Imagery Workshop (CVAUI)., pages 375–389, 2021. Springer\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{song2021deep,\n  title={Deep Sea Robotic Imaging Simulator},\n  author={Song, Yifan and Nakath, David and She, Mengkun and Elibol, Furkan and K{\\"o}ser, Kevin},\n  booktitle={Proceedings of the Computer Vision for Automated Analysis of Underwater Imagery Workshop (CVAUI).},\n  pages={375--389},\n  year={2021},\n  organization={Springer}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n In-situ joint light and medium estimation for underwater color restoration.\n \n \n \n\n\n \n Nakath, D.; She, M.; Song, Y.; and Köser, K.\n\n\n \n\n\n\n In Proceedings of the IEEE/CVF International Conference on Computer Vision, pages 3731–3740, 2021. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{nakath2021situ,\n  title={In-situ joint light and medium estimation for underwater color restoration},\n  author={Nakath, David and She, Mengkun and Song, Yifan and K{\\"o}ser, Kevin},\n  booktitle={Proceedings of the IEEE/CVF International Conference on Computer Vision},\n  pages={3731--3740},\n  year={2021}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n MaCal-Macro Lens Calibration and the Focus Stack Camera Model.\n \n \n \n\n\n \n Weng, X.; She, M.; Nakath, D.; and Köser, K.\n\n\n \n\n\n\n In 2021 International Conference on 3D Vision (3DV), pages 136–144, 2021. IEEE\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{weng2021macal,\n  title={MaCal-Macro Lens Calibration and the Focus Stack Camera Model},\n  author={Weng, Xiangyu and She, Mengkun and Nakath, David and K{\\"o}ser, Kevin},\n  booktitle={2021 International Conference on 3D Vision (3DV)},\n  pages={136--144},\n  year={2021},\n  organization={IEEE}\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2020\n \n \n (3)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Active asteroid-SLAM.\n \n \n \n\n\n \n Nakath, D.; Clemens, J.; and Rachuy, C.\n\n\n \n\n\n\n Journal of Intelligent & Robotic Systems, 99(2): 303–333. 2020.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{nakath2020active,\n  title={Active asteroid-SLAM},\n  author={Nakath, David and Clemens, Joachim and Rachuy, Carsten},\n  journal={Journal of Intelligent \\& Robotic Systems},\n  volume={99},\n  number={2},\n  pages={303--333},\n  year={2020},\n  publisher={Springer Netherlands}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Light Pose Calibration for Camera-light Vision Systems.\n \n \n \n\n\n \n Song, Y.; Elibol, F.; She, M.; Nakath, D.; and Köser, K.\n\n\n \n\n\n\n arXiv preprint arXiv:2006.15389. 2020.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{song2020light,\n  title={Light Pose Calibration for Camera-light Vision Systems},\n  author={Song, Yifan and Elibol, Furkan and She, Mengkun and Nakath, David and K{\\"o}ser, Kevin},\n  journal={arXiv preprint arXiv:2006.15389},\n  year={2020}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Deep Sea Robotic Imaging Simulator for UUV Development.\n \n \n \n\n\n \n Song, Y.; Nakath, D.; She, M.; Elibol, F.; and Köser, K.\n\n\n \n\n\n\n CoRR. 2020.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{song2020deep,\n  title={Deep Sea Robotic Imaging Simulator for UUV Development.},\n  author={Song, Yifan and Nakath, David and She, Mengkun and Elibol, Furkan and K{\\"o}ser, Kevin},\n  journal={CoRR},\n  year={2020}\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2019\n \n \n (1)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Active Perception for Autonomous Systems: In a Deep Space Navigation Scenario.\n \n \n \n\n\n \n Nakath, D.\n\n\n \n\n\n\n Ph.D. Thesis, Universität Bremen, 2019.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@phdthesis{nakath2019active,\n  title={Active Perception for Autonomous Systems: In a Deep Space Navigation Scenario},\n  author={Nakath, David},\n  year={2019},\n  school={Universit{\\"a}t Bremen}\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2018\n \n \n (1)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Multi-sensor fusion and active perception for autonomous deep space navigation.\n \n \n \n\n\n \n Nakath, D.; Clemens, J.; and Schill, K.\n\n\n \n\n\n\n In 2018 21st International Conference on Information Fusion (FUSION), pages 2596–2605, 2018. IEEE\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{nakath2018multi,\n  title={Multi-sensor fusion and active perception for autonomous deep space navigation},\n  author={Nakath, David and Clemens, Joachim and Schill, Kerstin},\n  booktitle={2018 21st International Conference on Information Fusion (FUSION)},\n  pages={2596--2605},\n  year={2018},\n  organization={IEEE}\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2017\n \n \n (1)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Rigid body attitude control based on a manifold representation of direction cosine matrices.\n \n \n \n\n\n \n Nakath, D.; Clemens, J.; and Rachuy, C.\n\n\n \n\n\n\n In Journal of Physics: Conference Series, volume 783, pages 012040, 2017. IOP Publishing\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{nakath2017rigid,\n  title={Rigid body attitude control based on a manifold representation of direction cosine matrices},\n  author={Nakath, David and Clemens, Joachim and Rachuy, Carsten},\n  booktitle={Journal of Physics: Conference Series},\n  volume={783},\n  number={1},\n  pages={012040},\n  year={2017},\n  organization={IOP Publishing}\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2016\n \n \n (1)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Optimal rotation sequences for active perception.\n \n \n \n\n\n \n Nakath, D.; Rachuy, C.; Clemens, J.; and Schill, K.\n\n\n \n\n\n\n In Multisensor, Multisource Information Fusion: Architectures, Algorithms, and Applications 2016, volume 9872, pages 20–32, 2016. SPIE\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{nakath2016optimal,\n  title={Optimal rotation sequences for active perception},\n  author={Nakath, David and Rachuy, Carsten and Clemens, Joachim and Schill, Kerstin},\n  booktitle={Multisensor, Multisource Information Fusion: Architectures, Algorithms, and Applications 2016},\n  volume={9872},\n  pages={20--32},\n  year={2016},\n  organization={SPIE}\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2015\n \n \n (3)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n KaNaRiA: Identifying the challenges for cognitive autonomous navigation and guidance for missions to small planetary bodies.\n \n \n \n\n\n \n Probst, A.; Peytavi, G.; Nakath, D.; Schattel, A.; Rachuy, C.; Lange, P.; Clemens, J; Echim, M; Schwarting, V; Srini-vas, A; and others\n\n\n \n\n\n\n In International Astronautical Congress (IAC), 2015. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{probst2015kanaria,\n  title={KaNaRiA: Identifying the challenges for cognitive autonomous navigation and guidance for missions to small planetary bodies},\n  author={Probst, Alena and Peytavi, Graciela and Nakath, David and Schattel, Anne and Rachuy, Carsten and Lange, Patrick and Clemens, J and Echim, M and Schwarting, V and Srini-vas, A and others},\n  booktitle={International Astronautical Congress (IAC)},\n  year={2015}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Adaptive information selection in images: Efficient naive bayes nearest neighbor classification.\n \n \n \n\n\n \n Reineking, T.; Kluth, T.; and Nakath, D.\n\n\n \n\n\n\n In International Conference on Computer Analysis of Images and Patterns, pages 350–361, 2015. Springer, Cham\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{reineking2015adaptive,\n  title={Adaptive information selection in images: Efficient naive bayes nearest neighbor classification},\n  author={Reineking, Thomas and Kluth, Tobias and Nakath, David},\n  booktitle={International Conference on Computer Analysis of Images and Patterns},\n  pages={350--361},\n  year={2015},\n  organization={Springer, Cham}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Autonomous orbit navigation for a mission to the asteroid main belt.\n \n \n \n\n\n \n Peytavı́, G González; Clemens, J; Nakath, D; Probst, A; Schill, F.; and Eissfeller, B\n\n\n \n\n\n\n In In. Proc. of the 66th International Astronautical Congress, 2015. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{peytavi2015autonomous,\n  title={Autonomous orbit navigation for a mission to the asteroid main belt},\n  author={Peytav{\\'\\i}, G Gonz{\\'a}lez and Clemens, J and Nakath, D and Probst, A and Schill, FRK and Eissfeller, B},\n  booktitle={In. Proc. of the 66th International Astronautical Congress},\n  year={2015}\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2014\n \n \n (4)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Virtual Reality for Simulating Autonomous Deep-Space Navigation and Mining.\n \n \n \n\n\n \n Büskens, C.; Clemens, J.; Eissfeller, B.; Förstner, R.; Gadzicki, K.; Peytavi, G. G.; Lange, P.; Nakath, D.; Probst, A.; Rachuy, C.; and others\n\n\n \n\n\n\n In ICAT-EGVE (Posters and Demos), 2014. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{buskens2014virtual,\n  title={Virtual Reality for Simulating Autonomous Deep-Space Navigation and Mining.},\n  author={B{\\"u}skens, Christof and Clemens, Joachim and Eissfeller, Bernd and F{\\"o}rstner, Roger and Gadzicki, Konrad and Peytavi, Graciela Gonzales and Lange, Patrick and Nakath, David and Probst, Alena and Rachuy, Carsten and others},\n  booktitle={ICAT-EGVE (Posters and Demos)},\n  year={2014}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Active sensorimotor object recognition in three-dimensional space.\n \n \n \n\n\n \n Nakath, D.; Kluth, T.; Reineking, T.; Zetzsche, C.; and Schill, K.\n\n\n \n\n\n\n In International Conference on Spatial Cognition, pages 312–324, 2014. Springer\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{nakath2014active,\n  title={Active sensorimotor object recognition in three-dimensional space},\n  author={Nakath, David and Kluth, Tobias and Reineking, Thomas and Zetzsche, Christoph and Schill, Kerstin},\n  booktitle={International Conference on Spatial Cognition},\n  pages={312--324},\n  year={2014},\n  organization={Springer}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Affordance-based object recognition using interactions obtained from a utility maximization principle.\n \n \n \n\n\n \n Kluth, T.; Nakath, D.; Reineking, T.; Zetzsche, C.; and Schill, K.\n\n\n \n\n\n\n In European Conference on Computer Vision, pages 406–412, 2014. Springer, Cham\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{kluth2014affordance,\n  title={Affordance-based object recognition using interactions obtained from a utility maximization principle},\n  author={Kluth, Tobias and Nakath, David and Reineking, Thomas and Zetzsche, Christoph and Schill, Kerstin},\n  booktitle={European Conference on Computer Vision},\n  pages={406--412},\n  year={2014},\n  organization={Springer, Cham}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Virtual Reality for Simulating Autonomous Deep-Space Navigation and Mining.\n \n \n \n\n\n \n Büskens, C.; Clemens, J.; Eissfeller, B.; Förstner, R.; Gadzicki, K.; Peytavi, G. G.; Lange, P.; Nakath, D.; Probst, A.; Rachuy, C.; and others\n\n\n \n\n\n\n In ICAT-EGVE (Posters and Demos), 2014. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{buskens2014virtual,\n  title={Virtual Reality for Simulating Autonomous Deep-Space Navigation and Mining.},\n  author={B{\\"u}skens, Christof and Clemens, Joachim and Eissfeller, Bernd and F{\\"o}rstner, Roger and Gadzicki, Konrad and Peytavi, Graciela Gonzales and Lange, Patrick and Nakath, David and Probst, Alena and Rachuy, Carsten and others},\n  booktitle={ICAT-EGVE (Posters and Demos)},\n  year={2014}\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2013\n \n \n (1)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Sensorimotor integration using an information gain strategy in application to object recognition tasks.\n \n \n \n\n\n \n Kluth, T; Nakath, D; Reineking, T; Zetzsche, C; and Schill, K\n\n\n \n\n\n\n Perception ECVP abstract, 42: 223–223. 2013.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{kluth2013sensorimotor,\n  title={Sensorimotor integration using an information gain strategy in application to object recognition tasks},\n  author={Kluth, T and Nakath, D and Reineking, T and Zetzsche, C and Schill, K},\n  journal={Perception ECVP abstract},\n  volume={42},\n  pages={223--223},\n  year={2013},\n  publisher={Pion Ltd}\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2012\n \n \n (1)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n A distributed online learning tracking algorithm.\n \n \n \n\n\n \n Schrader, S.; Dambek, M.; Block, A.; Brending, S.; Nakath, D.; Schmid, F.; and van de Ven, J.\n\n\n \n\n\n\n In 2012 12th International Conference on Control Automation Robotics & Vision (ICARCV), pages 1083–1088, 2012. IEEE\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{schrader2012distributed,\n  title={A distributed online learning tracking algorithm},\n  author={Schrader, Sascha and Dambek, Markus and Block, Adrian and Brending, Stefan and Nakath, David and Schmid, Falko and van de Ven, Jasper},\n  booktitle={2012 12th International Conference on Control Automation Robotics \\& Vision (ICARCV)},\n  pages={1083--1088},\n  year={2012},\n  organization={IEEE}\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n\n\n\n
\n\n\n \n\n \n \n \n \n\n
\n"}; document.write(bibbase_data.data);