var bibbase_data = {"data":"\"Loading..\"\n\n
\n\n \n\n \n\n \n \n\n \n\n \n \n\n \n\n \n
\n generated by\n \n \"bibbase.org\"\n\n \n
\n \n\n
\n\n \n\n\n
\n\n Excellent! Next you can\n create a new website with this list, or\n embed it in an existing web page by copying & pasting\n any of the following snippets.\n\n
\n JavaScript\n (easiest)\n
\n \n <script src=\"https://bibbase.org/show?bib=http%3A%2F%2Fthesis.philipnicolai.de%2Fdownload%2Fpublications_philip_nicolai.bib&jsonp=1&jsonp=1\"></script>\n \n
\n\n PHP\n
\n \n <?php\n $contents = file_get_contents(\"https://bibbase.org/show?bib=http%3A%2F%2Fthesis.philipnicolai.de%2Fdownload%2Fpublications_philip_nicolai.bib&jsonp=1\");\n print_r($contents);\n ?>\n \n
\n\n iFrame\n (not recommended)\n
\n \n <iframe src=\"https://bibbase.org/show?bib=http%3A%2F%2Fthesis.philipnicolai.de%2Fdownload%2Fpublications_philip_nicolai.bib&jsonp=1\"></iframe>\n \n
\n\n

\n For more details see the documention.\n

\n
\n
\n\n
\n\n This is a preview! To use this list on your own web site\n or create a new web site from it,\n create a free account. The file will be added\n and you will be able to edit it in the File Manager.\n We will show you instructions once you've created your account.\n
\n\n
\n\n

To the site owner:

\n\n

Action required! Mendeley is changing its\n API. In order to keep using Mendeley with BibBase past April\n 14th, you need to:\n

    \n
  1. renew the authorization for BibBase on Mendeley, and
  2. \n
  3. update the BibBase URL\n in your page the same way you did when you initially set up\n this page.\n
  4. \n
\n

\n\n

\n \n \n Fix it now\n

\n
\n\n
\n\n\n
\n \n \n
\n
\n  \n 2016\n \n \n (4)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n 3D Perception Technologies for Surgical Operating Theatres.\n \n \n \n\n\n \n Beyl, T.; Schreiter, L.; Nicolai, P.; Raczkowsky, J.; and Wörn, H.\n\n\n \n\n\n\n In Westwood, J. D.; Westwood, S. W.; and Felländer-Tsai, L., editor(s), Medicine Meets Virtual Reality 22: NextMed / MMVR22, volume 220, pages 45–50. IOS Press, 2016.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@incollection{beyl2016,\r\n author = {Beyl, Tim and Schreiter, Luzie and Nicolai, Philip and Raczkowsky, J{\\"o}rg and W{\\"o}rn, Heinz},\r\n title = {{3D Perception Technologies for Surgical Operating Theatres}},\r\n pages = {45--50},\r\n volume = {220},\r\n publisher = {{IOS Press}},\r\n isbn = {9781614996255},\r\n editor = {Westwood, J. D. and Westwood, S. W. and Fell{\\"a}nder-Tsai, L.},\r\n booktitle = {{Medicine Meets Virtual Reality 22: NextMed / MMVR22}},\r\n year = {2016}\r\n}\r\n\r\n\r\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n ROS-based Cognitive Surgical Robotics.\n \n \n \n\n\n \n Bihlmaier, A.; Beyl, T.; Nicolai, P.; Kunze, M.; Mintenbeck, J.; Schreiter, L.; Brennecke, T.; Hutzl, J.; Raczkowsky, J.; and Wörn, H.\n\n\n \n\n\n\n In Koubaa, A., editor(s), Robot Operating System (ROS). Springer, [S.l.], 2016.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@incollection{bihlmaier2016,\r\n author = {Bihlmaier, Andreas and Beyl, Tim and Nicolai, Philip and Kunze, Mirko and Mintenbeck, Julien and Schreiter, Luzie and Brennecke, Thorsten and Hutzl, Jessica and Raczkowsky, J{\\"o}rg and W{\\"o}rn, Heinz},\r\n title = {{ROS-based Cognitive Surgical Robotics}},\r\n publisher = {Springer},\r\n isbn = {9783319260525},\r\n editor = {Koubaa, Anis},\r\n booktitle = {{Robot Operating System (ROS)}},\r\n year = {2016},\r\n address = {[S.l.]},\r\n abstract = {The case study at hand describes our ROS-based setup for robot-assisted (minimally-invasive) surgery. The system includes different perception components (Kinects, Time-of-Flight Cameras, Endoscopic Cameras, Marker-based Trackers, Ultrasound), input devices (Force Dimension Haptic Input Devices), robots (KUKA LWRs, Universal Robots UR5, ViKY Endoscope Holder), surgical instruments and augmented reality displays. Apart from bringing together the individual components in a modular and flexible setup, many subsystems have been developed based on combinations of the single components. These subsystems include a bimanual telemanipulator, multiple Kinect people tracking, knowledge-based endoscope guidance and ultrasound tomography. The platform is not a research project in itself, but a basic infrastructure used for various research projects. We want to show how to build a large robotics platform, in fact a complete lab setup, based on ROS. It is flexible and modular enough to do research on different robotics related questions concurrently. The whole setup is running on ROS Indigo and Ubuntu Trusty (14.04). A repository of already open sourced components is available at https://github.com/KITmedical.}\r\n}\r\n\r\n\r\n
\n
\n\n\n
\n The case study at hand describes our ROS-based setup for robot-assisted (minimally-invasive) surgery. The system includes different perception components (Kinects, Time-of-Flight Cameras, Endoscopic Cameras, Marker-based Trackers, Ultrasound), input devices (Force Dimension Haptic Input Devices), robots (KUKA LWRs, Universal Robots UR5, ViKY Endoscope Holder), surgical instruments and augmented reality displays. Apart from bringing together the individual components in a modular and flexible setup, many subsystems have been developed based on combinations of the single components. These subsystems include a bimanual telemanipulator, multiple Kinect people tracking, knowledge-based endoscope guidance and ultrasound tomography. The platform is not a research project in itself, but a basic infrastructure used for various research projects. We want to show how to build a large robotics platform, in fact a complete lab setup, based on ROS. It is flexible and modular enough to do research on different robotics related questions concurrently. The whole setup is running on ROS Indigo and Ubuntu Trusty (14.04). A repository of already open sourced components is available at https://github.com/KITmedical.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Model-Free (Human) Tracking Based on Ground Truth with Time Delay: A 3D Camera Based Approach for Minimizing Tracking Latency and Increasing Tracking Quality.\n \n \n \n\n\n \n Nicolai, P.; Raczkowsky, J.; and Wörn, H.\n\n\n \n\n\n\n In Filipe, J.; Gusikhin, O.; Madani, K.; and Sasiadek, J., editor(s), Informatics in control, automation and robotics, volume 383, of Lecture Notes in Electrical Engineering, pages 247–266. Springer, Cham, 2016.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@incollection{nicolai2016b,\r\n author = {Nicolai, Philip and Raczkowsky, J{\\"o}rg and W{\\"o}rn, Heinz},\r\n title = {{Model-Free (Human) Tracking Based on Ground Truth with Time Delay: A 3D Camera Based Approach for Minimizing Tracking Latency and Increasing Tracking Quality}},\r\n pages = {247--266},\r\n volume = {383},\r\n publisher = {Springer},\r\n isbn = {978-3-319-31896-7},\r\n series = {Lecture Notes in Electrical Engineering},\r\n editor = {Filipe, Joaquim and Gusikhin, Oleg and Madani, Kurosh and Sasiadek, Jurek},\r\n booktitle = {{Informatics in control, automation and robotics}},\r\n year = {2016},\r\n address = {Cham}\r\n}\r\n\r\n\r\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n A 3D camera-based system concept for safe and intuitive use of a surgical robot system.\n \n \n \n\n\n \n \n\n\n \n\n\n\n Ph.D. Thesis, Karlsruhe Institute for Technology (KIT), June 2016.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
\n
\n\n\n
\n Within the last decades, surgical robot systems have been integrated operation rooms worldwide. However, in current robotic procedures, the surgical personnel has to devote a significant part of attention in order to ensure and monitor seamless functioning of the robot system. To overcome this limitation, this thesis explores the feasibility of developing a system for safe and intuitive use of surgical robots, based on state-of-the-art range imaging cameras and newly developed algorithms. A novel concept for an Operating Room (OR) monitoring system is proposed that can perceive the environment of a surgical robot using multiple 3D cameras and detect potentially harmful situations between the robot, its surroundings and the persons in its vicinity, i.e. the OR personnel and the patient. Such a system is realized in a generic way in order to be applicable to different surgical robot systems. It is optimized for a low spatial footprint for not interfering with the OR personnel and their actions in already crowded ORs. Furthermore, the system provides intuitive feedback to the OR personnel whenever safety-critical events are detected, without drawing on their attention otherwise. The realized system was extensively evaluated using the OP:Sense surgical research platform. Based on the proposed approach of establishing a virtual safety zone around each robot arm, the system was shown to reliably detect and therefore avoid impending collisions, without requiring information about the trajectory of the robot. To ensure the applicability of use within the operating room, the effects of sterile draping on range imaging cameras were analyzed. A filtering method was put forward to eliminate these effects within the realized ToF camera system, allowing for successful detection of impending collisions even for draped robots. The results indicate that a 3D-camera-based supervision system can effectively contribute to the safety of use of surgical robot systems in the OR, allowing the OR personnel to completely focus on their medical tasks. The proposed methods contribute to scene supervision for human-robot cooperation and show the feasibility of the approach.\n
\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2015\n \n \n (3)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Time-of-flight-assisted Kinect camera-based people detection for intuitive human robot cooperation in the surgical operating room.\n \n \n \n\n\n \n Beyl, T.; Nicolai, P.; Comparetti, M. D.; Raczkowsky, J.; de Momi, E.; and Wörn, H.\n\n\n \n\n\n\n International Journal of Computer Assisted Radiology and Surgery. 2015.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{beyl2015,\r\n author = {Beyl, Tim and Nicolai, Philip and Comparetti, Mirko D. and Raczkowsky, J{\\"o}rg and de Momi, Elena and W{\\"o}rn, Heinz},\r\n year = {2015},\r\n title = {{Time-of-flight-assisted Kinect camera-based people detection for intuitive human robot cooperation in the surgical operating room}},\r\n journal = {{International Journal of Computer Assisted Radiology and Surgery}},\r\n abstract = {Scene supervision is a major tool to make medical robots safer and more intuitive. The paper shows an approach to efficiently use 3D cameras within the surgical operating room to enable for safe human robot interaction and action perception. Additionally the presented approach aims to make 3D camera-based scene supervision more reliable and accurate.\r\n\r\nA camera system composed of multiple Kinect and time-of-flight cameras has been designed, implemented and calibrated. Calibration and object detection as well as people tracking methods have been designed and evaluated. Results The camera system shows a good registration accuracy of 0.05m. The tracking of humans is reliable and accurate and has been evaluated in an experimental setup using operating clothing. The robot detection shows an error of around 0.04m.\r\n\r\nThe robustness and accuracy of the approach allow for an integration into modern operating room. The data output can be used directly for situation and workflow detection as well as collision avoidance.}\r\n}\r\n\r\n\r\n
\n
\n\n\n
\n Scene supervision is a major tool to make medical robots safer and more intuitive. The paper shows an approach to efficiently use 3D cameras within the surgical operating room to enable for safe human robot interaction and action perception. Additionally the presented approach aims to make 3D camera-based scene supervision more reliable and accurate. A camera system composed of multiple Kinect and time-of-flight cameras has been designed, implemented and calibrated. Calibration and object detection as well as people tracking methods have been designed and evaluated. Results The camera system shows a good registration accuracy of 0.05m. The tracking of humans is reliable and accurate and has been evaluated in an experimental setup using operating clothing. The robot detection shows an error of around 0.04m. The robustness and accuracy of the approach allow for an integration into modern operating room. The data output can be used directly for situation and workflow detection as well as collision avoidance.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Continuous Pre-Calculation of Human Tracking with Time-delayed Ground-truth - A Hybrid Approach to Minimizing Tracking Latency by Combination of Different 3D Cameras.\n \n \n \n\n\n \n Nicolai, P.; Raczkowsky, J.; and Wörn, H.\n\n\n \n\n\n\n In Informatics in Control, Automation and Robotics (ICINCO), 12th International Conference on, pages 121–130, 2015. \n \n\nShort-listed for Best Paper Award.\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{nicolai2015,\r\n author = {Nicolai, Philip and Raczkowsky, J{\\"o}rg and W{\\"o}rn, Heinz},\r\n title = {{Continuous Pre-Calculation of Human Tracking with Time-delayed Ground-truth - A Hybrid Approach to Minimizing Tracking Latency by Combination of Different 3D Cameras}},\r\n pages = {121--130},\r\n booktitle = {{Informatics in Control, Automation and Robotics (ICINCO), 12th International Conference on}},\r\n year = {2015},\r\n abstract = {We present an approach to track a point cloud with a 3D camera system with low latency and/or high frame rate, based on ground truth provided by a second 3D camera system with higher latency and/or lower frame rate. In particular, we employ human tracking based on Kinect cameras and combine it with higher framerate/lower latency of Time-of-Flight (ToF) cameras. We present the system setup, methods used and evaluation results showing a very high accuracy in combination with a latency reduction of up to factor 30.},\r\n bibbase_note = {<span style="color: green">Short-listed for Best Paper Award.</span>}\r\n\r\n}\r\n\r\n\r\n
\n
\n\n\n
\n We present an approach to track a point cloud with a 3D camera system with low latency and/or high frame rate, based on ground truth provided by a second 3D camera system with higher latency and/or lower frame rate. In particular, we employ human tracking based on Kinect cameras and combine it with higher framerate/lower latency of Time-of-Flight (ToF) cameras. We present the system setup, methods used and evaluation results showing a very high accuracy in combination with a latency reduction of up to factor 30.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n A Novel 3D Camera Based Supervision System for Safe Human-Robot Interaction in the Operating Room.\n \n \n \n\n\n \n Nicolai, P.; Raczkowsky, J.; and Wörn, H.\n\n\n \n\n\n\n Journal of Automation and Control Engineering,410–417. 2015.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{nicolai2015b,\r\n author = {Nicolai, Philip and Raczkowsky, J{\\"o}rg and W{\\"o}rn, Heinz},\r\n year = {2015},\r\n title = {{A Novel 3D Camera Based Supervision System for Safe Human-Robot Interaction in the Operating Room}},\r\n pages = {410--417},\r\n journal = {{Journal of Automation and Control Engineering}}\r\n}\r\n\r\n\r\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2013\n \n \n (3)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Multi kinect people detection for intuitive and safe human robot cooperation in the operating room.\n \n \n \n\n\n \n Beyl, T.; Nicolai, P.; Raczkowsky, J.; Wörn, H.; Comparetti, M. D.; and de Momi, E.\n\n\n \n\n\n\n In Advanced Robotics (ICAR), 2013 16th International Conference on, pages 1–6, 2013. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{beyl2013,\r\n author = {Beyl, Tim and Nicolai, Philip and Raczkowsky, J{\\"o}rg and W{\\"o}rn, Heinz and Comparetti, Mirko D. and de Momi, Elena},\r\n title = {{Multi kinect people detection for intuitive and safe human robot cooperation in the operating room}},\r\n pages = {1--6},\r\n booktitle = {{Advanced Robotics (ICAR), 2013 16th International Conference on}},\r\n year = {2013},\r\n abstract = {Microsoft Kinect cameras are widely used in robotics. The cameras can be mounted either to the robot itself (in case of mobile robotics) or can be placed where they have a good view on robots and/or humans. The use of cameras in the surgical operating room adds additional complexity in placing the cameras and adds the necessity of coping with a highly uncontrolled environment with occlusions and unknown objects. In this paper we present an approach that accurately detects humans using multiple Kinect cameras. Experiments were performed to show that our approach is robust to interference, noise and occlusions. It provides a good detection and identification rate of the user which is crucial for safe human robot cooperation}\r\n}\r\n\r\n\r\n
\n
\n\n\n
\n Microsoft Kinect cameras are widely used in robotics. The cameras can be mounted either to the robot itself (in case of mobile robotics) or can be placed where they have a good view on robots and/or humans. The use of cameras in the surgical operating room adds additional complexity in placing the cameras and adds the necessity of coping with a highly uncontrolled environment with occlusions and unknown objects. In this paper we present an approach that accurately detects humans using multiple Kinect cameras. Experiments were performed to show that our approach is robust to interference, noise and occlusions. It provides a good detection and identification rate of the user which is crucial for safe human robot cooperation\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n The OP:Sense surgical robotics platform: first feasibility studies and current research.\n \n \n \n\n\n \n Nicolai, P.; Brennecke, T.; Kunze, M.; Schreiter, L.; Beyl, T.; Zhang, Y.; Mintenbeck, J.; Raczkowsky, J.; and Wörn, H.\n\n\n \n\n\n\n In Surgical robotics and instrumentation, volume 8, pages 136–137. Springer-Verlag, 2013.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@incollection{nicolai2013,\r\n author = {Nicolai, Philip and Brennecke, T. and Kunze, M. and Schreiter, L. and Beyl, Tim and Zhang, Y. and Mintenbeck, J. and Raczkowsky, J{\\"o}rg and W{\\"o}rn, Heinz},\r\n title = {{The OP:Sense surgical robotics platform: first feasibility studies and current research}},\r\n pages = {136--137},\r\n volume = {8},\r\n publisher = {Springer-Verlag},\r\n booktitle = {{Surgical robotics and instrumentation}},\r\n year = {2013}\r\n}\r\n\r\n\r\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n System Concept for Collision-Free Robot Assisted Surgery Using Real-Time Sensing.\n \n \n \n\n\n \n Raczkowsky, J.; Nicolai, P.; Hein, B.; and Wörn, H.\n\n\n \n\n\n\n In Lee, S.; Cho, H.; Yoon, K.; and Lee, J., editor(s), Intelligent Autonomous Systems 12, volume 194, of Advances in Intelligent Systems and Computing, pages 165–173. Springer Berlin Heidelberg, 2013.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@incollection{raczkowsky2013,\r\n author = {Raczkowsky, J{\\"o}rg and Nicolai, Philip and Hein, Bj{\\"o}rn and W{\\"o}rn, Heinz},\r\n title = {{System Concept for Collision-Free Robot Assisted Surgery Using Real-Time Sensing}},\r\n pages = {165--173},\r\n volume = {194},\r\n publisher = {{Springer Berlin Heidelberg}},\r\n isbn = {978-3-642-33931-8},\r\n series = {Advances in Intelligent Systems and Computing},\r\n editor = {Lee, Sukhan and Cho, Hyungsuck and Yoon, Kwang-Joon and Lee, Jangmyung},\r\n booktitle = {{Intelligent Autonomous Systems 12}},\r\n year = {2013},\r\n abstract = {The introduction of robot assistance into the surgical process yields beside all desired advantages also additional sources of potential risks. This is especially due to the fact that the working space of the robot system is overlapping with the patient and the surgical personnel in a narrow environment around the situs. To enable the usage of partially autonomous robotic system in this field, we propose a novel approach which combines an algorithm for guaranteed collision-free path-planning with a real-time monitoring system of the workspace. This paper details the concept as well as the integration plan, showing first results for both components.}\r\n}\r\n\r\n\r\n
\n
\n\n\n
\n The introduction of robot assistance into the surgical process yields beside all desired advantages also additional sources of potential risks. This is especially due to the fact that the working space of the robot system is overlapping with the patient and the surgical personnel in a narrow environment around the situs. To enable the usage of partially autonomous robotic system in this field, we propose a novel approach which combines an algorithm for guaranteed collision-free path-planning with a real-time monitoring system of the workspace. This paper details the concept as well as the integration plan, showing first results for both components.\n
\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2012\n \n \n (2)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n Haptic Feedback in OP:Sense – Augmented Reality in Telemanipulated Robotic Surgery.\n \n \n \n \n\n\n \n Beyl, T.; Nicolai, P.; Mönnich, H.; Raczkowsky, J.; and Wörn, H.\n\n\n \n\n\n\n In Westwood, J. D.; Westwood, S. W.; and Felländer-Tsai, L., editor(s), Medicine Meets Virtual Reality 19, volume 173, of Studies in Health Technology and Informatics, pages 58–63. IOS Press, Amsterdam, 2012.\n \n\n\n\n
\n\n\n\n \n \n \"HapticPaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@incollection{beyl2012,\r\n author = {Beyl, Tim and Nicolai, Philip and M{\\"o}nnich, Holger and Raczkowsky, J{\\"o}rg and W{\\"o}rn, Heinz},\r\n title = {{Haptic Feedback in OP:Sense -- Augmented Reality in Telemanipulated Robotic Surgery}},\r\n urldate = {06.11.2015},\r\n pages = {58--63},\r\n volume = {173},\r\n publisher = {{IOS Press}},\r\n isbn = {9781614990215},\r\n series = {Studies in Health Technology and Informatics},\r\n editor = {Westwood, J. D. and Westwood, S. W. and Fell{\\"a}nder-Tsai, L.},\r\n booktitle = {{Medicine Meets Virtual Reality 19}},\r\n year = {2012},\r\n address = {Amsterdam},\r\n abstract = {In current research, haptic feedback in robot assisted interventions plays an important role. However most approaches to haptic feedback only regard the mapping of the current forces at the surgical instrument to the haptic input devices, whereas surgeons demand a combination of medical imaging and telemanipulated robotic setups. In this paper we describe how this feature is integrated in our robotic research platform OP:Sense. The proposed method allows the automatic transfer of segmented imaging data to the haptic renderer and therefore allows enriching the haptic feedback with virtual fixtures based on imaging data. Anatomical structures are extracted from pre-operative generated medical images or virtual walls are defined by the surgeon inside the imaging data. Combining real forces with virtual fixtures can guide the surgeon to the regions of interest as well as helps to prevent the risk of damage to critical structures inside the patient. We believe that the combination of medical imaging and telemanipulation is a crucial step for the next generation of MIRS-systems.},\r\n url = {\\url{http://ebooks.iospress.nl/publication/21262}}\r\n}\r\n\r\n\r\n
\n
\n\n\n
\n In current research, haptic feedback in robot assisted interventions plays an important role. However most approaches to haptic feedback only regard the mapping of the current forces at the surgical instrument to the haptic input devices, whereas surgeons demand a combination of medical imaging and telemanipulated robotic setups. In this paper we describe how this feature is integrated in our robotic research platform OP:Sense. The proposed method allows the automatic transfer of segmented imaging data to the haptic renderer and therefore allows enriching the haptic feedback with virtual fixtures based on imaging data. Anatomical structures are extracted from pre-operative generated medical images or virtual walls are defined by the surgeon inside the imaging data. Combining real forces with virtual fixtures can guide the surgeon to the regions of interest as well as helps to prevent the risk of damage to critical structures inside the patient. We believe that the combination of medical imaging and telemanipulation is a crucial step for the next generation of MIRS-systems.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Ein Kinect\\supTM basiertes Überwachungssystem für Workflowerkennung und Gestensteuerung im Operationssaal.\n \n \n \n \n\n\n \n Beyl, T.; Nicolai, P.; Raczkowsky, J.; and Wörn, H.\n\n\n \n\n\n\n In Bühler, K.; Feussner, H.; Freysinger, W.; Klenzner, T.; Nabavi, A.; and Weber, S., editor(s), 11. Jahrestagung der Deutschen Gesellschaft für Computer- und Roboterassistierte Chirurgie, November 15-16, 2012, Düsseldorf, Germany, pages 177–181, 2012. \n \n\n\n\n
\n\n\n\n \n \n \"EinPaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{beyl2012b,\r\n author = {Beyl, Tim and Nicolai, Philip and Raczkowsky, J{\\"o}rg and W{\\"o}rn, Heinz},\r\n title = {{Ein Kinect\\sup{TM} basiertes {\\"U}berwachungssystem f{\\"u}r Workflowerkennung und Gestensteuerung im Operationssaal}},\r\n url = {\\url{http://ceur-ws.org/Vol-1429/Proceedings_CURAC_2012_Paper_44.pdf}},\r\n pages = {177--181},\r\n editor = {B{\\"u}hler, Katja and Feussner, Hubertus and Freysinger, Wolfgang and Klenzner, Thomas and Nabavi, Arya and Weber, Stefan},\r\n booktitle = {{11. Jahrestagung der Deutschen Gesellschaft f{\\"u}r Computer-  und Roboterassistierte Chirurgie, November 15-16, 2012, D{\\"u}sseldorf,  Germany}},\r\n year = {2012}\r\n}\r\n\r\n\r\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2011\n \n \n (5)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n A supervision system for the intuitive usage of a telemanipulated surgical robotic setup.\n \n \n \n \n\n\n \n Mönnich, H.; Nicolai, P.; Beyl, T.; Raczkowsky, J.; and Wörn, H.\n\n\n \n\n\n\n In Robotics and Biomimetics (ROBIO), 2011 IEEE International Conference on, pages 449–454, 2011. \n \n\n\n\n
\n\n\n\n \n \n \"APaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{monnich2011,\r\n author = {M{\\"o}nnich, Holger and Nicolai, Philip and Beyl, Tim and Raczkowsky, J{\\"o}rg and W{\\"o}rn, Heinz},\r\n title = {{A supervision system for the intuitive usage of a telemanipulated surgical robotic setup}},\r\n url = {\\url{http://ieeexplore.ieee.org/iel5/6175417/6181246/06181327.pdf?arnumber=6181327}},\r\n pages = {449--454},\r\n isbn = {978-1-4577-2136-6},\r\n booktitle = {{Robotics and Biomimetics (ROBIO), 2011 IEEE International Conference on}},\r\n year = {2011},\r\n abstract = {This paper introduces the OP:Sense system that is able to track objects and humans and. To reach this goal a complete surgical robotic system is built up that can be used for telemanipulation as well as for autonomous tasks, e.g. cutting or needle-insertion. Two KUKA lightweight robots that feature seven DOF and allow variable stiffness and damping due to an integrated impedance controller are used as actuators. The system includes two haptic input devices for providing haptic feedback in telemanipulation mode as well as including virtual fixtures to guide the surgeon even during telemanipulation mode. The supervision system consists of a marker-based optical tracking system, Photonic Mixer Device cameras (PMD) and rgb-d cameras (Microsoft Kinect). A simulation environment is constantly updated with the model of the environment, the model of the robots and tracked objects, the occupied space as well as tracked models of humans.}\r\n}\r\n\r\n\r\n
\n
\n\n\n
\n This paper introduces the OP:Sense system that is able to track objects and humans and. To reach this goal a complete surgical robotic system is built up that can be used for telemanipulation as well as for autonomous tasks, e.g. cutting or needle-insertion. Two KUKA lightweight robots that feature seven DOF and allow variable stiffness and damping due to an integrated impedance controller are used as actuators. The system includes two haptic input devices for providing haptic feedback in telemanipulation mode as well as including virtual fixtures to guide the surgeon even during telemanipulation mode. The supervision system consists of a marker-based optical tracking system, Photonic Mixer Device cameras (PMD) and rgb-d cameras (Microsoft Kinect). A simulation environment is constantly updated with the model of the environment, the model of the robots and tracked objects, the occupied space as well as tracked models of humans.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n OP: Sense - Eine Rapid Development Umgebung für die Entwicklung in der robotergestützten Chirurgie und Roboterassistierte Chirurgie, September 15-16, 2011, Magdeburg, Germany.\n \n \n \n \n\n\n \n Mönnich, H.; Nicolai, P.; Beyl, T.; Raczkowsky, J.; and Wörn, H.\n\n\n \n\n\n\n In Oliver Burgert; Jörg Schipper; and Stefan Zachow, editor(s), 10. Jahrestagung der Deutschen Gesellschaft für Computer- und Roboterassistierte Chirurgie, pages 41–44, 2011. Verlag Univ Magdeburg\n \n\n\n\n
\n\n\n\n \n \n \"OP:Paper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{monnich2011b,\r\n author = {M{\\"o}nnich, Holger and Nicolai, Philip and Beyl, Tim and Raczkowsky, J{\\"o}rg and W{\\"o}rn, Heinz},\r\n title = {{OP: Sense - Eine Rapid Development Umgebung f{\\"u}r die Entwicklung  in der robotergest{\\"u}tzten Chirurgie  und Roboterassistierte Chirurgie, September 15-16, 2011, Magdeburg,  Germany}},\r\n url = {\\url{http://ceur-ws.org/Vol-1476/Proceedings_CURAC_2011_Paper_2.pdf}},\r\n pages = {41--44},\r\n publisher = {{Verlag Univ Magdeburg}},\r\n isbn = {978-3-940961-59-4},\r\n editor = {{Oliver Burgert} and {J{\\"o}rg Schipper} and {Stefan Zachow}},\r\n booktitle = {{10. Jahrestagung der Deutschen Gesellschaft f{\\"u}r Computer-  und Roboterassistierte Chirurgie}},\r\n year = {2011}\r\n}\r\n\r\n\r\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n A semi-autonomous robotic teleoperation surgery setup.\n \n \n \n \n\n\n \n Mönnich, H.; Nicolai, P.; Raczkowsky, J.; and Wörn, H.\n\n\n \n\n\n\n International Journal of Computer Assisted Radiology and Surgery, 6(S1): 132–133. 2011.\n \n\n\n\n
\n\n\n\n \n \n \"A\\urlhttp://download.springer.com/static/pdf/660/art%253A10.1007%252Fs11548-011-0594-0.pdf?originUrl\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{monnich2011c,\r\n author = {M{\\"o}nnich, Holger and Nicolai, Philip and Raczkowsky, J{\\"o}rg and W{\\"o}rn, Heinz},\r\n year = {2011},\r\n title = {{A semi-autonomous robotic teleoperation surgery setup}},\r\n pages = {132--133},\r\n volume = {6},\r\n number = {S1},\r\n journal = {{International Journal of Computer Assisted Radiology and Surgery}},\r\n url = {\\url{http://download.springer.com/static/pdf/660/art%253A10.1007%252Fs11548-011-0594-0.pdf?originUrl=http%3A%2F%2Flink.springer.com%2Farticle%2F10.1007%2Fs11548-011-0594-0&token2=exp=1447780756~acl=%2Fstatic%2Fpdf%2F660%2Fart%25253A10.1007%25252Fs11548-011-0594-0.pdf%3ForiginUrl%3Dhttp%253A%252F%252Flink.springer.com%252Farticle%252F10.1007%252Fs11548-011-0594-0*~hmac=c381133fe4b0ce0b8ee23df50712c5b15f828288f57ca591b18149f858eba57a}}\r\n}\r\n\r\n\r\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n OP:Sense — An integrated rapid development environment in the context of robot assisted surgery and operation room sensing.\n \n \n \n \n\n\n \n Nicolai, P.; Beyl, T.; Mönnich, H.; Raczkowsky, J.; and Wörn, H.\n\n\n \n\n\n\n In Robotics and Biomimetics (ROBIO), 2011 IEEE International Conference on, pages 2421–2422, 2011. \n \n\nFinalist for Best Video Award.\n\n
\n\n\n\n \n \n \"OP:SensePaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{nicolai2011,\r\n author = {Nicolai, Philip and Beyl, Tim and M{\\"o}nnich, Holger and Raczkowsky, J{\\"o}rg and W{\\"o}rn, Heinz},\r\n title = {{OP:Sense --- An integrated rapid development environment in the context of robot assisted surgery and operation room sensing}},\r\n url = {\\url{http://ieeexplore.ieee.org/iel5/6175417/6181246/06181667.pdf?arnumber=6181667}},\r\n pages = {2421--2422},\r\n isbn = {978-1-4577-2136-6},\r\n booktitle = {{Robotics and Biomimetics (ROBIO), 2011 IEEE International Conference on}},\r\n year = {2011},\r\n abstract = {In this video we show the capabilities of the OP:Sense system. OP:Sense is an integrated rapid application development environment for robot assisted surgery. It mainly aims on MIRS and on open head neurosurgery as OP:Sense is developed for the EU Projects FP7 SAFROS and FP7 ACTIVE that aim on these usecases. Besides the framework, OP:Sense also integrates applications. Thus it is not only the framework itself but also a system that demonstrates how robots can be used for surgical interventions. Core of the system is the ACE TAO framework [1] [2] that implements realtime CORBA for communication between distributed systems. We built interfaces based on CORBA for use in Matlab and Simulink. Also there are modules for 3D Slicer and applications for the control of devices like robots, or surgical tools. As Matlab is a mighty tool for rapid application development it can be used to develop applications in a faster way compared to using C++ or similar programming languages. We use Matlab for setting up our environment and for tasks and computations that does not need to run in realtime. For Realtime tasks like telemanipulation we use Simulink models.},\r\n bibbase_note = {<span style="color: green">Finalist for Best Video Award.</span>}\r\n}\r\n\r\n\r\n
\n
\n\n\n
\n In this video we show the capabilities of the OP:Sense system. OP:Sense is an integrated rapid application development environment for robot assisted surgery. It mainly aims on MIRS and on open head neurosurgery as OP:Sense is developed for the EU Projects FP7 SAFROS and FP7 ACTIVE that aim on these usecases. Besides the framework, OP:Sense also integrates applications. Thus it is not only the framework itself but also a system that demonstrates how robots can be used for surgical interventions. Core of the system is the ACE TAO framework [1] [2] that implements realtime CORBA for communication between distributed systems. We built interfaces based on CORBA for use in Matlab and Simulink. Also there are modules for 3D Slicer and applications for the control of devices like robots, or surgical tools. As Matlab is a mighty tool for rapid application development it can be used to develop applications in a faster way compared to using C++ or similar programming languages. We use Matlab for setting up our environment and for tasks and computations that does not need to run in realtime. For Realtime tasks like telemanipulation we use Simulink models.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n 3D-Überwachung des OP-Saals - stören sich PMD-Kameras, Trackingsysteme und Spielkonsolenzubehör?.\n \n \n \n\n\n \n Nicolai, P.; Mönnich, H.; Raczkowsky, J.; and Wörn, H.\n\n\n \n\n\n\n In Oliver Burgert; Jörg Schipper; and Stefan Zachow, editor(s), 10. Jahrestagung der Deutschen Gesellschaft für Computer- und Roboterassistierte Chirurgie, pages 165–169, 2011. Verlag Univ Magdeburg\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{nicolai2011b,\r\n author = {Nicolai, Philip and M{\\"o}nnich, Holger and Raczkowsky, J{\\"o}rg and W{\\"o}rn, Heinz},\r\n title = {{3D-{\\"U}berwachung des OP-Saals - st{\\"o}ren sich PMD-Kameras,  Trackingsysteme und Spielkonsolenzubeh{\\"o}r?}},\r\n pages = {165--169},\r\n publisher = {{Verlag Univ Magdeburg}},\r\n isbn = {978-3-940961-59-4},\r\n editor = {{Oliver Burgert} and {J{\\"o}rg Schipper} and {Stefan Zachow}},\r\n booktitle = {{10. Jahrestagung der Deutschen Gesellschaft f{\\"u}r Computer-  und Roboterassistierte Chirurgie}},\r\n year = {2011}\r\n}\r\n\r\n\r\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2010\n \n \n (1)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Überwachung eines Operationssaals für die kooperative robotergestützte Chirurgie mittels neuartiger Tiefenbildkameras.\n \n \n \n\n\n \n Nicolai, P.; Mönnich, H.; Raczkowsky, J.; Wörn, H.; and Bernshausen, J.\n\n\n \n\n\n\n In Oliver Burgert; Lüder A. Kahrs; Bernhard Preim; and Jörg Schipper, editor(s), 9. Jahrestagung der Deutschen Gesellschaft für Computer- und Roboterassistierte Chirurgie, November 18-19, 2010, Düsseldorf, Germany, pages 45–49, 2010. Der Andere Verlag, Düsseldorf\n \n\nBest Paper Award.\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{nicolai2010,\r\n author = {Nicolai, Philip and M{\\"o}nnich, Holger and Raczkowsky, J{\\"o}rg and W{\\"o}rn, Heinz and Bernshausen, Jens},\r\n title = {{{\\"U}berwachung eines Operationssaals f{\\"u}r die kooperative robotergest{\\"u}tzte Chirurgie mittels neuartiger Tiefenbildkameras}},\r\n pages = {45--49},\r\n publisher = {{Der Andere Verlag, D{\\"u}sseldorf}},\r\n isbn = {978-3-86247-078-5},\r\n editor = {{Oliver Burgert} and {L{\\"u}der A. Kahrs} and {Bernhard Preim} and {J{\\"o}rg Schipper}},\r\n booktitle = {{9. Jahrestagung der Deutschen Gesellschaft f{\\"u}r Computer-  und Roboterassistierte Chirurgie, November 18-19, 2010, D{\\"u}sseldorf,  Germany}},\r\n year = {2010},\r\n bibbase_note = {<span style="color: green">Best Paper Award.</span>}\r\n}\r\n\r\n\r\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n\n\n\n
\n\n\n \n\n \n \n \n \n\n
\n"}; document.write(bibbase_data.data);