var bibbase_data = {"data":"\"Loading..\"\n\n
\n\n \n\n \n\n \n \n\n \n\n \n \n\n \n\n \n
\n generated by\n \n \"bibbase.org\"\n\n \n
\n \n\n
\n\n \n\n\n
\n\n Excellent! Next you can\n create a new website with this list, or\n embed it in an existing web page by copying & pasting\n any of the following snippets.\n\n
\n JavaScript\n (easiest)\n
\n \n <script src=\"https://bibbase.org/show?bib=https%3A%2F%2Fapi.zotero.org%2Fusers%2F1821687%2Fcollections%2FSC9PLZ4Z%2Fitems%3Fkey%3DOy3DhPKA9PjPqllU3aw2W58W%26format%3Dbibtex%26limit%3D100&jsonp=1&jsonp=1\"></script>\n \n
\n\n PHP\n
\n \n <?php\n $contents = file_get_contents(\"https://bibbase.org/show?bib=https%3A%2F%2Fapi.zotero.org%2Fusers%2F1821687%2Fcollections%2FSC9PLZ4Z%2Fitems%3Fkey%3DOy3DhPKA9PjPqllU3aw2W58W%26format%3Dbibtex%26limit%3D100&jsonp=1\");\n print_r($contents);\n ?>\n \n
\n\n iFrame\n (not recommended)\n
\n \n <iframe src=\"https://bibbase.org/show?bib=https%3A%2F%2Fapi.zotero.org%2Fusers%2F1821687%2Fcollections%2FSC9PLZ4Z%2Fitems%3Fkey%3DOy3DhPKA9PjPqllU3aw2W58W%26format%3Dbibtex%26limit%3D100&jsonp=1\"></iframe>\n \n
\n\n

\n For more details see the documention.\n

\n
\n
\n\n
\n\n This is a preview! To use this list on your own web site\n or create a new web site from it,\n create a free account. The file will be added\n and you will be able to edit it in the File Manager.\n We will show you instructions once you've created your account.\n
\n\n
\n\n

To the site owner:

\n\n

Action required! Mendeley is changing its\n API. In order to keep using Mendeley with BibBase past April\n 14th, you need to:\n

    \n
  1. renew the authorization for BibBase on Mendeley, and
  2. \n
  3. update the BibBase URL\n in your page the same way you did when you initially set up\n this page.\n
  4. \n
\n

\n\n

\n \n \n Fix it now\n

\n
\n\n
\n\n\n
\n \n \n
\n
\n  \n 2023\n \n \n (5)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n Early Results on GNSS Receiver Antenna Calibration System Development.\n \n \n \n \n\n\n \n Tupek, A.; Zrinjski, M.; Švaco, M.; and Barković, Đ.\n\n\n \n\n\n\n In 2023. MDPI\n \n\n\n\n
\n\n\n\n \n \n \"EarlyPaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{tupek_early_2023,\n\ttitle = {Early {Results} on {GNSS} {Receiver} {Antenna} {Calibration} {System} {Development}},\n\turl = {https://sciforum.net/paper/view/16227?leave_comment=true},\n\tpublisher = {MDPI},\n\tauthor = {Tupek, Antonio and Zrinjski, Mladen and Švaco, Marko and Barković, Đuro},\n\tyear = {2023},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Artificial intelligence and robotics as the driving power of modern society.\n \n \n \n \n\n\n \n Jerbić, B.; and Švaco, M.\n\n\n \n\n\n\n Rad Hrvatske akademije znanosti i umjetnosti. Tehničke znanosti, 23: 1–55. 2023.\n \n\n\n\n
\n\n\n\n \n \n \"ArtificialPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{jerbic_artificial_2023,\n\ttitle = {Artificial intelligence and robotics as the driving power of modern society},\n\tvolume = {23},\n\tissn = {1330-0822},\n\turl = {http://dizbi.hazu.hr/a/?pr=i&id=2584698},\n\tdoi = {10.21857/94kl4cld6m},\n\turldate = {2023-11-04},\n\tjournal = {Rad Hrvatske akademije znanosti i umjetnosti. Tehničke znanosti},\n\tauthor = {Jerbić, Bojan and Švaco, Marko},\n\tyear = {2023},\n\tpages = {1--55},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Visual quality and safety monitoring system for human-robot cooperation.\n \n \n \n \n\n\n \n Kozamernik, N.; Zaletelj, J.; Košir, A.; Šuligoj, F.; and Bračun, D.\n\n\n \n\n\n\n The International Journal of Advanced Manufacturing Technology, 128(1-2): 685–701. September 2023.\n \n\n\n\n
\n\n\n\n \n \n \"VisualPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{kozamernik_visual_2023,\n\ttitle = {Visual quality and safety monitoring system for human-robot cooperation},\n\tvolume = {128},\n\tissn = {0268-3768, 1433-3015},\n\turl = {https://link.springer.com/10.1007/s00170-023-11698-2},\n\tdoi = {10.1007/s00170-023-11698-2},\n\tabstract = {Abstract\n            Efficient workspace awareness is critical for improved interaction in cooperative and collaborative robotic applications. In addition to safety and control aspects, quality-related tasks such as the monitoring of manual activities and the final quality assessment of the results are also required. In this context, a visual quality and safety monitoring system is developed and evaluated. The system integrates close-up observation of manual activities and posture monitoring. A compact single-camera stereo vision system and a time-of-flight depth camera are used to minimize the interference of the sensors with the operator and the workplace. Data processing is based on a deep learning to detect classes related to quality and safety aspects. The operation of the system is evaluated while monitoring a human-robot manual assembly task. The results show that the system ensures a high level of safety, provides reliable visual feedback to the operator on errors in the assembly process, and inspects the finished assembly with a low critical error rate.},\n\tlanguage = {en},\n\tnumber = {1-2},\n\turldate = {2023-09-26},\n\tjournal = {The International Journal of Advanced Manufacturing Technology},\n\tauthor = {Kozamernik, Nejc and Zaletelj, Janez and Košir, Andrej and Šuligoj, Filip and Bračun, Drago},\n\tmonth = sep,\n\tyear = {2023},\n\tpages = {685--701},\n}\n\n
\n
\n\n\n
\n Abstract Efficient workspace awareness is critical for improved interaction in cooperative and collaborative robotic applications. In addition to safety and control aspects, quality-related tasks such as the monitoring of manual activities and the final quality assessment of the results are also required. In this context, a visual quality and safety monitoring system is developed and evaluated. The system integrates close-up observation of manual activities and posture monitoring. A compact single-camera stereo vision system and a time-of-flight depth camera are used to minimize the interference of the sensors with the operator and the workplace. Data processing is based on a deep learning to detect classes related to quality and safety aspects. The operation of the system is evaluated while monitoring a human-robot manual assembly task. The results show that the system ensures a high level of safety, provides reliable visual feedback to the operator on errors in the assembly process, and inspects the finished assembly with a low critical error rate.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n GNSS Receiver Antenna Absolute Field Calibration System Development: Testing and Preliminary Results.\n \n \n \n \n\n\n \n Tupek, A.; Zrinjski, M.; Švaco, M.; and Barković, Đ.\n\n\n \n\n\n\n Remote Sensing, 15(18): 4622. September 2023.\n \n\n\n\n
\n\n\n\n \n \n \"GNSSPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{tupek_gnss_2023,\n\ttitle = {{GNSS} {Receiver} {Antenna} {Absolute} {Field} {Calibration} {System} {Development}: {Testing} and {Preliminary} {Results}},\n\tvolume = {15},\n\tissn = {2072-4292},\n\tshorttitle = {{GNSS} {Receiver} {Antenna} {Absolute} {Field} {Calibration} {System} {Development}},\n\turl = {https://www.mdpi.com/2072-4292/15/18/4622},\n\tdoi = {10.3390/rs15184622},\n\tabstract = {For high-precision Global Navigation Satellite Systems (GNSS) positioning based on carrier-phase measurements, knowledge of the GNSS receiver antenna electrical signal reception characteristics, i.e., phase center, is crucial. Numerous studies have led to the understanding of the influence of GNSS receiver antenna phase center corrections (PCCs) on GNSS positioning accuracy and other estimated parameters (e.g., receiver clock estimates, ambiguities, etc.). With the goal of determining the PCC model of GNSS receiver antennas, only a few antenna calibration systems/facilities are in operation or under development worldwide. The International GNSS Service (IGS) publishes type-mean PCC models for almost all geodetic-grade GNSS antennas. However, the type-mean models are not perfect and do not fully reflect the signal reception properties of individual GNSS receiver antennas. Relevant published scientific research has shown that the application of individual PCC models significantly improves the accuracy of GNSS positioning and other estimated parameters. In this article, the new automated GNSS antenna calibration system, recently developed at the Laboratory for Measurements and Measuring Technique (LMMT) of the Faculty of Geodesy of the University of Zagreb in Croatia, is presented. The developed system is an absolute field calibration system based on the utilization of a Mitsubishi MELFA 6-axis industrial robot. During calibration, the robot tilts and rotates the GNSS antenna under test (AUT) around a fixed point within the antenna. The antenna PCC modelling is based on time-differenced double-difference carrier-phase observations. Our preliminary results for the Global Positioning System (GPS) L1 (G01) frequency show a submillimeter repeatability of the estimated PCC model and a submillimeter agreement with the Geo++ GmbH calibration results.},\n\tlanguage = {en},\n\tnumber = {18},\n\turldate = {2023-09-24},\n\tjournal = {Remote Sensing},\n\tauthor = {Tupek, Antonio and Zrinjski, Mladen and Švaco, Marko and Barković, Đuro},\n\tmonth = sep,\n\tyear = {2023},\n\tpages = {4622},\n}\n\n
\n
\n\n\n
\n For high-precision Global Navigation Satellite Systems (GNSS) positioning based on carrier-phase measurements, knowledge of the GNSS receiver antenna electrical signal reception characteristics, i.e., phase center, is crucial. Numerous studies have led to the understanding of the influence of GNSS receiver antenna phase center corrections (PCCs) on GNSS positioning accuracy and other estimated parameters (e.g., receiver clock estimates, ambiguities, etc.). With the goal of determining the PCC model of GNSS receiver antennas, only a few antenna calibration systems/facilities are in operation or under development worldwide. The International GNSS Service (IGS) publishes type-mean PCC models for almost all geodetic-grade GNSS antennas. However, the type-mean models are not perfect and do not fully reflect the signal reception properties of individual GNSS receiver antennas. Relevant published scientific research has shown that the application of individual PCC models significantly improves the accuracy of GNSS positioning and other estimated parameters. In this article, the new automated GNSS antenna calibration system, recently developed at the Laboratory for Measurements and Measuring Technique (LMMT) of the Faculty of Geodesy of the University of Zagreb in Croatia, is presented. The developed system is an absolute field calibration system based on the utilization of a Mitsubishi MELFA 6-axis industrial robot. During calibration, the robot tilts and rotates the GNSS antenna under test (AUT) around a fixed point within the antenna. The antenna PCC modelling is based on time-differenced double-difference carrier-phase observations. Our preliminary results for the Global Positioning System (GPS) L1 (G01) frequency show a submillimeter repeatability of the estimated PCC model and a submillimeter agreement with the Geo++ GmbH calibration results.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Physical Education Exercises Validation Through Child-Humanoid Robot Interaction.\n \n \n \n \n\n\n \n Knežević, T.; Radmilović, M.; Borojević, J.; Šumarac, J.; Švaco, M.; and Raković, M.\n\n\n \n\n\n\n In Petrič, T.; Ude, A.; and Žlajpah, L., editor(s), Advances in Service and Industrial Robotics, volume 135, pages 132–140. Springer Nature Switzerland, Cham, 2023.\n Series Title: Mechanisms and Machine Science\n\n\n\n
\n\n\n\n \n \n \"PhysicalPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@incollection{petric_physical_2023,\n\taddress = {Cham},\n\ttitle = {Physical {Education} {Exercises} {Validation} {Through} {Child}-{Humanoid} {Robot} {Interaction}},\n\tvolume = {135},\n\tisbn = {978-3-031-32605-9 978-3-031-32606-6},\n\turl = {https://link.springer.com/10.1007/978-3-031-32606-6_16},\n\tlanguage = {en},\n\turldate = {2023-05-31},\n\tbooktitle = {Advances in {Service} and {Industrial} {Robotics}},\n\tpublisher = {Springer Nature Switzerland},\n\tauthor = {Knežević, Tara and Radmilović, Marija and Borojević, Jefimija and Šumarac, Jovan and Švaco, Marko and Raković, Mirko},\n\teditor = {Petrič, Tadej and Ude, Aleš and Žlajpah, Leon},\n\tyear = {2023},\n\tdoi = {10.1007/978-3-031-32606-6_16},\n\tnote = {Series Title: Mechanisms and Machine Science},\n\tpages = {132--140},\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2022\n \n \n (5)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n Intravascular Tracking of Micro-Agents Using Medical Ultrasound: Towards Clinical Applications.\n \n \n \n \n\n\n \n Suligoj, F.; Heunis, C. M.; Mohanty, S.; and Misra, S.\n\n\n \n\n\n\n IEEE Transactions on Biomedical Engineering, 69(12): 3739–3747. December 2022.\n \n\n\n\n
\n\n\n\n \n \n \"IntravascularPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{suligoj_intravascular_2022,\n\ttitle = {Intravascular {Tracking} of {Micro}-{Agents} {Using} {Medical} {Ultrasound}: {Towards} {Clinical} {Applications}},\n\tvolume = {69},\n\tissn = {0018-9294, 1558-2531},\n\tshorttitle = {Intravascular {Tracking} of {Micro}-{Agents} {Using} {Medical} {Ultrasound}},\n\turl = {https://ieeexplore.ieee.org/document/9780008/},\n\tdoi = {10.1109/TBME.2022.3176746},\n\tnumber = {12},\n\turldate = {2023-05-15},\n\tjournal = {IEEE Transactions on Biomedical Engineering},\n\tauthor = {Suligoj, Filip and Heunis, Christoff M. and Mohanty, Sumit and Misra, Sarthak},\n\tmonth = dec,\n\tyear = {2022},\n\tpages = {3739--3747},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Frameless stereotactic brain biopsy and external ventricular drainage placement using the RONNA G4 system.\n \n \n \n \n\n\n \n Raguž, M.; Dlaka, D.; Orešković, D.; Kaštelančić, A.; Chudy, D.; Jerbić, B.; Šekoranja, B.; Šuligoj, F.; and Švaco, M.\n\n\n \n\n\n\n Journal of Surgical Case Reports, 2022(5): rjac151. May 2022.\n \n\n\n\n
\n\n\n\n \n \n \"FramelessPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n\n\n\n
\n
@article{raguz_frameless_2022,\n\ttitle = {Frameless stereotactic brain biopsy and external ventricular drainage placement using the {RONNA} {G4} system},\n\tvolume = {2022},\n\tissn = {2042-8812},\n\turl = {https://academic.oup.com/jscr/article/doi/10.1093/jscr/rjac151/6596083},\n\tdoi = {10.1093/jscr/rjac151},\n\tabstract = {Abstract\n            Robot-assisted stereotactic procedures are among the latest technological improvements in neurosurgery. Herein, to the best of our knowledge, we report a first external ventricular drainage (EVD) placement using the RONNA G4 robotic system preformed together with brain biopsy, all in one procedure. A patient was presented with progressive drowsiness, cognitive slowing, poor mobility and incontinent. Magnetic resonance imaging brain scans revealed multicentric process located in the basal ganglia right with extensive vasogenic edema and dilatated ventricular system. Using the RONNAplan software two trajectories were planned: one for brain biopsy on the left side and one for EVD implantation on the right side; the procedures went without complications. The RONNA G4 robotic system is an accurate neurosurgical tool for performing frameless brain biopsies and EVD placement. Further studies are needed in order to enroll a larger patient sample and to calculate the possible placement deviation, and to perform the comparison with other robotic systems.},\n\tlanguage = {en},\n\tnumber = {5},\n\turldate = {2022-06-02},\n\tjournal = {Journal of Surgical Case Reports},\n\tauthor = {Raguž, Marina and Dlaka, Domagoj and Orešković, Darko and Kaštelančić, Anđelo and Chudy, Darko and Jerbić, Bojan and Šekoranja, Bojan and Šuligoj, Filip and Švaco, Marko},\n\tmonth = may,\n\tyear = {2022},\n\tkeywords = {RM},\n\tpages = {rjac151},\n}\n\n
\n
\n\n\n
\n Abstract Robot-assisted stereotactic procedures are among the latest technological improvements in neurosurgery. Herein, to the best of our knowledge, we report a first external ventricular drainage (EVD) placement using the RONNA G4 robotic system preformed together with brain biopsy, all in one procedure. A patient was presented with progressive drowsiness, cognitive slowing, poor mobility and incontinent. Magnetic resonance imaging brain scans revealed multicentric process located in the basal ganglia right with extensive vasogenic edema and dilatated ventricular system. Using the RONNAplan software two trajectories were planned: one for brain biopsy on the left side and one for EVD implantation on the right side; the procedures went without complications. The RONNA G4 robotic system is an accurate neurosurgical tool for performing frameless brain biopsies and EVD placement. Further studies are needed in order to enroll a larger patient sample and to calculate the possible placement deviation, and to perform the comparison with other robotic systems.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Mobile Wall-Climbing Robot for NDT inspection of vertical concrete structures.\n \n \n \n \n\n\n \n Božić, M.; Ćaran, B.; Švaco, M.; Jerbić, B.; and Serdar, M.\n\n\n \n\n\n\n In Zurich, August 2022. \n \n\n\n\n
\n\n\n\n \n \n \"MobilePaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n\n\n\n
\n
@inproceedings{bozic_mobile_2022,\n\taddress = {Zurich},\n\ttitle = {Mobile {Wall}-{Climbing} {Robot} for {NDT} inspection of vertical concrete structures},\n\turl = {https://www.ndt.net/search/docs.php3?id=27295},\n\tauthor = {Božić, Matej and Ćaran, Branimir and Švaco, Marko and Jerbić, Bojan and Serdar, Marijana},\n\tmonth = aug,\n\tyear = {2022},\n\tkeywords = {CARGA},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Development of an autonomous system for assessment and prediction of structural integrity.\n \n \n \n \n\n\n \n Serdar, M.; Damjanović, D.; Švaco, M.; Jerbić, B.; Orsag, M.; and Kovačić, Z.\n\n\n \n\n\n\n Journal of the Croatian Association of Civil Engineers, 73(12): 1173–1184. February 2022.\n \n\n\n\n
\n\n\n\n \n \n \"DevelopmentPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n\n\n\n
\n
@article{lakusic_development_2022,\n\ttitle = {Development of an autonomous system for assessment and prediction of structural integrity},\n\tvolume = {73},\n\tissn = {03502465, 13339095},\n\turl = {http://www.casopis-gradjevinar.hr/archive/article/3390},\n\tdoi = {10.14256/JCE.3390.2021},\n\tabstract = {Development of innovative solutions for the maintenance of transport infrastructure facilities is needed in order to ensure a more rational, planned and lower-cost maintenance of transport infrastructure, and to ultimately minimise the risk of catastrophic consequences. A system for an autonomous inspection of structures, based on advanced measuring methods integrated on a wall-climbing robot and an unmanned aerial vehicle, is currently developed in the scope of the ASAP project. The objective of this paper is to provide an overview and draw attention to disadvantages of conventional methods for testing materials and structures in order to assess their condition. This objective was the main motivation for forming a multidisciplinary team through the ASAP project. Possibilities and challenges in the development of an autonomous structural-assessment system are also presented in the paper, with the purpose of increasing the reliability and efficiency of systemic assessment of structures.},\n\tnumber = {12},\n\turldate = {2022-02-08},\n\tjournal = {Journal of the Croatian Association of Civil Engineers},\n\tauthor = {Serdar, Marijana and Damjanović, Domagoj and Švaco, Marko and Jerbić, Bojan and Orsag, Matko and Kovačić, Zdenko},\n\teditor = {Lakusic, Stjepan},\n\tmonth = feb,\n\tyear = {2022},\n\tkeywords = {CARGA},\n\tpages = {1173--1184},\n}\n\n
\n
\n\n\n
\n Development of innovative solutions for the maintenance of transport infrastructure facilities is needed in order to ensure a more rational, planned and lower-cost maintenance of transport infrastructure, and to ultimately minimise the risk of catastrophic consequences. A system for an autonomous inspection of structures, based on advanced measuring methods integrated on a wall-climbing robot and an unmanned aerial vehicle, is currently developed in the scope of the ASAP project. The objective of this paper is to provide an overview and draw attention to disadvantages of conventional methods for testing materials and structures in order to assess their condition. This objective was the main motivation for forming a multidisciplinary team through the ASAP project. Possibilities and challenges in the development of an autonomous structural-assessment system are also presented in the paper, with the purpose of increasing the reliability and efficiency of systemic assessment of structures.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Interspecies Collaboration in the Design of Visual Identity: A Case Study.\n \n \n \n \n\n\n \n Jerbić, B.; Švaco, M.; Šuligoj, F.; Šekoranja, B.; Vidaković, J.; Turković, M.; Lekić, M.; Pavlek, B.; Bolfan, B.; Bruketa, D.; Borošić, D.; and Bušić, B.\n\n\n \n\n\n\n arXiv:2201.10393 [cs]. January 2022.\n arXiv: 2201.10393\n\n\n\n
\n\n\n\n \n \n \"InterspeciesPaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n\n\n\n
\n
@article{jerbic_interspecies_2022,\n\ttitle = {Interspecies {Collaboration} in the {Design} of {Visual} {Identity}: {A} {Case} {Study}},\n\tshorttitle = {Interspecies {Collaboration} in the {Design} of {Visual} {Identity}},\n\turl = {http://arxiv.org/abs/2201.10393},\n\tabstract = {Design usually relies on human ingenuity, but the past decade has seen the field's toolbox expanding to Artificial Intelligence (AI) and its adjacent methods, making room for hybrid, algorithmic creations. This article aims to substantiate the concept of interspecies collaboration - that of natural and artificial intelligence - in the active co-creation of a visual identity, describing a case study of the Regional Center of Excellence for Robotic Technology (CRTA) which opened on 750 m2 in June 2021 within the University of Zagreb. The visual identity of the Center comprises three separately devised elements, each representative of the human-AI relationship and embedded in the institution's logo. Firstly, the letter "C" (from the CRTA acronym) was created using a Gaussian Mixture Model (GMM) applied to (x, y) coordinates that the neurosurgical robot RONNA, CRTA's flagship innovation, generated when hand-guided by a human operator. The second shape of the letter "C" was created by using the same (x, y) coordinates as inputs fed to a neural network whose goal was to output letters in a novel, AI-generated typography. A basic feedforward back-propagating neural network with two hidden layers was chosen for the task. The final and third design element was a trajectory the robot RONNA makes when performing a brain biopsy. As CRTA embodies a state-of-the-art venue for robotics research, the 'interspecies' approach was used to accentuate the importance of human-robot collaboration which is at the core of the newly opened Center, illustrating the potential of reciprocal and amicable relationship that humans could have with technology.},\n\turldate = {2022-01-26},\n\tjournal = {arXiv:2201.10393 [cs]},\n\tauthor = {Jerbić, Bojan and Švaco, Marko and Šuligoj, Filip and Šekoranja, Bojan and Vidaković, Josip and Turković, Marija and Lekić, Mihaela and Pavlek, Borjan and Bolfan, Bruno and Bruketa, Davor and Borošić, Dina and Bušić, Barbara},\n\tmonth = jan,\n\tyear = {2022},\n\tnote = {arXiv: 2201.10393},\n\tkeywords = {Computer Science - Machine Learning},\n}\n\n
\n
\n\n\n
\n Design usually relies on human ingenuity, but the past decade has seen the field's toolbox expanding to Artificial Intelligence (AI) and its adjacent methods, making room for hybrid, algorithmic creations. This article aims to substantiate the concept of interspecies collaboration - that of natural and artificial intelligence - in the active co-creation of a visual identity, describing a case study of the Regional Center of Excellence for Robotic Technology (CRTA) which opened on 750 m2 in June 2021 within the University of Zagreb. The visual identity of the Center comprises three separately devised elements, each representative of the human-AI relationship and embedded in the institution's logo. Firstly, the letter \"C\" (from the CRTA acronym) was created using a Gaussian Mixture Model (GMM) applied to (x, y) coordinates that the neurosurgical robot RONNA, CRTA's flagship innovation, generated when hand-guided by a human operator. The second shape of the letter \"C\" was created by using the same (x, y) coordinates as inputs fed to a neural network whose goal was to output letters in a novel, AI-generated typography. A basic feedforward back-propagating neural network with two hidden layers was chosen for the task. The final and third design element was a trajectory the robot RONNA makes when performing a brain biopsy. As CRTA embodies a state-of-the-art venue for robotics research, the 'interspecies' approach was used to accentuate the importance of human-robot collaboration which is at the core of the newly opened Center, illustrating the potential of reciprocal and amicable relationship that humans could have with technology.\n
\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2021\n \n \n (6)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n Development of a Mobile Wall-Climbing Robot with a Hybrid Adhesion System.\n \n \n \n \n\n\n \n Bozic, M.; Jerbic, B.; and Švaco, M.\n\n\n \n\n\n\n In 2021 44th International Convention on Information, Communication and Electronic Technology (MIPRO), pages 1136–1142, Opatija, Croatia, September 2021. IEEE\n \n\n\n\n
\n\n\n\n \n \n \"DevelopmentPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{bozic_development_2021,\n\taddress = {Opatija, Croatia},\n\ttitle = {Development of a {Mobile} {Wall}-{Climbing} {Robot} with a {Hybrid} {Adhesion} {System}},\n\tisbn = {978-953-233-101-1},\n\turl = {https://ieeexplore.ieee.org/document/9596717/},\n\tdoi = {10.23919/MIPRO52101.2021.9596717},\n\turldate = {2021-12-11},\n\tbooktitle = {2021 44th {International} {Convention} on {Information}, {Communication} and {Electronic} {Technology} ({MIPRO})},\n\tpublisher = {IEEE},\n\tauthor = {Bozic, M. and Jerbic, B. and Švaco, M.},\n\tmonth = sep,\n\tyear = {2021},\n\tpages = {1136--1142},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Workspace Analysis of Robotically Assisted Percutaneous Radiofrequency Thermocoagulation for the Trigeminal Neuralgia.\n \n \n \n \n\n\n \n Švaco, M.; Kastelancic, A.; Dlaka, D.; Suligoj, F.; Marijic, M.; Chudy, D.; and Jerbic, B.\n\n\n \n\n\n\n In 2021 44th International Convention on Information, Communication and Electronic Technology (MIPRO), pages 1191–1195, Opatija, Croatia, September 2021. IEEE\n \n\n\n\n
\n\n\n\n \n \n \"WorkspacePaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{svaco_workspace_2021,\n\taddress = {Opatija, Croatia},\n\ttitle = {Workspace {Analysis} of {Robotically} {Assisted} {Percutaneous} {Radiofrequency} {Thermocoagulation} for the {Trigeminal} {Neuralgia}},\n\tisbn = {978-953-233-101-1},\n\turl = {https://ieeexplore.ieee.org/document/9596651/},\n\tdoi = {10.23919/MIPRO52101.2021.9596651},\n\turldate = {2021-12-11},\n\tbooktitle = {2021 44th {International} {Convention} on {Information}, {Communication} and {Electronic} {Technology} ({MIPRO})},\n\tpublisher = {IEEE},\n\tauthor = {Švaco, M. and Kastelancic, A. and Dlaka, D. and Suligoj, F. and Marijic, M. and Chudy, D. and Jerbic, B.},\n\tmonth = sep,\n\tyear = {2021},\n\tpages = {1191--1195},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Frameless stereotactic brain biopsy: a prospective study on robot‐assisted brain biopsies performed on 32 patients by using the RONNA G4 system.\n \n \n \n \n\n\n \n Dlaka, D.; Švaco, M.; Chudy, D.; Jerbić, B.; Šekoranja, B.; Šuligoj, F.; Vidaković, J.; Romić, D.; and Raguž, M.\n\n\n \n\n\n\n The International Journal of Medical Robotics and Computer Assisted Surgery,rcs.2245. February 2021.\n \n\n\n\n
\n\n\n\n \n \n \"FramelessPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n\n\n\n
\n
@article{dlaka_frameless_2021,\n\ttitle = {Frameless stereotactic brain biopsy: a prospective study on robot‐assisted brain biopsies performed on 32 patients by using the {RONNA} {G4} system},\n\tissn = {1478-5951, 1478-596X},\n\tshorttitle = {Frameless stereotactic brain biopsy},\n\turl = {https://onlinelibrary.wiley.com/doi/10.1002/rcs.2245},\n\tdoi = {10.1002/rcs.2245},\n\tlanguage = {en},\n\turldate = {2021-02-20},\n\tjournal = {The International Journal of Medical Robotics and Computer Assisted Surgery},\n\tauthor = {Dlaka, Domagoj and Švaco, Marko and Chudy, Darko and Jerbić, Bojan and Šekoranja, Bojan and Šuligoj, Filip and Vidaković, Josip and Romić, Dominik and Raguž, Marina},\n\tmonth = feb,\n\tyear = {2021},\n\tkeywords = {CV, RM},\n\tpages = {rcs.2245},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Clinical application of the RONNA G4 system – preliminary validation of 23 robotic frameless brain biopsies.\n \n \n \n \n\n\n \n Dlaka, D.; Švaco, M.; Chudy, D.; Jerbić, B.; Šekoranja, B.; Šuligoj, F.; Vidaković, J.; Almahariq, F.; Romić, D.; and Raguž, M.\n\n\n \n\n\n\n Croatian Medical Journal, 62(4): 318–327. August 2021.\n \n\n\n\n
\n\n\n\n \n \n \"ClinicalPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n\n\n\n
\n
@article{dlaka_clinical_2021,\n\ttitle = {Clinical application of the {RONNA} {G4} system – preliminary validation of 23 robotic frameless brain biopsies},\n\tvolume = {62},\n\tissn = {0353-9504, 1332-8166},\n\turl = {https://www.ncbi.nlm.nih.gov/pmc/articles/PMC8491043/},\n\tdoi = {10.3325/cmj.2021.62.318},\n\tnumber = {4},\n\turldate = {2021-12-11},\n\tjournal = {Croatian Medical Journal},\n\tauthor = {Dlaka, Domagoj and Švaco, Marko and Chudy, Darko and Jerbić, Bojan and Šekoranja, Bojan and Šuligoj, Filip and Vidaković, Josip and Almahariq, Fadi and Romić, Dominik and Raguž, Marina},\n\tmonth = aug,\n\tyear = {2021},\n\tkeywords = {RM},\n\tpages = {318--327},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n RobUSt–An Autonomous Robotic Ultrasound System for Medical Imaging.\n \n \n \n \n\n\n \n Suligoj, F.; Heunis, C. M.; Sikorski, J.; and Misra, S.\n\n\n \n\n\n\n IEEE Access, 9: 67456–67465. 2021.\n \n\n\n\n
\n\n\n\n \n \n \"RobUSt–AnPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{suligoj_robustautonomous_2021,\n\ttitle = {{RobUSt}–{An} {Autonomous} {Robotic} {Ultrasound} {System} for {Medical} {Imaging}},\n\tvolume = {9},\n\tissn = {2169-3536},\n\turl = {https://ieeexplore.ieee.org/document/9420731/},\n\tdoi = {10.1109/ACCESS.2021.3077037},\n\turldate = {2021-05-18},\n\tjournal = {IEEE Access},\n\tauthor = {Suligoj, Filip and Heunis, Christoff M. and Sikorski, Jakub and Misra, Sarthak},\n\tyear = {2021},\n\tpages = {67456--67465},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Real-Time Multi-Modal Sensing and Feedback for Catheterization in Porcine Tissue.\n \n \n \n \n\n\n \n Heunis, C. M.; S̆uligoj, F.; Santos, C. F.; and Misra, S.\n\n\n \n\n\n\n Sensors, 21(1): 273. January 2021.\n \n\n\n\n
\n\n\n\n \n \n \"Real-TimePaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{heunis_real-time_2021,\n\ttitle = {Real-{Time} {Multi}-{Modal} {Sensing} and {Feedback} for {Catheterization} in {Porcine} {Tissue}},\n\tvolume = {21},\n\tissn = {1424-8220},\n\turl = {https://www.mdpi.com/1424-8220/21/1/273},\n\tdoi = {10.3390/s21010273},\n\tabstract = {Objective: In this study, we introduce a multi-modal sensing and feedback framework aimed at assisting clinicians during endovascular surgeries and catheterization procedures. This framework utilizes state-of-the-art imaging and sensing sub-systems to produce a 3D visualization of an endovascular catheter and surrounding vasculature without the need for intra-operative X-rays. Methods: The catheterization experiments within this study are conducted inside a porcine limb undergoing motions. A hybrid position-force controller of a robotically-actuated ultrasound (US) transducer for uneven porcine tissue surfaces is introduced. The tissue, vasculature, and catheter are visualized by integrated real-time US images, 3D surface imaging, and Fiber Bragg Grating (FBG) sensors. Results: During externally-induced limb motions, the vasculature and catheter can be reliably reconstructed at mean accuracies of 1.9±0.3 mm and 0.82±0.21 mm, respectively. Conclusions: The conventional use of intra-operative X-ray imaging to visualize instruments and vasculature in the human body can be reduced by employing improved diagnostic technologies that do not operate via ionizing radiation or nephrotoxic contrast agents. Significance: The presented multi-modal framework enables the radiation-free and accurate reconstruction of significant tissues and instruments involved in catheterization procedures.},\n\tlanguage = {en},\n\tnumber = {1},\n\turldate = {2021-01-08},\n\tjournal = {Sensors},\n\tauthor = {Heunis, Christoff M. and S̆uligoj, Filip and Santos, Carlos Fambuena and Misra, Sarthak},\n\tmonth = jan,\n\tyear = {2021},\n\tpages = {273},\n}\n\n
\n
\n\n\n
\n Objective: In this study, we introduce a multi-modal sensing and feedback framework aimed at assisting clinicians during endovascular surgeries and catheterization procedures. This framework utilizes state-of-the-art imaging and sensing sub-systems to produce a 3D visualization of an endovascular catheter and surrounding vasculature without the need for intra-operative X-rays. Methods: The catheterization experiments within this study are conducted inside a porcine limb undergoing motions. A hybrid position-force controller of a robotically-actuated ultrasound (US) transducer for uneven porcine tissue surfaces is introduced. The tissue, vasculature, and catheter are visualized by integrated real-time US images, 3D surface imaging, and Fiber Bragg Grating (FBG) sensors. Results: During externally-induced limb motions, the vasculature and catheter can be reliably reconstructed at mean accuracies of 1.9±0.3 mm and 0.82±0.21 mm, respectively. Conclusions: The conventional use of intra-operative X-ray imaging to visualize instruments and vasculature in the human body can be reduced by employing improved diagnostic technologies that do not operate via ionizing radiation or nephrotoxic contrast agents. Significance: The presented multi-modal framework enables the radiation-free and accurate reconstruction of significant tissues and instruments involved in catheterization procedures.\n
\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2020\n \n \n (7)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n Accelerating Robot Trajectory Learning for Stochastic Tasks.\n \n \n \n \n\n\n \n Vidaković, J.; Jerbić, B.; Sekoranja, B.; Švaco, M.; and Suligoj, F.\n\n\n \n\n\n\n IEEE Access, 8: 71993–72006. 2020.\n \n\n\n\n
\n\n\n\n \n \n \"AcceleratingPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{vidakovic_accelerating_2020,\n\ttitle = {Accelerating {Robot} {Trajectory} {Learning} for {Stochastic} {Tasks}},\n\tvolume = {8},\n\tissn = {2169-3536},\n\turl = {https://ieeexplore.ieee.org/document/9062516/},\n\tdoi = {10.1109/ACCESS.2020.2986999},\n\tabstract = {Learning from demonstration provides ways to transfer knowledge and skills from humans to robots. Models based solely on learning from demonstration often have very good generalization capabilities but are not completely accurate when adapting to new scenarios. This happens especially when learning stochastic tasks because of the correspondence problem and unmodeled physical properties of tasks. On the other hand, reinforcement learning (RL) methods such as policy search have the capability to refine an initial skill through exploration, where the learning process is often very dependent on the initialization strategy and is efficient in finding only local solutions. These two approaches are, therefore, frequently combined. In this paper, we present how the iterative learning of tasks can be accelerated by a learning from demonstration (LfD) method based on the extraction of via-points. The paper provides an evaluation of the approach on two different primitive motion tasks.},\n\tlanguage = {en},\n\turldate = {2020-06-12},\n\tjournal = {IEEE Access},\n\tauthor = {Vidaković, Josip and Jerbić, Bojan and Sekoranja, Bojan and Švaco, Marko and Suligoj, Filip},\n\tyear = {2020},\n\tpages = {71993--72006},\n}\n\n
\n
\n\n\n
\n Learning from demonstration provides ways to transfer knowledge and skills from humans to robots. Models based solely on learning from demonstration often have very good generalization capabilities but are not completely accurate when adapting to new scenarios. This happens especially when learning stochastic tasks because of the correspondence problem and unmodeled physical properties of tasks. On the other hand, reinforcement learning (RL) methods such as policy search have the capability to refine an initial skill through exploration, where the learning process is often very dependent on the initialization strategy and is efficient in finding only local solutions. These two approaches are, therefore, frequently combined. In this paper, we present how the iterative learning of tasks can be accelerated by a learning from demonstration (LfD) method based on the extraction of via-points. The paper provides an evaluation of the approach on two different primitive motion tasks.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n RONNA G4—Robotic Neuronavigation: A Novel Robotic Navigation Device for Stereotactic Neurosurgery.\n \n \n \n \n\n\n \n Jerbić, B.; Švaco, M.; Chudy, D.; Šekoranja, B.; Šuligoj, F.; Vidaković, J.; Dlaka, D.; Vitez, N.; Župančić, I.; Drobilo, L.; Turković, M.; Žgaljić, A.; Kajtazi, M.; and Stiperski, I.\n\n\n \n\n\n\n In Handbook of Robotic and Image-Guided Surgery, pages 599–625. Elsevier, 2020.\n \n\n\n\n
\n\n\n\n \n \n \"RONNAPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n \n \n \n \n\n\n\n
\n
@incollection{jerbic_ronna_2020,\n\ttitle = {{RONNA} {G4}—{Robotic} {Neuronavigation}: {A} {Novel} {Robotic} {Navigation} {Device} for {Stereotactic} {Neurosurgery}},\n\tisbn = {978-0-12-814245-5},\n\tshorttitle = {{RONNA} {G4}—{Robotic} {Neuronavigation}},\n\turl = {https://linkinghub.elsevier.com/retrieve/pii/B9780128142455000359},\n\tlanguage = {en},\n\turldate = {2020-01-03},\n\tbooktitle = {Handbook of {Robotic} and {Image}-{Guided} {Surgery}},\n\tpublisher = {Elsevier},\n\tauthor = {Jerbić, Bojan and Švaco, Marko and Chudy, Darko and Šekoranja, Bojan and Šuligoj, Filip and Vidaković, Josip and Dlaka, Domagoj and Vitez, Nikola and Župančić, Ivan and Drobilo, Luka and Turković, Marija and Žgaljić, Adrian and Kajtazi, Marin and Stiperski, Ivan},\n\tyear = {2020},\n\tdoi = {10.1016/B978-0-12-814245-5.00035-9},\n\tkeywords = {CV, RM, h2020, prijava},\n\tpages = {599--625},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Stereotactic Neuro-Navigation Phantom Designs: A Systematic Review.\n \n \n \n \n\n\n \n Švaco, M.; Stiperski, I.; Dlaka, D.; Šuligoj, F.; Jerbić, B.; Chudy, D.; and Raguž, M.\n\n\n \n\n\n\n Frontiers in Neurorobotics, 14: 549603. October 2020.\n \n\n\n\n
\n\n\n\n \n \n \"StereotacticPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n\n\n\n
\n
@article{svaco_stereotactic_2020,\n\ttitle = {Stereotactic {Neuro}-{Navigation} {Phantom} {Designs}: {A} {Systematic} {Review}},\n\tvolume = {14},\n\tissn = {1662-5218},\n\tshorttitle = {Stereotactic {Neuro}-{Navigation} {Phantom} {Designs}},\n\turl = {https://www.frontiersin.org/articles/10.3389/fnbot.2020.549603/full},\n\tdoi = {10.3389/fnbot.2020.549603},\n\turldate = {2020-10-31},\n\tjournal = {Frontiers in Neurorobotics},\n\tauthor = {Švaco, Marko and Stiperski, Ivan and Dlaka, Domagoj and Šuligoj, Filip and Jerbić, Bojan and Chudy, Darko and Raguž, Marina},\n\tmonth = oct,\n\tyear = {2020},\n\tkeywords = {RM},\n\tpages = {549603},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Intelligent Algorithms for Non-parametric Robot Calibration:.\n \n \n \n \n\n\n \n Turković, M.; Švaco, M.; and Jerbić, B.\n\n\n \n\n\n\n In Galambos, P.; and Madani, K., editor(s), Proceedings of the International Conference on Robotics, Computer Vision and Intelligent Systems, pages 51–58, Budapest, Hungary, 2020. SCITEPRESS - Science and Technology Publications\n \n\n\n\n
\n\n\n\n \n \n \"IntelligentPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{turkovic_intelligent_2020,\n\taddress = {Budapest, Hungary},\n\ttitle = {Intelligent {Algorithms} for {Non}-parametric {Robot} {Calibration}:},\n\tisbn = {978-989-758-479-4},\n\tshorttitle = {Intelligent {Algorithms} for {Non}-parametric {Robot} {Calibration}},\n\turl = {https://www.scitepress.org/DigitalLibrary/Link.aspx?doi=10.5220/0010176900510058},\n\tdoi = {10.5220/0010176900510058},\n\turldate = {2020-11-17},\n\tbooktitle = {Proceedings of the {International} {Conference} on {Robotics}, {Computer} {Vision} and {Intelligent} {Systems}},\n\tpublisher = {SCITEPRESS - Science and Technology Publications},\n\tauthor = {Turković, Marija and Švaco, Marko and Jerbić, Bojan},\n\teditor = {Galambos, Péter and Madani, Kurosh},\n\tyear = {2020},\n\tpages = {51--58},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Improving Markerless Registration Accuracy by Mapping Facial Deformation.\n \n \n \n \n\n\n \n Žgaljić, A.; Švaco, M.; and Jerbić, B.\n\n\n \n\n\n\n In 2020 43rd International Convention on Information, Communication and Electronic Technology (MIPRO), pages 1195–1199, Opatija, Croatia, September 2020. IEEE\n \n\n\n\n
\n\n\n\n \n \n \"ImprovingPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{zgaljic_improving_2020,\n\taddress = {Opatija, Croatia},\n\ttitle = {Improving {Markerless} {Registration} {Accuracy} by {Mapping} {Facial} {Deformation}},\n\tisbn = {978-953-233-099-1},\n\turl = {https://ieeexplore.ieee.org/document/9245172/},\n\tdoi = {10.23919/MIPRO48935.2020.9245172},\n\turldate = {2020-11-10},\n\tbooktitle = {2020 43rd {International} {Convention} on {Information}, {Communication} and {Electronic} {Technology} ({MIPRO})},\n\tpublisher = {IEEE},\n\tauthor = {Žgaljić, Adrian and Švaco, Marko and Jerbić, Bojan},\n\tmonth = sep,\n\tyear = {2020},\n\tpages = {1195--1199},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Increasing the Accuracy of Robotic Neurosurgical Procedures Through Robot Calibration.\n \n \n \n \n\n\n \n Drobilo, L.; Švaco, M.; and Jerbić, B.\n\n\n \n\n\n\n In 2020 43rd International Convention on Information, Communication and Electronic Technology (MIPRO), pages 1180–1188, Opatija, Croatia, September 2020. IEEE\n \n\n\n\n
\n\n\n\n \n \n \"IncreasingPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{drobilo_increasing_2020,\n\taddress = {Opatija, Croatia},\n\ttitle = {Increasing the {Accuracy} of {Robotic} {Neurosurgical} {Procedures} {Through} {Robot} {Calibration}},\n\tisbn = {978-953-233-099-1},\n\turl = {https://ieeexplore.ieee.org/document/9245233/},\n\tdoi = {10.23919/MIPRO48935.2020.9245233},\n\turldate = {2020-11-10},\n\tbooktitle = {2020 43rd {International} {Convention} on {Information}, {Communication} and {Electronic} {Technology} ({MIPRO})},\n\tpublisher = {IEEE},\n\tauthor = {Drobilo, L. and Švaco, Marko and Jerbić, Bojan},\n\tmonth = sep,\n\tyear = {2020},\n\tpages = {1180--1188},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Task Dependent Trajectory Learning from Multiple Demonstrations Using Movement Primitives.\n \n \n \n \n\n\n \n Vidaković, J.; Jerbić, B.; Šekoranja, B.; Švaco, M.; and Šuligoj, F.\n\n\n \n\n\n\n In Berns, K.; and Görges, D., editor(s), Advances in Service and Industrial Robotics, volume 980, pages 275–282. Springer International Publishing, Cham, 2020.\n \n\n\n\n
\n\n\n\n \n \n \"TaskPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@incollection{berns_task_2020,\n\taddress = {Cham},\n\ttitle = {Task {Dependent} {Trajectory} {Learning} from {Multiple} {Demonstrations} {Using} {Movement} {Primitives}},\n\tvolume = {980},\n\tisbn = {978-3-030-19647-9 978-3-030-19648-6},\n\turl = {http://link.springer.com/10.1007/978-3-030-19648-6_32},\n\tlanguage = {en},\n\turldate = {2019-11-04},\n\tbooktitle = {Advances in {Service} and {Industrial} {Robotics}},\n\tpublisher = {Springer International Publishing},\n\tauthor = {Vidaković, Josip and Jerbić, Bojan and Šekoranja, Bojan and Švaco, Marko and Šuligoj, Filip},\n\teditor = {Berns, Karsten and Görges, Daniel},\n\tyear = {2020},\n\tdoi = {10.1007/978-3-030-19648-6_32},\n\tpages = {275--282},\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2019\n \n \n (1)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n Learning from Demonstration Based on a Classification of Task Parameters and Trajectory Optimization.\n \n \n \n \n\n\n \n Vidaković, J.; Jerbić, B.; Šekoranja, B.; Švaco, M.; and Šuligoj, F.\n\n\n \n\n\n\n Journal of Intelligent & Robotic Systems. December 2019.\n \n\n\n\n
\n\n\n\n \n \n \"LearningPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{vidakovic_learning_2019,\n\ttitle = {Learning from {Demonstration} {Based} on a {Classification} of {Task} {Parameters} and {Trajectory} {Optimization}},\n\tissn = {0921-0296, 1573-0409},\n\turl = {http://link.springer.com/10.1007/s10846-019-01101-2},\n\tdoi = {10.1007/s10846-019-01101-2},\n\tlanguage = {en},\n\turldate = {2019-12-13},\n\tjournal = {Journal of Intelligent \\& Robotic Systems},\n\tauthor = {Vidaković, Josip and Jerbić, Bojan and Šekoranja, Bojan and Švaco, Marko and Šuligoj, Filip},\n\tmonth = dec,\n\tyear = {2019},\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2018\n \n \n (5)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n Influence of the Localization Strategy on the Accuracy of a Neurosurgical Robot System.\n \n \n \n \n\n\n \n Šuligoj, F.; Jerbić, B.; Šekoranja, B.; Vidaković, J.; and Švaco, M.\n\n\n \n\n\n\n Transactions of FAMENA, 42(2): 27–38. June 2018.\n \n\n\n\n
\n\n\n\n \n \n \"InfluencePaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n\n\n\n
\n
@article{suligoj_influence_2018,\n\ttitle = {Influence of the {Localization} {Strategy} on the {Accuracy} of a {Neurosurgical} {Robot} {System}},\n\tvolume = {42},\n\tissn = {13331124, 18491391},\n\turl = {http://hrcak.srce.hr/203828},\n\tdoi = {10.21278/TOF.42203},\n\tabstract = {Precise navigation of surgical instruments is one of the most important features of autonomous surgical robots. In this paper, we introduce a concept of robot localization strategy and analyse its influence on the overall application error of a robot system for frameless stereotactic neurosurgery named RONNA. Localization strategies utilize specific angles at which the robot can approach a target point, orientations, and types of movement during the procedure of physical space fiducial marker localization and positioning to the target points. The localization strategies developed in this study are a neutral orientation strategy (NOS), an orientation correction strategy (OCS) and a joint displacement minimization strategy (JDMS). To evaluate the robot positioning performance with the localization strategies applied, we performed laboratory phantom measurements using a different number of fiducial markers in the registration procedure. When three, four, and five fiducial markers were used, the application error for the NOS was 1.571±0.256 mm, 1.397±0.283 mm, and 1.327±0.274 mm, and for the OCS, it was 0.429±0.133 mm, 0.284±0.068mm, and 0.260±0.076 mm, respectively. The application error for the JDMS was 0.493±0.176 mm with four and 0.369±0.160 mm with five fiducial markers used.},\n\tlanguage = {en},\n\tnumber = {2},\n\turldate = {2018-07-19},\n\tjournal = {Transactions of FAMENA},\n\tauthor = {Šuligoj, Filip and Jerbić, Bojan and Šekoranja, Bojan and Vidaković, Josip and Švaco, Marko},\n\tmonth = jun,\n\tyear = {2018},\n\tkeywords = {RM},\n\tpages = {27--38},\n}\n\n
\n
\n\n\n
\n Precise navigation of surgical instruments is one of the most important features of autonomous surgical robots. In this paper, we introduce a concept of robot localization strategy and analyse its influence on the overall application error of a robot system for frameless stereotactic neurosurgery named RONNA. Localization strategies utilize specific angles at which the robot can approach a target point, orientations, and types of movement during the procedure of physical space fiducial marker localization and positioning to the target points. The localization strategies developed in this study are a neutral orientation strategy (NOS), an orientation correction strategy (OCS) and a joint displacement minimization strategy (JDMS). To evaluate the robot positioning performance with the localization strategies applied, we performed laboratory phantom measurements using a different number of fiducial markers in the registration procedure. When three, four, and five fiducial markers were used, the application error for the NOS was 1.571±0.256 mm, 1.397±0.283 mm, and 1.327±0.274 mm, and for the OCS, it was 0.429±0.133 mm, 0.284±0.068mm, and 0.260±0.076 mm, respectively. The application error for the JDMS was 0.493±0.176 mm with four and 0.369±0.160 mm with five fiducial markers used.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Fully Automated Point-Based Robotic Neurosurgical Patient Registration Procedure.\n \n \n \n \n\n\n \n Šuligoj, F.; Jerbić, B.; Švaco, M.; and Šekoranja, B.\n\n\n \n\n\n\n International Journal of Simulation Modelling, 17(3): 458–471. September 2018.\n \n\n\n\n
\n\n\n\n \n \n \"FullyPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n \n \n\n\n\n
\n
@article{suligoj_fully_2018,\n\ttitle = {Fully {Automated} {Point}-{Based} {Robotic} {Neurosurgical} {Patient} {Registration} {Procedure}},\n\tvolume = {17},\n\tissn = {17264529},\n\turl = {http://www.ijsimm.com/Full_Papers/Fulltext2018/text17-3_458-471.pdf},\n\tdoi = {10.2507/IJSIMM17(3)442},\n\tabstract = {In this study, we have introduced a framework for an automatic patient registration procedure using freely distributed fiducial markers within a robot application in neurosurgery. The localization procedures in the image space and in the physical space are fully automated. We have developed a novel algorithm for finding the point pair correspondence between freely distributed fiducial markers in the image and in the physical space. The algorithm introduces a similarity matrix to maximize the possibility of successful point pairing and to remove the potential outlier points. The correspondence algorithm has been tested in 900,000 computer simulations and also on the real data from five laboratory phantom CT scans and twelve clinical patient CT scans, which were paired with 1415 readings captured with an optical tracking system. Testing of simulated point scenarios showed that the correspondence algorithm has a higher percentage of success when a larger number of fiducial markers and a lower number of outlier points were present. In the 24055 tests on the clinical data, there has been a 100 \\% success rate.},\n\tlanguage = {en},\n\tnumber = {3},\n\turldate = {2018-09-05},\n\tjournal = {International Journal of Simulation Modelling},\n\tauthor = {Šuligoj, Filip and Jerbić, Bojan and Švaco, Marko and Šekoranja, Bojan},\n\tmonth = sep,\n\tyear = {2018},\n\tkeywords = {RM, h2020, prijava},\n\tpages = {458--471},\n}\n\n
\n
\n\n\n
\n In this study, we have introduced a framework for an automatic patient registration procedure using freely distributed fiducial markers within a robot application in neurosurgery. The localization procedures in the image space and in the physical space are fully automated. We have developed a novel algorithm for finding the point pair correspondence between freely distributed fiducial markers in the image and in the physical space. The algorithm introduces a similarity matrix to maximize the possibility of successful point pairing and to remove the potential outlier points. The correspondence algorithm has been tested in 900,000 computer simulations and also on the real data from five laboratory phantom CT scans and twelve clinical patient CT scans, which were paired with 1415 readings captured with an optical tracking system. Testing of simulated point scenarios showed that the correspondence algorithm has a higher percentage of success when a larger number of fiducial markers and a lower number of outlier points were present. In the 24055 tests on the clinical data, there has been a 100 % success rate.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n The Case of Industrial Robotics in Croatia.\n \n \n \n \n\n\n \n Švaco, M.; Župančić, I.; Jerbić, B.; Vitez, N.; Šekoranja, B.; Šuligoj, F.; and Vidaković, J.\n\n\n \n\n\n\n In 27th International Conference on Robotics in Alpe-Adria-Danube Region, RAAD 2018, Patras, Greece, 2018. Springer Berlin Heidelberg\n \n\n\n\n
\n\n\n\n \n \n \"ThePaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{svaco_case_2018,\n\taddress = {Patras, Greece},\n\ttitle = {The {Case} of {Industrial} {Robotics} in {Croatia}},\n\turl = {https://link.springer.com/chapter/10.1007/978-3-030-00232-9_64},\n\tdoi = {10.1007/978-3-030-00232-9_64},\n\tbooktitle = {27th {International} {Conference} on {Robotics} in {Alpe}-{Adria}-{Danube} {Region}, {RAAD} 2018},\n\tpublisher = {Springer Berlin Heidelberg},\n\tauthor = {Švaco, Marko and Župančić, Ivan and Jerbić, Bojan and Vitez, Nikola and Šekoranja, Bojan and Šuligoj, Filip and Vidaković, Josip},\n\tyear = {2018},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n A Reinforcement Learning Based Framework for Robot Action Planning.\n \n \n \n \n\n\n \n Švaco, M.; Jerbić, B.; Polančec, M.; Šuligoj, F.; Šekoranja, B.; and Vidaković, J.\n\n\n \n\n\n\n In 27th International Conference on Robotics in Alpe-Adria-Danube Region, RAAD 2018, Patras, Greece, 2018. Springer Berlin Heidelberg\n \n\n\n\n
\n\n\n\n \n \n \"APaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{svaco_reinforcement_2018,\n\taddress = {Patras, Greece},\n\ttitle = {A {Reinforcement} {Learning} {Based} {Framework} for {Robot} {Action} {Planning}},\n\tisbn = {978-3-030-00231-2},\n\turl = {https://link.springer.com/chapter/10.1007/978-3-030-00232-9_52},\n\tdoi = {10.1007/978-3-030-00232-9_52},\n\tbooktitle = {27th {International} {Conference} on {Robotics} in {Alpe}-{Adria}-{Danube} {Region}, {RAAD} 2018},\n\tpublisher = {Springer Berlin Heidelberg},\n\tauthor = {Švaco, Marko and Jerbić, Bojan and Polančec, Mateo and Šuligoj, Filip and Šekoranja, Bojan and Vidaković, Josip},\n\tyear = {2018},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Robot assisted laser scanner for 3D point cloud registration.\n \n \n \n \n\n\n \n Šekoranja, B.; Domjanić, F.; Jerbić, B.; Šuligoj, F.; and Vidaković, J.\n\n\n \n\n\n\n In Primošten, Croatia, 2018. \n \n\n\n\n
\n\n\n\n \n \n \"RobotPaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{sekoranja_robot_2018,\n\taddress = {Primošten, Croatia},\n\ttitle = {Robot assisted laser scanner for {3D} point cloud registration},\n\turl = {http://motsp.org/},\n\tauthor = {Šekoranja, Bojan and Domjanić, Filip and Jerbić, Bojan and Šuligoj, Filip and Vidaković, Josip},\n\tyear = {2018},\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2017\n \n \n (7)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n Automated Marker Localization in the Planning Phase of Robotic Neurosurgery.\n \n \n \n \n\n\n \n Šuligoj, F.; Švaco, M.; Jerbić, B.; Šekoranja, B.; and Vidaković, J.\n\n\n \n\n\n\n IEEE Access, 5: 12265–12274. 2017.\n \n\n\n\n
\n\n\n\n \n \n \"AutomatedPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n\n\n
\n
@article{suligoj_automated_2017,\n\ttitle = {Automated {Marker} {Localization} in the {Planning} {Phase} of {Robotic} {Neurosurgery}},\n\tvolume = {5},\n\tissn = {2169-3536},\n\turl = {http://ieeexplore.ieee.org/document/7954960/},\n\tdoi = {10.1109/ACCESS.2017.2718621},\n\turldate = {2017-07-26},\n\tjournal = {IEEE Access},\n\tauthor = {Šuligoj, Filip and Švaco, Marko and Jerbić, Bojan and Šekoranja, Bojan and Vidaković, Josip},\n\tyear = {2017},\n\tkeywords = {Adrian Žgaljić, CV, Novi\\_fantom, RM, h2020, prijava},\n\tpages = {12265--12274},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Validation of Three KUKA Agilus Robots for Application in Neurosurgery.\n \n \n \n \n\n\n \n Švaco, M.; Koren, P.; Jerbić, B.; Vidaković, J.; Šekoranja, B.; and Šuligoj, F.\n\n\n \n\n\n\n In Ferraresi, C.; and Quaglia, G., editor(s), Advances in Service and Industrial Robotics, volume 49, pages 996–1006. Springer International Publishing, Torino, Italy, 2017.\n \n\n\n\n
\n\n\n\n \n \n \"ValidationPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n \n \n\n\n\n
\n
@incollection{ferraresi_validation_2017,\n\taddress = {Torino, Italy},\n\ttitle = {Validation of {Three} {KUKA} {Agilus} {Robots} for {Application} in {Neurosurgery}},\n\tvolume = {49},\n\tisbn = {978-3-319-61275-1 978-3-319-61276-8},\n\turl = {http://link.springer.com/10.1007/978-3-319-61276-8_107},\n\turldate = {2017-07-28},\n\tbooktitle = {Advances in {Service} and {Industrial} {Robotics}},\n\tpublisher = {Springer International Publishing},\n\tauthor = {Švaco, Marko and Koren, Petar and Jerbić, Bojan and Vidaković, Josip and Šekoranja, Bojan and Šuligoj, Filip},\n\teditor = {Ferraresi, Carlo and Quaglia, Giuseppe},\n\tyear = {2017},\n\tdoi = {10.1007/978-3-319-61276-8_107},\n\tkeywords = {Adrian Žgaljić, RM, Stiperski\\_review1},\n\tpages = {996--1006},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n A Novel Robotic Neuronavigation System: RONNA G3.\n \n \n \n \n\n\n \n Švaco, M.; Šekoranja, B.; Šuligoj, F.; Vidaković, J.; Jerbić, B.; and Chudy, D.\n\n\n \n\n\n\n Strojniški vestnik - Journal of Mechanical Engineering. November 2017.\n \n\n\n\n
\n\n\n\n \n \n \"APaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n\n\n\n
\n
@article{svaco_novel_2017,\n\ttitle = {A {Novel} {Robotic} {Neuronavigation} {System}: {RONNA} {G3}},\n\tissn = {00392480, 00392480},\n\tshorttitle = {A {Novel} {Robotic} {Neuronavigation} {System}},\n\turl = {http://en.sv-jme.eu/data/upload/2017/Clanki/2017_4649_Svaco_11_W.pdf},\n\tdoi = {10.5545/sv-jme.2017.4649},\n\turldate = {2017-11-30},\n\tjournal = {Strojniški vestnik - Journal of Mechanical Engineering},\n\tauthor = {Švaco, Marko and Šekoranja, Bojan and Šuligoj, Filip and Vidaković, Josip and Jerbić, Bojan and Chudy, Darko},\n\tmonth = nov,\n\tyear = {2017},\n\tkeywords = {CV, RM},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Position planning for collaborating robots and its application in neurosurgery.\n \n \n \n \n\n\n \n Vidaković, J.; Jerbić, B.; Švaco, M.; Šuligoj, F.; and Šekoranja, B.\n\n\n \n\n\n\n Tehnicki vjesnik - Technical Gazette, 24(6). December 2017.\n \n\n\n\n
\n\n\n\n \n \n \"PositionPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n\n\n\n
\n
@article{vidakovic_position_2017,\n\ttitle = {Position planning for collaborating robots and its application in neurosurgery},\n\tvolume = {24},\n\tissn = {13303651, 18486339},\n\turl = {http://hrcak.srce.hr/190166},\n\tdoi = {10.17559/TV-20170213110534},\n\tabstract = {Applications of robot manipulators in surgery are nowadays very actual field of research. Still, there are a number of technical problems when setting and preparing robotical systems for various operation procedures. One of them is the robot-patient placement. When placing robots in respect to known target working positions it is crucial to assure feasible positioning where all required motions can be executed with no kinematic or collision problems. A planning method for robot placement suitable for neurosurgical operations is presented in this paper. The planning method is based on a multi-objective cost function which is composed of criteria that balance dexterity properties with a novel collision avoiding parameter. Use of the planning approach is implemented and validated on a dual arm robot setup.},\n\tnumber = {6},\n\turldate = {2017-12-05},\n\tjournal = {Tehnicki vjesnik - Technical Gazette},\n\tauthor = {Vidaković, Josip and Jerbić, Bojan and Švaco, Marko and Šuligoj, Filip and Šekoranja, Bojan},\n\tmonth = dec,\n\tyear = {2017},\n\tkeywords = {RM},\n}\n\n
\n
\n\n\n
\n Applications of robot manipulators in surgery are nowadays very actual field of research. Still, there are a number of technical problems when setting and preparing robotical systems for various operation procedures. One of them is the robot-patient placement. When placing robots in respect to known target working positions it is crucial to assure feasible positioning where all required motions can be executed with no kinematic or collision problems. A planning method for robot placement suitable for neurosurgical operations is presented in this paper. The planning method is based on a multi-objective cost function which is composed of criteria that balance dexterity properties with a novel collision avoiding parameter. Use of the planning approach is implemented and validated on a dual arm robot setup.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Brain biopsy performed with the RONNA G3 system: a case study on using a novel robotic navigation device for stereotactic neurosurgery.\n \n \n \n \n\n\n \n Dlaka, D.; Švaco, M.; Chudy, D.; Jerbić, B.; Šekoranja, B.; Šuligoj, F.; Vidaković, J.; Almahariq, F.; and Romić, D.\n\n\n \n\n\n\n The International Journal of Medical Robotics and Computer Assisted Surgery,1–7. December 2017.\n \n\n\n\n
\n\n\n\n \n \n \"BrainPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n \n \n\n\n\n
\n
@article{dlaka_brain_2017,\n\ttitle = {Brain biopsy performed with the {RONNA} {G3} system: a case study on using a novel robotic navigation device for stereotactic neurosurgery},\n\tissn = {14785951},\n\tshorttitle = {Brain biopsy performed with the {RONNA} {G3} system},\n\turl = {https://onlinelibrary.wiley.com/doi/10.1002/rcs.1884},\n\tdoi = {10.1002/rcs.1884},\n\tlanguage = {en},\n\turldate = {2017-12-13},\n\tjournal = {The International Journal of Medical Robotics and Computer Assisted Surgery},\n\tauthor = {Dlaka, Domagoj and Švaco, Marko and Chudy, Darko and Jerbić, Bojan and Šekoranja, Bojan and Šuligoj, Filip and Vidaković, Josip and Almahariq, Fadi and Romić, Dominik},\n\tmonth = dec,\n\tyear = {2017},\n\tkeywords = {RM, h2020, prijava},\n\tpages = {1--7},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Experimental Evaluation of Parameters for Robotic Contouring Force Feedback Applications.\n \n \n \n \n\n\n \n Švaco, M.; Vitez, N.; Jerbić, B.; Šuligoj, F.; Šekoranja, B.; and Vidaković, J.\n\n\n \n\n\n\n In Dubrovnik, Croatia, July 2017. \n \n\n\n\n
\n\n\n\n \n \n \"ExperimentalPaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{svaco_experimental_2017,\n\taddress = {Dubrovnik, Croatia},\n\ttitle = {Experimental {Evaluation} of {Parameters} for {Robotic} {Contouring} {Force} {Feedback} {Applications}},\n\turl = {http://motsp.org/},\n\tauthor = {Švaco, Marko and Vitez, Nikola and Jerbić, Bojan and Šuligoj, Filip and Šekoranja, Bojan and Vidaković, Josip},\n\tmonth = jul,\n\tyear = {2017},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Automated Marker Localization in the Planning Phase of Robotic Neurosurgery.\n \n \n \n\n\n \n Šuligoj, F.; Švaco, M.; Jerbić, B.; Šekoranja, B.; and Vidaković, J.\n\n\n \n\n\n\n In Sixth Croatian Computer Vision Workshop (CCVW 2017), Split, Croatia, September 2017. \n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n\n\n\n
\n
@inproceedings{suligoj_automated_2017-1,\n\taddress = {Split, Croatia},\n\ttitle = {Automated {Marker} {Localization} in the {Planning} {Phase} of {Robotic} {Neurosurgery}},\n\tbooktitle = {Sixth {Croatian} {Computer} {Vision} {Workshop} ({CCVW} 2017)},\n\tauthor = {Šuligoj, Filip and Švaco, Marko and Jerbić, Bojan and Šekoranja, Bojan and Vidaković, Josip},\n\tmonth = sep,\n\tyear = {2017},\n\tkeywords = {Adrian Žgaljić, Novi\\_fantom},\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2016\n \n \n (5)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n Simulation for robotic stereotactic neurosurgery.\n \n \n \n \n\n\n \n Vidaković, J.; Jerbić, B.; Šuligoj, F.; Švaco, M.; and Šekoranja, B.\n\n\n \n\n\n\n In pages 0562–0568, Mostar, BiH, 2016. DAAAM International Vienna\n \n\n\n\n
\n\n\n\n \n \n \"SimulationPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n \n \n\n\n\n
\n
@inproceedings{vidakovic_simulation_2016,\n\taddress = {Mostar, BiH},\n\ttitle = {Simulation for robotic stereotactic neurosurgery},\n\turl = {http://www.daaam.info/Downloads/Pdfs/proceedings/proceedings_2016/083.pdf},\n\tdoi = {10.2507/27th.daaam.proceedings.083},\n\tpublisher = {DAAAM International Vienna},\n\tauthor = {Vidaković, Josip and Jerbić, Bojan and Šuligoj, Filip and Švaco, Marko and Šekoranja, Bojan},\n\tyear = {2016},\n\tkeywords = {RM, RONNA, Stiperski\\_review1},\n\tpages = {0562--0568},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n T-Phantom: a New Phantom Design for Neurosurgical Robotics.\n \n \n \n \n\n\n \n Švaco, M.; Jerbić, B.; Stiperski, I.; Dlaka, D.; Vidaković, J.; Šekoranja, B.; and Šuligoj, F.\n\n\n \n\n\n\n In Proceedings of the 27th DAAAM International Symposium, pages 266–270, Mostar, BiH, 2016. DAAAM International Vienna\n \n\n\n\n
\n\n\n\n \n \n \"T-Phantom:Paper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n \n \n\n\n\n
\n
@inproceedings{svaco_t-phantom_2016,\n\taddress = {Mostar, BiH},\n\ttitle = {T-{Phantom}: a {New} {Phantom} {Design} for {Neurosurgical} {Robotics}},\n\tshorttitle = {T-{Phantom}},\n\turl = {http://www.daaam.info/Downloads/Pdfs/proceedings/proceedings_2016/039.pdf},\n\tdoi = {DOI: 10.2507/27th.daaam.proceedings.039},\n\turldate = {2017-01-16},\n\tbooktitle = {Proceedings of the 27th {DAAAM} {International} {Symposium}},\n\tpublisher = {DAAAM International Vienna},\n\tauthor = {Švaco, Marko and Jerbić, Bojan and Stiperski, Ivan and Dlaka, Domagoj and Vidaković, Josip and Šekoranja, Bojan and Šuligoj, Filip},\n\tyear = {2016},\n\tkeywords = {Adrian Žgaljić, Novi\\_fantom, RM},\n\tpages = {266--270},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n A context-aware approach in realization of socially intelligent industrial robots.\n \n \n \n \n\n\n \n Stipancic, T.; Jerbic, B.; and Curkovic, P.\n\n\n \n\n\n\n Robotics and Computer-Integrated Manufacturing, 37: 79–89. February 2016.\n \n\n\n\n
\n\n\n\n \n \n \"APaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{stipancic_context-aware_2016,\n\ttitle = {A context-aware approach in realization of socially intelligent industrial robots},\n\tvolume = {37},\n\tissn = {07365845},\n\turl = {https://linkinghub.elsevier.com/retrieve/pii/S0736584515000812},\n\tdoi = {10.1016/j.rcim.2015.07.002},\n\tlanguage = {en},\n\turldate = {2021-01-06},\n\tjournal = {Robotics and Computer-Integrated Manufacturing},\n\tauthor = {Stipancic, Tomislav and Jerbic, Bojan and Curkovic, Petar},\n\tmonth = feb,\n\tyear = {2016},\n\tpages = {79--89},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Simulation for robotic stereotactic neurosurgery.\n \n \n \n \n\n\n \n Vidaković, J.; Jerbić, B.; Šuligoj, F.; Švaco, M.; and Šekoranja, B.\n\n\n \n\n\n\n In Osijek, Croatia, October 2016. University of Zagreb Faculty of Electrical Engineering and Computing\n \n\n\n\n
\n\n\n\n \n \n \"SimulationPaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n\n\n\n
\n
@inproceedings{vidakovic_simulation_2016-1,\n\taddress = {Osijek, Croatia},\n\ttitle = {Simulation for robotic stereotactic neurosurgery},\n\turl = {https://www.fer.unizg.hr/crv/ccvw2016/program},\n\tpublisher = {University of Zagreb Faculty of Electrical Engineering and Computing},\n\tauthor = {Vidaković, Josip and Jerbić, Bojan and Šuligoj, Filip and Švaco, Marko and Šekoranja, Bojan},\n\tmonth = oct,\n\tyear = {2016},\n\tkeywords = {RONNA, Stiperski\\_review1},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n T-Phantom: a New Phantom Design for Neurosurgical Robotics.\n \n \n \n \n\n\n \n Švaco, M.; Jerbić, B.; Stiperski, I.; Dlaka, D.; Vidaković, J.; Šekoranja, B.; and Šuligoj, F.\n\n\n \n\n\n\n In October 2016. University of Zagreb Faculty of Electrical Engineering and Computing\n \n\n\n\n
\n\n\n\n \n \n \"T-Phantom:Paper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n\n\n\n
\n
@inproceedings{svaco_t-phantom:_2016,\n\ttitle = {T-{Phantom}: a {New} {Phantom} {Design} for {Neurosurgical} {Robotics}},\n\turl = {https://www.fer.unizg.hr/crv/ccvw2016/program},\n\turldate = {2017-01-16},\n\tpublisher = {University of Zagreb Faculty of Electrical Engineering and Computing},\n\tauthor = {Švaco, Marko and Jerbić, Bojan and Stiperski, Ivan and Dlaka, Domagoj and Vidaković, Josip and Šekoranja, Bojan and Šuligoj, Filip},\n\tmonth = oct,\n\tyear = {2016},\n\tkeywords = {Adrian Žgaljić, Novi\\_fantom},\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2015\n \n \n (2)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Robotic application in neurosurgery using intelligent visual and haptic interaction.\n \n \n \n\n\n \n Jerbić, B.; Nikolić, G.; Chudy, D.; Švaco, M.; and Šekoranja, B.\n\n\n \n\n\n\n International Journal of Simulation Modelling, 14(1): 71–84. 2015.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n\n\n
\n
@article{jerbic_robotic_2015,\n\ttitle = {Robotic application in neurosurgery using intelligent visual and haptic interaction},\n\tvolume = {14},\n\tdoi = {10.2507/IJSIMM14(1)7.290},\n\tnumber = {1},\n\tjournal = {International Journal of Simulation Modelling},\n\tauthor = {Jerbić, Bojan and Nikolić, Gojko and Chudy, Darko and Švaco, Marko and Šekoranja, Bojan},\n\tyear = {2015},\n\tkeywords = {Case report 1, Fantomi, Marker\\_localizer\\_1, RM, RONNA, Stiperski\\_review1, Škec, Škec01},\n\tpages = {71--84},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Medical applicability of a low-cost industrial robot arm guided with an optical tracking system.\n \n \n \n \n\n\n \n Šuligoj, F.; Jerbić, B.; Švaco, M.; Šekoranja, B.; Mihalinec, D.; and Vidaković, J.\n\n\n \n\n\n\n In Intelligent Robots and Systems (IROS), 2015 IEEE/RSJ International Conference on, pages 3785–3790, 2015. IEEE\n \n\n\n\n
\n\n\n\n \n \n \"MedicalPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n\n\n
\n
@inproceedings{suligoj_medical_2015,\n\ttitle = {Medical applicability of a low-cost industrial robot arm guided with an optical tracking system},\n\turl = {http://ieeexplore.ieee.org/xpls/abs_all.jsp?arnumber=7353908},\n\tdoi = {10.1109/IROS.2015.7353908},\n\turldate = {2016-06-14},\n\tbooktitle = {Intelligent {Robots} and {Systems} ({IROS}), 2015 {IEEE}/{RSJ} {International} {Conference} on},\n\tpublisher = {IEEE},\n\tauthor = {Šuligoj, Filip and Jerbić, Bojan and Švaco, Marko and Šekoranja, Bojan and Mihalinec, Dominik and Vidaković, Josip},\n\tyear = {2015},\n\tkeywords = {Adrian Žgaljić, Fantomi, Marker\\_localizer\\_1, Novi\\_fantom, RM, RONNA},\n\tpages = {3785--3790},\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2014\n \n \n (1)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n Calibration of an Industrial Robot Using a Stereo Vision System.\n \n \n \n \n\n\n \n Švaco, M.; Šekoranja, B.; Šuligoj, F.; and Jerbić, B.\n\n\n \n\n\n\n In Procedia Engineering, volume 69, pages 459–463, 2014. \n \n\n\n\n
\n\n\n\n \n \n \"CalibrationPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n \n \n\n\n\n
\n
@inproceedings{svaco_calibration_2014,\n\ttitle = {Calibration of an {Industrial} {Robot} {Using} a {Stereo} {Vision} {System}},\n\tvolume = {69},\n\turl = {http://linkinghub.elsevier.com/retrieve/pii/S1877705814002586},\n\tdoi = {10.1016/j.proeng.2014.03.012},\n\tlanguage = {en},\n\turldate = {2015-01-09},\n\tbooktitle = {Procedia {Engineering}},\n\tauthor = {Švaco, Marko and Šekoranja, Bojan and Šuligoj, Filip and Jerbić, Bojan},\n\tyear = {2014},\n\tkeywords = {Adrian Žgaljić, RM, kalibracija},\n\tpages = {459--463},\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2013\n \n \n (1)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n Bayesian Approach to Robot Group Control.\n \n \n \n \n\n\n \n Stipancic, T.; Jerbić, B.; and Curkovic, P.\n\n\n \n\n\n\n In Ao, S.; and Gelman, L., editor(s), Electrical Engineering and Intelligent Systems, volume 130, of Lecture Notes in Electrical Engineering, pages 109–119. Springer New York, New York, NY, 2013.\n \n\n\n\n
\n\n\n\n \n \n \"BayesianPaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@incollection{ao_bayesian_2013,\n\taddress = {New York, NY},\n\tseries = {Lecture {Notes} in {Electrical} {Engineering}},\n\ttitle = {Bayesian {Approach} to {Robot} {Group} {Control}},\n\tvolume = {130},\n\tisbn = {978-1-4614-2316-4 978-1-4614-2317-1},\n\turl = {http://link.springer.com/10.1007/978-1-4614-2317-1},\n\turldate = {2014-09-07},\n\tbooktitle = {Electrical {Engineering} and {Intelligent} {Systems}},\n\tpublisher = {Springer New York},\n\tauthor = {Stipancic, Tomislav and Jerbić, Bojan and Curkovic, Petar},\n\teditor = {Ao, Sio-Iong and Gelman, Len},\n\tyear = {2013},\n\tpages = {109--119},\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2011\n \n \n (2)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n A multiagent framework for industrial robotic applications.\n \n \n \n \n\n\n \n Švaco, M.; Šekoranja, B.; and Jerbić, B.\n\n\n \n\n\n\n In Procedia Computer Science, volume 6, pages 291–296, 2011. \n \n\n\n\n
\n\n\n\n \n \n \"APaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{svaco_multiagent_2011,\n\ttitle = {A multiagent framework for industrial robotic applications},\n\tvolume = {6},\n\turl = {http://linkinghub.elsevier.com/retrieve/pii/S1877050911005199},\n\tdoi = {10.1016/j.procs.2011.08.054},\n\tlanguage = {en},\n\turldate = {2015-01-09},\n\tbooktitle = {Procedia {Computer} {Science}},\n\tauthor = {Švaco, Marko and Šekoranja, Bojan and Jerbić, Bojan},\n\tyear = {2011},\n\tpages = {291--296},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Autonomous Planning Framework for Distributed Multiagent Robotic Systems.\n \n \n \n \n\n\n \n Švaco, M.; Šekoranja, B.; and Jerbić, B.\n\n\n \n\n\n\n In Camarinha-Matos, L. M., editor(s), Technological Innovation for Sustainability, volume 349, pages 147–154. Springer Berlin Heidelberg, Berlin, Heidelberg, 2011.\n \n\n\n\n
\n\n\n\n \n \n \"AutonomousPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@incollection{camarinha-matos_autonomous_2011,\n\taddress = {Berlin, Heidelberg},\n\ttitle = {Autonomous {Planning} {Framework} for {Distributed} {Multiagent} {Robotic} {Systems}},\n\tvolume = {349},\n\tisbn = {978-3-642-19169-5 978-3-642-19170-1},\n\turl = {http://link.springer.com/10.1007/978-3-642-19170-1_16},\n\turldate = {2017-07-27},\n\tbooktitle = {Technological {Innovation} for {Sustainability}},\n\tpublisher = {Springer Berlin Heidelberg},\n\tauthor = {Švaco, Marko and Šekoranja, Bojan and Jerbić, Bojan},\n\teditor = {Camarinha-Matos, Luis M.},\n\tyear = {2011},\n\tdoi = {10.1007/978-3-642-19170-1_16},\n\tpages = {147--154},\n}\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2010\n \n \n (1)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n Cooperative Coevolution Applied to Dual-arm Robot Motion Planning.\n \n \n \n \n\n\n \n Ćurković, P.; Jerbić, B.; and Stipanĉić, T.\n\n\n \n\n\n\n IFAC Proceedings Volumes, 43(4): 132–137. 2010.\n \n\n\n\n
\n\n\n\n \n \n \"CooperativePaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{curkovic_cooperative_2010,\n\ttitle = {Cooperative {Coevolution} {Applied} to {Dual}-arm {Robot} {Motion} {Planning}},\n\tvolume = {43},\n\tissn = {14746670},\n\turl = {https://linkinghub.elsevier.com/retrieve/pii/S1474667015301300},\n\tdoi = {10.3182/20100701-2-PT-4011.00024},\n\tlanguage = {en},\n\tnumber = {4},\n\turldate = {2021-01-06},\n\tjournal = {IFAC Proceedings Volumes},\n\tauthor = {Ćurković, Petar and Jerbić, Bojan and Stipanĉić, Tomislav},\n\tyear = {2010},\n\tpages = {132--137},\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2008\n \n \n (1)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Hybridization of adaptive genetic algorithm and ART 1 neural architecture for efficient path planning of a mobile robot.\n \n \n \n\n\n \n Ćurković, P.; Jerbić, B.; and Stipančić, T.\n\n\n \n\n\n\n Transactions of FAMENA, 32(2): 11–21. 2008.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n\n\n
\n
@article{curkovic_hybridization_2008,\n\ttitle = {Hybridization of adaptive genetic algorithm and {ART} 1 neural architecture for efficient path planning of a mobile robot},\n\tvolume = {32},\n\tnumber = {2},\n\tjournal = {Transactions of FAMENA},\n\tauthor = {Ćurković, Petar and Jerbić, Bojan and Stipančić, Tomislav},\n\tyear = {2008},\n\tkeywords = {da, genetic algorithm, mobile robot, neural network, optimization, path planning},\n\tpages = {11--21},\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2007\n \n \n (1)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n Honey-bees optimization algorithm applied to path planning problem.\n \n \n \n \n\n\n \n Curkovic, P.\n\n\n \n\n\n\n International Journal of Simulation Modelling, 6(3): 154–164. September 2007.\n \n\n\n\n
\n\n\n\n \n \n \"Honey-beesPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{curkovic_honey-bees_2007,\n\ttitle = {Honey-bees optimization algorithm applied to path planning problem},\n\tvolume = {6},\n\tissn = {1726-4529},\n\turl = {http://www.ijsimm.com/Full_Papers/Fulltext2007/text6-3_154-164.pdf},\n\tdoi = {10.2507/IJSIMM06(3)2.087},\n\tnumber = {3},\n\turldate = {2021-01-06},\n\tjournal = {International Journal of Simulation Modelling},\n\tauthor = {Curkovic, P.},\n\tmonth = sep,\n\tyear = {2007},\n\tpages = {154--164},\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2005\n \n \n (2)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Intelligent robotic assembly by machine vision and CAD integration.\n \n \n \n\n\n \n Jerbić, B.; Vranješ, B.; Hrman, M.; and Kunica, Z.\n\n\n \n\n\n\n Transactions of FAMENA, 29(1): 17–30. 2005.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n\n\n
\n
@article{jerbic_intelligent_2005,\n\ttitle = {Intelligent robotic assembly by machine vision and {CAD} integration},\n\tvolume = {29},\n\tnumber = {1},\n\tjournal = {Transactions of FAMENA},\n\tauthor = {Jerbić, Bojan and Vranješ, Božo and Hrman, Miljenko and Kunica, Zoran},\n\tyear = {2005},\n\tkeywords = {artificial intelligence, assembly, automation, da, machine vision, robotics},\n\tpages = {17--30},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Behavior based mobile robot in automatic assembly.\n \n \n \n \n\n\n \n Jerbić, B.; Vranjes, B.; and Kunica, Z.\n\n\n \n\n\n\n In Assembly and Task Planning: From Nano to Macro Assembly and Manufacturing, 2005.(ISATP 2005). The 6th IEEE International Symposium on, pages 25–31, 2005. IEEE\n \n\n\n\n
\n\n\n\n \n \n \"BehaviorPaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{jerbic_behavior_2005,\n\ttitle = {Behavior based mobile robot in automatic assembly},\n\turl = {http://ieeexplore.ieee.org/xpls/abs_all.jsp?arnumber=1511445},\n\turldate = {2014-04-18},\n\tbooktitle = {Assembly and {Task} {Planning}: {From} {Nano} to {Macro} {Assembly} and {Manufacturing}, 2005.({ISATP} 2005). {The} 6th {IEEE} {International} {Symposium} on},\n\tpublisher = {IEEE},\n\tauthor = {Jerbić, Bojan and Vranjes, B. and Kunica, Zoran},\n\tyear = {2005},\n\tpages = {25--31},\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2003\n \n \n (1)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Intelligent robotic assembly by active vision system integrated with CAD.\n \n \n \n\n\n \n Jerbić, B.; Hrman, M.; and Vranješ, B.\n\n\n \n\n\n\n ,321–324. 2003.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n\n\n
\n
@article{jerbic_intelligent_2003,\n\ttitle = {Intelligent robotic assembly by active vision system integrated with {CAD}},\n\tauthor = {Jerbić, Bojan and Hrman, Miljenko and Vranješ, Božo},\n\teditor = {Vivancos, Joan V. and Puerta, Ferran S. and Ekinović, Sabahudin and Brdarević, Safet},\n\tyear = {2003},\n\tkeywords = {CAD, PDM, artificial intelligence, artificial neural network, assembly automation, da, machine vision, robotic assembly},\n\tpages = {321--324},\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n\n\n\n
\n\n\n \n\n \n \n \n \n\n
\n"}; document.write(bibbase_data.data);