<script src="https://bibbase.org/service/mendeley/bfbbf840-4c42-3914-a463-19024f50b30c?jsonp=1"></script>
<?php
$contents = file_get_contents("https://bibbase.org/service/mendeley/bfbbf840-4c42-3914-a463-19024f50b30c");
print_r($contents);
?>
<iframe src="https://bibbase.org/service/mendeley/bfbbf840-4c42-3914-a463-19024f50b30c"></iframe>
For more details see the documention.
To the site owner:
Action required! Mendeley is changing its API. In order to keep using Mendeley with BibBase past April 14th, you need to:
@article{ title = {Embedded real-time people detection and tracking with time-of-flight camera}, type = {article}, year = {2021}, pages = {10}, id = {405ed9dc-907c-3cdd-a67b-b137243dceb2}, created = {2021-10-21T07:23:52.110Z}, file_attached = {true}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2021-12-16T17:08:17.261Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {Tamas2021}, folder_uuids = {80d93ce2-3c03-4bdb-98b4-2c3e9d9e755d}, private_publication = {false}, abstract = {People recognition is a relevant subset of the generic image based recognition task with many possible application areas such as security, surveillance, human-robot interaction or recently the social security in a pandemic context. In this work we present a light-weight recognition pipeline for time-of-flight cameras based on deep learning techniques tailored to this specific type of camera with registered infrared and depth images. By combining the maturity of the 2D image based recognition techniques with the custom depth sensing we achieved effective solutions for a number of relevant industrial applications. In particular, our focus was on automatic door-control and people counting applications. © COPYRIGHT SPIE. Downloading of the abstract is permitted for personal use only.}, bibtype = {article}, author = {Tamas, Levente and Cozma, Andrei}, doi = {10.1117/12.2586057}, number = {April} }
@article{ title = {Absolute Pose Estimation of Central Cameras Using Planar Regions}, type = {article}, year = {2021}, keywords = {Lidar,Pose estimation,calibration,data fusion,omnidirectional camera,registration}, pages = {377-391}, volume = {43}, publisher = {IEEE}, id = {8e040d6f-a8b7-3eec-a33c-d3ce09eb76a6}, created = {2021-10-21T07:23:52.291Z}, file_attached = {true}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2021-12-16T17:08:17.133Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {Frohlich2021}, folder_uuids = {80d93ce2-3c03-4bdb-98b4-2c3e9d9e755d}, private_publication = {false}, abstract = {A novel method is proposed for the absolute pose estimation of a central 2D camera with respect to 3D depth data without the use of any dedicated calibration pattern or explicit point correspondences. The proposed method has no specific assumption about the data source: Plain depth information is expected from the 3D sensing device and a central camera is used to capture the 2D images. Both the perspective and omnidirectional central cameras are handled within a single generic camera model. Pose estimation is formulated as a 2D-3D nonlinear shape registration task which is solved without point correspondences or complex similarity metrics. It relies on a set of corresponding planar regions, and the pose parameters are obtained by solving an overdetermined system of nonlinear equations. The efficiency and robustness of the proposed method were confirmed on both large scale synthetic data and on real data acquired from various types of sensors.}, bibtype = {article}, author = {Frohlich, Robert and Tamas, Levente and Kato, Zoltan}, doi = {10.1109/TPAMI.2019.2931577}, journal = {IEEE Transactions on Pattern Analysis and Machine Intelligence}, number = {2} }
@article{ title = {Augmented reality integration into MES for connected workers}, type = {article}, year = {2021}, keywords = {Augmented reality,Manufacturing execution system,Robotics}, pages = {102057}, volume = {68}, websites = {https://doi.org/10.1016/j.rcim.2020.102057}, publisher = {Elsevier Ltd}, id = {bd054077-2226-30e4-bf96-a0f1b45ce662}, created = {2021-10-21T07:23:52.339Z}, file_attached = {true}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2021-12-16T17:08:17.091Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {Blaga2021}, folder_uuids = {80d93ce2-3c03-4bdb-98b4-2c3e9d9e755d}, private_publication = {false}, abstract = {In this paper an overview of recent augmented reality (AR) solutions for manufacturing execution systems (MES) is presented. The first part of the paper describes the challenges of integrating AR into MES, while the second part focuses on custom AR solutions. The last part of the paper highlights the advantages of the proposed approaches, as well as real life experimental results. Experiments are described in detail while the code for these applications is made public on author's website.}, bibtype = {article}, author = {Blaga, Andreea and Militaru, Cristian and Mezei, Ady Daniel and Tamas, Levente}, doi = {10.1016/j.rcim.2020.102057}, journal = {Robotics and Computer-Integrated Manufacturing}, number = {September 2020} }
@article{ title = {Feature Pyramid Network Based Efficient Normal Estimation and Filtering for Time-of-Flight Depth Cameras}, type = {article}, year = {2021}, keywords = {FPN,depth image,filtering,normal estimation,point cloud}, pages = {6257}, volume = {21}, websites = {https://www.mdpi.com/1424-8220/21/18/6257/htm,https://www.mdpi.com/1424-8220/21/18/6257}, month = {9}, publisher = {Multidisciplinary Digital Publishing Institute}, day = {18}, id = {5bff033f-1e66-349e-8c08-8a3948030c45}, created = {2021-10-21T07:26:08.036Z}, accessed = {2021-10-21}, file_attached = {true}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2021-12-16T17:08:17.323Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {false}, hidden = {false}, citation_key = {Molnar2021}, folder_uuids = {80d93ce2-3c03-4bdb-98b4-2c3e9d9e755d}, private_publication = {false}, abstract = {In this paper, an efficient normal estimation and filtering method for depth images acquired by Time-of-Flight (ToF) cameras is proposed. The method is based on a common feature pyramid networks (FPN) architecture. The normal estimation method is called ToFNest, and the filtering method ToFClean. Both of these low-level 3D point cloud processing methods start from the 2D depth images, projecting the measured data into the 3D space and computing a task-specific loss function. Despite the simplicity, the methods prove to be efficient in terms of robustness and runtime. In order to validate the methods, extensive evaluations on public and custom datasets were performed. Compared with the state-of-the-art methods, the ToFNest and ToFClean algorithms are faster by an order of magnitude without losing precision on public datasets.}, bibtype = {article}, author = {Molnár, Szilárd and Kelényi, Benjamin and Tamas, Levente}, doi = {10.3390/S21186257}, journal = {Sensors 2021, Vol. 21, Page 6257}, number = {18} }
@article{ title = {The ClujUAV student competition: A corridor navigation challenge with autonomous drones}, type = {article}, year = {2020}, keywords = {computer vision,control,education,robotics,student competition,uav}, pages = {17511-17517}, volume = {53}, id = {f5521038-5d80-3800-9a47-bbb35fc22cfe}, created = {2021-10-21T07:23:52.132Z}, file_attached = {true}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2021-12-16T17:08:17.081Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {Sandor2020}, folder_uuids = {80d93ce2-3c03-4bdb-98b4-2c3e9d9e755d}, private_publication = {false}, bibtype = {article}, author = {Sándor, Csanád and Pável, Szabolcs and Wieser, Erik and Blaga, Andreea and Boda, Péter and Fülöp, Andrea-Orsolya and Ursache, Adrian and Zöld, Attila and Kopacz, Anikó and Lázár, Botond and Szabó, Károly and Tasnádi, Zoltán and Trinfa, Botond and Csató, Lehel and Marius, Tegzes Dan and Leontin, Pop M. and Tarziu, Raluca and Zaha, Mihai and Grigorescu, Sorin and Busoniu, Lucian and Raica, Paula and Tamas, Levente}, doi = {10.1016/j.ifacol.2020.12.2656}, journal = {IFAC-PapersOnLine}, number = {2} }
@article{ title = {Navigation of Outdoor Mobile Robots with Extended Grid Algorithms}, type = {article}, year = {2020}, keywords = {grid locator,grid mapping,locator systems,mobile robots,navigation}, id = {0368251b-fe16-3e58-aad6-9278fb2e8791}, created = {2021-10-21T07:23:52.147Z}, file_attached = {true}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2021-12-16T17:08:17.272Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {Marcu2020}, folder_uuids = {80d93ce2-3c03-4bdb-98b4-2c3e9d9e755d}, private_publication = {false}, abstract = {Grid Locator systems describe locations anywhere in the world and are mainly used in radio communication to transmit stations location of the correspondents. This paper proposes to extend the use of Maidenhead Locator System to mobile robots navigation by transforming the GPS coordinates into higher precision extended grid squares. The experimental part of the paper was validated using real GPS data from a commercial drone. The data, as well as the source for the reproduction of the experimental part, is available at the homepage of the authors.}, bibtype = {article}, author = {Marcu, Cosmin and Tamas, Levente}, doi = {10.1109/AQTR49680.2020.9129936}, journal = {2020 22nd IEEE International Conference on Automation, Quality and Testing, Robotics - THETA, AQTR 2020 - Proceedings} }
@article{ title = {Smart CPS: vertical integration overview and user story with a cobot}, type = {article}, year = {2019}, keywords = {Industry 4.0,MES,Smart CPS,cobot}, volume = {32}, id = {862961fa-3d0e-390f-9375-f290ce479c4f}, created = {2018-11-02T23:59:00.000Z}, file_attached = {false}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2020-10-16T01:05:25.181Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {false}, hidden = {false}, private_publication = {false}, abstract = {© 2018, © 2018 Informa UK Limited, trading as Taylor & Francis Group. In this paper, an overview and a demonstration for the vertical integration of manufacturing enterprise layers are described by implementing the manufacturing execution system (MES). In the first part of the paper, the details regarding the MES implementation are described, while in the second part, the use case-specific insights are highlighted. The presented use case contains each important step of a production line involving also a collaborative Baxter-type robot and the state-of-the-art tools for MES implementation. The cobot-involved use case is relevant and generic enough in the context of Industry 4.0 offering a good overview of a typical vertical integration use case which can be generalised and applied to manufacturing scenarios. The paper ends with the lessons learned from the vertical integration process as well as future direction which can be followed in such a context.}, bibtype = {article}, author = {Tamas, L. and Murar, M.}, doi = {10.1080/0951192X.2018.1535196}, journal = {International Journal of Computer Integrated Manufacturing}, number = {4-5} }
@inproceedings{ title = {Sorting objects from a conveyor belt using active perception with a pomdp model}, type = {inproceedings}, year = {2019}, id = {0be6f681-5ce3-3af7-9989-4b03a5caa2b5}, created = {2019-09-10T23:59:00.000Z}, file_attached = {true}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2021-12-16T17:08:17.045Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {Mezei2019}, folder_uuids = {80d93ce2-3c03-4bdb-98b4-2c3e9d9e755d}, private_publication = {false}, abstract = {© 2019 EUCA. We consider an application where a robot must sort objects traveling on a conveyor belt into different classes. The detector and classifier work on 3D point clouds, but are of course not fully accurate, so they sometimes misclassify objects. We describe this task using a novel model in the formalism of partially observable Markov decision processes. With the objective of finding the correct classes with a small number of observations, we then apply a state-of-the-art POMDP solver to plan a sequence of observations from different viewpoints, as well as the moments when the robot decides the class of the current object (which automatically triggers sorting and moving the conveyor belt). In a first version, observations are carried out only for the object at the end of the conveyor belt, after which we extend the framework to observe multiple objects. The performance with both versions is analyzed in simulations, in which we study the ratio of correct to incorrect classifications and the total number of steps to sort a batch of objects. Real-life experiments with a Baxter robot are then provided with publicly shared code and data at http://community.clujit.ro/display/TEAM/Active+perception.}, bibtype = {inproceedings}, author = {Mezei, A.-D. and Tamas, L. and Busoniu, L.}, doi = {10.23919/ECC.2019.8796028}, booktitle = {2019 18th European Control Conference, ECC 2019} }
@article{ title = {Smart CPS: vertical integration overview and user story with a cobot}, type = {article}, year = {2019}, keywords = {Industry 4.0,MES,Smart CPS,cobot}, pages = {504-521}, volume = {32}, id = {91021b0f-43a7-3087-a309-cc6a90dba235}, created = {2021-10-21T07:23:52.106Z}, file_attached = {true}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2021-12-16T17:08:17.241Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {Tamas2019}, folder_uuids = {80d93ce2-3c03-4bdb-98b4-2c3e9d9e755d}, private_publication = {false}, abstract = {In this paper, an overview and a demonstration for the vertical integration of manufacturing enterprise layers are described by implementing the manufacturing execution system (MES). In the first part of the paper, the details regarding the MES implementation are described, while in the second part, the use case-specific insights are highlighted. The presented use case contains each important step of a production line involving also a collaborative Baxter-type robot and the state-of-the-art tools for MES implementation. The cobot-involved use case is relevant and generic enough in the context of Industry 4.0 offering a good overview of a typical vertical integration use case which can be generalised and applied to manufacturing scenarios. The paper ends with the lessons learned from the vertical integration process as well as future direction which can be followed in such a context.}, bibtype = {article}, author = {Tamas, Levente and Murar, Mircea}, doi = {10.1080/0951192X.2018.1535196}, journal = {International Journal of Computer Integrated Manufacturing}, number = {4-5} }
@article{ title = {Augmented Reality for Digital Manufacturing}, type = {article}, year = {2018}, keywords = {Assembly,Augmented reality,Human-robot collaboration,Manufacturing}, pages = {173-178}, id = {d6118462-009a-3edc-a145-2316ae3a93cc}, created = {2021-12-16T16:57:29.309Z}, file_attached = {true}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2021-12-16T17:08:17.299Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {Blaga2018}, folder_uuids = {80d93ce2-3c03-4bdb-98b4-2c3e9d9e755d}, private_publication = {false}, abstract = {The focus of this paper is on enhancing the possibilities of manufacturing operations by taking advantage of augmented reality (AR) technology and highlighting its benefits by implementing a product manufacturing case. The latter appearance of augmented headsets, such as the Microsoft HoloLens allows more opportunities for creating innovative solutions. After introducing concepts about smart manufacturing, improvements regarding human-robot collaboration in assembly tasks are presented. The developed scenario is based on the integration of AR, a cobot, a see-through device, a digital twin and an algorithm for assembly visualization. This approach pledges a compelling interaction of the 3D real and virtual unit, so that the operator can work in a more intuitive environment. This methodology has been implemented on a assembly case to investigate users' enhanced perception using the virtual world while cooperating with the robot.}, bibtype = {article}, author = {Blaga, Andreea and Tamas, Levente}, doi = {10.1109/MED.2018.8443028}, journal = {MED 2018 - 26th Mediterranean Conference on Control and Automation}, number = {June 2018} }
@article{ title = {Lessons learned from lightweight CNN based object recognition for mobile robots}, type = {article}, year = {2018}, pages = {1-5}, id = {b5b890a5-fb66-3fe9-9af8-ba51548287d2}, created = {2021-12-16T16:57:29.309Z}, file_attached = {true}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2021-12-16T17:08:17.052Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {Fulop2018}, folder_uuids = {80d93ce2-3c03-4bdb-98b4-2c3e9d9e755d}, private_publication = {false}, abstract = {The focus of this paper is on the comparison of multiple neural network frameworks and the their usage in 2D/3D robot perception applications. Numerous frameworks exists for this task including the recent deep learning based ones, which allow us to develop a perception system, with the chosen parameters for object recognition. In this paper we analyzed the possible solutions, including different Convolutional Neural Networks (CNNs) variants. The advantages of 2D CNNs linked with 3D features lead to another approach, which can be extended further. The leading idea is to create a custom object recognition method that takes advantage of a 2D system's precision and speed, but it can efficiently incorporate 3D features. This way, the disturbances specific to each method separately can be minimized. On the other hand, this is a lightweight solution, that is supposed to be tolerated by less powerful processing units as well. By placing 3D bounding boxes around detected objects, the convenience of the 2D detection methods can be integrated in a 3D metric world.}, bibtype = {article}, author = {Fulop, Andrea Orsolya and Tamas, Levente}, doi = {10.1109/AQTR.2018.8402778}, journal = {2018 IEEE International Conference on Automation, Quality and Testing, Robotics, AQTR 2018 - THETA 21st Edition, Proceedings}, number = {May 2018} }
@article{ title = {Manufacturing execution system specific data analysis-use case with a cobot}, type = {article}, year = {2018}, keywords = {Intelligent manufacturing systems,data mining,industry applications,intelligent robots,prediction models}, pages = {50245-50259}, volume = {6}, publisher = {IEEE}, id = {e936a91e-fb39-3a03-a3d7-67c7613a0363}, created = {2021-12-16T16:57:29.310Z}, file_attached = {true}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2021-12-16T17:08:17.051Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {Mitrea2018}, folder_uuids = {80d93ce2-3c03-4bdb-98b4-2c3e9d9e755d}, private_publication = {false}, abstract = {The purpose of this research is to analyze and upgrade the performances of the Baxter intelligent robot, through data mining methods. The case study belongs to the robotics domain, being integrated in the context of manufacturing execution systems and product lifecycle management, aiming to overcome the lack of vertical integration inside a company. The explored data comprises the parameters registered during the activities of the Baxter intelligent robot, as, for example, the movement of the left or right arm. First, the state of the art concerning the data mining methods is presented, and then the solution is detailed by describing the data mining techniques. The final purpose was that of improving the speed and robustness of the robot in the production. Specific techniques and sometimes their combinations are experimented and assessed, in order to perform root cause analysis, then powerful classifiers and metaclassifiers, as well as deep learning methods, in optimum configuration, are analyzed for prediction. The experimental results are described and discussed in details, then the conclusions and further development possibilities are formulated. Based on the experiments, important relationships among the robot parameters were discovered, the obtained accuracy for predicting the target variables being always above 96%.}, bibtype = {article}, author = {Mitrea, Delia and Tamas, Levente}, doi = {10.1109/ACCESS.2018.2869346}, journal = {IEEE Access} }
@inproceedings{ title = {Region based fusion of 3D and 2D visual data for Cultural Heritage objects}, type = {inproceedings}, year = {2017}, id = {23bb8985-56d6-34c9-bcc7-81d9d55dd0d1}, created = {2018-02-19T06:34:53.081Z}, file_attached = {false}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2018-02-19T06:34:53.081Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {false}, hidden = {false}, private_publication = {false}, abstract = {© 2016 IEEE. A workflow is proposed for Cultural Heritage applications in which the fusion of 3D and 2D visual data is required. Using data acquired by cheap, standard devices, like a 3D scanner having a low quality 2D camera in it, and a high resolution DSLR camera, one can produce high quality color calibrated 3D model for documenting purpose. The proposed processing workflow combines a novel region based calibration method with an ICP alignment used for refining the results. It works on 3D data, that do not necessarily contain intensity information in them, and 2D images of a calibrated camera. These can be acquired with commercial 3D scanners and color cameras without any special constraint. In contrast with the typical solutions, the proposed method is not using any calibration patterns or markers. The efficiency and robustness of the proposed calibration method has been confirmed on both synthetic and real data.}, bibtype = {inproceedings}, author = {Frohlich, R. and Kato, Z. and Tremeau, A. and Tamas, L. and Shabo, S. and Waksman, Y.}, doi = {10.1109/ICPR.2016.7899996}, booktitle = {Proceedings - International Conference on Pattern Recognition} }
@article{ title = {Analysis and a home assistance application of online AEMS2 planning}, type = {article}, year = {2016}, pages = {5013-5019}, id = {5d15d482-b885-313e-8ffc-056d16636dfb}, created = {2016-12-19T10:38:35.000Z}, file_attached = {true}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-03-22T19:08:01.262Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, private_publication = {false}, bibtype = {article}, author = {Tam, Levente and Bus, Lucian} }
@inproceedings{ title = {Visual tracking with a small scale aircraft}, type = {inproceedings}, year = {2016}, id = {ad9d1e56-590f-34ae-a255-a173bd3faf88}, created = {2018-02-19T06:34:52.619Z}, file_attached = {false}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2018-02-19T06:34:52.619Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {false}, hidden = {false}, private_publication = {false}, abstract = {© 2015 IEEE. This paper presents experimental results of localization and tracking of on-going research work with a small scale aircraft. The proposed architecture is based on the communication between an embedded smartphone and a ground station. Besides the standard sensors provided by the smartphone such as an accelerometer, gyroscope, GPS and camera, special flight related sensors were used, in example the pitot tube. By means of data fusion based on available sensors and the data extracted from image processing, static and dynamic object tracking was performed both in offline and online experiments. The tracking was performed using an extended Kalman filter. A computer aided system identification was performed to compute the transfer function along the horizontal axes of the UAV intended for further design of the corresponding stabilization controller.}, bibtype = {inproceedings}, author = {Bulezyuk, A. and Tamás, L.}, doi = {10.1109/CINTI.2015.7382948}, booktitle = {CINTI 2015 - 16th IEEE International Symposium on Computational Intelligence and Informatics, Proceedings} }
@inproceedings{ title = {Active perception for object manipulation}, type = {inproceedings}, year = {2016}, id = {70574e12-fee4-3ed0-a60a-d413c562615d}, created = {2018-02-19T06:34:52.682Z}, file_attached = {false}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2018-02-19T06:34:52.682Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {false}, hidden = {false}, private_publication = {false}, abstract = {© 2016 IEEE. The active perception makes an important step towards the integration of the sensing in the planning phase of the object manipulation. Closing the perception-planning loop iteratively, this helps to reduce the uncertainty in the sensing and planning offering better scene parsing and object manipulation applications. In this paper we show the active perception concept using an industrial like scene with different pipes which are sensed with a depth camera. The pose information from the camera is used for a 7 degree of freedom robotic arm mounted on a mobile base in order to perform the planning for object manipulation purposes. For the whole active perception pipeline we have successfully implemented and tested the scene parsing using object part decoupling and the planning with the robot arm.}, bibtype = {inproceedings}, author = {Mezei, A.-D. and Tamas, L.}, doi = {10.1109/ICCP.2016.7737158}, booktitle = {Proceedings - 2016 IEEE 12th International Conference on Intelligent Computer Communication and Processing, ICCP 2016} }
@inproceedings{ title = {Object handling in cluttered indoor environment with a mobile manipulator}, type = {inproceedings}, year = {2016}, id = {08e9b7c2-c779-36bb-99de-2f50a905f724}, created = {2018-02-19T06:34:52.904Z}, file_attached = {false}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2018-02-19T06:34:52.904Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {false}, hidden = {false}, private_publication = {false}, abstract = {© 2016 IEEE. This paper presents an indoor object handling application using a 7 degree of freedom lightweight robotic arm and a commercial depth sensor. The motion planning for such a complex arm is demanding, especially in a cluttered scenes, like the majority of human environments are. In order to cope with this task we designed and implemented the direct and inverse kinematics for the arm as well as the perception related modules using a depth sensor. Both the simulation and experimental parts are carried out in a common framework which ensures a light integration of different planning and spatial perception parts.}, bibtype = {inproceedings}, author = {Militaru, C. and Mezei, A.-D. and Tamas, L.}, doi = {10.1109/AQTR.2016.7501382}, booktitle = {2016 20th IEEE International Conference on Automation, Quality and Testing, Robotics, AQTR 2016 - Proceedings} }
@inproceedings{ title = {Relative Pose Estimation and Fusion of 2D Spectral and 3D Lidar Images}, type = {inproceedings}, year = {2015}, pages = {1-10}, volume = {0073}, publisher = {Springer, LNCS}, city = {St-Etienne}, id = {e2419f1a-cf4e-3620-9e3b-5a723dfd3a9d}, created = {2015-03-11T16:04:23.000Z}, file_attached = {true}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-03-22T19:08:01.262Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, private_publication = {false}, bibtype = {inproceedings}, author = {Kato, Zoltan and Tamas, Levente}, booktitle = {Computational Color Imaging Workshop (CCIW) - Invited talk} }
@book{ title = {Relative pose estimation and fusion of omnidirectional and Lidar cameras}, type = {book}, year = {2015}, source = {Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)}, keywords = {Fusion,Lidar,Omnidirectional camera,Pose estimation}, volume = {8926}, id = {6c6f6b00-a7ed-3ee4-bae3-7a8ae92d816b}, created = {2018-02-19T06:34:52.751Z}, file_attached = {false}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2018-02-19T06:34:52.751Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {false}, hidden = {false}, private_publication = {false}, abstract = {© Springer International Publishing Switzerland 2015. This paper presents a novel approach for the extrinsic parameter estimation of omnidirectional cameras with respect to a 3D Lidar coordinate frame. The method works without specific setup and calibration targets, using only a pair of 2D-3D data. Pose estimation is formulated as a 2D-3D nonlinear shape registration task which is solved without point correspondences or complex similarity metrics. It relies on a set of corresponding regions, and pose parameters are obtained by solving a small system of nonlinear equations. The efficiency and robustness of the proposed method was confirmed on both synthetic and real data in urban environment.}, bibtype = {book}, author = {Tamas, L. and Frohlich, R. and Kato, Z.}, doi = {10.1007/978-3-319-16181-5_49} }
@book{ title = {Homography estimation between omnidirectional cameras without point correspondences}, type = {book}, year = {2015}, source = {Studies in Systems, Decision and Control}, volume = {42}, id = {1e0ff52d-c2ed-3272-bcb6-045f413ec0a9}, created = {2018-02-19T06:34:52.810Z}, file_attached = {false}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2018-02-19T06:34:52.810Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {false}, hidden = {false}, private_publication = {false}, abstract = {© Springer International Publishing Switzerland 2015. This chapter presents a novel approach for homography estimation between omnidirectional cameras. The solution is formulated in terms of a system of nonlinear equations. Each equation is generated by integrating a nonlinear function over corresponding image regions on the surface of the unit spheres representing the cameras. The method works without point correspondences or complex similarity metrics, using only a pair of corresponding planar regions extracted from the omnidirectional images. Relative pose of the cameras can be factorized from the estimated homography. The efficiency and robustness of the proposed method has been confirmed on both synthetic and real data.}, bibtype = {book}, author = {Frohlich, R. and Tamás, L. and Kato, Z.}, doi = {10.1007/978-3-319-26327-4_6} }
@book{ title = {Vision-based quadcopter navigation in structured environments}, type = {book}, year = {2015}, source = {Studies in Systems, Decision and Control}, volume = {42}, id = {b67fc707-6b41-3b65-bc1e-d183a5f0f813}, created = {2018-02-19T06:34:52.815Z}, file_attached = {false}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2018-02-19T06:34:52.815Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {false}, hidden = {false}, private_publication = {false}, abstract = {© Springer International Publishing Switzerland 2015. Quadcopters are small-sized aerial vehicles with four fixed-pitch propellers. These robots have great potential since they are inexpensive with affordable hardware, and with appropriate software solutions they can accomplish assignments autonomously. They could perform daily tasks in the future, such as package deliveries, inspections, and rescue missions. In this chapter, after an extensive introduction to object recognition and tracking, we present an approach for vision-based autonomous flying of an unmanned quadcopter in various structured environments, such as hallway-like scenes. The desired flight direction is obtained visually, based on perspective clues, in particular the vanishing point. This point is the intersection of parallel lines viewed in perspective, and is sought on the front camera image. For a stable guidance the position of the vanishing point is filtered with different types of probabilistic filters, such as linear Kalman filter, extended Kalman filter, unscented Kalman filter and particle filter. These are compared in terms of the tracking error and also for computational time. A switching control method is implemented. Each of the modes focuses on controlling only one state variable at a time and the objective is to center the vanishing point on the image. The selected filtering and control methods are tested successfully, both in simulation and in real indoor and outdoor environments.}, bibtype = {book}, author = {Páll, E. and Tamás, L. and Buşoniu, L.}, doi = {10.1007/978-3-319-26327-4_11} }
@article{ title = {3D semantic interpretation for robot perception inside office environments}, type = {article}, year = {2014}, pages = {76-87}, volume = {32}, websites = {http://www.sciencedirect.com/science/article/pii/S0952197614000566}, month = {6}, id = {dcc0204c-fc30-3e9f-a389-872063c8b0d3}, created = {2014-04-04T06:38:49.000Z}, accessed = {2014-04-04}, file_attached = {true}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-03-22T19:08:01.262Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {Tamas2014c}, private_publication = {false}, abstract = {Making sense out of human indoor environments is an essential feature for robots. The paper at hand presents a system for semantic interpretation of our surrounding indoor environments such as offices and kitchens. The perception and the interpretation of the measured data are essential tasks for any intelligent system. There are different techniques for processing 3D point clouds. The majority of them include acquisition, iterative registration, segmentation, or classification stages. We describe a generic pipeline for indoor data processing and semantic information extraction. The proposed pipeline is validated using several data sets collected using different 3D sensing devices.}, bibtype = {article}, author = {Tamas, Levente and Cosmin Goron, Lucian}, doi = {10.1016/j.engappai.2014.03.001}, journal = {Engineering Applications of Artificial Intelligence} }
@inproceedings{ title = {Robustness Analysis of 3D Feature Descriptors for Object Recognition Using a Time-of-Flight Camera}, type = {inproceedings}, year = {2014}, publisher = {IEEE}, city = {Palermo}, id = {aed28485-9beb-3142-a7f6-8f46587cb988}, created = {2014-08-29T09:23:49.000Z}, accessed = {2014-08-29}, file_attached = {true}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-05-10T07:29:38.372Z}, read = {true}, starred = {false}, authored = {true}, confirmed = {false}, hidden = {false}, citation_key = {Tamas2014b}, private_publication = {false}, bibtype = {inproceedings}, author = {Tamas, Levente and Jensen, Bjoern}, booktitle = {MED2014} }
@inproceedings{ title = {Homography Estimation between Omnidirectional Cameras without Point Correspondences}, type = {inproceedings}, year = {2014}, pages = {1-6}, id = {dc9b16a0-d6d2-3489-ac32-4ded00609a2d}, created = {2014-08-29T09:23:50.000Z}, file_attached = {true}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-03-22T19:08:01.262Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {Frohlich2014}, private_publication = {false}, bibtype = {inproceedings}, author = {Frohlich, Robert and Tamas, Levente and Kato, Zoltan}, booktitle = {ICRA OMNIVIS Workshop} }
@inproceedings{ title = {Railway track following with the AR. Drone using vanishing point detection}, type = {inproceedings}, year = {2014}, websites = {http://ieeexplore.ieee.org/xpls/abs_all.jsp?arnumber=6857870}, id = {a034d6c6-deaa-3655-a830-0a2a0e669a97}, created = {2014-08-29T09:23:46.000Z}, accessed = {2014-08-29}, file_attached = {true}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-03-22T19:08:01.262Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {false}, hidden = {false}, citation_key = {Pali2014}, private_publication = {false}, bibtype = {inproceedings}, author = {Pall, E., Mathe, K., Tamas, L., Busonius L.}, booktitle = {Automation, Quality and Testing, Robotics} }
@inproceedings{ title = {Relative Pose Estimation and Fusion of Omnidirectional and Lidar Cameras}, type = {inproceedings}, year = {2014}, keywords = {fusion,lidar,omnidirectional camera,pose estimation}, pages = {1-12}, id = {400f584f-a5a4-3f90-84dd-e34f8ecb66ae}, created = {2014-08-29T09:23:48.000Z}, file_attached = {true}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-03-22T19:08:01.262Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {false}, hidden = {false}, citation_key = {Tamas2014}, private_publication = {false}, bibtype = {inproceedings}, author = {Tamas, Levente and Frohlich, Robert and Kato, Zoltan}, booktitle = {ECCV WS} }
@inproceedings{ title = {All-Season 3D Object Recognition Challenges}, type = {inproceedings}, year = {2014}, volume = {2}, websites = {http://www.tu-chemnitz.de/etit/proaut/ICRAWorkshopChangingEnvironments/ICRA_2014_Visual_Place_Recognition_in_Changing_Environments/paper/Tamas.pdf}, id = {56ebf5e5-7f30-3865-bbac-3f68e0274620}, created = {2014-08-29T09:23:48.000Z}, accessed = {2014-08-29}, file_attached = {true}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-05-10T07:29:38.358Z}, read = {true}, starred = {false}, authored = {true}, confirmed = {false}, hidden = {false}, citation_key = {Tamas2014a}, private_publication = {false}, bibtype = {inproceedings}, author = {Tamás, L and Jensen, Bjoern}, booktitle = {ICRA Workshop on Semantic Perception} }
@inproceedings{ title = {Optimistic Planning for the Near-Optimal Control of General Nonlinear Systems with Continuous Transition Distributions ⋆}, type = {inproceedings}, year = {2014}, keywords = {artificial intelligence,nonlinear predictive control,optimal control,planning}, id = {cd49bff6-56c9-33b3-a3da-c7e7313d88e3}, created = {2014-08-29T09:23:49.000Z}, file_attached = {true}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-03-22T19:08:01.262Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {false}, hidden = {false}, citation_key = {Bu2012}, private_publication = {false}, bibtype = {inproceedings}, author = {Busoniu, Lucian and Tamas, Levente}, booktitle = {IFAC2014} }
@inproceedings{ title = {Optimistic planning for the near-optimal control of general nonlinear systems with continuous transition distributions}, type = {inproceedings}, year = {2014}, keywords = {Artificial intelligence,Nonlinear predictive control,Optimal control,Planning}, volume = {19}, id = {c3d0d25e-f58f-3834-89d3-568d3bdf558b}, created = {2018-02-19T06:34:52.511Z}, file_attached = {false}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2018-02-19T06:34:52.511Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {false}, hidden = {false}, private_publication = {false}, abstract = {© IFAC. Optimistic planning is an optimal control approach from artificial intelligence, which can be applied in receding horizon. It works for very general nonlinear dynamics and cost functions, and its analysis establishes a tight relationship between computation invested and near-optimality. However, there is no optimistic planning algorithm that searches for closed-loop solutions in stochastic problems with continuous transition distributions. Such transitions are essential in control, where they arise e.g. due to continuous disturbances. Existing algorithms only search for open-loop input sequences, which are suboptimal. We therefore propose a closed-loop algorithm that discretizes the continuous transition distribution into sigma points, and call it sigma-optimistic planning. Assuming the error introduced by sigma-point discretization is bounded, we analyze the solution returned, showing that it is near-optimal. The algorithm is evaluated in simulation experiments, where it performs better than a state-of-the-art open-loop planning technique; a certainty-equivalence approach also works well.}, bibtype = {inproceedings}, author = {Buşoniu, L. and Tamás, L.}, booktitle = {IFAC Proceedings Volumes (IFAC-PapersOnline)} }
@inproceedings{ title = {Robustness analysis of 3D feature descriptors for object recognition using a time-of-flight camera}, type = {inproceedings}, year = {2014}, id = {4c0b0b5f-a609-35c6-8988-3b2112765db6}, created = {2018-02-19T06:34:52.573Z}, file_attached = {false}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2018-02-19T06:34:52.573Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {false}, hidden = {false}, private_publication = {false}, abstract = {© 2014 IEEE. In this paper we propose to analyze characteristics of the feature descriptors in terms of robustness against typical disturbances in the context of the object recognition pipeline for depth data with intensity information. In terms of robustness the focus was on the occlusion handling, segmentation errors, sub-sampling of data as well as the presence of Gaussian noise in data. For this analyses we considered a set of real life data captured in an indoor environment using a time-of-flight sensor returning depth and intensity data. According to our test results the intensity spin estimator and the ensemble of shape functions type of feature descriptors proved to be the most suitable variant for such object recognition tasks.}, bibtype = {inproceedings}, author = {Tamas, L. and Jensen, B.}, doi = {10.1109/MED.2014.6961508}, booktitle = {2014 22nd Mediterranean Conference on Control and Automation, MED 2014} }
@inproceedings{ title = {Sensor data fusion based position estimation techniques in mobile robot navigation}, type = {inproceedings}, year = {2014}, id = {00e033ff-b4c6-3583-9dbd-159847872b8c}, created = {2018-02-19T06:34:52.755Z}, file_attached = {false}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2018-02-19T06:34:52.755Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {false}, hidden = {false}, private_publication = {false}, abstract = {© 2009 EUCA. This paper gives an overview about the position estimation techniques based on typical measurement devices used in mobile robot applications. The purpose of this paper is to give an overview of the position estimation based on the fusion of information from several sensors. It presents different extensions of Kalman filter estimators and analyses the performances of these algorithms. There are compared several estimation techniques like the Extended or Unscented Kalman filters and the particle methods. Furthermore modelling details and stereo vision algorithms are introduced. In the second part there are shown the results of the odometric, ultrasonic measurements techniques and the ones based on stereo vision.}, bibtype = {inproceedings}, author = {Tamas, L. and Majdik, A. and Lazea, G.}, booktitle = {2009 European Control Conference, ECC 2009} }
@inproceedings{ title = {Communication delay and jitter influence on bilateral teleoperation}, type = {inproceedings}, year = {2014}, keywords = {Bilateral control,Jitter,Passivity,Teleoperation}, id = {7c5f71c8-e4f2-31ea-99bd-238459fa57f4}, created = {2018-02-19T06:34:53.130Z}, file_attached = {false}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2018-02-19T06:34:53.130Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {false}, hidden = {false}, private_publication = {false}, abstract = {© 2014 IEEE. One of the most challenging problems during the design of bilateral teleoperation systems implemented over Wide Area Networks is to assure the stability and transparency in the presence of time varying communication delay between the master and the slave. In this work the influence of the delay and jitter on the passivity controller - passivity observer type stabilizers is analyzed. This control design approach does not require the explicit knowledge on the communication delay. In spite of this fact in this work it is shown that time varying the delay influences the teleoperation performances. Firstly, the relation between the delay and the dissipated energy by the controller was studied. Secondly, it was shown that in the presence of jitter the passivity controller can induce undesired oscillations into the received signals, affecting the transparency of the teleoperation. To handle this problem an extended passivity controller was proposed which assures better performances in the presence of jitter. Experimental measurements, including teleoperation experiments between two universities situated in different countries, are performed to validate the theoretical results.}, bibtype = {inproceedings}, author = {Marton, L. and Szanto, Z. and Vajda, T. and Haller, P. and Sandor, H. and Szabo, T. and Tamas, L.}, doi = {10.1109/MED.2014.6961534}, booktitle = {2014 22nd Mediterranean Conference on Control and Automation, MED 2014} }
@book{ title = {Colour and space in cultural heritage: Key questions in 3D optical documentation of material culture for conservation, study and preservation}, type = {book}, year = {2014}, source = {Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)}, keywords = {3D documentation,Art conservation,COSCHKR,Cultural heritage,Fusion,Interdisciplinary collaboration,Multispectral imaging,Optical measurement,Visualisation}, volume = {8740}, id = {c53d396c-90e2-37e9-a6eb-64e3ccf29678}, created = {2018-02-19T06:34:53.140Z}, file_attached = {false}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2018-02-19T06:34:53.140Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {false}, hidden = {false}, private_publication = {false}, abstract = {© Springer International Publishing Switzerland 2014. The paper introduces some key interdisciplinary questions concerning the development of optical measuring techniques and electronic imaging applied to documentation and presentation of artefacts, as identified through the work of Colour and Space in Cultural Heritage (www.COSCH.info), a transdomain European Action (TD1201) in the area of Materials, Physics and Nanosciences (MPNS) supported, since 2013, by the European Cooperation in Science and Technology http://www.cost.eu/domains_actions/mpns/Actions/ TD1201. Some 125 international researchers and professionals participate in COSCH activities which have been organised around six main subjects: (1) spectral object documentation; (2) spatial object documentation; (3) algorithms and procedures; (4) analysis and restoration of cultural heritage surfaces and objects; (5) visualisation of cultural heritage objects and its dissemination; and (6) the semantic development of the COSCH Knowledge Representation. The Authors outline and illustrate the approaches adopted by COSCH. They indicate future work that is needed to resolve the identified scientific, technical and semantic questions, as well as challenges of interdisciplinary communication, to ensure a wider adoption of specialist technologies and enhanced standards in 3D documentation of material cultural heritage — being a basis for its understanding, conservation, restoration, long-term preservation, study, presentation and wide dissemination.}, bibtype = {book}, author = {Boochs, F. and Bentkowska-Kafel, A. and Degrigny, C. and Karaszewski, M. and Karmacharya, A. and Kato, Z. and Picollo, M. and Sitnik, R. and Trémeau, A. and Tsiafaki, D. and Tamas, L.}, doi = {10.1007/978-3-319-13695-0} }
@inproceedings{ title = {Targetless Calibration of a Lidar-Perspective Camera Pair}, type = {inproceedings}, year = {2013}, websites = {http://www.cv-foundation.org/openaccess/content_iccv_workshops_2013/W21/papers/Tamas_Targetless_Calibration_of_2013_ICCV_paper.pdf}, id = {3cd70d23-a751-3a54-b80f-196987624d32}, created = {2014-01-13T13:08:13.000Z}, accessed = {2014-01-13}, file_attached = {true}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-03-22T19:08:01.262Z}, read = {true}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {Tamas2013}, private_publication = {false}, bibtype = {inproceedings}, author = {Tamas, Levente and Kato, Z}, booktitle = {International Conference on Computer Vision} }
@article{ title = {3D Non-Contact Building Survey Technique}, type = {article}, year = {2013}, pages = {584-592}, volume = {8}, websites = {http://rrg.utcluj.ro/~levente/download/private/pub/Tamas, Lazea - 2013 - 3D Non-Contact Building Survey Technique.pdf}, month = {10}, day = {4}, id = {f96d2987-309d-362e-ab1d-28ce2c74b658}, created = {2014-01-13T12:54:30.000Z}, accessed = {2014-01-14}, file_attached = {true}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-03-22T19:08:01.262Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {tamas20133d}, source_type = {article}, private_publication = {false}, bibtype = {article}, author = {Tamas, Levente and Lazea, Gheorghe}, journal = {Advanced Engineering Forum} }
@inproceedings{ title = {Targetless calibration of a lidar - Perspective camera pair}, type = {inproceedings}, year = {2013}, keywords = {Calibration,Camera,Lidar,Registration}, id = {4ca504b3-7d7d-3078-b1a1-716ce41a8a0a}, created = {2018-02-19T06:34:52.513Z}, file_attached = {false}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2018-02-19T06:34:52.513Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {false}, hidden = {false}, private_publication = {false}, abstract = {A novel method is proposed for the calibration of a camera - 3D lidar pair without the use of any special calibration pattern or point correspondences. The proposed method has no specific assumption about the data source: plain depth information is expected from the lidar scan and a simple perspective camera is used for the 2D images. The calibration is solved as a 2D-3D registration problem using a minimum of one (for extrinsic) or two (for intrinsic-extrinsic) planar regions visible in both cameras. The registration is then traced back to the solution of a non-linear system of equations which directly provides the calibration parameters between the bases of the two sensors. The method has been tested on a large set of synthetic lidar-camera image pairs as well as on real data acquired in outdoor environment. © 2013 IEEE.}, bibtype = {inproceedings}, author = {Tamas, L. and Kato, Z.}, doi = {10.1109/ICCVW.2013.92}, booktitle = {Proceedings of the IEEE International Conference on Computer Vision} }
@inproceedings{ title = {Video based control of a 6 degrees-of-freedom serial manipulator}, type = {inproceedings}, year = {2012}, pages = {417-421}, institution = {IEEE}, id = {13a3ca07-2c7d-3cfe-abcd-1f1ea8a64d2d}, created = {2014-01-13T12:54:30.000Z}, file_attached = {false}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-03-22T19:08:01.262Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {marcu2012video}, source_type = {inproceedings}, private_publication = {false}, bibtype = {inproceedings}, author = {Marcu, C and Herle, S and Tamas, L and Lazea, Gh}, booktitle = {Automation Quality and Testing Robotics (AQTR), 2012 IEEE International Conference on} }
@inproceedings{ title = {Development of an Operation Training System a Case Study}, type = {inproceedings}, year = {2012}, pages = {1622-1627}, volume = {14}, issue = {1}, websites = {http://www.ifac-papersonline.net/Detailed/54031.html,http://rrg.utcluj.ro/~levente/download/private/pub/Tamas et al. - 2012 - Development of an Operation Training System - a Case Study.pdf}, month = {5}, day = {23}, institution = {IFAC}, id = {346485f8-944e-3981-a741-82fb909c7014}, created = {2014-01-13T12:54:30.000Z}, accessed = {2014-01-14}, file_attached = {true}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-03-22T19:08:01.262Z}, read = {true}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {levente2012development}, source_type = {inproceedings}, private_publication = {false}, bibtype = {inproceedings}, author = {Duca, Monica and Tamas, Levente}, booktitle = {14th IFAC Symposium on Information Control Problems in Manufacturing} }
@inproceedings{ title = {3D map building with mobile robots}, type = {inproceedings}, year = {2012}, pages = {134-139}, websites = {http://rrg.utcluj.ro/~levente/download/private/pub/Tamas, Goron - 2012 - 3D map building with mobile robots.pdf}, month = {7}, publisher = {Ieee}, institution = {IEEE}, id = {842d569b-92e2-315f-992c-e8b6d46108e0}, created = {2014-01-13T12:54:30.000Z}, file_attached = {true}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-03-22T19:08:01.262Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {tamas20123d}, source_type = {inproceedings}, private_publication = {false}, bibtype = {inproceedings}, author = {Tamas, Levente and Goron, Lucian Cosmin}, doi = {10.1109/MED.2012.6265627}, booktitle = {Control & Automation (MED), 2012 20th Mediterranean Conference on} }
@inproceedings{ title = {Classification within indoor environments using 3D perception}, type = {inproceedings}, year = {2012}, pages = {400-405}, websites = {http://rrg.utcluj.ro/~levente/download/private/pub/Goron, Tamas, Lazea - 2012 - Classification within indoor environments using 3D perception.pdf}, month = {5}, publisher = {Ieee}, institution = {IEEE}, id = {2c53ccce-0710-3f12-9c9f-e10055c35095}, created = {2014-01-13T12:54:30.000Z}, file_attached = {true}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-03-22T19:08:01.262Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {goron2012classification}, source_type = {inproceedings}, private_publication = {false}, bibtype = {inproceedings}, author = {Goron, Lucian Cosmin and Tamas, Levente and Lazea, Gheorghe}, doi = {10.1109/AQTR.2012.6237743}, booktitle = {Automation Quality and Testing Robotics (AQTR), 2012 IEEE International Conference on} }
@inproceedings{ title = {Heterogeneous feature based correspondence estimation}, type = {inproceedings}, year = {2012}, pages = {89-94}, websites = {http://rrg.utcluj.ro/~levente/download/private/pub/Tamas, Majdik - 2012 - Heterogeneous feature based correspondence estimation.pdf}, month = {9}, publisher = {Ieee}, institution = {IEEE}, id = {cf1d5470-8c4c-3ae7-8794-b6817469b6fc}, created = {2014-01-13T12:54:30.000Z}, file_attached = {true}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-03-22T19:08:01.262Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {tamas2012heterogeneous}, source_type = {inproceedings}, private_publication = {false}, bibtype = {inproceedings}, author = {Tamas, Levente and Majdik, Andras}, doi = {10.1109/MFI.2012.6343042}, booktitle = {Multisensor Fusion and Integration for Intelligent Systems (MFI), 2012 IEEE Conference on} }
@inproceedings{ title = {Detection and Tracking Experiment Design in Various Environments}, type = {inproceedings}, year = {2011}, keywords = {experiment design,mobile robots,obstacle detection,tracking}, pages = {1-6}, volume = {1}, issue = {1}, websites = {http://rrg.utcluj.ro/~levente/download/private/pub/Tamas, Marcu - 2011 - Detection and tracking experiments in various environments.pdf}, id = {4226b2dd-f0bd-3af5-b5bd-f1509254c5d6}, created = {2014-01-13T13:08:09.000Z}, file_attached = {true}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-03-22T19:08:01.262Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {Tamas2011}, private_publication = {false}, bibtype = {inproceedings}, author = {Tamas, Levente and Lazea, Gheorghe and Marcu, Cosmin}, booktitle = {IEEE International Conference on Advanced Robotics} }
@article{ title = {Mapping Techniques for AMPLE, an Autonomous Security Mobile Robot}, type = {article}, year = {2011}, pages = {18-24}, volume = {13}, id = {40adf4ae-0bb2-3862-873b-542eddae524a}, created = {2014-01-13T12:54:30.000Z}, file_attached = {false}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-03-22T19:08:01.262Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {majdik2011mapping}, source_type = {article}, private_publication = {false}, bibtype = {article}, author = {Majdik, Andras L and Szoke, Istvan and Popa, Mircea and Tamas, Levente and Lazea, Gheorghe}, journal = {Journal of Control Engineering and Applied Informatics}, number = {1} }
@article{ title = {Active Perception for Mobile Manipulation}, type = {article}, year = {2011}, id = {62fc1ca1-a42a-36da-aae0-20ce6dd0d39e}, created = {2016-12-19T10:38:35.000Z}, file_attached = {true}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-03-22T19:08:01.262Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, private_publication = {false}, bibtype = {article}, author = {Asfour, Tamim}, journal = {Intelligence}, number = {May} }
@inproceedings{ title = {Detection and tracking experiments in various environments}, type = {inproceedings}, year = {2011}, id = {1f986e42-caf8-308c-af91-1671b735b95c}, created = {2018-02-19T06:34:52.677Z}, file_attached = {false}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2018-02-19T06:34:52.677Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {false}, hidden = {false}, private_publication = {false}, abstract = {This paper presents the details of several mobile robot experiment designs including indoor, outdoor and urban variants. The aim of the paper is to give insights to setting up these tracking experiments covering both the software and hardware components as well as the application details. Beside these aspects the references to the perception, tracking and estimation parts are also pointed out for a wheel based mobile robots, bicycles and auto vehicles in various scenarios covering both indoor and outdoor variants. © 2011 IEEE.}, bibtype = {inproceedings}, author = {Tamas, L. and Marcu, C.}, doi = {10.1109/ICAR.2011.6088601}, booktitle = {IEEE 15th International Conference on Advanced Robotics: New Boundaries for Robotics, ICAR 2011} }
@article{ title = {Mapping techniques for AMPLE, an autonomous security mobile robot}, type = {article}, year = {2011}, keywords = {Autonomous mobile robots,Lidar based mapping,Stereo vision}, volume = {13}, id = {88708ba0-7727-3bf1-a299-a20931217097}, created = {2018-02-19T06:34:53.019Z}, file_attached = {false}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2018-02-19T06:34:53.019Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {false}, hidden = {false}, private_publication = {false}, abstract = {AMPLE is the acronym of the project called Autonomous Mapping of Polluted Environments. This paper introduces the concept of the project and presents some preliminary results of the ongoing research with the final goal of building an autonomous mobile robot that is capable of performing various tasks in unknown environments. To achieve this scope the mapping problem is an ineluctable one. This paper presents a visual mapping system which detects the same Speeded Up Robust Features (SURF) on the stereo pair images in order to obtain three dimensional point clouds at every robot location. The algorithm tracks the displacement of the identical features viewed from different positions to get back the robots positions. Also a mapping algorithm based on the laser system is presented which can detect the dynamic objects that are present in the robots field. The results of an indoor office environment experiments are shown.}, bibtype = {article}, author = {Majdik, A.L. and Szoke, I. and Popa, M. and Tamas, L. and Lazea, G.}, journal = {Control Engineering and Applied Informatics}, number = {1} }
@inproceedings{ title = {New approach in solving the kidnapped robot problem}, type = {inproceedings}, year = {2010}, pages = {1-6}, institution = {VDE}, id = {c4f0e32d-60a4-3445-9dbf-6e17b414683a}, created = {2014-01-13T12:54:30.000Z}, file_attached = {false}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-03-22T19:08:01.262Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {majdik2010new}, source_type = {inproceedings}, private_publication = {false}, bibtype = {inproceedings}, author = {Majdik, Andras and Popa, Mircea and Tamas, Levente and Szoke, Istvan and Lazea, Gheorghe}, booktitle = {Robotics (ISR), 2010 41st International Symposium on and 2010 6th German Conference on Robotics (ROBOTIK)} }
@article{ title = {Lidar and Vision Based People Detection and Tracking}, type = {article}, year = {2010}, pages = {30-35}, volume = {12}, id = {051878d9-0ae8-3075-b524-71362a9d87c1}, created = {2014-01-13T12:54:30.000Z}, file_attached = {true}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-03-22T19:08:01.262Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {tamas2010lidar}, source_type = {article}, private_publication = {false}, bibtype = {article}, author = {Tamas, L and Popa, M and Lazea, Gh and Szoke, I and Majdik, A}, journal = {Journal of Control Engineering and Applied Informatics}, number = {2} }
@inproceedings{ title = {Pattern Recognition and Tracking Dynamic Objects with LIDAR}, type = {inproceedings}, year = {2010}, pages = {1-6}, institution = {VDE}, id = {f2bc6980-5e59-3846-afdd-e98c36768548}, created = {2014-01-13T12:54:30.000Z}, file_attached = {true}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-03-22T19:08:01.262Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {tamas2010pattern}, source_type = {inproceedings}, private_publication = {false}, bibtype = {inproceedings}, author = {Tamas, Levente and Lazea, Gheorghe}, booktitle = {Robotics (ISR), 2010 41st International Symposium on and 2010 6th German Conference on Robotics (ROBOTIK)} }
@inproceedings{ title = {3D Laser scanning system and 3D segmentation of urban scenes}, type = {inproceedings}, year = {2010}, pages = {1-5}, volume = {1}, websites = {http://rrg.utcluj.ro/~levente/download/private/pub/Goron et al. - 2010 - 3D Laser scanning system and 3D segmentation of urban scenes.pdf}, institution = {IEEE}, id = {b943531b-e785-39a6-839d-4fb4e828e5f5}, created = {2014-01-13T12:54:30.000Z}, file_attached = {false}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-03-22T19:08:01.262Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {goron20103d}, source_type = {inproceedings}, private_publication = {false}, bibtype = {inproceedings}, author = {Goron, Lucian Cosmin and Tamas, Levente and Reti, Istvan and Lazea, Gheorghe}, booktitle = {Automation Quality and Testing Robotics (AQTR), 2010 IEEE International Conference on} }
@inproceedings{ title = {Laser and vision based map building techniques for mobile robot navigation}, type = {inproceedings}, year = {2010}, pages = {1-6}, volume = {1}, institution = {IEEE}, id = {c809c45d-9c52-395d-ab8f-91c7a607d9ec}, created = {2014-01-13T12:54:30.000Z}, file_attached = {false}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-03-22T19:08:01.262Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {majdik2010laser}, source_type = {inproceedings}, private_publication = {false}, bibtype = {inproceedings}, author = {Majdik, A L and Szoke, I and Tamas, L and Popa, M and Lazea, Gh}, booktitle = {Automation Quality and Testing Robotics (AQTR), 2010 IEEE International Conference on} }
@inproceedings{ title = {3d graphical simulation of an articulated serial manipulator based on kinematic models}, type = {inproceedings}, year = {2010}, pages = {143-148}, institution = {IEEE}, id = {c50fa464-91b1-3619-a12e-822ca4147fb2}, created = {2014-01-13T12:54:30.000Z}, file_attached = {false}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-03-22T19:08:01.262Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {marcu20103d}, source_type = {inproceedings}, private_publication = {false}, bibtype = {inproceedings}, author = {Marcu, C and Lazea, Gh and Herle, S and Robotin, R and Tamas, L}, booktitle = {Robotics in Alpe-Adria-Danube Region (RAAD), 2010 IEEE 19th International Workshop on} }
@inproceedings{ title = {Visual odometer system to build feature based maps for mobile robot navigation}, type = {inproceedings}, year = {2010}, pages = {1200-1205}, institution = {IEEE}, id = {a61189c1-d99b-319d-a35c-233f7fee02e3}, created = {2014-01-13T12:54:30.000Z}, file_attached = {false}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-03-22T19:08:01.262Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {majdik2010visual}, source_type = {inproceedings}, private_publication = {false}, bibtype = {inproceedings}, author = {Majdik, Andras L and Tamas, Levente and Popa, Mircea and Szoke, Istvan and Lazea, Gheorghe}, booktitle = {Control & Automation (MED), 2010 18th Mediterranean Conference on} }
@article{ title = {Laser and vision based object detection for mobile robots}, type = {article}, year = {2010}, keywords = {Adaboost and GMM classifiers,Bayesian systems,Kalman filter}, volume = {11}, id = {e8d98cd6-8db5-3dbf-8c75-03b7627a349c}, created = {2018-02-19T06:34:53.015Z}, file_attached = {false}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2018-02-19T06:34:53.015Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {false}, hidden = {false}, private_publication = {false}, abstract = {This paper presents a multi-sensor architecture to detect moving persons based on the information of the laser and vision systems. The detection of the objects are performed relative to the estimated robot position. In the laser space the Gaussian Mixture Model (GMM) classifier and for the vision the AdaBoost classifier is used, from which the outputs are combined with the Bayesian rule.}, bibtype = {article}, author = {Tamas, L. and Lazea, G. and Popa, M. and Szoke, I. and Majdik, A.}, journal = {International Journal of Mechanics and Control}, number = {1} }
@inproceedings{ title = {An effective method for people detection in grayscale image sequences}, type = {inproceedings}, year = {2009}, pages = {181-184}, institution = {IEEE}, id = {e7b7a2e9-5f16-310a-8eef-d084130b6687}, created = {2014-01-13T12:54:30.000Z}, file_attached = {false}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-03-22T19:08:01.262Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {popa2009effective}, source_type = {inproceedings}, private_publication = {false}, bibtype = {inproceedings}, author = {Popa, Mircea and Lazea, Gheorghe and Majdik, Andras and Tamas, Levente and Szoke, Istvan}, booktitle = {Intelligent Computer Communication and Processing, 2009. ICCP 2009. IEEE 5th International Conference on} }
@inproceedings{ title = {Fast point cloud registration algorithm to build 3D maps for robot navigation.}, type = {inproceedings}, year = {2009}, id = {9fbb8bcc-54a9-3745-82b3-92d9fbe01d46}, created = {2014-01-13T12:54:30.000Z}, file_attached = {false}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-03-22T19:08:01.262Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {majdik2009fast}, source_type = {article}, private_publication = {false}, bibtype = {inproceedings}, author = {MAJDIK, Andras and TAMAS, Levente and LAZEA, Gheorghe}, booktitle = {Annals of DAAAM & Proceedings} }
@inproceedings{ title = {Path planning and dynamic objects detection}, type = {inproceedings}, year = {2009}, pages = {479-482}, institution = {IEEE}, id = {19a8091d-ecf5-3847-b729-eff61599e33b}, created = {2014-01-13T12:54:30.000Z}, file_attached = {false}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-03-22T19:08:01.262Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {szoke2009path}, source_type = {inproceedings}, private_publication = {false}, bibtype = {inproceedings}, author = {Szoke, Istvan and Lazea, Gheorghe and Tamas, Levente and Popa, Mircea and Majdik, Andras}, booktitle = {Advanced Robotics, 2009. ICAR 2009. International Conference on} }
@inproceedings{ title = {Laser based localization techniques for indoor mobile robots}, type = {inproceedings}, year = {2009}, keywords = {-mobile robots,laser scan alignment,localization}, pages = {169-170}, issue = {4}, websites = {http://rrg.utcluj.ro/~levente/download/private/pub/Tamas et al. - 2009 - Laser Based Localization Techniques for Indoor Mobile Robots.pdf}, month = {7}, publisher = {Springer London}, institution = {IEEE}, id = {cb2bbecd-51bd-3f7b-b886-cad21757035b}, created = {2014-01-13T12:54:30.000Z}, file_attached = {true}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-03-22T19:08:01.262Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {tamas2009laser}, source_type = {inproceedings}, private_publication = {false}, bibtype = {inproceedings}, author = {Tamas, Levente and Lazea, Gheorghe and Popa, Mircea and Szoke, Istvan and Majdik, Andras}, doi = {10.1109/AT-EQUAL.2009.43}, booktitle = {Robot Motion and Control 2009} }
@inproceedings{ title = {Pattern recognition from lidar for dynamic object detection}, type = {inproceedings}, year = {2009}, keywords = {Classification,Gaussian models,Mobile robots}, id = {d2264262-db69-3b7a-8544-e0b7791cd716}, created = {2018-02-19T06:34:52.897Z}, file_attached = {false}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2018-02-19T06:34:52.897Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {false}, hidden = {false}, private_publication = {false}, abstract = {This paper gives an insight to lidar scan segmentation and classification. The presented segmentation algorithm is a general one, and it can be applied for any type of sensor which returns bearing-range data. For the classification, the features were represented by Gaussian Mixture Models (GMM). The experimental result part shows that a detector for human legs in an indoor mobile robot.}, bibtype = {inproceedings}, author = {Tamas, L. and Popa, M. and Lazea, G.}, booktitle = {Annals of DAAAM and Proceedings of the International DAAAM Symposium} }
@book{ title = {Position estimation techniques for mobile robots}, type = {book}, year = {2009}, source = {Lecture Notes in Control and Information Sciences}, volume = {396}, id = {36170d8e-eb73-3ac2-890b-e98142b763a8}, created = {2018-02-19T06:34:52.955Z}, file_attached = {false}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2018-02-19T06:34:52.955Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {false}, hidden = {false}, private_publication = {false}, bibtype = {book}, author = {Tamas, L. and Lazea, G. and Majdik, A. and Popa, M. and Szoke, I.}, doi = {10.1007/978-1-84882-985-5_29} }
@inproceedings{ title = {Path planning with Markovian processes}, type = {inproceedings}, year = {2009}, keywords = {Mapping,Markovian processes,Mobile robots,Navigation algorithms,Path planning}, volume = {2 RA}, id = {8f5145b6-5393-3526-bb2f-2783e15b86f9}, created = {2018-02-19T06:34:52.961Z}, file_attached = {false}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2018-02-19T06:34:52.961Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {false}, hidden = {false}, private_publication = {false}, abstract = {This paper describes the path planning for the mobile robots, based on the Markov Decision Problems. The presented algorithms are developed for resolving problems with partially observable states. The algorithm is applied in an office environment and tested with a skid-steered robot. The created map combines two mapping theory, the topological respectively the metric method. The main goal of the robot is to reach from the home point to the door of the indoor environment using algorithms which are based on Markovian decisions.}, bibtype = {inproceedings}, author = {Szoke, I. and Lazea, G. and Tamas, L. and Popa, M. and Majdik, A.}, booktitle = {ICINCO 2009 - 6th International Conference on Informatics in Control, Automation and Robotics, Proceedings} }
@inproceedings{ title = {Mobile robots formation navigation with behavior based algorithms}, type = {inproceedings}, year = {2008}, pages = {388-391}, volume = {2}, institution = {IEEE}, id = {f7323e29-5ae9-39e4-b302-afc44fc95a24}, created = {2014-01-13T12:54:30.000Z}, file_attached = {false}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-03-22T19:08:01.262Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {lazea2008mobile}, source_type = {inproceedings}, private_publication = {false}, bibtype = {inproceedings}, author = {Lazea, Gh and Robotin, R and Herle, S and Marcu, C and Tamas, L}, booktitle = {Automation, Quality and Testing, Robotics, 2008. AQTR 2008. IEEE International Conference on} }
@inproceedings{ title = {Industrial robot controller using miniature computers}, type = {inproceedings}, year = {2008}, pages = {341-344}, volume = {2}, institution = {IEEE}, id = {0319697d-f7f1-3e72-85d5-98ffddfa4eb2}, created = {2014-01-13T12:54:30.000Z}, file_attached = {false}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-03-22T19:08:01.262Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {marcu2008industrial}, source_type = {inproceedings}, private_publication = {false}, bibtype = {inproceedings}, author = {Marcu, C and Lazea, Gh and Robotin, R and Herle, S and Tamas, L}, booktitle = {Automation, Quality and Testing, Robotics, 2008. AQTR 2008. IEEE International Conference on} }
@inproceedings{ title = {Classification of surface electromyographic signals for control of upper limb virtual prosthesis using time-domain features}, type = {inproceedings}, year = {2008}, pages = {160-165}, volume = {3}, institution = {IEEE}, id = {214bc6f1-9758-3330-8418-da6030d604e5}, created = {2014-01-13T12:54:30.000Z}, file_attached = {false}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-03-22T19:08:01.262Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {herle2008classification}, source_type = {inproceedings}, private_publication = {false}, bibtype = {inproceedings}, author = {Herle, S and Raica, Paula and Lazea, Gh and Robotin, R and Marcu, C and Tamas, L}, booktitle = {Automation, Quality and Testing, Robotics, 2008. AQTR 2008. IEEE International Conference on} }
@inproceedings{ title = {Modeling and simulation of an induction drive with application to a small wind turbine generator}, type = {inproceedings}, year = {2008}, pages = {429-433}, volume = {3}, issue = {1}, websites = {http://rrg.utcluj.ro/~levente/download/private/pub/Tamas, Szekely - 2008 - Modeling and simulation of an induction drive with application to a small wind turbine generator.pdf}, publisher = {Ieee}, institution = {IEEE}, id = {28b63bde-69ff-365d-afe8-8874f86e19d2}, created = {2014-01-13T12:54:30.000Z}, file_attached = {true}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-03-22T19:08:01.262Z}, read = {true}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {tamas2008modeling}, source_type = {inproceedings}, private_publication = {false}, bibtype = {inproceedings}, author = {Tamas, Levente and Szekely, Zoltan}, doi = {10.1109/AQTR.2008.4588957}, booktitle = {Automation, Quality and Testing, Robotics, 2008. AQTR 2008. IEEE International Conference on} }
@inproceedings{ title = {State estimation based on Kalman filtering techniques in navigation}, type = {inproceedings}, year = {2008}, keywords = {- odometric,infrared measurement,kalman filter,ultrasonic,unscented transformation}, pages = {147-152}, volume = {2}, issue = {2}, websites = {http://rrg.utcluj.ro/~levente/download/private/pub/Tamas, Lazea, Robotin - 2008 - State estimation based on Kalman filtering techniques in navigation.pdf}, institution = {IEEE}, id = {54d30f13-2e34-3235-af80-848a2b9debb9}, created = {2014-01-13T12:54:30.000Z}, accessed = {2014-01-13}, file_attached = {true}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-03-22T19:08:01.262Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {tamas2008state}, source_type = {inproceedings}, private_publication = {false}, bibtype = {inproceedings}, author = {Tamas, L and Lazea, Gh and Robotin, R and Marcu, C and Herle, S and Szekely, Z}, booktitle = {Automation, Quality and Testing, Robotics, 2008. AQTR 2008. IEEE International Conference on} }
@inproceedings{ title = {Mobile robot position estimation based on multi sensor fusion}, type = {inproceedings}, year = {2008}, keywords = {Kalman filter,Position estimation,Robot navigation,Sensor fusion}, id = {894166cb-6c72-3b29-82d9-913a01e95b9d}, created = {2018-02-19T06:34:53.072Z}, file_attached = {false}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2018-02-19T06:34:53.072Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {false}, hidden = {false}, private_publication = {false}, abstract = {This paper gives an overview about the state estimation techniques based on Kalman filters in the robot navigation field. It deals with the problem of the position estimation based on odometric and ultrasonic measurements. Its main aim is to improve the position estimation in navigation. There are presented the results from applying the Kalman filter for odometric and ultrasonic measurement techniques.}, bibtype = {inproceedings}, author = {Tamas, L. and Lazea, G. and Robotin, R. and Marcu, C. and Majdik, A.}, booktitle = {Annals of DAAAM and Proceedings of the International DAAAM Symposium} }
@inproceedings{ title = {The NEPSAC nonlinear predictive controller in a real life experiment}, type = {inproceedings}, year = {2007}, keywords = {- nonlinear predictive control,model based control,parameter estimation,physical modeling}, pages = {229-234}, websites = {http://rrg.utcluj.ro/~levente/download/private/pub/Tamas, Nascu, De Keyser - 2007 - The NEPSAC Nonlinear Predictive Controller in a Real Life Experiment.pdf}, month = {6}, publisher = {Ieee}, institution = {IEEE}, id = {cf8062fc-12e7-3a54-a31b-fb2655d3a79b}, created = {2014-01-13T12:54:30.000Z}, file_attached = {true}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-03-22T19:08:01.262Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {true}, hidden = {false}, citation_key = {tamas2007nepsac}, source_type = {inproceedings}, private_publication = {false}, bibtype = {inproceedings}, author = {Tamas, Levente and Nascu, Ioan and De Keyser, Robin and others, undefined}, doi = {10.1109/INES.2007.4283703}, booktitle = {Intelligent Engineering Systems, 2007. INES 2007. 11th International Conference on} }
@article{ title = {Object Handling in Cluttered Indoor Environment with a Mobile Manipulator}, type = {article}, id = {09e8bb5b-42a0-361c-ac14-78bb54c16bc6}, created = {2016-12-19T10:38:35.000Z}, file_attached = {true}, profile_id = {bfbbf840-4c42-3914-a463-19024f50b30c}, last_modified = {2017-03-22T19:08:01.262Z}, read = {false}, starred = {false}, authored = {true}, confirmed = {false}, hidden = {false}, private_publication = {false}, bibtype = {article}, author = {Militaru, Cristian and Mezei, Ady-daniel and Tamas, Levente} }