Deep-LfD: Deep robot learning from demonstrations. Esfahani, A. G., Sasikolomi, K. N., Hashempour, H., & Zhong, F. Software Impacts, 9:100087, Elsevier, August, 2021.
Paper doi abstract bibtex Like other robot learning from demonstration (LfD) approaches, deep-LfD builds a task model from sample demonstrations. However, unlike conventional LfD, the deep-LfD model learns the relation between high dimensional visual sensory information and robot trajectory/path. This paper presents a dataset of successful needle insertion by da Vinci Research Kit into deformable objects based on which several deep-LfD models are built as a benchmark of models learning robot controller for the needle insertion task.
@article{lincoln45212,
volume = {9},
month = {August},
author = {Amir Ghalamzan Esfahani and Kiyanoush Nazari Sasikolomi and Hamidreza Hashempour and Fangxun Zhong},
title = {Deep-LfD: Deep robot learning from demonstrations},
publisher = {Elsevier},
year = {2021},
journal = {Software Impacts},
doi = {10.1016/j.simpa.2021.100087},
pages = {100087},
keywords = {ARRAY(0x56546f0152a8)},
url = {https://eprints.lincoln.ac.uk/id/eprint/45212/},
abstract = {Like other robot learning from demonstration (LfD) approaches, deep-LfD builds a task model from sample demonstrations. However, unlike conventional LfD, the deep-LfD model learns the relation between high dimensional visual sensory information and robot trajectory/path. This paper presents a dataset of successful needle insertion by da Vinci Research Kit into deformable objects based on which several deep-LfD models are built as a benchmark of models learning robot controller for the needle insertion task.}
}
Downloads: 0
{"_id":"tATTt5XJrMjhaN7mZ","bibbaseid":"esfahani-sasikolomi-hashempour-zhong-deeplfddeeprobotlearningfromdemonstrations-2021","author_short":["Esfahani, A. G.","Sasikolomi, K. N.","Hashempour, H.","Zhong, F."],"bibdata":{"bibtype":"article","type":"article","volume":"9","month":"August","author":[{"firstnames":["Amir","Ghalamzan"],"propositions":[],"lastnames":["Esfahani"],"suffixes":[]},{"firstnames":["Kiyanoush","Nazari"],"propositions":[],"lastnames":["Sasikolomi"],"suffixes":[]},{"firstnames":["Hamidreza"],"propositions":[],"lastnames":["Hashempour"],"suffixes":[]},{"firstnames":["Fangxun"],"propositions":[],"lastnames":["Zhong"],"suffixes":[]}],"title":"Deep-LfD: Deep robot learning from demonstrations","publisher":"Elsevier","year":"2021","journal":"Software Impacts","doi":"10.1016/j.simpa.2021.100087","pages":"100087","keywords":"ARRAY(0x56546f0152a8)","url":"https://eprints.lincoln.ac.uk/id/eprint/45212/","abstract":"Like other robot learning from demonstration (LfD) approaches, deep-LfD builds a task model from sample demonstrations. However, unlike conventional LfD, the deep-LfD model learns the relation between high dimensional visual sensory information and robot trajectory/path. This paper presents a dataset of successful needle insertion by da Vinci Research Kit into deformable objects based on which several deep-LfD models are built as a benchmark of models learning robot controller for the needle insertion task.","bibtex":"@article{lincoln45212,\n volume = {9},\n month = {August},\n author = {Amir Ghalamzan Esfahani and Kiyanoush Nazari Sasikolomi and Hamidreza Hashempour and Fangxun Zhong},\n title = {Deep-LfD: Deep robot learning from demonstrations},\n publisher = {Elsevier},\n year = {2021},\n journal = {Software Impacts},\n doi = {10.1016/j.simpa.2021.100087},\n pages = {100087},\n keywords = {ARRAY(0x56546f0152a8)},\n url = {https://eprints.lincoln.ac.uk/id/eprint/45212/},\n abstract = {Like other robot learning from demonstration (LfD) approaches, deep-LfD builds a task model from sample demonstrations. However, unlike conventional LfD, the deep-LfD model learns the relation between high dimensional visual sensory information and robot trajectory/path. This paper presents a dataset of successful needle insertion by da Vinci Research Kit into deformable objects based on which several deep-LfD models are built as a benchmark of models learning robot controller for the needle insertion task.}\n}\n\n","author_short":["Esfahani, A. G.","Sasikolomi, K. N.","Hashempour, H.","Zhong, F."],"key":"lincoln45212","id":"lincoln45212","bibbaseid":"esfahani-sasikolomi-hashempour-zhong-deeplfddeeprobotlearningfromdemonstrations-2021","role":"author","urls":{"Paper":"https://eprints.lincoln.ac.uk/id/eprint/45212/"},"keyword":["ARRAY(0x56546f0152a8)"],"metadata":{"authorlinks":{}}},"bibtype":"article","biburl":"https://eprints.lincoln.ac.uk/cgi/search/archive/advanced/export_lincoln_BibTeX.bib?screen=Search&dataset=archive&_action_export=1&output=BibTeX&exp=0%7C1%7C-date%2Fcreators_name%2Ftitle%7Carchive%7C-%7Ccreators_name%3Acreators_name%3AANY%3AEQ%3AHanheide+Al-Fadhli+Baxter+Bellotto+Bosilj+Calisti+Cielniak+Coutts+Cuayahuitl+Das+Elgeneidy+Gaju+Esfahani+Fox+From+Gao+Gould+Millard+Parsons+Pearson+Saaj+Sklar+Swainson+Valluru+Villa+Wright+Yue%7Cdate%3Adate%3AALL%3AEQ%3A2021%7C-%7Ceprint_status%3Aeprint_status%3AANY%3AEQ%3Aarchive%7Cmetadata_visibility%3Ametadata_visibility%3AANY%3AEQ%3Ashow&n=&cache=11447865","dataSources":["5gniG5JtcCSxmGw9h"],"keywords":["array(0x56546f0152a8)"],"search_terms":["deep","lfd","deep","robot","learning","demonstrations","esfahani","sasikolomi","hashempour","zhong"],"title":"Deep-LfD: Deep robot learning from demonstrations","year":2021}