var bibbase_data = {"data":"\"Loading..\"\n\n
\n\n \n\n \n\n \n \n\n \n\n \n \n\n \n\n \n
\n generated by\n \n \"bibbase.org\"\n\n \n
\n \n\n
\n\n \n\n\n
\n\n Excellent! Next you can\n create a new website with this list, or\n embed it in an existing web page by copying & pasting\n any of the following snippets.\n\n
\n JavaScript\n (easiest)\n
\n \n <script src=\"https://bibbase.org/show?bib=https%3A%2F%2Fag-robotics.github.io%2Fdata-bibliography%2Fag-robotics.bib&jsonp=1&jsonp=1\"></script>\n \n
\n\n PHP\n
\n \n <?php\n $contents = file_get_contents(\"https://bibbase.org/show?bib=https%3A%2F%2Fag-robotics.github.io%2Fdata-bibliography%2Fag-robotics.bib&jsonp=1\");\n print_r($contents);\n ?>\n \n
\n\n iFrame\n (not recommended)\n
\n \n <iframe src=\"https://bibbase.org/show?bib=https%3A%2F%2Fag-robotics.github.io%2Fdata-bibliography%2Fag-robotics.bib&jsonp=1\"></iframe>\n \n
\n\n

\n For more details see the documention.\n

\n
\n
\n\n
\n\n This is a preview! To use this list on your own web site\n or create a new web site from it,\n create a free account. The file will be added\n and you will be able to edit it in the File Manager.\n We will show you instructions once you've created your account.\n
\n\n
\n\n

To the site owner:

\n\n

Action required! Mendeley is changing its\n API. In order to keep using Mendeley with BibBase past April\n 14th, you need to:\n

    \n
  1. renew the authorization for BibBase on Mendeley, and
  2. \n
  3. update the BibBase URL\n in your page the same way you did when you initially set up\n this page.\n
  4. \n
\n

\n\n

\n \n \n Fix it now\n

\n
\n\n
\n\n\n
\n \n \n
\n
\n  \n 2019\n \n \n (1)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n The Rosario dataset: Multisensor data for localization and mapping in agricultural environments.\n \n \n \n \n\n\n \n Pire, T.; Mujica, M.; Civera, J.; and Kofman, E.\n\n\n \n\n\n\n The International Journal of Robotics Research,0278364919841437. April 2019.\n Dataset URL: http://www.cifasis-conicet.gov.ar/robot/doku.php\n\n\n\n
\n\n\n\n \n \n \"ThePaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n \n \n \n \n\n\n\n
\n
@article{pire_rosario_2019,\n\ttitle = {The {Rosario} dataset: {Multisensor} data for localization and mapping in agricultural environments},\n\tissn = {0278-3649},\n\tshorttitle = {The {Rosario} dataset},\n\turl = {https://doi.org/10.1177/0278364919841437},\n\tdoi = {10.1177/0278364919841437},\n\tabstract = {In this paper we present the Rosario dataset, a collection of sensor data for autonomous mobile robotics in agricultural scenes. The dataset is motivated by the lack of realistic sensor readings gathered by a mobile robot in such environments. It consists of six sequences recorded in soybean fields showing real and challenging cases: highly repetitive scenes, reflection, and burned images caused by direct sunlight and rough terrain among others. The dataset was conceived in order to provide a benchmark and contribute to the agricultural simultaneous localization and mapping (SLAM)/odometry and sensor fusion research. It contains synchronized readings of several sensors: wheel odometry, inertial measurement unit (IMU), stereo camera, and a Global Positioning System real-time kinematics (GPS-RTK) system. The dataset is publicly available from http://www.cifasis-conicet.gov.ar/robot/.},\n\tlanguage = {en},\n\turldate = {2019-04-28},\n\tjournal = {The International Journal of Robotics Research},\n\tauthor = {Pire, Taihú and Mujica, Martín and Civera, Javier and Kofman, Ernesto},\n\tmonth = apr,\n\tyear = {2019},\n\tnote = {Dataset URL: http://www.cifasis-conicet.gov.ar/robot/doku.php},\n\tpages = {0278364919841437},\n\tkeywords = {mobile robots, odometry, IMU, RTK-GPS}\n}\n\n
\n
\n\n\n
\n In this paper we present the Rosario dataset, a collection of sensor data for autonomous mobile robotics in agricultural scenes. The dataset is motivated by the lack of realistic sensor readings gathered by a mobile robot in such environments. It consists of six sequences recorded in soybean fields showing real and challenging cases: highly repetitive scenes, reflection, and burned images caused by direct sunlight and rough terrain among others. The dataset was conceived in order to provide a benchmark and contribute to the agricultural simultaneous localization and mapping (SLAM)/odometry and sensor fusion research. It contains synchronized readings of several sensors: wheel odometry, inertial measurement unit (IMU), stereo camera, and a Global Positioning System real-time kinematics (GPS-RTK) system. The dataset is publicly available from http://www.cifasis-conicet.gov.ar/robot/.\n
\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2018\n \n \n (2)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n Data from: Multi-species fruit flower detection using a refined semantic segmentation network.\n \n \n \n \n\n\n \n Dias, P. A.; Tabb, A.; and Medeiros, H.\n\n\n \n\n\n\n 2018.\n type: dataset\n\n\n\n
\n\n\n\n \n \n \"DataPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n \n \n\n\n\n
\n
@misc{dias_data_2018,\n\ttitle = {Data from: {Multi}-species fruit flower detection using a refined semantic segmentation network},\n\tshorttitle = {Data from},\n\turl = {https://data.nal.usda.gov/dataset/data-multi-species-fruit-flower-detection-using-refined-semantic-segmentation-network},\n\tlanguage = {en},\n\turldate = {2019-04-28},\n\tpublisher = {Ag Data Commons},\n\tauthor = {Dias, Philipe A. and Tabb, Amy and Medeiros, Henry},\n\tyear = {2018},\n\tdoi = {10.15482/usda.adc/1423466},\n\tnote = {type: dataset},\n\tkeywords = {flower, fruit, orchard}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n The National Robotics Engineering Center Agricultural Person-Detection Dataset.\n \n \n \n \n\n\n \n Pezzementi, Zachary A.; Tabor, Trenton; Hu, Peiyun; Chang, Jonathan K.; Ramanan, Deva; Wellington, Carl K.; Wisely Babu, Benzun P.; and Herman, Herman\n\n\n \n\n\n\n 2018.\n \n\n\n\n
\n\n\n\n \n \n \"ThePaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n  \n \n 1 download\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n \n \n\n\n\n
\n
@misc{pezzementi_zachary_a._national_2018,\n\ttitle = {The {National} {Robotics} {Engineering} {Center} {Agricultural} {Person}-{Detection} {Dataset}},\n\turl = {https://data.nal.usda.gov/dataset/national-robotics-engineering-center-agricultural-person-detection-dataset},\n\tabstract = {Person detection from vehicles has made rapid progress recently with the advent of multiple high-quality datasets of urban and highway driving, yet no large-scale benchmark is available for the same problem in off-road or agricultural environments. Here we present the National Robotics Engineering Center (NREC) Agricultural Person-Detection Dataset to spur research in these environments. It consists of labeled stereo video of people in orange and apple orchards taken from two perception platforms (a tractor and a pickup truck), along with vehicle position data from Real Time Kinetic (RTK) GPS. We define a benchmark on part of the dataset that combines a total of 76k labeled person images and 19k sampled person-free images. The dataset highlights several key challenges of the domain, including varying environment, substantial occlusion by vegetation, people in motion and in nonstandard poses, and people seen from a variety of distances; metadata are included to allow targeted evaluation of each of these effects.},\n\tlanguage = {en},\n\turldate = {2019-03-27},\n\tauthor = {{Pezzementi, Zachary A.} and {Tabor, Trenton} and {Hu, Peiyun} and {Chang, Jonathan K.} and {Ramanan, Deva} and {Wellington, Carl K.} and {Wisely Babu, Benzun P.} and {Herman, Herman}},\n\tyear = {2018},\n\tkeywords = {stereo, RTK-GPS, person detection}\n}\n
\n
\n\n\n
\n Person detection from vehicles has made rapid progress recently with the advent of multiple high-quality datasets of urban and highway driving, yet no large-scale benchmark is available for the same problem in off-road or agricultural environments. Here we present the National Robotics Engineering Center (NREC) Agricultural Person-Detection Dataset to spur research in these environments. It consists of labeled stereo video of people in orange and apple orchards taken from two perception platforms (a tractor and a pickup truck), along with vehicle position data from Real Time Kinetic (RTK) GPS. We define a benchmark on part of the dataset that combines a total of 76k labeled person images and 19k sampled person-free images. The dataset highlights several key challenges of the domain, including varying environment, substantial occlusion by vegetation, people in motion and in nonstandard poses, and people seen from a variety of distances; metadata are included to allow targeted evaluation of each of these effects.\n
\n\n\n
\n\n\n\n\n\n
\n
\n\n\n\n\n
\n\n\n \n\n \n \n \n \n\n
\n"}; document.write(bibbase_data.data);