var bibbase_data = {"data":"\"Loading..\"\n\n
\n\n \n\n \n\n \n \n\n \n\n \n \n\n \n\n \n
\n generated by\n \n \"bibbase.org\"\n\n \n
\n \n\n
\n\n \n\n\n
\n\n Excellent! Next you can\n create a new website with this list, or\n embed it in an existing web page by copying & pasting\n any of the following snippets.\n\n
\n JavaScript\n (easiest)\n
\n \n <script src=\"https://bibbase.org/show?bib=https%3A%2F%2Fag-robotics.github.io%2Fdata-bibliography%2Fsurvey2019.bib&jsonp=1&jsonp=1\"></script>\n \n
\n\n PHP\n
\n \n <?php\n $contents = file_get_contents(\"https://bibbase.org/show?bib=https%3A%2F%2Fag-robotics.github.io%2Fdata-bibliography%2Fsurvey2019.bib&jsonp=1\");\n print_r($contents);\n ?>\n \n
\n\n iFrame\n (not recommended)\n
\n \n <iframe src=\"https://bibbase.org/show?bib=https%3A%2F%2Fag-robotics.github.io%2Fdata-bibliography%2Fsurvey2019.bib&jsonp=1\"></iframe>\n \n
\n\n

\n For more details see the documention.\n

\n
\n
\n\n
\n\n This is a preview! To use this list on your own web site\n or create a new web site from it,\n create a free account. The file will be added\n and you will be able to edit it in the File Manager.\n We will show you instructions once you've created your account.\n
\n\n
\n\n

To the site owner:

\n\n

Action required! Mendeley is changing its\n API. In order to keep using Mendeley with BibBase past April\n 14th, you need to:\n

    \n
  1. renew the authorization for BibBase on Mendeley, and
  2. \n
  3. update the BibBase URL\n in your page the same way you did when you initially set up\n this page.\n
  4. \n
\n

\n\n

\n \n \n Fix it now\n

\n
\n\n
\n\n\n
\n \n \n
\n
\n  \n 2019\n \n \n (7)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Robotics and automation for improving agriculture.\n \n \n \n\n\n \n Billingsley, J.,\n editor.\n \n\n\n \n\n\n\n Burleigh Dodds Science Publishing, UK, 2019.\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@book{billingsley_robotics_2019,\n\taddress = {UK},\n\ttitle = {Robotics and automation for improving agriculture.},\n\tpublisher = {Burleigh Dodds Science Publishing},\n\teditor = {Billingsley, John},\n\tyear = {2019}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Autonomous Technologies in Agricultural Equipment: A Review of the State of the Art.\n \n \n \n \n\n\n \n \n\n\n \n\n\n\n In 2019 Agricultural Equipment Technology Conference, pages 1–17, February 2019. American Society of Agricultural and Biological Engineers\n \n\n\n\n
\n\n\n\n \n \n \"AutonomousPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{noauthor_autonomous_2019,\n\ttitle = {Autonomous {Technologies} in {Agricultural} {Equipment}: {A} {Review} of the {State} of the {Art}},\n\tshorttitle = {Autonomous {Technologies} in {Agricultural} {Equipment}},\n\turl = {https://elibrary.asabe.org/azdez.asp?JID=6&AID=49978&CID=913c0119&T=1},\n\tdoi = {10.13031/913},\n\tabstract = {Automation and robotics for agricultural production are topics of tremendous interest and large investments. There have been significant recent advances in agricultural automation and robotics in the areas of (1) automatic vehicle guidance and steering control, (2) automatic implement guidance, (3) automatic headland sequence and turn management, (4) sensing for perception, (5) sensing for variable-rate technologies, (6) optimization of machine operation, (7) machinery coordination, and (8) machinery communication. These eight areas of progress are discussed in terms of commercially available technologies, public intellectual property, and the research literature.},\n\tlanguage = {en},\n\turldate = {2019-03-22},\n\tbooktitle = {2019 {Agricultural} {Equipment} {Technology} {Conference}},\n\tpublisher = {American Society of Agricultural and Biological Engineers},\n\tmonth = feb,\n\tyear = {2019},\n\tpages = {1--17},\n\t\n}\n\n
\n
\n\n\n
\n Automation and robotics for agricultural production are topics of tremendous interest and large investments. There have been significant recent advances in agricultural automation and robotics in the areas of (1) automatic vehicle guidance and steering control, (2) automatic implement guidance, (3) automatic headland sequence and turn management, (4) sensing for perception, (5) sensing for variable-rate technologies, (6) optimization of machine operation, (7) machinery coordination, and (8) machinery communication. These eight areas of progress are discussed in terms of commercially available technologies, public intellectual property, and the research literature.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Calibration of Asynchronous Camera Networks for Object Reconstruction Tasks.\n \n \n \n \n\n\n \n Tabb, A.; and Medeiros, H.\n\n\n \n\n\n\n arXiv:1903.06811 [cs]. March 2019.\n arXiv: 1903.06811\n\n\n\n
\n\n\n\n \n \n \"CalibrationPaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n\n\n\n
\n
@article{tabb_calibration_2019,\n\ttitle = {Calibration of {Asynchronous} {Camera} {Networks} for {Object} {Reconstruction} {Tasks}},\n\turl = {http://arxiv.org/abs/1903.06811},\n\tabstract = {Camera network and multi-camera calibration for external parameters is a necessary step for a variety of contexts in computer vision and robotics, ranging from three-dimensional reconstruction to human activity tracking. This paper describes a method for camera network and/or multi-camera calibration suitable for specific contexts: the cameras may not all have a common field of view, or if they do, there may be some views that are 180 degrees from one another, and the network may be asynchronous. The calibration object required is one or more planar calibration patterns, rigidly attached to one another, and are distinguishable from one another, such as aruco or charuco patterns. We formulate the camera network and/or multi-camera calibration problem in this context using rigidity constraints, represented as a system of equations, and an approximate solution is found through a two-step process. Synthetic and real experiments, including scenarios of a asynchronous camera network and rotating imaging system, demonstrate the method in a variety of settings. Reconstruction accuracy error was less than 0.5 mm for all datasets. This method is suitable for new users to calibrate a camera network, and the modularity of the calibration object also allows for disassembly, shipping, and the use of this method in a variety of large and small spaces.},\n\turldate = {2019-03-22},\n\tjournal = {arXiv:1903.06811 [cs]},\n\tauthor = {Tabb, Amy and Medeiros, Henry},\n\tmonth = mar,\n\tyear = {2019},\n\tnote = {arXiv: 1903.06811},\n\tkeywords = {Computer Science - Computer Vision and Pattern Recognition},\n}\n\n
\n
\n\n\n
\n Camera network and multi-camera calibration for external parameters is a necessary step for a variety of contexts in computer vision and robotics, ranging from three-dimensional reconstruction to human activity tracking. This paper describes a method for camera network and/or multi-camera calibration suitable for specific contexts: the cameras may not all have a common field of view, or if they do, there may be some views that are 180 degrees from one another, and the network may be asynchronous. The calibration object required is one or more planar calibration patterns, rigidly attached to one another, and are distinguishable from one another, such as aruco or charuco patterns. We formulate the camera network and/or multi-camera calibration problem in this context using rigidity constraints, represented as a system of equations, and an approximate solution is found through a two-step process. Synthetic and real experiments, including scenarios of a asynchronous camera network and rotating imaging system, demonstrate the method in a variety of settings. Reconstruction accuracy error was less than 0.5 mm for all datasets. This method is suitable for new users to calibrate a camera network, and the modularity of the calibration object also allows for disassembly, shipping, and the use of this method in a variety of large and small spaces.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Future farms without farmers.\n \n \n \n \n\n\n \n Asseng, S.; and Asche, F.\n\n\n \n\n\n\n Science Robotics, 4(27): eaaw1875. February 2019.\n \n\n\n\n
\n\n\n\n \n \n \"FuturePaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{asseng_future_2019,\n\ttitle = {Future farms without farmers},\n\tvolume = {4},\n\tissn = {2470-9476},\n\turl = {http://robotics.sciencemag.org/lookup/doi/10.1126/scirobotics.aaw1875},\n\tdoi = {10.1126/scirobotics.aaw1875},\n\tlanguage = {en},\n\tnumber = {27},\n\turldate = {2019-03-22},\n\tjournal = {Science Robotics},\n\tauthor = {Asseng, Senthold and Asche, Frank},\n\tmonth = feb,\n\tyear = {2019},\n\tpages = {eaaw1875}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Autonomous Technologies in Agricultural Equipment: A Review of the State of the Art.\n \n \n \n \n\n\n \n \n\n\n \n\n\n\n In 2019 Agricultural Equipment Technology Conference, pages 1–17, February 2019. American Society of Agricultural and Biological Engineers\n \n\n\n\n
\n\n\n\n \n \n \"AutonomousPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{noauthor_autonomous_2019-1,\n\ttitle = {Autonomous {Technologies} in {Agricultural} {Equipment}: {A} {Review} of the {State} of the {Art}},\n\tshorttitle = {Autonomous {Technologies} in {Agricultural} {Equipment}},\n\turl = {https://elibrary.asabe.org/azdez.asp?JID=6&AID=49978&CID=913c0119&T=1},\n\tdoi = {10.13031/913},\n\tabstract = {Automation and robotics for agricultural production are topics of tremendous interest and large investments. There have been significant recent advances in agricultural automation and robotics in the areas of (1) automatic vehicle guidance and steering control, (2) automatic implement guidance, (3) automatic headland sequence and turn management, (4) sensing for perception, (5) sensing for variable-rate technologies, (6) optimization of machine operation, (7) machinery coordination, and (8) machinery communication. These eight areas of progress are discussed in terms of commercially available technologies, public intellectual property, and the research literature.},\n\tlanguage = {en},\n\turldate = {2019-03-22},\n\tbooktitle = {2019 {Agricultural} {Equipment} {Technology} {Conference}},\n\tpublisher = {American Society of Agricultural and Biological Engineers},\n\tmonth = feb,\n\tyear = {2019},\n\tpages = {1--17},\n}\n\n
\n
\n\n\n
\n Automation and robotics for agricultural production are topics of tremendous interest and large investments. There have been significant recent advances in agricultural automation and robotics in the areas of (1) automatic vehicle guidance and steering control, (2) automatic implement guidance, (3) automatic headland sequence and turn management, (4) sensing for perception, (5) sensing for variable-rate technologies, (6) optimization of machine operation, (7) machinery coordination, and (8) machinery communication. These eight areas of progress are discussed in terms of commercially available technologies, public intellectual property, and the research literature.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Autonomous pollination of individual kiwifruit flowers: Toward a robotic kiwifruit pollinator.\n \n \n \n \n\n\n \n Williams, H.; Nejati, M.; Hussein, S.; Penhall, N.; Lim, J. Y.; Jones, M. H.; Bell, J.; Ahn, H. S.; Bradley, S.; Schaare, P.; Martinsen, P.; Alomar, M.; Patel, P.; Seabright, M.; Duke, M.; Scarfe, A.; and MacDonald, B.\n\n\n \n\n\n\n Journal of Field Robotics. January 2019.\n \n\n\n\n
\n\n\n\n \n \n \"AutonomousPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{williams_autonomous_2019,\n\ttitle = {Autonomous pollination of individual kiwifruit flowers: {Toward} a robotic kiwifruit pollinator},\n\tissn = {1556-4959, 1556-4967},\n\tshorttitle = {Autonomous pollination of individual kiwifruit flowers},\n\turl = {https://onlinelibrary.wiley.com/doi/abs/10.1002/rob.21861},\n\tdoi = {10.1002/rob.21861},\n\tlanguage = {en},\n\turldate = {2019-03-22},\n\tjournal = {Journal of Field Robotics},\n\tauthor = {Williams, Henry and Nejati, Mahla and Hussein, Salome and Penhall, Nicky and Lim, Jong Yoon and Jones, Mark Hedley and Bell, Jamie and Ahn, Ho Seok and Bradley, Stuart and Schaare, Peter and Martinsen, Paul and Alomar, Mohammad and Patel, Purak and Seabright, Matthew and Duke, Mike and Scarfe, Alistair and MacDonald, Bruce},\n\tmonth = jan,\n\tyear = {2019}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Development and evaluation of a low-cost and smart technology for precision weed management utilizing artificial intelligence.\n \n \n \n \n\n\n \n Partel, V.; Charan Kakarla, S.; and Ampatzidis, Y.\n\n\n \n\n\n\n Computers and Electronics in Agriculture, 157: 339–350. February 2019.\n \n\n\n\n
\n\n\n\n \n \n \"DevelopmentPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{partel_development_2019,\n\ttitle = {Development and evaluation of a low-cost and smart technology for precision weed management utilizing artificial intelligence},\n\tvolume = {157},\n\tissn = {01681699},\n\turl = {https://linkinghub.elsevier.com/retrieve/pii/S0168169918316612},\n\tdoi = {10.1016/j.compag.2018.12.048},\n\tlanguage = {en},\n\turldate = {2019-03-22},\n\tjournal = {Computers and Electronics in Agriculture},\n\tauthor = {Partel, Victor and Charan Kakarla, Sri and Ampatzidis, Yiannis},\n\tmonth = feb,\n\tyear = {2019},\n\tpages = {339--350}\n}\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2018\n \n \n (6)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n A methodology of orchard architecture design for an optimal harvesting robot.\n \n \n \n \n\n\n \n Bloch, V.; Degani, A.; and Bechar, A.\n\n\n \n\n\n\n Biosystems Engineering, 166: 126–137. February 2018.\n \n\n\n\n
\n\n\n\n \n \n \"APaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{bloch_methodology_2018,\n\ttitle = {A methodology of orchard architecture design for an optimal harvesting robot},\n\tvolume = {166},\n\tissn = {15375110},\n\turl = {https://linkinghub.elsevier.com/retrieve/pii/S1537511017300612},\n\tdoi = {10.1016/j.biosystemseng.2017.11.006},\n\tlanguage = {en},\n\turldate = {2019-03-22},\n\tjournal = {Biosystems Engineering},\n\tauthor = {Bloch, Victor and Degani, Amir and Bechar, Avital},\n\tmonth = feb,\n\tyear = {2018},\n\tpages = {126--137}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Agricultural Robotics: The Future of Robotic Agriculture.\n \n \n \n \n\n\n \n Duckett, T.; Pearson, S.; Blackmore, S.; Grieve, B.; Chen, W.; Cielniak, G.; Cleaversmith, J.; Dai, J.; Davis, S.; Fox, C.; From, P.; Georgilas, I.; Gill, R.; Gould, I.; Hanheide, M.; Iida, F.; Mihalyova, L.; Nefti-Meziani, S.; Neumann, G.; Paoletti, P.; Pridmore, T.; Ross, D.; Smith, M.; Stoelen, M.; Swainson, M.; Wane, S.; Wilson, P.; Wright, I.; and Yang, G.\n\n\n \n\n\n\n June 2018.\n \n\n\n\n
\n\n\n\n \n \n \"AgriculturalPaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@misc{duckett_agricultural_2018,\n\ttype = {Paper or {Report}},\n\ttitle = {Agricultural {Robotics}: {The} {Future} of {Robotic} {Agriculture}},\n\tcopyright = {cc\\_by4},\n\tshorttitle = {Agricultural {Robotics}},\n\turl = {http://hamlyn.doc.ic.ac.uk/uk-ras/sites/default/files/UK_RAS_wp_Agri_hi-res_single.pdf},\n\tabstract = {Agri-Food is the largest manufacturing sector in the UK. It supports a food chain that generates over £108bn p.a., with 3.9m employees in a truly international industry and exports £20bn of UK manufactured goods. However, the global food chain is under pressure from population growth, climate change, political pressures affecting migration, population drift from rural to urban regions and the demographics of an aging global population. These challenges are recognised in the UK Industrial Strategy white paper and backed by significant investment via a Wave 2 Industrial Challenge Fund Investment ("Transforming Food Production: from Farm to Fork"). Robotics and Autonomous Systems (RAS) and associated digital technologies are now seen as enablers of this critical food chain transformation. To meet these challenges, this white paper reviews the state of the art in the application of RAS in Agri-Food production and explores research and innovation needs to ensure these technologies reach their full potential and deliver the necessary impacts in the Agri-Food sector.},\n\tlanguage = {en},\n\turldate = {2019-03-22},\n\tauthor = {Duckett, Tom and Pearson, Simon and Blackmore, Simon and Grieve, Bruce and Chen, Wen-Hua and Cielniak, Grzegorz and Cleaversmith, Jason and Dai, Jian and Davis, Steve and Fox, Charles and From, Pal and Georgilas, Ioannis and Gill, Richie and Gould, Iain and Hanheide, Marc and Iida, Fumiya and Mihalyova, Lyudmila and Nefti-Meziani, Samia and Neumann, Gerhard and Paoletti, Paolo and Pridmore, Tony and Ross, Dave and Smith, Melvyn and Stoelen, Martin and Swainson, Mark and Wane, Sam and Wilson, Peter and Wright, Isobel and Yang, Guang-Zhong},\n\tmonth = jun,\n\tyear = {2018}\n}\n\n
\n
\n\n\n
\n Agri-Food is the largest manufacturing sector in the UK. It supports a food chain that generates over £108bn p.a., with 3.9m employees in a truly international industry and exports £20bn of UK manufactured goods. However, the global food chain is under pressure from population growth, climate change, political pressures affecting migration, population drift from rural to urban regions and the demographics of an aging global population. These challenges are recognised in the UK Industrial Strategy white paper and backed by significant investment via a Wave 2 Industrial Challenge Fund Investment (\"Transforming Food Production: from Farm to Fork\"). Robotics and Autonomous Systems (RAS) and associated digital technologies are now seen as enablers of this critical food chain transformation. To meet these challenges, this white paper reviews the state of the art in the application of RAS in Agri-Food production and explores research and innovation needs to ensure these technologies reach their full potential and deliver the necessary impacts in the Agri-Food sector.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n A review of the state of the art in agricultural automation. Part I: Sensing technologies for optimization of machine operation and farm inputs.\n \n \n \n \n\n\n \n Baillie, C. P.; Thomasson, J. A.; Lobsey, C. R.; McCarthy, C. L.; and Antille, D. L.\n\n\n \n\n\n\n In 2018 Detroit, Michigan July 29 - August 1, 2018, 2018. American Society of Agricultural and Biological Engineers\n \n\n\n\n
\n\n\n\n \n \n \"APaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{baillie_review_2018,\n\ttitle = {A review of the state of the art in agricultural automation. {Part} {I}: {Sensing} technologies for optimization of machine operation and farm inputs},\n\tshorttitle = {\\&lt;i\\&gt;{A} review of the state of the art in agricultural automation. {Part} {I}},\n\turl = {http://elibrary.asabe.org/abstract.asp?JID=5&AID=49328&CID=det2018&T=1},\n\tdoi = {10.13031/aim.201801589},\n\tlanguage = {en},\n\turldate = {2019-03-22},\n\tbooktitle = {2018 {Detroit}, {Michigan} {July} 29 - {August} 1, 2018},\n\tpublisher = {American Society of Agricultural and Biological Engineers},\n\tauthor = {Baillie, Craig P. and Thomasson, J. Alex and Lobsey, Craig R. and McCarthy, Cheryl L. and Antille, Diogenes L.},\n\tyear = {2018}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Agricultural Robotics: The Future of Robotic Agriculture.\n \n \n \n \n\n\n \n Duckett, T.; Pearson, S.; Blackmore, S.; Grieve, B.; Chen, W.; Cielniak, G.; Cleaversmith, J.; Dai, J.; Davis, S.; Fox, C.; From, P.; Georgilas, I.; Gill, R.; Gould, I.; Hanheide, M.; Hunter, A.; Iida, F.; Mihalyova, L.; Nefti-Meziani, S.; Neumann, G.; Paoletti, P.; Pridmore, T.; Ross, D.; Smith, M.; Stoelen, M.; Swainson, M.; Wane, S.; Wilson, P.; Wright, I.; and Yang, G.\n\n\n \n\n\n\n . June 2018.\n \n\n\n\n
\n\n\n\n \n \n \"AgriculturalPaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{duckett_agricultural_2018-1,\n\ttitle = {Agricultural {Robotics}: {The} {Future} of {Robotic} {Agriculture}},\n\tshorttitle = {Agricultural {Robotics}},\n\turl = {https://arxiv.org/abs/1806.06762v2},\n\tabstract = {Agri-Food is the largest manufacturing sector in the UK. It supports a food\nchain that generates over £108bn p.a., with 3.9m employees in a truly\ninternational industry and exports £20bn of UK manufactured goods.\nHowever, the global food chain is under pressure from population growth,\nclimate change, political pressures affecting migration, population drift from\nrural to urban regions and the demographics of an aging global population.\nThese challenges are recognised in the UK Industrial Strategy white paper and\nbacked by significant investment via a Wave 2 Industrial Challenge Fund\nInvestment ("Transforming Food Production: from Farm to Fork"). Robotics and\nAutonomous Systems (RAS) and associated digital technologies are now seen as\nenablers of this critical food chain transformation. To meet these challenges,\nthis white paper reviews the state of the art in the application of RAS in\nAgri-Food production and explores research and innovation needs to ensure these\ntechnologies reach their full potential and deliver the necessary impacts in\nthe Agri-Food sector.},\n\tlanguage = {en},\n\turldate = {2019-03-22},\n\tauthor = {Duckett, Tom and Pearson, Simon and Blackmore, Simon and Grieve, Bruce and Chen, Wen-Hua and Cielniak, Grzegorz and Cleaversmith, Jason and Dai, Jian and Davis, Steve and Fox, Charles and From, Pål and Georgilas, Ioannis and Gill, Richie and Gould, Iain and Hanheide, Marc and Hunter, Alan and Iida, Fumiya and Mihalyova, Lyudmila and Nefti-Meziani, Samia and Neumann, Gerhard and Paoletti, Paolo and Pridmore, Tony and Ross, Dave and Smith, Melvyn and Stoelen, Martin and Swainson, Mark and Wane, Sam and Wilson, Peter and Wright, Isobel and Yang, Guang-Zhong},\n\tmonth = jun,\n\tyear = {2018},\n\t\n}\n\n
\n
\n\n\n
\n Agri-Food is the largest manufacturing sector in the UK. It supports a food chain that generates over £108bn p.a., with 3.9m employees in a truly international industry and exports £20bn of UK manufactured goods. However, the global food chain is under pressure from population growth, climate change, political pressures affecting migration, population drift from rural to urban regions and the demographics of an aging global population. These challenges are recognised in the UK Industrial Strategy white paper and backed by significant investment via a Wave 2 Industrial Challenge Fund Investment (\"Transforming Food Production: from Farm to Fork\"). Robotics and Autonomous Systems (RAS) and associated digital technologies are now seen as enablers of this critical food chain transformation. To meet these challenges, this white paper reviews the state of the art in the application of RAS in Agri-Food production and explores research and innovation needs to ensure these technologies reach their full potential and deliver the necessary impacts in the Agri-Food sector.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Comparing apples and oranges: Off-road pedestrian detection on the National Robotics Engineering Center agricultural person-detection dataset.\n \n \n \n \n\n\n \n Pezzementi, Z.; Tabor, T.; Hu, P.; Chang, J. K.; Ramanan, D.; Wellington, C.; Wisely Babu, B. P.; and Herman, H.\n\n\n \n\n\n\n Journal of Field Robotics, 35(4): 545–563. June 2018.\n \n\n\n\n
\n\n\n\n \n \n \"ComparingPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{pezzementi_comparing_2018,\n\ttitle = {Comparing apples and oranges: {Off}-road pedestrian detection on the {National} {Robotics} {Engineering} {Center} agricultural person-detection dataset},\n\tvolume = {35},\n\tissn = {15564959},\n\tshorttitle = {Comparing apples and oranges},\n\turl = {http://doi.wiley.com/10.1002/rob.21760},\n\tdoi = {10.1002/rob.21760},\n\tlanguage = {en},\n\tnumber = {4},\n\turldate = {2019-03-22},\n\tjournal = {Journal of Field Robotics},\n\tauthor = {Pezzementi, Zachary and Tabor, Trenton and Hu, Peiyun and Chang, Jonathan K. and Ramanan, Deva and Wellington, Carl and Wisely Babu, Benzun P. and Herman, Herman},\n\tmonth = jun,\n\tyear = {2018},\n\tpages = {545--563}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Embedded High Precision Control and Corn Stand Counting Algorithms for an Ultra-Compact 3D Printed Field Robot.\n \n \n \n \n\n\n \n Kayacan, E.; Zhang, Z.; and Chowdhary, G.\n\n\n \n\n\n\n In Robotics: Science and Systems XIV, June 2018. Robotics: Science and Systems Foundation\n \n\n\n\n
\n\n\n\n \n \n \"EmbeddedPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@inproceedings{kayacan_embedded_2018,\n\ttitle = {Embedded {High} {Precision} {Control} and {Corn} {Stand} {Counting} {Algorithms} for an {Ultra}-{Compact} 3D {Printed} {Field} {Robot}},\n\tisbn = {978-0-9923747-4-7},\n\turl = {http://www.roboticsproceedings.org/rss14/p36.pdf},\n\tdoi = {10.15607/RSS.2018.XIV.036},\n\tabstract = {This paper presents embedded high precision control and corn stands counting algorithms for a low-cost, ultracompact 3D printed and autonomous field robot for agricultural operations. Currently, plant traits, such as emergence rate, biomass, vigor and stand counting are measured manually. This is highly labor intensive and prone to errors. The robot, termed TerraSentia, is designed to automate the measurement of plant traits for efficient phenotyping as an alternative to manual measurements. In this paper, we formulate a Nonlinear Moving Horizon Estimator (NMHE) that identifies key terrain parameters using onboard robot sensors and a learning-based Nonlinear Model Predictive Control (NMPC) that ensures high precision path tracking in the presence of unknown wheel-terrain interaction. Moreover, we develop a machine vision algorithm to enable TerraSentia to count corn stands by driving through the fields autonomously. We present results of an extensive field-test study that shows that (i) the robot can track paths precisely with less than 5 cm error so that the robot is less likely to damage plants, and (ii) the machine vision algorithm is robust against interferences from leaves and weeds, and the system has been verified in corn fields at the growth stage of V4, V6, VT, R2, and R6 from five different locations. The robot predictions agree well with the ground truth with countrobot = 0.96 × counthuman + 0.85 and correlation coefficient R = 0.96.},\n\tlanguage = {en},\n\turldate = {2019-03-22},\n\tbooktitle = {Robotics: {Science} and {Systems} {XIV}},\n\tpublisher = {Robotics: Science and Systems Foundation},\n\tauthor = {Kayacan, Erkan and Zhang, Zhongzhong and Chowdhary, Girish},\n\tmonth = jun,\n\tyear = {2018},\n\t\n}\n\n
\n
\n\n\n
\n This paper presents embedded high precision control and corn stands counting algorithms for a low-cost, ultracompact 3D printed and autonomous field robot for agricultural operations. Currently, plant traits, such as emergence rate, biomass, vigor and stand counting are measured manually. This is highly labor intensive and prone to errors. The robot, termed TerraSentia, is designed to automate the measurement of plant traits for efficient phenotyping as an alternative to manual measurements. In this paper, we formulate a Nonlinear Moving Horizon Estimator (NMHE) that identifies key terrain parameters using onboard robot sensors and a learning-based Nonlinear Model Predictive Control (NMPC) that ensures high precision path tracking in the presence of unknown wheel-terrain interaction. Moreover, we develop a machine vision algorithm to enable TerraSentia to count corn stands by driving through the fields autonomously. We present results of an extensive field-test study that shows that (i) the robot can track paths precisely with less than 5 cm error so that the robot is less likely to damage plants, and (ii) the machine vision algorithm is robust against interferences from leaves and weeds, and the system has been verified in corn fields at the growth stage of V4, V6, VT, R2, and R6 from five different locations. The robot predictions agree well with the ground truth with countrobot = 0.96 × counthuman + 0.85 and correlation coefficient R = 0.96.\n
\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2017\n \n \n (6)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n JFR Special Issue on Agricultural Robotics.\n \n \n \n \n\n\n \n Ball, D.; Upcroft, B.; Henten, E. v.; Hengel, A. v. d.; Tokekar, P.; and Das, J.\n\n\n \n\n\n\n Journal of Field Robotics, 34(6): 1037–1038. 2017.\n \n\n\n\n
\n\n\n\n \n \n \"JFRPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{ball_jfr_2017,\n\ttitle = {{JFR} {Special} {Issue} on {Agricultural} {Robotics}},\n\tvolume = {34},\n\tcopyright = {© 2017 Wiley Periodicals, Inc.},\n\tissn = {1556-4967},\n\turl = {https://onlinelibrary.wiley.com/doi/abs/10.1002/rob.21745},\n\tdoi = {10.1002/rob.21745},\n\tlanguage = {en},\n\tnumber = {6},\n\turldate = {2019-03-20},\n\tjournal = {Journal of Field Robotics},\n\tauthor = {Ball, David and Upcroft, Ben and Henten, Eldert van and Hengel, Anton van den and Tokekar, Pratap and Das, Jnaneshwar},\n\tyear = {2017},\n\tpages = {1037--1038},\n\t\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Autonomous Sweet Pepper Harvesting for Protected Cropping Systems.\n \n \n \n \n\n\n \n Lehnert, C.; English, A.; McCool, C.; Tow, A. W.; and Perez, T.\n\n\n \n\n\n\n IEEE Robotics and Automation Letters, 2(2): 872–879. April 2017.\n \n\n\n\n
\n\n\n\n \n \n \"AutonomousPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{lehnert_autonomous_2017,\n\ttitle = {Autonomous {Sweet} {Pepper} {Harvesting} for {Protected} {Cropping} {Systems}},\n\tvolume = {2},\n\tissn = {2377-3766, 2377-3774},\n\turl = {http://ieeexplore.ieee.org/document/7827126/},\n\tdoi = {10.1109/LRA.2017.2655622},\n\tnumber = {2},\n\turldate = {2019-03-22},\n\tjournal = {IEEE Robotics and Automation Letters},\n\tauthor = {Lehnert, Christopher and English, Andrew and McCool, Christopher and Tow, Adam W. and Perez, Tristan},\n\tmonth = apr,\n\tyear = {2017},\n\tpages = {872--879},\n\t\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Synthetic bootstrapping of convolutional neural networks for semantic plant part segmentation.\n \n \n \n \n\n\n \n Barth, R.; IJsselmuiden, J.; Hemming, J.; and Van Henten, E.\n\n\n \n\n\n\n Computers and Electronics in Agriculture. December 2017.\n \n\n\n\n
\n\n\n\n \n \n \"SyntheticPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{barth_synthetic_2017,\n\ttitle = {Synthetic bootstrapping of convolutional neural networks for semantic plant part segmentation},\n\tissn = {01681699},\n\turl = {https://linkinghub.elsevier.com/retrieve/pii/S0168169917307664},\n\tdoi = {10.1016/j.compag.2017.11.040},\n\tlanguage = {en},\n\turldate = {2019-03-22},\n\tjournal = {Computers and Electronics in Agriculture},\n\tauthor = {Barth, R. and IJsselmuiden, J. and Hemming, J. and Van Henten, E.J.},\n\tmonth = dec,\n\tyear = {2017}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Synthetic bootstrapping of convolutional neural networks for semantic plant part segmentation.\n \n \n \n \n\n\n \n Barth, R.; IJsselmuiden, J.; Hemming, J.; and Van Henten, E.\n\n\n \n\n\n\n Computers and Electronics in Agriculture. December 2017.\n \n\n\n\n
\n\n\n\n \n \n \"SyntheticPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{barth_synthetic_2017-1,\n\ttitle = {Synthetic bootstrapping of convolutional neural networks for semantic plant part segmentation},\n\tissn = {01681699},\n\turl = {https://linkinghub.elsevier.com/retrieve/pii/S0168169917307664},\n\tdoi = {10.1016/j.compag.2017.11.040},\n\tlanguage = {en},\n\turldate = {2019-03-22},\n\tjournal = {Computers and Electronics in Agriculture},\n\tauthor = {Barth, R. and IJsselmuiden, J. and Hemming, J. and Van Henten, E.J.},\n\tmonth = dec,\n\tyear = {2017}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Phenoliner: A New Field Phenotyping Platform for Grapevine Research.\n \n \n \n \n\n\n \n Kicherer, A.; Herzog, K.; Bendel, N.; Klück, H.; Backhaus, A.; Wieland, M.; Rose, J.; Klingbeil, L.; Läbe, T.; Hohl, C.; Petry, W.; Kuhlmann, H.; Seiffert, U.; and Töpfer, R.\n\n\n \n\n\n\n Sensors, 17(7): 1625. July 2017.\n \n\n\n\n
\n\n\n\n \n \n \"Phenoliner:Paper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{kicherer_phenoliner:_2017,\n\ttitle = {Phenoliner: {A} {New} {Field} {Phenotyping} {Platform} for {Grapevine} {Research}},\n\tvolume = {17},\n\tissn = {1424-8220},\n\tshorttitle = {Phenoliner},\n\turl = {http://www.mdpi.com/1424-8220/17/7/1625},\n\tdoi = {10.3390/s17071625},\n\tlanguage = {en},\n\tnumber = {7},\n\turldate = {2019-03-22},\n\tjournal = {Sensors},\n\tauthor = {Kicherer, Anna and Herzog, Katja and Bendel, Nele and Klück, Hans-Christian and Backhaus, Andreas and Wieland, Markus and Rose, Johann and Klingbeil, Lasse and Läbe, Thomas and Hohl, Christian and Petry, Willi and Kuhlmann, Heiner and Seiffert, Udo and Töpfer, Reinhard},\n\tmonth = jul,\n\tyear = {2017},\n\tpages = {1625},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Guest Editorial Focused Section on Mechatronics Applications in Agriculture.\n \n \n \n \n\n\n \n Cheein, F. A.; Kantor, G.; Reina, G.; and Torres-Torriti, M.\n\n\n \n\n\n\n IEEE/ASME Transactions on Mechatronics, 22(6): 2397–2400. December 2017.\n \n\n\n\n
\n\n\n\n \n \n \"GuestPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{cheein_guest_2017,\n\ttitle = {Guest {Editorial} {Focused} {Section} on {Mechatronics} {Applications} in {Agriculture}},\n\tvolume = {22},\n\tissn = {1083-4435, 1941-014X},\n\turl = {http://ieeexplore.ieee.org/document/8197182/},\n\tdoi = {10.1109/TMECH.2017.2774106},\n\tnumber = {6},\n\turldate = {2019-03-22},\n\tjournal = {IEEE/ASME Transactions on Mechatronics},\n\tauthor = {Cheein, F. A. and Kantor, G. and Reina, G. and Torres-Torriti, M.},\n\tmonth = dec,\n\tyear = {2017},\n\tpages = {2397--2400}\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2016\n \n \n (5)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Robotics in Agriculture and Forestry.\n \n \n \n\n\n \n Bergerman, M.; Billingsley, J.; Reid, J.; and van Henten, E.\n\n\n \n\n\n\n In Siciliano, B.; and Khatib, O., editor(s), Springer Handbook of Robotics, pages 1463–1492. Springer International Publishing, Cham, 2016.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@incollection{siciliano_robotics_2016,\n\tauthor = {Bergerman, Marcel and Billingsley, John and Reid, John and van Henten, Eldert},\n\tbooktitle = {Springer {Handbook} of {Robotics}},\n\taddress = {Cham},\n\ttitle = {Robotics in {Agriculture} and {Forestry}},\n\tisbn = {978-3-319-32550-7 978-3-319-32552-1},\n\tlanguage = {en},\n\tpublisher = {Springer International Publishing},\n\teditor = {Siciliano, Bruno and Khatib, Oussama},\n\tyear = {2016},\n\tdoi = {10.1007/978-3-319-32552-1\\_56},\n\tpages = {1463--1492}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Agricultural robots for field operations: Concepts and components.\n \n \n \n \n\n\n \n Bechar, A.; and Vigneault, C.\n\n\n \n\n\n\n Biosystems Engineering, 149: 94–111. September 2016.\n \n\n\n\n
\n\n\n\n \n \n \"AgriculturalPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{bechar_agricultural_2016,\n\ttitle = {Agricultural robots for field operations: {Concepts} and components},\n\tvolume = {149},\n\tissn = {15375110},\n\tshorttitle = {Agricultural robots for field operations},\n\turl = {https://linkinghub.elsevier.com/retrieve/pii/S1537511015301914},\n\tdoi = {10.1016/j.biosystemseng.2016.06.014},\n\tlanguage = {en},\n\turldate = {2019-03-22},\n\tjournal = {Biosystems Engineering},\n\tauthor = {Bechar, Avital and Vigneault, Clément},\n\tmonth = sep,\n\tyear = {2016},\n\tpages = {94--111}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Finely-grained annotated datasets for image-based plant phenotyping.\n \n \n \n \n\n\n \n Minervini, M.; Fischbach, A.; Scharr, H.; and Tsaftaris, S. A.\n\n\n \n\n\n\n Pattern Recognition Letters, 81: 80–89. October 2016.\n \n\n\n\n
\n\n\n\n \n \n \"Finely-grainedPaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{minervini_finely-grained_2016,\n\ttitle = {Finely-grained annotated datasets for image-based plant phenotyping},\n\tvolume = {81},\n\tissn = {01678655},\n\turl = {https://linkinghub.elsevier.com/retrieve/pii/S0167865515003645},\n\tdoi = {10.1016/j.patrec.2015.10.013},\n\tlanguage = {en},\n\turldate = {2019-03-22},\n\tjournal = {Pattern Recognition Letters},\n\tauthor = {Minervini, Massimo and Fischbach, Andreas and Scharr, Hanno and Tsaftaris, Sotirios A.},\n\tmonth = oct,\n\tyear = {2016},\n\tpages = {80--89},\n\t\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Image Based Mango Fruit Detection, Localisation and Yield Estimation Using Multiple View Geometry.\n \n \n \n \n\n\n \n Stein, M.; Bargoti, S.; and Underwood, J.\n\n\n \n\n\n\n Sensors, 16(11): 1915. November 2016.\n \n\n\n\n
\n\n\n\n \n \n \"ImagePaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{stein_image_2016,\n\ttitle = {Image {Based} {Mango} {Fruit} {Detection}, {Localisation} and {Yield} {Estimation} {Using} {Multiple} {View} {Geometry}},\n\tvolume = {16},\n\tissn = {1424-8220},\n\turl = {http://www.mdpi.com/1424-8220/16/11/1915},\n\tdoi = {10.3390/s16111915},\n\tlanguage = {en},\n\tnumber = {11},\n\turldate = {2019-03-22},\n\tjournal = {Sensors},\n\tauthor = {Stein, Madeleine and Bargoti, Suchet and Underwood, James},\n\tmonth = nov,\n\tyear = {2016},\n\tpages = {1915}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Large Scale Image Mosaic Construction for Agricultural Applications.\n \n \n \n \n\n\n \n Li, Z.; and Isler, V.\n\n\n \n\n\n\n IEEE Robotics and Automation Letters, 1(1): 295–302. January 2016.\n \n\n\n\n
\n\n\n\n \n \n \"LargePaper\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{li_large_2016,\n\ttitle = {Large {Scale} {Image} {Mosaic} {Construction} for {Agricultural} {Applications}},\n\tvolume = {1},\n\tissn = {2377-3766, 2377-3774},\n\turl = {http://ieeexplore.ieee.org/document/7387713/},\n\tdoi = {10.1109/LRA.2016.2519946},\n\tnumber = {1},\n\turldate = {2019-03-22},\n\tjournal = {IEEE Robotics and Automation Letters},\n\tauthor = {Li, Zhengqi and Isler, Volkan},\n\tmonth = jan,\n\tyear = {2016},\n\tpages = {295--302}\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n undefined\n \n \n (2)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n Precision Agricultural Robotics and Autonomous Farming Technologies - IEEE Robotics and Automation Society.\n \n \n \n \n\n\n \n \n\n\n \n\n\n\n \n \n\n\n\n
\n\n\n\n \n \n \"PrecisionPaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@misc{noauthor_precision_nodate,\n\ttitle = {Precision {Agricultural} {Robotics} and {Autonomous} {Farming} {Technologies} - {IEEE} {Robotics} and {Automation} {Society}},\n\turl = {https://www.ieee-ras.org/publications/ra-l/special-issues/past-special-issues/precision-agricultural-robotics-and-autonomous-farming-technologies},\n\tabstract = {Focus is on both applied and theoretical issues in robotics and automation. Robotics is here defined to include intelligent machines and systems; whereas automation includes the use of automated methods in various applications to improve performance and productivity. The society sponsors a number of conferences, including the annual International Conference on Robotics and Automation.},\n\tlanguage = {en-gb},\n\turldate = {2019-03-21},\n\t\n}\n\n
\n
\n\n\n
\n Focus is on both applied and theoretical issues in robotics and automation. Robotics is here defined to include intelligent machines and systems; whereas automation includes the use of automated methods in various applications to improve performance and productivity. The society sponsors a number of conferences, including the annual International Conference on Robotics and Automation.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Fundamentals of Agricultural and Field Robotics.\n \n \n \n\n\n \n Karkee, M.; and Zhang, Q.,\n editors.\n \n\n\n \n\n\n\n Springer, .\n \n\n\n\n
\n\n\n\n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@book{karkee_fundamentals_nodate,\n\ttitle = {Fundamentals of {Agricultural} and {Field} {Robotics}},\n\tpublisher = {Springer},\n\teditor = {Karkee, Manoj and Zhang, Qin}\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n\n\n\n
\n\n\n \n\n \n \n \n \n\n
\n"}; document.write(bibbase_data.data);