\n \n \n
\n
\n\n \n \n \n \n \n Robotics and automation for improving agriculture.\n \n \n \n\n\n \n Billingsley, J.,\n editor.\n \n\n\n \n\n\n\n Burleigh Dodds Science Publishing, UK, 2019.\n
\n\n
\n\n
\n\n
\n\n \n\n \n\n \n link\n \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n \n \n \n\n\n\n
\n
@book{billingsley_robotics_2019,\n\taddress = {UK},\n\ttitle = {Robotics and automation for improving agriculture.},\n\tpublisher = {Burleigh Dodds Science Publishing},\n\teditor = {Billingsley, John},\n\tyear = {2019}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n
\n\n \n \n \n \n \n \n Autonomous Technologies in Agricultural Equipment: A Review of the State of the Art.\n \n \n \n \n\n\n \n \n\n\n \n\n\n\n In
2019 Agricultural Equipment Technology Conference, pages 1–17, February 2019. American Society of Agricultural and Biological Engineers\n
\n\n
\n\n
\n\n
\n\n \n \n
Paper\n \n \n\n \n \n doi\n \n \n\n \n link\n \n \n\n bibtex\n \n\n \n \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n \n \n \n\n\n\n
\n
@inproceedings{noauthor_autonomous_2019,\n\ttitle = {Autonomous {Technologies} in {Agricultural} {Equipment}: {A} {Review} of the {State} of the {Art}},\n\tshorttitle = {Autonomous {Technologies} in {Agricultural} {Equipment}},\n\turl = {https://elibrary.asabe.org/azdez.asp?JID=6&AID=49978&CID=913c0119&T=1},\n\tdoi = {10.13031/913},\n\tabstract = {Automation and robotics for agricultural production are topics of tremendous interest and large investments. There have been significant recent advances in agricultural automation and robotics in the areas of (1) automatic vehicle guidance and steering control, (2) automatic implement guidance, (3) automatic headland sequence and turn management, (4) sensing for perception, (5) sensing for variable-rate technologies, (6) optimization of machine operation, (7) machinery coordination, and (8) machinery communication. These eight areas of progress are discussed in terms of commercially available technologies, public intellectual property, and the research literature.},\n\tlanguage = {en},\n\turldate = {2019-03-22},\n\tbooktitle = {2019 {Agricultural} {Equipment} {Technology} {Conference}},\n\tpublisher = {American Society of Agricultural and Biological Engineers},\n\tmonth = feb,\n\tyear = {2019},\n\tpages = {1--17},\n\t\n}\n\n
\n
\n\n\n
\n Automation and robotics for agricultural production are topics of tremendous interest and large investments. There have been significant recent advances in agricultural automation and robotics in the areas of (1) automatic vehicle guidance and steering control, (2) automatic implement guidance, (3) automatic headland sequence and turn management, (4) sensing for perception, (5) sensing for variable-rate technologies, (6) optimization of machine operation, (7) machinery coordination, and (8) machinery communication. These eight areas of progress are discussed in terms of commercially available technologies, public intellectual property, and the research literature.\n
\n\n\n
\n\n\n
\n
\n\n \n \n \n \n \n \n Calibration of Asynchronous Camera Networks for Object Reconstruction Tasks.\n \n \n \n \n\n\n \n Tabb, A.; and Medeiros, H.\n\n\n \n\n\n\n
arXiv:1903.06811 [cs]. March 2019.\n
arXiv: 1903.06811\n\n
\n\n
\n\n
\n\n \n \n
Paper\n \n \n\n \n\n \n link\n \n \n\n bibtex\n \n\n \n \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n \n \n \n \n \n\n\n\n
\n
@article{tabb_calibration_2019,\n\ttitle = {Calibration of {Asynchronous} {Camera} {Networks} for {Object} {Reconstruction} {Tasks}},\n\turl = {http://arxiv.org/abs/1903.06811},\n\tabstract = {Camera network and multi-camera calibration for external parameters is a necessary step for a variety of contexts in computer vision and robotics, ranging from three-dimensional reconstruction to human activity tracking. This paper describes a method for camera network and/or multi-camera calibration suitable for specific contexts: the cameras may not all have a common field of view, or if they do, there may be some views that are 180 degrees from one another, and the network may be asynchronous. The calibration object required is one or more planar calibration patterns, rigidly attached to one another, and are distinguishable from one another, such as aruco or charuco patterns. We formulate the camera network and/or multi-camera calibration problem in this context using rigidity constraints, represented as a system of equations, and an approximate solution is found through a two-step process. Synthetic and real experiments, including scenarios of a asynchronous camera network and rotating imaging system, demonstrate the method in a variety of settings. Reconstruction accuracy error was less than 0.5 mm for all datasets. This method is suitable for new users to calibrate a camera network, and the modularity of the calibration object also allows for disassembly, shipping, and the use of this method in a variety of large and small spaces.},\n\turldate = {2019-03-22},\n\tjournal = {arXiv:1903.06811 [cs]},\n\tauthor = {Tabb, Amy and Medeiros, Henry},\n\tmonth = mar,\n\tyear = {2019},\n\tnote = {arXiv: 1903.06811},\n\tkeywords = {Computer Science - Computer Vision and Pattern Recognition},\n}\n\n
\n
\n\n\n
\n Camera network and multi-camera calibration for external parameters is a necessary step for a variety of contexts in computer vision and robotics, ranging from three-dimensional reconstruction to human activity tracking. This paper describes a method for camera network and/or multi-camera calibration suitable for specific contexts: the cameras may not all have a common field of view, or if they do, there may be some views that are 180 degrees from one another, and the network may be asynchronous. The calibration object required is one or more planar calibration patterns, rigidly attached to one another, and are distinguishable from one another, such as aruco or charuco patterns. We formulate the camera network and/or multi-camera calibration problem in this context using rigidity constraints, represented as a system of equations, and an approximate solution is found through a two-step process. Synthetic and real experiments, including scenarios of a asynchronous camera network and rotating imaging system, demonstrate the method in a variety of settings. Reconstruction accuracy error was less than 0.5 mm for all datasets. This method is suitable for new users to calibrate a camera network, and the modularity of the calibration object also allows for disassembly, shipping, and the use of this method in a variety of large and small spaces.\n
\n\n\n
\n\n\n
\n
\n\n \n \n \n \n \n \n Future farms without farmers.\n \n \n \n \n\n\n \n Asseng, S.; and Asche, F.\n\n\n \n\n\n\n
Science Robotics, 4(27): eaaw1875. February 2019.\n
\n\n
\n\n
\n\n
\n\n \n \n
Paper\n \n \n\n \n \n doi\n \n \n\n \n link\n \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n \n \n \n\n\n\n
\n
@article{asseng_future_2019,\n\ttitle = {Future farms without farmers},\n\tvolume = {4},\n\tissn = {2470-9476},\n\turl = {http://robotics.sciencemag.org/lookup/doi/10.1126/scirobotics.aaw1875},\n\tdoi = {10.1126/scirobotics.aaw1875},\n\tlanguage = {en},\n\tnumber = {27},\n\turldate = {2019-03-22},\n\tjournal = {Science Robotics},\n\tauthor = {Asseng, Senthold and Asche, Frank},\n\tmonth = feb,\n\tyear = {2019},\n\tpages = {eaaw1875}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n
\n\n \n \n \n \n \n \n Autonomous Technologies in Agricultural Equipment: A Review of the State of the Art.\n \n \n \n \n\n\n \n \n\n\n \n\n\n\n In
2019 Agricultural Equipment Technology Conference, pages 1–17, February 2019. American Society of Agricultural and Biological Engineers\n
\n\n
\n\n
\n\n
\n\n \n \n
Paper\n \n \n\n \n \n doi\n \n \n\n \n link\n \n \n\n bibtex\n \n\n \n \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n \n \n \n\n\n\n
\n
@inproceedings{noauthor_autonomous_2019-1,\n\ttitle = {Autonomous {Technologies} in {Agricultural} {Equipment}: {A} {Review} of the {State} of the {Art}},\n\tshorttitle = {Autonomous {Technologies} in {Agricultural} {Equipment}},\n\turl = {https://elibrary.asabe.org/azdez.asp?JID=6&AID=49978&CID=913c0119&T=1},\n\tdoi = {10.13031/913},\n\tabstract = {Automation and robotics for agricultural production are topics of tremendous interest and large investments. There have been significant recent advances in agricultural automation and robotics in the areas of (1) automatic vehicle guidance and steering control, (2) automatic implement guidance, (3) automatic headland sequence and turn management, (4) sensing for perception, (5) sensing for variable-rate technologies, (6) optimization of machine operation, (7) machinery coordination, and (8) machinery communication. These eight areas of progress are discussed in terms of commercially available technologies, public intellectual property, and the research literature.},\n\tlanguage = {en},\n\turldate = {2019-03-22},\n\tbooktitle = {2019 {Agricultural} {Equipment} {Technology} {Conference}},\n\tpublisher = {American Society of Agricultural and Biological Engineers},\n\tmonth = feb,\n\tyear = {2019},\n\tpages = {1--17},\n}\n\n
\n
\n\n\n
\n Automation and robotics for agricultural production are topics of tremendous interest and large investments. There have been significant recent advances in agricultural automation and robotics in the areas of (1) automatic vehicle guidance and steering control, (2) automatic implement guidance, (3) automatic headland sequence and turn management, (4) sensing for perception, (5) sensing for variable-rate technologies, (6) optimization of machine operation, (7) machinery coordination, and (8) machinery communication. These eight areas of progress are discussed in terms of commercially available technologies, public intellectual property, and the research literature.\n
\n\n\n
\n\n\n
\n
\n\n \n \n \n \n \n \n Autonomous pollination of individual kiwifruit flowers: Toward a robotic kiwifruit pollinator.\n \n \n \n \n\n\n \n Williams, H.; Nejati, M.; Hussein, S.; Penhall, N.; Lim, J. Y.; Jones, M. H.; Bell, J.; Ahn, H. S.; Bradley, S.; Schaare, P.; Martinsen, P.; Alomar, M.; Patel, P.; Seabright, M.; Duke, M.; Scarfe, A.; and MacDonald, B.\n\n\n \n\n\n\n
Journal of Field Robotics. January 2019.\n
\n\n
\n\n
\n\n
\n\n \n \n
Paper\n \n \n\n \n \n doi\n \n \n\n \n link\n \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n \n \n \n\n\n\n
\n
@article{williams_autonomous_2019,\n\ttitle = {Autonomous pollination of individual kiwifruit flowers: {Toward} a robotic kiwifruit pollinator},\n\tissn = {1556-4959, 1556-4967},\n\tshorttitle = {Autonomous pollination of individual kiwifruit flowers},\n\turl = {https://onlinelibrary.wiley.com/doi/abs/10.1002/rob.21861},\n\tdoi = {10.1002/rob.21861},\n\tlanguage = {en},\n\turldate = {2019-03-22},\n\tjournal = {Journal of Field Robotics},\n\tauthor = {Williams, Henry and Nejati, Mahla and Hussein, Salome and Penhall, Nicky and Lim, Jong Yoon and Jones, Mark Hedley and Bell, Jamie and Ahn, Ho Seok and Bradley, Stuart and Schaare, Peter and Martinsen, Paul and Alomar, Mohammad and Patel, Purak and Seabright, Matthew and Duke, Mike and Scarfe, Alistair and MacDonald, Bruce},\n\tmonth = jan,\n\tyear = {2019}\n}\n\n
\n
\n\n\n\n
\n\n\n
\n
\n\n \n \n \n \n \n \n Development and evaluation of a low-cost and smart technology for precision weed management utilizing artificial intelligence.\n \n \n \n \n\n\n \n Partel, V.; Charan Kakarla, S.; and Ampatzidis, Y.\n\n\n \n\n\n\n
Computers and Electronics in Agriculture, 157: 339–350. February 2019.\n
\n\n
\n\n
\n\n
\n\n \n \n
Paper\n \n \n\n \n \n doi\n \n \n\n \n link\n \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n \n \n \n\n\n\n
\n
@article{partel_development_2019,\n\ttitle = {Development and evaluation of a low-cost and smart technology for precision weed management utilizing artificial intelligence},\n\tvolume = {157},\n\tissn = {01681699},\n\turl = {https://linkinghub.elsevier.com/retrieve/pii/S0168169918316612},\n\tdoi = {10.1016/j.compag.2018.12.048},\n\tlanguage = {en},\n\turldate = {2019-03-22},\n\tjournal = {Computers and Electronics in Agriculture},\n\tauthor = {Partel, Victor and Charan Kakarla, Sri and Ampatzidis, Yiannis},\n\tmonth = feb,\n\tyear = {2019},\n\tpages = {339--350}\n}\n
\n
\n\n\n\n
\n\n\n\n\n\n