Deep learning workflow to support in-flight processing of digital aerial imagery for wildlife population surveys. Ke, T., Yu, S. X., Koneff, M. D., Fronczak, D. L., Fara, L. J., Harrison, T. J., Landolt, K. L., Hlavacek, E. J., Lubinski, B. R., & White, T. P. PLOS ONE, 19(4):e0288121, April, 2024. Publisher: Public Library of SciencePaper doi abstract bibtex Deep learning shows promise for automating detection and classification of wildlife from digital aerial imagery to support cost-efficient remote sensing solutions for wildlife population monitoring. To support in-flight orthorectification and machine learning processing to detect and classify wildlife from imagery in near real-time, we evaluated deep learning methods that address hardware limitations and the need for processing efficiencies to support the envisioned in-flight workflow. We developed an annotated dataset for a suite of marine birds from high-resolution digital aerial imagery collected over open water environments to train the models. The proposed 3-stage workflow for automated, in-flight data processing includes: 1) image filtering based on the probability of any bird occurrence, 2) bird instance detection, and 3) bird instance classification. For image filtering, we compared the performance of a binary classifier with Mask Region-based Convolutional Neural Network (Mask R-CNN) as a means of sub-setting large volumes of imagery based on the probability of at least one bird occurrence in an image. On both the validation and test datasets, the binary classifier achieved higher performance than Mask R-CNN for predicting bird occurrence at the image-level. We recommend the binary classifier over Mask R-CNN for workflow first-stage filtering. For bird instance detection, we leveraged Mask R-CNN as our detection framework and proposed an iterative refinement method to bootstrap our predicted detections from loose ground-truth annotations. We also discuss future work to address the taxonomic classification phase of the envisioned workflow.
@article{ke_deep_2024,
title = {Deep learning workflow to support in-flight processing of digital aerial imagery for wildlife population surveys},
volume = {19},
issn = {1932-6203},
url = {https://journals.plos.org/plosone/article?id=10.1371/journal.pone.0288121},
doi = {10.1371/journal.pone.0288121},
abstract = {Deep learning shows promise for automating detection and classification of wildlife from digital aerial imagery to support cost-efficient remote sensing solutions for wildlife population monitoring. To support in-flight orthorectification and machine learning processing to detect and classify wildlife from imagery in near real-time, we evaluated deep learning methods that address hardware limitations and the need for processing efficiencies to support the envisioned in-flight workflow. We developed an annotated dataset for a suite of marine birds from high-resolution digital aerial imagery collected over open water environments to train the models. The proposed 3-stage workflow for automated, in-flight data processing includes: 1) image filtering based on the probability of any bird occurrence, 2) bird instance detection, and 3) bird instance classification. For image filtering, we compared the performance of a binary classifier with Mask Region-based Convolutional Neural Network (Mask R-CNN) as a means of sub-setting large volumes of imagery based on the probability of at least one bird occurrence in an image. On both the validation and test datasets, the binary classifier achieved higher performance than Mask R-CNN for predicting bird occurrence at the image-level. We recommend the binary classifier over Mask R-CNN for workflow first-stage filtering. For bird instance detection, we leveraged Mask R-CNN as our detection framework and proposed an iterative refinement method to bootstrap our predicted detections from loose ground-truth annotations. We also discuss future work to address the taxonomic classification phase of the envisioned workflow.},
language = {en},
number = {4},
urldate = {2024-04-24},
journal = {PLOS ONE},
author = {Ke, Tsung-Wei and Yu, Stella X. and Koneff, Mark D. and Fronczak, David L. and Fara, Luke J. and Harrison, Travis J. and Landolt, Kyle L. and Hlavacek, Enrika J. and Lubinski, Brian R. and White, Timothy P.},
month = apr,
year = {2024},
note = {Publisher: Public Library of Science},
keywords = {Political Boundaries},
pages = {e0288121},
}
Downloads: 0
{"_id":"vSK4xNjCve3wQ7rZv","bibbaseid":"ke-yu-koneff-fronczak-fara-harrison-landolt-hlavacek-etal-deeplearningworkflowtosupportinflightprocessingofdigitalaerialimageryforwildlifepopulationsurveys-2024","author_short":["Ke, T.","Yu, S. X.","Koneff, M. D.","Fronczak, D. L.","Fara, L. J.","Harrison, T. J.","Landolt, K. L.","Hlavacek, E. J.","Lubinski, B. R.","White, T. P."],"bibdata":{"bibtype":"article","type":"article","title":"Deep learning workflow to support in-flight processing of digital aerial imagery for wildlife population surveys","volume":"19","issn":"1932-6203","url":"https://journals.plos.org/plosone/article?id=10.1371/journal.pone.0288121","doi":"10.1371/journal.pone.0288121","abstract":"Deep learning shows promise for automating detection and classification of wildlife from digital aerial imagery to support cost-efficient remote sensing solutions for wildlife population monitoring. To support in-flight orthorectification and machine learning processing to detect and classify wildlife from imagery in near real-time, we evaluated deep learning methods that address hardware limitations and the need for processing efficiencies to support the envisioned in-flight workflow. We developed an annotated dataset for a suite of marine birds from high-resolution digital aerial imagery collected over open water environments to train the models. The proposed 3-stage workflow for automated, in-flight data processing includes: 1) image filtering based on the probability of any bird occurrence, 2) bird instance detection, and 3) bird instance classification. For image filtering, we compared the performance of a binary classifier with Mask Region-based Convolutional Neural Network (Mask R-CNN) as a means of sub-setting large volumes of imagery based on the probability of at least one bird occurrence in an image. On both the validation and test datasets, the binary classifier achieved higher performance than Mask R-CNN for predicting bird occurrence at the image-level. We recommend the binary classifier over Mask R-CNN for workflow first-stage filtering. For bird instance detection, we leveraged Mask R-CNN as our detection framework and proposed an iterative refinement method to bootstrap our predicted detections from loose ground-truth annotations. We also discuss future work to address the taxonomic classification phase of the envisioned workflow.","language":"en","number":"4","urldate":"2024-04-24","journal":"PLOS ONE","author":[{"propositions":[],"lastnames":["Ke"],"firstnames":["Tsung-Wei"],"suffixes":[]},{"propositions":[],"lastnames":["Yu"],"firstnames":["Stella","X."],"suffixes":[]},{"propositions":[],"lastnames":["Koneff"],"firstnames":["Mark","D."],"suffixes":[]},{"propositions":[],"lastnames":["Fronczak"],"firstnames":["David","L."],"suffixes":[]},{"propositions":[],"lastnames":["Fara"],"firstnames":["Luke","J."],"suffixes":[]},{"propositions":[],"lastnames":["Harrison"],"firstnames":["Travis","J."],"suffixes":[]},{"propositions":[],"lastnames":["Landolt"],"firstnames":["Kyle","L."],"suffixes":[]},{"propositions":[],"lastnames":["Hlavacek"],"firstnames":["Enrika","J."],"suffixes":[]},{"propositions":[],"lastnames":["Lubinski"],"firstnames":["Brian","R."],"suffixes":[]},{"propositions":[],"lastnames":["White"],"firstnames":["Timothy","P."],"suffixes":[]}],"month":"April","year":"2024","note":"Publisher: Public Library of Science","keywords":"Political Boundaries","pages":"e0288121","bibtex":"@article{ke_deep_2024,\n\ttitle = {Deep learning workflow to support in-flight processing of digital aerial imagery for wildlife population surveys},\n\tvolume = {19},\n\tissn = {1932-6203},\n\turl = {https://journals.plos.org/plosone/article?id=10.1371/journal.pone.0288121},\n\tdoi = {10.1371/journal.pone.0288121},\n\tabstract = {Deep learning shows promise for automating detection and classification of wildlife from digital aerial imagery to support cost-efficient remote sensing solutions for wildlife population monitoring. To support in-flight orthorectification and machine learning processing to detect and classify wildlife from imagery in near real-time, we evaluated deep learning methods that address hardware limitations and the need for processing efficiencies to support the envisioned in-flight workflow. We developed an annotated dataset for a suite of marine birds from high-resolution digital aerial imagery collected over open water environments to train the models. The proposed 3-stage workflow for automated, in-flight data processing includes: 1) image filtering based on the probability of any bird occurrence, 2) bird instance detection, and 3) bird instance classification. For image filtering, we compared the performance of a binary classifier with Mask Region-based Convolutional Neural Network (Mask R-CNN) as a means of sub-setting large volumes of imagery based on the probability of at least one bird occurrence in an image. On both the validation and test datasets, the binary classifier achieved higher performance than Mask R-CNN for predicting bird occurrence at the image-level. We recommend the binary classifier over Mask R-CNN for workflow first-stage filtering. For bird instance detection, we leveraged Mask R-CNN as our detection framework and proposed an iterative refinement method to bootstrap our predicted detections from loose ground-truth annotations. We also discuss future work to address the taxonomic classification phase of the envisioned workflow.},\n\tlanguage = {en},\n\tnumber = {4},\n\turldate = {2024-04-24},\n\tjournal = {PLOS ONE},\n\tauthor = {Ke, Tsung-Wei and Yu, Stella X. and Koneff, Mark D. and Fronczak, David L. and Fara, Luke J. and Harrison, Travis J. and Landolt, Kyle L. and Hlavacek, Enrika J. and Lubinski, Brian R. and White, Timothy P.},\n\tmonth = apr,\n\tyear = {2024},\n\tnote = {Publisher: Public Library of Science},\n\tkeywords = {Political Boundaries},\n\tpages = {e0288121},\n}\n\n\n\n\n\n\n\n","author_short":["Ke, T.","Yu, S. X.","Koneff, M. D.","Fronczak, D. L.","Fara, L. J.","Harrison, T. J.","Landolt, K. L.","Hlavacek, E. J.","Lubinski, B. R.","White, T. P."],"key":"ke_deep_2024","id":"ke_deep_2024","bibbaseid":"ke-yu-koneff-fronczak-fara-harrison-landolt-hlavacek-etal-deeplearningworkflowtosupportinflightprocessingofdigitalaerialimageryforwildlifepopulationsurveys-2024","role":"author","urls":{"Paper":"https://journals.plos.org/plosone/article?id=10.1371/journal.pone.0288121"},"keyword":["Political Boundaries"],"metadata":{"authorlinks":{}},"downloads":0},"bibtype":"article","biburl":"https://bibbase.org/zotero/NAAtlas2024","dataSources":["qLjf8q88GSLZ5dAmC"],"keywords":["political boundaries"],"search_terms":["deep","learning","workflow","support","flight","processing","digital","aerial","imagery","wildlife","population","surveys","ke","yu","koneff","fronczak","fara","harrison","landolt","hlavacek","lubinski","white"],"title":"Deep learning workflow to support in-flight processing of digital aerial imagery for wildlife population surveys","year":2024}