@article{pandey_veazie_whipker_young_2023, title={Predicting foliar nutrient concentrations and nutrient deficiencies of hydroponic lettuce using hyperspectral imaging}, volume={230}, ISSN={["1537-5129"]}, DOI={10.1016/j.biosystemseng.2023.05.005}, abstractNote={Effective management of plant essential nutrients is necessary for hydroponically grown lettuce to achieve high yields and maintain production. This study investigated in situ hyperspectral imaging of hydroponic lettuce for predicting nutrient concentrations and identifying nutrient deficiencies for: nitrogen (N), phosphorous (P), potassium (K), calcium (Ca), magnesium (Mg), and sulphur (S). A greenhouse study was conducted using ‘Salanova Green’ lettuce grown with controlled solution treatments with varying macronutrient fertility rates of 0, 8, 16, 32, 64, and 100% each for N, P, K, Ca, Mg, and S. Plants were imaged using a hyperspectral line scanner at six and eight weeks after transplanting; then, plant tissues were sampled, and nutrient concentrations measured. Partial least squares regression (PLSR) models were developed to predict nutrient concentrations for each nutrient individually (PLS1) and for all six nutrient concentrations (PLS2). Several binary classification models were also developed to predict nutrient deficiencies. The PLS1 and PLS2 models predicted nutrient concentrations with Coefficient of Determination (R2) values from 0.60 to 0.88 for N, P, K, and S, while results for Ca and Mg yielded R2 values of 0.12–0.34, for both harvest dates. Similarly, plants deficient in N, P, K, and S were classified more accurately compared to plants deficient in Ca and Mg for both harvest dates, with F1 values (F-scores) ranging from 0.71 to 1.00, with the exception of K which had F1 scores of 0.40–0.67. Overall, results indicate that both leaf tissue nutrient concentration and nutrient deficiencies can be predicted using hyperspectral data collected for whole plants.}, journal={BIOSYSTEMS ENGINEERING}, author={Pandey, Piyush and Veazie, Patrick and Whipker, Brian and Young, Sierra}, year={2023}, month={Jun}, pages={458–469} } @article{nguyen_holt_knauer_abner_lobaton_young_2023, title={Towards rapid weight assessment of finishing pigs using a handheld, mobile RGB-D camera}, volume={226}, ISSN={["1537-5129"]}, url={https://doi.org/10.1016/j.biosystemseng.2023.01.005}, DOI={10.1016/j.biosystemseng.2023.01.005}, abstractNote={Pig weight measurement is essential for monitoring performance, welfare, and production value. Weight measurement using a scale provides the most accurate results; however, it is time consuming and may increase animal stress. Subjective visual evaluations, even when conducted by an experienced caretaker, lack consistency and accuracy. Optical sensing systems provide alternative methods for estimating pig weight, but studies examining these systems only focus on images taken from stationary cameras. This study fills a gap in existing technology through examining a handheld, portable RGB-D imaging system for estimating pig weight. An Intel RealSense camera collected RGB-D data from finishing pigs at various market weights. 3D point clouds were computed for each pig, and latent features from a 3D generative model were used to predict pig weights using three regression models (SVR, MLP and AdaBoost). These models were compared to two baseline models: median prediction and linear regression using body dimension measurements as predictor variables. Using 10-fold cross validation mean absolute error (MAE) and root-mean-square error (RMSE), all three latent feature models performed better than the median prediction model (MAE = 12.3 kg, RMSE = 16.0 kg) but did not outperform linear regression between weight and girth measurements (MAE = 4.06 kg, RMSE = 4.94 kg). Of the models under consideration, SVR performed best (MAE = 9.25 kg, RMSE = 12.3 kg, mean absolute percentage error = 7.54%) when tested on unseen data. This research is an important step towards developing rapid pig body weight estimation methods from a handheld, portable imaging system by leveraging deep learning feature outputs and depth imaging technology.}, journal={BIOSYSTEMS ENGINEERING}, author={Nguyen, Anh H. and Holt, Jonathan P. and Knauer, Mark T. and Abner, Victoria A. and Lobaton, Edgar J. and Young, Sierra N.}, year={2023}, month={Feb}, pages={155–168} } @article{kendler_aharoni_young_sela_kis-papo_fahima_fishbain_2022, title={Detection of crop diseases using enhanced variability imagery data and convolutional neural networks}, volume={193}, ISSN={["1872-7107"]}, DOI={10.1016/j.compag.2022.106732}, abstractNote={The timely detection of crop diseases is critical for securing crop productivity, lowering production costs, and minimizing agrochemical use. This study presents a crop disease identification method that is based on Convolutional Neural Networks (CNN) trained on images taken with consumer-grade cameras. Specifically, this study addresses the early detection of wheat yellow rust, stem rust, powdery mildew, potato late blight, and wild barley net blotch. To facilitate this, pictures were taken in situ without modifying the scene, the background, or controlling the illumination. Each image was then split into several patches, thus retaining the original spatial resolution of the image while allowing for data variability. The resulting dataset was highly diverse since the disease manifestation, imaging geometry, and illumination varied from patch to patch. This diverse dataset was used to train various CNN architectures to find the best match. The resulting classification accuracy was 95.4 ± 0.4%. These promising results lay the groundwork for autonomous early detection of plant diseases. Guidelines for implementing this approach in realistic conditions are also discussed.}, journal={COMPUTERS AND ELECTRONICS IN AGRICULTURE}, author={Kendler, Shai and Aharoni, Ran and Young, Sierra and Sela, Hanan and Kis-Papo, Tamar and Fahima, Tzion and Fishbain, Barak}, year={2022}, month={Feb} } @article{lu_young_linder_whipker_suchoff_2022, title={Hyperspectral Imaging With Machine Learning to Differentiate Cultivars, Growth Stages, Flowers, and Leaves of Industrial Hemp (Cannabis sativa L.)}, volume={12}, ISSN={["1664-462X"]}, DOI={10.3389/fpls.2021.810113}, abstractNote={As an emerging cash crop, industrial hemp (Cannabis sativa L.) grown for cannabidiol (CBD) has spurred a surge of interest in the United States. Cultivar selection and harvest timing are important to produce CBD hemp profitably and avoid economic loss resulting from the tetrahydrocannabinol (THC) concentration in the crop exceeding regulatory limits. Hence there is a need for differentiating CBD hemp cultivars and growth stages to aid in cultivar and genotype selection and optimization of harvest timing. Current methods that rely on visual assessment of plant phenotypes and chemical procedures are limited because of its subjective and destructive nature. In this study, hyperspectral imaging was proposed as a novel, objective, and non-destructive method for differentiating hemp cultivars, growth stages as well as plant organs (leaves and flowers). Five cultivars of CBD hemp were grown greenhouse conditions and leaves and flowers were sampled at five growth stages 2–10 weeks in 2-week intervals after flower initiation and scanned by a benchtop hyperspectral imaging system in the spectral range of 400–1000 nm. The acquired images were subjected to image processing procedures to extract the spectra of hemp samples. The spectral profiles and scatter plots of principal component analysis of the spectral data revealed a certain degree of separation between hemp cultivars, growth stages, and plant organs. Machine learning based on regularized linear discriminant analysis achieved the accuracy of up to 99.6% in differentiating the five hemp cultivars. Plant organ and growth stage need to be factored into model development for hemp cultivar classification. The classification models achieved 100% accuracy in differentiating the five growth stages and two plant organs. This study demonstrates the effectiveness of hyperspectral imaging for differentiating cultivars, growth stages and plant organs of CBD hemp, which is a potentially useful tool for growers and breeders of CBD hemp.}, journal={FRONTIERS IN PLANT SCIENCE}, author={Lu, Yuzhen and Young, Sierra and Linder, Eric and Whipker, Brian and Suchoff, David}, year={2022}, month={Feb} } @article{lu_li_young_li_linder_suchoff_2022, title={Hyperspectral imaging with chemometrics for non-destructive determination of cannabinoids in floral and leaf materials of industrial hemp (Cannabis sativa L.)}, volume={202}, ISSN={["1872-7107"]}, DOI={10.1016/j.compag.2022.107387}, abstractNote={With the passage of the 2018 Farm Bill, industrial hemp (Cannabis sativa L.) has become a legal and economically promising crop commodity for U.S. farmers. There has been a surge of interest in growing industrial hemp for producing cannabinoids, such as cannabidiol (CBD), because of their medical potential. Quantitative determination of cannabinoids in harvested materials (primarily floral tissues) is critical for cannabinoid production and compliance testing. The concentrations of cannabinoids in hemp materials are conventionally determined using wet-chemistry chromatographic methods, which require destructive sampling, and are time-consuming, costly, and thus not suitable for on-site rapid testing. This study presents a novel effort to utilize hyperspectral imaging technology for non-destructive quantification of major cannabinoids, including CBD, THC (tetrahydrocannabinol), CBG (cannabigerol) and their acid forms in fresh floral and leaf materials of industrial hemp on a dry weight basis. Hyperspectral images in the wavelength range of 400–1000 nm were acquired from floral and leaf tissues immediately after harvest from a total of 100 industrial hemp plants of five cultivars at varied growth stages. Linear discriminant analysis showed hyperspectral imaging could identify CBD-rich/poor and THC-legal/illegal flower samples with accuracies of 99% and 97%, respectively. Quantitative models based on full-spectrum PLS (partial least squares) achieved prediction accuracies of RPD (ratio of prediction to deviation) = 2.5 (corresponding R2 = 0.84) for CBD and THC in floral tissues. Similar accuracies were obtained for their acid forms in flower samples. The predictions for CBG and its acid form in floral tissues and all six cannabinoids in leaf tissues were unsatisfactory with noticeably lower RPD values. Consistently improved accuracies were obtained by parsimonious PLS models based on a wavelength selection procedure for minimized variable collinearity. The best RPD values of approximately 2.6 (corresponding R2 = 0.85) were obtained for CBD and THC in floral materials. This study demonstrates the utility of hyperspectral imaging as a potential valuable tool for rapid quantification of cannabinoids in industrial hemp.}, journal={COMPUTERS AND ELECTRONICS IN AGRICULTURE}, author={Lu, Yuzhen and Li, Xu and Young, Sierra and Li, Xin and Linder, Eric and Suchoff, David}, year={2022}, month={Nov} } @article{veazie_pandey_young_ballance_hicks_whipker_2022, title={Impact of Macronutrient Fertility on Mineral Uptake and Growth of Lactuca sativa 'Salanova Green' in a Hydroponic System}, volume={8}, ISSN={["2311-7524"]}, url={https://doi.org/10.3390/horticulturae8111075}, DOI={10.3390/horticulturae8111075}, abstractNote={Lactuca sativa (commonly referred to as lettuce) is one of the most popular grown hydroponic crops. While other fertilizer rate work has been conducted on lettuce, the impact of each element has not been evaluated independently or by determining adequate foliar tissue concentrations when all nutrients are plant-available. This study explores the impact that macronutrients have on the growth and yield of lettuce at different stages of the production cycle. Additionally, this study explores the adequate nutrient rates by regressing nutrient curves to find the concentration of each element that corresponds to optimal growth. Plants were grown under varying macronutrient concentrations (0, 8, 16, 32, 64, and 100%) utilizing the concentrations of a modified Hoagland’s solution based on 150 mg·L−1 N. Lettuce plants were grown in a silica sand culture and received a nutrient solution in which a single element was altered. Visual symptomology was documented, and leaf tissue mineral nutrient concentrations and biomass were measured at Weeks 3, 6, and 8 after transplant. Optimal elemental leaf tissue concentration and biomass varied by macronutrient rates and weeks of growth. Nitrogen rate produced a linear increase in total plant dry weight, but foliar N followed a quadratic plateau pattern. Other elements, such as phosphorus, potassium, and magnesium, produced distinct total plant dry weight plateaus despite increasing fertility concentrations. These results demonstrate that fertility recommendation can be lowered for nutrients where higher rates do not result in higher plant biomass or foliar nutrient concentrations.}, number={11}, journal={HORTICULTURAE}, author={Veazie, Patrick and Pandey, Piyush and Young, Sierra and Ballance, M. Seth and Hicks, Kristin and Whipker, Brian}, year={2022}, month={Nov} } @article{young_lu_li_li_linder_suchoff_2022, title={NAPPN Annual Conference Abstract: Hyperspectral imaging for non-destructive determination of cannabinoids in floral and leaf materials of industrial hemp}, url={https://doi.org/10.22541/au.166497079.98875901/v1}, DOI={10.22541/au.166497079.98875901/v1}, author={Young, Sierra and Lu, Yuzhen and Li, Xu and Li, Xin and Linder, Eric and Suchoff, David}, year={2022}, month={Oct} } @article{lu_young_wang_wijewardane_2022, title={Robust plant segmentation of color images based on image contrast optimization}, volume={193}, ISSN={["1872-7107"]}, DOI={10.1016/j.compag.2022.106711}, abstractNote={• A contrast-optimization approach was proposed for plant segmentation of color images. • Contrast-enhanced images were compared with index images using five image datasets. • The proposed method consistently enhanced image contrast and segmentation accuracy. • None of nine common color indices were robust enough to varying image conditions. Plant segmentation is a crucial task in computer vision applications for identification/classification and quantification of plant phenotypic features. Robust segmentation of plants is challenged by a variety of factors such as unstructured background, variable illumination, biological variations, and weak plant-background contrast. Existing color indices that are empirically developed in specific applications may not adapt robustly to varying imaging conditions. This study proposes a new method for robust, automatic segmentation of plants from background in color (red-green-blue, RGB) images. This method consists of unconstrained optimization of a linear combination of RGB component images to enhance the contrast between plant and background regions, followed by automatic thresholding of the contrast-enhanced images ( CEI s). The validity of this method was demonstrated using five plant image datasets acquired under different field or indoor conditions, with a total of 329 color images as well as ground-truth plant masks. The CEI s along with 10 common index images were evaluated in terms of image contrast and plant segmentation accuracy. The CEI s, based on the maximized foreground-background separability, achieved consistent, substantial improvements in image contrast over the index images, with an average segmentation accuracy of F1 = 95%, which is 4% better than the best accuracy obtained by the indices. The index images were found sensitive to imaging conditions and none of them performed robustly across the datasets. The proposed method is straightforward, easy to implement and can be potentially extended to nonlinear forms of color component combinations or other color spaces and generally useful in plant image analysis for precision agriculture and plant phenotyping.}, journal={COMPUTERS AND ELECTRONICS IN AGRICULTURE}, author={Lu, Yuzhen and Young, Sierra and Wang, Haifeng and Wijewardane, Nuwan}, year={2022}, month={Feb} } @article{saia_nelson_young_parham_vandegrift_2022, title={Ten Simple Rules for Researchers Who Want to Develop Web Apps}, volume={1}, url={https://doi.org/10.31223/X57P6R}, DOI={10.31223/X57P6R}, abstractNote={Growing interest in data-driven, decision-support tools across the life sciences and physical sciences has motivated development of web applications, also known as web apps. Web apps can help disseminate research findings and present research outputs in ways that are more accessible and meaningful to the general public--from individuals, to governments, to companies. Specifically, web apps enable exploration of scenario testing and policy analysis (i.e., to answer “what if?”) as well as co-evolution of scientific and public knowledge. However, the majority of researchers developing web apps receive little formal training or technical guidance on how to develop and evaluate the effectiveness of their web-based decision support tools. Take some of us for example. We (Saia and Nelson) are agricultural and environmental engineers with little experience in web app development, but we are interested in creating web apps to support sustainable aquaculture production in the Southeast. We had user (i.e., shellfish growers) interest, a goal in mind (i.e., develop a new forecast product and decision-support tool for shellfish aquaculturalists), and received funding to support this work. Yet, we experienced several unexpected hurdles from the start of our project that ended up being fairly common hiccups to the seasoned web app developers among us (Young, Parham). As a result, we share the following Ten Simple Rules, which highlight take home messages, including lessons learned and practical tips, of our experience as burgeoning web app developers. We hope researchers interested in developing web apps draw insights from our (in)experience as they set out on their decision support tool development journey.}, publisher={California Digital Library (CDL)}, author={Saia, Sheila and Nelson, Natalie and Young, Sierra and Parham, Stanton and Vandegrift, Micah}, year={2022}, month={Jan} } @article{saia_nelson_young_parham_vandegrift_2022, title={Ten simple rules for researchers who want to develop web apps}, volume={18}, ISSN={["1553-7358"]}, url={https://doi.org/10.1371/journal.pcbi.1009663}, DOI={10.1371/journal.pcbi.1009663}, abstractNote={Growing interest in data-driven, decision-support tools across the life sciences and physical sciences has motivated development of web applications, also known as web apps. Web apps can help disseminate research findings and present research outputs in ways that are more accessible and meaningful to the general public--from individuals, to governments, to companies. Specifically, web apps enable exploration of scenario testing and policy analysis (i.e., to answer “what if?”) as well as co-evolution of scientific and public knowledge. However, the majority of researchers developing web apps receive little formal training or technical guidance on how to develop and evaluate the effectiveness of their web-based decision support tools. Take some of us for example. We (Saia and Nelson) are agricultural and environmental engineers with little experience in web app development, but we are interested in creating web apps to support sustainable aquaculture production in the Southeast. We had user (i.e., shellfish growers) interest, a goal in mind (i.e., develop a new forecast product and decision-support tool for shellfish aquaculturalists), and received funding to support this work. Yet, we experienced several unexpected hurdles from the start of our project that ended up being fairly common hiccups to the seasoned web app developers among us (Young, Parham). As a result, we share the following Ten Simple Rules, which highlight take home messages, including lessons learned and practical tips, of our experience as burgeoning web app developers. We hope researchers interested in developing web apps draw insights from our (in)experience as they set out on their decision support tool development journey.}, number={1}, journal={PLOS COMPUTATIONAL BIOLOGY}, author={Saia, Sheila M. and Nelson, Natalie G. and Young, Sierra N. and Parham, Stanton and Vandegrift, Micah}, editor={Markel, ScottEditor}, year={2022}, month={Jan} } @article{linder_young_li_inoa_suchoff_2022, title={The Effect of Harvest Date on Temporal Cannabinoid and Biomass Production in the Floral Hemp (Cannabis sativa L.) Cultivars BaOx and Cherry Wine}, volume={8}, ISSN={["2311-7524"]}, url={https://doi.org/10.3390/horticulturae8100959}, DOI={10.3390/horticulturae8100959}, abstractNote={The objectives of this study were to model the temporal accumulation of cannabidiol (CBD) and tetrahydrocannabinol (THC) in field-grown floral hemp in North Carolina and establish harvest timing recommendations to minimize non-compliant crop production. Field trials were conducted in 2020 and 2021 with BaOx and Cherry Wine cultivars. Harvest events started two weeks after floral initiation and occurred every two weeks for 12 weeks. Per-plant threshed biomass accumulation exhibited a linear plateau trend. The best fit model for temporal accumulation of THC was a beta growth curve. As harvest date was delayed, total THC concentrations increased until concentrations reached their maximum, then decreased as plants approached senescence. Logistic regression was the best fit model for temporal accumulation of CBD. CBD concentrations increased with later harvest dates. Unlike THC concentrations, there was no decline in total CBD concentrations. To minimize risk, growers should test their crop as early as possible within the USDA’s 30-day compliance window. We observed ‘BaOx’ and ‘Cherry Wine’ exceeding the compliance threshold 50 and 41 days after flower initiation, respectively.}, number={10}, journal={HORTICULTURAE}, author={Linder, Eric R. and Young, Sierra and Li, Xu and Inoa, Shannon Henriquez and Suchoff, David H.}, year={2022}, month={Oct} } @article{linder_young_li_inoa_suchoff_2022, title={The Effect of Transplant Date and Plant Spacing on Biomass Production for Floral Hemp (Cannabis sativa L.)}, volume={12}, ISSN={["2073-4395"]}, url={https://doi.org/10.3390/agronomy12081856}, DOI={10.3390/agronomy12081856}, abstractNote={Floral hemp cultivated for the extraction of cannabinoids is a new crop in the United States, and agronomic recommendations are scarce. The objective of this study was to understand the effects of plant spacing and transplant date on floral hemp growth and biomass production. Field trials were conducted in North Carolina in 2020 and 2021 with the floral hemp cultivar BaOx. Transplant date treatments occurred every two weeks from 11 May to 7 July (±1 d). Plant spacing treatments were 0.91, 1.22, 1.52, and 1.83 m between plants. Weekly height and width data were collected throughout the vegetative period, and dry biomass was measured at harvest. Plant width was affected by transplant date and spacing. Plant height was affected by transplant date. Earlier transplant dates resulted in taller, wider plants, while larger plant spacing resulted in wider plants. Individual plant biomass increased with earlier transplant dates and larger plant spacing. On a per-hectare basis, biomass increased with earlier transplant dates and smaller transplant spacing. An economic analysis found that returns were highest with 1.22 m spacing and decreased linearly by a rate of −163.098 USD ha−1 d−1. These findings highlight the importance of earlier transplant timing to maximize harvestable biomass.}, number={8}, journal={AGRONOMY-BASEL}, author={Linder, Eric R. and Young, Sierra and Li, Xu and Inoa, Shannon Henriquez and Suchoff, David H.}, year={2022}, month={Aug} } @article{pandey_narayan_young_2021, title={FRONTIER: AUTONOMY IN DETECTION, ACTUATION, AND PLANNING FOR ROBOTIC WEEDING SYSTEMS}, volume={64}, ISSN={["2151-0040"]}, DOI={10.13031/trans.14085}, abstractNote={Highlights}, number={2}, journal={TRANSACTIONS OF THE ASABE}, author={Pandey, P. and Narayan, Hemanth D. and Young, S. N.}, year={2021}, pages={557–563} } @article{lu_payn_pandey_acosta_heine_walker_young_2021, title={HYPERSPECTRAL IMAGING WITH COST-SENSITIVE LEARNING FOR HIGH-THROUGHPUT SCREENING OF LOBLOLLY PINE (PINUS TAEDA L.) SEEDLINGS FOR FREEZE TOLERANCE}, volume={64}, ISSN={["2151-0040"]}, url={http://dx.doi.org/10.13031/trans.14708}, DOI={10.13031/trans.14708}, abstractNote={Highlights}, number={6}, journal={TRANSACTIONS OF THE ASABE}, publisher={American Society of Agricultural and Biological Engineers (ASABE)}, author={Lu, Yuzhen and Payn, Kitt G. and Pandey, Piyush and Acosta, Juan J. and Heine, Austin J. and Walker, Trevor D. and Young, Sierra}, year={2021}, pages={2045–2059} } @article{pandey_payn_lu_heine_walker_acosta_young_2021, title={Hyperspectral Imaging Combined with Machine Learning for the Detection of Fusiform Rust Disease Incidence in Loblolly Pine Seedlings}, volume={13}, ISSN={["2072-4292"]}, url={https://doi.org/10.3390/rs13183595}, DOI={10.3390/rs13183595}, abstractNote={Loblolly pine is an economically important timber species in the United States, with almost 1 billion seedlings produced annually. The most significant disease affecting this species is fusiform rust, caused by Cronartium quercuum f. sp. fusiforme. Testing for disease resistance in the greenhouse involves artificial inoculation of seedlings followed by visual inspection for disease incidence. An automated, high-throughput phenotyping method could improve both the efficiency and accuracy of the disease screening process. This study investigates the use of hyperspectral imaging for the detection of diseased seedlings. A nursery trial comprising families with known in-field rust resistance data was conducted, and the seedlings were artificially inoculated with fungal spores. Hyperspectral images in the visible and near-infrared region (400–1000 nm) were collected six months after inoculation. The disease incidence was scored with traditional methods based on the presence or absence of visible stem galls. The seedlings were segmented from the background by thresholding normalized difference vegetation index (NDVI) images, and the delineation of individual seedlings was achieved through object detection using the Faster RCNN model. Plant parts were subsequently segmented using the DeepLabv3+ model. The trained DeepLabv3+ model for semantic segmentation achieved a pixel accuracy of 0.76 and a mean Intersection over Union (mIoU) of 0.62. Crown pixels were segmented using geometric features. Support vector machine discrimination models were built for classifying the plants into diseased and non-diseased classes based on spectral data, and balanced accuracy values were calculated for the comparison of model performance. Averaged spectra from the whole plant (balanced accuracy = 61%), the crown (61%), the top half of the stem (77%), and the bottom half of the stem (62%) were used. A classification model built using the spectral data from the top half of the stem was found to be the most accurate, and resulted in an area under the receiver operating characteristic curve (AUC) of 0.83.}, number={18}, journal={REMOTE SENSING}, publisher={MDPI AG}, author={Pandey, Piyush and Payn, Kitt G. and Lu, Yuzhen and Heine, Austin J. and Walker, Trevor D. and Acosta, Juan J. and Young, Sierra}, year={2021}, month={Sep} } @article{barnes_morgan_hake_devine_kurtz_ibendahl_sharda_rains_snider_maja_et al._2021, title={Opportunities for Robotic Systems and Automation in Cotton Production}, volume={3}, ISSN={["2624-7402"]}, url={https://www.mdpi.com/2624-7402/3/2/23}, DOI={10.3390/agriengineering3020023}, abstractNote={Automation continues to play a greater role in agricultural production with commercial systems now available for machine vision identification of weeds and other pests, autonomous weed control, and robotic harvesters for fruits and vegetables. The growing availability of autonomous machines in agriculture indicates that there are opportunities to increase automation in cotton production. This article considers how current and future advances in automation has, could, or will impact cotton production practices. The results are organized to follow the cotton production process from land preparation to planting to within season management through harvesting and ginning. For each step, current and potential opportunities to automate processes are discussed. Specific examples include advances in automated weed control and progress made in the use of robotic systems for cotton harvesting.}, number={2}, journal={AGRIENGINEERING}, author={Barnes, Edward and Morgan, Gaylon and Hake, Kater and Devine, Jon and Kurtz, Ryan and Ibendahl, Gregory and Sharda, Ajay and Rains, Glen and Snider, John and Maja, Joe Mari and et al.}, year={2021}, month={Jun}, pages={339–362} } @article{lu_walker_acosta_young_pandey_heine_payn_2021, title={Prediction of Freeze Damage and Minimum Winter Temperature of the Seed Source of Loblolly Pine Seedlings Using Hyperspectral Imaging}, volume={67}, ISSN={["1938-3738"]}, url={https://doi.org/10.1093/forsci/fxab003}, DOI={10.1093/forsci/fxab003}, abstractNote={Abstract}, number={3}, journal={FOREST SCIENCE}, publisher={Oxford University Press (OUP)}, author={Lu, Yuzhen and Walker, Trevor D. and Acosta, Juan J. and Young, Sierra and Pandey, Piyush and Heine, Austin J. and Payn, Kitt G.}, year={2021}, month={Jun}, pages={321–334} } @article{aharoni_klymiuk_sarusi_young_fahima_fishbain_kendler_2021, title={Spectral light-reflection data dimensionality reduction for timely detection of yellow rust}, volume={22}, url={https://doi.org/10.1007/s11119-020-09742-2}, DOI={10.1007/s11119-020-09742-2}, abstractNote={Yellow rust (YR) wheat disease is one of the major threats to worldwide wheat production, and it often spreads rapidly to new and unexpected geographic locations. To cope with this threat, integrated pathogen management strategies combine disease-resistant plants, sensors monitoring technologies, and fungicides either preventively or curatively, which come with their associated monetary and environmental costs. This work presents a methodology for timely detection of YR that cuts down on hardware and computational requirements. It enables frequent detailed monitoring of the spread of YR, hence providing the opportunity to better target mitigation efforts which is critical for successful integrated disease management. The method is trained to detect YR symptoms using reflectance spectrum (VIS–NIR) and a classification algorithm at different stages of YR development to distinguish them from typical defense responses occurring in resistant wheat. The classification method was trained and tested on four different spectral datasets. The results showed that using a full spectral range, a selection of the top 5% significant spectral features, or five typical multispectral bands for early detection of YR in infected plants yielded a true positive rate of ~ 86%, for infected plants. The same data analysis with digital camera bands provided a true positive rate of 77%. These findings lay the groundwork for the development of high-throughput YR screening in the field implementing multispectral digital camera sensors that can be mounted on autonomous vehicles or a drone as part of an integrated disease management scheme.}, number={1}, journal={Precision Agriculture}, publisher={Springer Science and Business Media LLC}, author={Aharoni, Ran and Klymiuk, Valentyna and Sarusi, Benny and Young, Sierra and Fahima, Tzion and Fishbain, Barak and Kendler, Shai}, year={2021}, month={Feb}, pages={267–286} } @article{young_lanciloti_peschel_2021, title={The Effects of Interface Views on Performing Aerial Telemanipulation Tasks Using Small UAVs}, volume={4}, ISSN={["1875-4805"]}, url={https://doi.org/10.1007/s12369-021-00783-9}, DOI={10.1007/s12369-021-00783-9}, abstractNote={This paper presents a human-robot interaction (HRI) study of a dedicated Mission Specialist interface for performing telemanipulation tasks using a small unoccupied aerial vehicle (UAV). Current literature suggests that the successful completion of aerial manipulation tasks in real-world environments requires human input due to challenges in autonomous perception and control. Visual information of the remote environment in a telemanipulation interface can significantly affect performance under direct control; however, the effects of interface visualizations on task performance have not been studied for UAV telemanipulation. This work evaluated the effects of interface viewpoint on aerial manipulation task performance. The interfaces evaluated in this study included video streams from cameras located onboard the UAV, including: (i) a manipulator egocentric view, (ii) a manipulator exocentric view, and (iii) a combination of egocentric and exocentric views. A total of 36 participants completed three different manipulation tasks using all three interface conditions. The observations and results showed that both the exocentric and mixed view configurations contributed to improved task performance over an egocentric-only interface. Further, this study resulted in data regarding view use, view effectiveness, and task type that can be used for further developing interfacing for aerial manipulators that change and adapt to the environment and task.}, journal={INTERNATIONAL JOURNAL OF SOCIAL ROBOTICS}, publisher={Springer Science and Business Media LLC}, author={Young, Sierra N. and Lanciloti, Ryan J. and Peschel, Joshua M.}, year={2021}, month={Apr} } @article{a process‐based approach to attribution of historical streamflow decline in a data‐scarce and human‐dominated watershed_2020, url={http://dx.doi.org/10.1002/hyp.13707}, DOI={10.1002/hyp.13707}, abstractNote={Abstract}, journal={Hydrological Processes}, year={2020}, month={Jan} } @article{lu_young_2020, title={A survey of public datasets for computer vision tasks in precision agriculture}, volume={178}, url={http://dx.doi.org/10.1016/j.compag.2020.105760}, DOI={10.1016/j.compag.2020.105760}, abstractNote={Computer vision technologies have attracted significant interest in precision agriculture in recent years. At the core of robotics and artificial intelligence, computer vision enables various tasks from planting to harvesting in the crop production cycle to be performed automatically and efficiently. However, the scarcity of public image datasets remains a crucial bottleneck for fast prototyping and evaluation of computer vision and machine learning algorithms for the targeted tasks. Since 2015, a number of image datasets have been established and made publicly available to alleviate this bottleneck. Despite this progress, a dedicated survey on these datasets is still lacking. To fill this gap, this paper makes the first comprehensive but not exhaustive review of the public image datasets collected under field conditions for facilitating precision agriculture, which include 15 datasets on weed control, 10 datasets on fruit detection, and 9 datasets on miscellaneous applications. We survey the main characteristics and applications of these datasets, and discuss the key considerations for creating high-quality public image datasets. This survey paper will be valuable for the research community on the selection of suitable image datasets for algorithm development and identification of where creation of new image datasets is needed to support precision agriculture.}, journal={Computers and Electronics in Agriculture}, publisher={Elsevier BV}, author={Lu, Yuzhen and Young, Sierra}, year={2020}, month={Nov}, pages={105760} } @article{young_peschel_2020, title={Review of Human–Machine Interfaces for Small Unmanned Systems With Robotic Manipulators}, volume={50}, url={https://doi.org/10.1109/THMS.2020.2969380}, DOI={10.1109/THMS.2020.2969380}, abstractNote={This article reviews the human–machine interaction (HMI) technologies used for telemanipulation by small unmanned systems (SUS) with remote manipulators. SUS, including land, air, and sea vehicles, can perform a wide range of reconnaissance and manipulation tasks with varying levels of autonomy. SUS operations involving physical interactions with the environment require some level of operator involvement, ranging from direct control to goal-oriented supervision. Telemanipulation remains a challenging task for all levels of human interaction because the operator and the vehicle are not colocated, and operators require HMI technologies that facilitate manipulation from a remote location. This article surveys the human operator interfacing for over 70 teleoperated systems, summarizes the effects of physical and visual interface factors on user performance, and discusses these findings in the context of telemanipulating SUS. This article is of importance to SUS researchers and practitioners who will directly benefit from HMI implementations that improve telemanipulation performance.}, number={2}, journal={IEEE Transactions on Human-Machine Systems}, publisher={Institute of Electrical and Electronics Engineers (IEEE)}, author={Young, Sierra N. and Peschel, Joshua M.}, year={2020}, month={Apr}, pages={131–143} } @article{young_2019, title={A Framework for Evaluating Field-Based, High-Throughput Phenotyping Systems: A Meta-Analysis}, volume={19}, ISSN={["1424-8220"]}, url={https://doi.org/10.3390/s19163582}, DOI={10.3390/s19163582}, abstractNote={This paper presents a framework for the evaluation of system complexity and utility and the identification of bottlenecks in the deployment of field-based, high-throughput phenotyping (FB-HTP) systems. Although the capabilities of technology used for high-throughput phenotyping has improved and costs decreased, there have been few, if any, successful attempts at developing turnkey field-based phenotyping systems. To identify areas for future improvement in developing turnkey FB-HTP solutions, a framework for evaluating their complexity and utility was developed and applied to total of 10 case studies to highlight potential barriers in their development and adoption. The framework performs system factorization and rates the complexity and utility of subsystem factors, as well as each FB-HTP system as a whole, and provides data related to the trends and relationships within the complexity and utility factors. This work suggests that additional research and development are needed focused around the following areas: (i) data handling and management, specifically data transfer from the field to the data processing pipeline, (ii) improved human-machine interaction to facilitate usability across multiple users, and (iii) design standardization of the factors common across all FB-HTP systems to limit the competing drivers of system complexity and utility. This framework can be used to evaluate both previously developed and future proposed systems to approximate the overall system complexity and identify areas for improvement prior to implementation.}, number={16}, journal={SENSORS}, publisher={MDPI AG}, author={Young, Sierra N.}, year={2019}, month={Aug} } @article{young_kayacan_peschel_2019, title={Design and field evaluation of a ground robot for high-throughput phenotyping of energy sorghum}, volume={20}, url={https://doi.org/10.1007/s11119-018-9601-6}, DOI={10.1007/s11119-018-9601-6}, abstractNote={This article describes the design and field evaluation of a low-cost, high-throughput phenotyping robot for energy sorghum for use in biofuel production. High-throughput phenotyping approaches have been used in isolated growth chambers or greenhouses, but there is a growing need for field-based, precision agriculture techniques to measure large quantities of plants at high spatial and temporal resolutions throughout a growing season. A low-cost, tracked mobile robot was developed to collect phenotypic data for individual plants and tested on two separate energy sorghum fields in Central Illinois during summer 2016. Stereo imaging techniques determined plant height, and a depth sensor measured stem width near the base of the plant. A data capture rate of 0.4 ha, bi-weekly, was demonstrated for platform robustness consistent with various environmental conditions and crop yield modeling needs, and formative human–robot interaction observations were made during the field trials to address usability. This work is of interest to researchers and practitioners advancing the field of plant breeding because it demonstrates a new phenotyping platform that can measure individual plant architecture traits accurately (absolute measurement error at 15% for plant height and 13% for stem width) over large areas at a sub-daily frequency; furthermore, the design of this platform can be extended for phenotyping applications in maize or other agricultural row crops.}, number={4}, journal={Precision Agriculture}, publisher={Springer Science and Business Media LLC}, author={Young, Sierra N. and Kayacan, Erkan and Peschel, Joshua M.}, year={2019}, month={Aug}, pages={697–722} } @article{young_peschel_penny_thompson_srinivasan_2017, title={Robot-Assisted Measurement for Hydrologic Understanding in Data Sparse Regions}, volume={9}, DOI={10.3390/w9070494}, abstractNote={This article describes the field application of small, low-cost robots for remote surface data collection and an automated workflow to support water balance computations and hydrologic understanding where water availability data is sparse. Current elevation measurement approaches, such as manual surveying and LiDAR, are costly and infrequent, leading to potential inefficiencies for quantifying the dynamic hydrologic storage capacity of the land surface over large areas. Experiments to evaluate a team of two different robots, including an unmanned aerial vehicle (UAV) and an unmanned surface vehicle (USV), to collect hydrologic surface data utilizing sonar and visual sensors were conducted at three different field sites within the Arkavathy Basin river network located near Bangalore in Karnataka, South India. Visual sensors were used on the UAV to capture high resolution imagery for topographic characterization, and sonar sensors were deployed on the USV to capture bathymetric readings; the data streams were fused in an automated workflow to determine the storage capacity of agricultural reservoirs (also known as ``tanks'') at the three field sites. This study suggests: (i) this robot-assisted methodology is low-cost and suitable for novice users, and (ii) storage capacity data collected at previously unmapped locations revealed strong power-type relationships between surface area, stage, and storage volume, which can be incorporated into modeling of landscape-scale hydrology. This methodology is of importance to water researchers and practitioners because it produces local, high-resolution representations of bathymetry and topography and enables water balance computations at small-watershed scales, which offer insight into the present-day dynamics of a strongly human impacted watershed.}, number={7}, journal={Water}, publisher={MDPI AG}, author={Young, Sierra and Peschel, Joshua and Penny, Gopal and Thompson, Sally and Srinivasan, Veena}, year={2017}, month={Jul}, pages={494} } @article{kayacan_young_peschel_chowdhary, title={High-precision control of tracked field robots in the presence of unknown traction coefficients}, volume={0}, url={https://onlinelibrary.wiley.com/doi/abs/10.1002/rob.21794}, DOI={10.1002/rob.21794}, abstractNote={Abstract}, number={0}, journal={Journal of Field Robotics}, author={Kayacan, Erkan and Young, Sierra N. and Peschel, Joshua M. and Chowdhary, Girish} }