@article{bloom_larsen_martinez_williams_jones_kudenov_2025, title={High‐throughput classification and quantification of skinning phenotype in sweet potatoes}, volume={8}, url={https://doi.org/10.1002/ppj2.70023}, DOI={10.1002/ppj2.70023}, abstractNote={Abstract Sweet potatoes (SPs) ( Ipomoea batatas ) are a valued crop for their color, flavor, and nutrition. Harvesting is labor‐intensive, requiring hand‐picking to maintain skin quality. Mechanical harvesting often causes skin damage, known as “skinning,” where skin is cut, scraped, or torn, leading to lower quality during packing. To manage this, packers may conduct a costly “field‐switch” to reduce skinning in the production line. Currently, skinning levels are visually assessed by the packers and stakeholders. Field‐switches involve transitioning between multiple fields during harvest to meet specific customer orders (e.g., supermarkets, processing plants, or end users) that require high‐quality SPs. This process aims to minimize skinning and ensure the SPs meet the desired quality standards for those orders. This study introduces a computer vision (CV) pipeline to automate skinning assessment using a ResNet50‐based DeepLabV3+ semantic segmentation model. The CV system was trained to identify three classes: skinning, intact skin, and background. A machine vision camera, mounted above a conveyor belt, captured images throughout several full production days. The pipeline calculated the percentage of skinning () as the ratio between the predicted skinning area () and the total SP surface area (). Using this method, daily production trends and field‐switch decisions were studied with image data from six production days, chosen by our grower collaborator. Percent skinning ratings calculated from random subsamples of imaged SPs—where only a portion of the full image set was analyzed—showed no significant differences compared to those derived from complete image sets. This demonstrates that subsampling can reduce computational processing times by 90% while maintaining accuracy. When data were binned in 30‐min intervals, field‐switches occurred when there was approximately 1.5 PS. Across 2,417,907 SP instances, the model achieved a root mean square error of 0.55%, R of 0.84, 80.03% recall, 99.99% specificity, 77.44% F1 score, and 99.98% grading accuracy. This offers a promising improvement for automatic skinning detection on a commercial scale.}, number={1}, journal={The Plant Phenome Journal}, author={Bloom, Zachary A. and Larsen, Joshua C. and Martinez, Enrique E. Pena and Williams, Cranos M. and Jones, Daniela S. and Kudenov, Michael W.}, year={2025}, month={May} }