@article { , title = {A photometric stereo-based 3D imaging system using computer vision and deep learning for tracking plant growth}, abstract = {© The Author(s) 2019. Published by Oxford University Press. This is an Open Access article distributed under the terms of the Creative Commons Attribution License (http://creativecommons.org/licenses/by/4.0/), which permits unrestricted reuse, distribution, and reproduction in any medium, provided the original work is properly cited. Background: Tracking and predicting the growth performance of plants in different environments is critical for predicting the impact of global climate change. Automated approaches for image capture and analysis have allowed for substantial increases in the throughput of quantitative growth trait measurements compared with manual assessments. Recent work has focused on adopting computer vision and machine learning approaches to improve the accuracy of automated plant phenotyping. Here we present PS-Plant, a low-cost and portable 3D plant phenotyping platform based on an imaging technique novel to plant phenotyping called photometric stereo (PS). Results: We calibrated PS-Plant to track the model plant Arabidopsis thaliana throughout the day-night (diel) cycle and investigated growth architecture under a variety of conditions to illustrate the dramatic effect of the environment on plant phenotype. We developed bespoke computer vision algorithms and assessed available deep neural network architectures to automate the segmentation of rosettes and individual leaves, and extract basic and more advanced traits from PS-derived data, including the tracking of 3D plant growth and diel leaf hyponastic movement. Furthermore, we have produced the first PS training data set, which includes 221 manually annotated Arabidopsis rosettes that were used for training and data analysis (1,768 images in total). A full protocol is provided, including all software components and an additional test data set. Conclusions: PS-Plant is a powerful new phenotyping tool for plant research that provides robust data at high temporal and spatial resolutions. The system is well-suited for small- and large-scale research and will help to accelerate bridging of the phenotype-to-genotype gap.}, doi = {10.1093/gigascience/giz056}, eissn = {2047-217X}, issue = {5}, journal = {GigaScience}, publicationstatus = {Published}, publisher = {Oxford University Press (OUP)}, url = {https://uwe-repository.worktribe.com/output/848300}, volume = {8}, keyword = {Centre for Machine Vision, arabidopsis thaliana, leaf angle, segmentation, machine learning, near-infrared (NIR) LEDs, photomorphogenesis, thermomorphogenesis}, year = {2019}, author = {Bernotas, Gytis and Scorza, Livia C.T. and Hansen, Mark F. and Hales, Ian J. and Halliday, Karen J. and Smith, Lyndon N. and Smith, Melvyn L. and McCormick, Alistair J.} }