@article {867, title = {An automatic visible-range video weed detection, segmentation and classification prototype in potato field}, journal = {Heliyon}, volume = {6}, year = {2020}, pages = {e03685}, abstract = {Weeds might be defined as destructive plants that grow and compete with agricultural crops in order to achieve water and nutrients. Uniform spray of herbicides is nowadays a common cause in crops poisoning, environment pollution and high cost of herbicide consumption. Site-specific spraying is a possible solution for the problems that occur with uniform spray in fields. For this reason, a machine vision prototype is proposed in this study based on video processing and meta-heuristic classifiers for online identification and classification of Marfona potato plant (Solanum tuberosum) and 4299 samples from five weed plant varieties: Malva neglecta (mallow), Portulaca oleracea (purslane), Chenopodium album L (lamb{\textquoteright}s quarters), Secale cereale L (rye) and Xanthium strumarium (coklebur). In order to properly train the machine vision system, various videos taken from two Marfona potato fields within a surface of six hectares are used. After extraction of texture features based on the gray level co-occurrence matrix (GLCM), color features, spectral descriptors of texture, moment invariants and shape features, six effective discriminant features were selected: the standard deviation of saturation (S) component in HSV color space, difference of first and seventh moment invariants, mean value of hue component (H) in HSI color space, area to length ratio, average blue-difference chrominance (Cb) component in YCbCr color space and standard deviation of in-phase (I) component in YIQ color space. Classification results show a high accuracy of 98\% correct classification rate (CCR) over the test set, being able to properly identify potato plant from previously mentioned five different weed varieties. Finally, the machine vision prototype was tested in field under real conditions and was able to properly detect, segment and classify weed from potato plant at a speed of up to 0.15 m/s.}, doi = {https://doi.org/10.1016/j.heliyon.2020.e03685}, url = {https://www.sciencedirect.com/science/article/pii/S2405844020305302}, author = {Sabzi, Sajad and Yousef Abbaspour-Gilandeh and J I Arribas} } @article {862, title = {An Automatic Non-Destructive Method for the Classification of the Ripeness Stage of Red Delicious Apples in Orchards Using Aerial Video}, journal = {Agronomy}, volume = {9}, year = {2019}, abstract = {The estimation of the ripening state in orchards helps improve post-harvest processes. Picking fruits based on their stage of maturity can reduce the cost of storage and increase market outcomes. Moreover, aerial images and the estimated ripeness can be used as indicators for detecting water stress and determining the water applied during irrigation. Additionally, they can also be related to the crop coefficient (Kc) of seasonal water needs. The purpose of this research is to develop a new computer vision algorithm to detect the existing fruits in aerial images of an apple cultivar (of Red Delicious variety) and estimate their ripeness stage among four possible classes: unripe, half-ripe, ripe, and overripe. The proposed method is based on a combination of the most effective color features and a classifier based on artificial neural networks optimized with genetic algorithms. The obtained results indicate an average classification accuracy of 97.88\%, over a dataset of 8390 images and 27,687 apples, and values of the area under the ROC (receiver operating characteristic) curve near or above 0.99 for all classes. We believe this is a remarkable performance that allows a proper non-intrusive estimation of ripening that will help to improve harvesting strategies.}, doi = {https://doi.org/10.3390/agronomy9020084}, url = {https://www.mdpi.com/2073-4395/9/2/84}, author = {S Sabzi and Yousef Abbaspour-Gilandeh and G Garcia-Mateos and A Ruiz-Canales and J M Molina-Martinez and J I Arribas} }