Publications
2024
García-Ordás, María Teresa; Alegre, Enrique; Alaiz-Rodríguez, Rocío; González-Castro, Víctor
Tool wear monitoring using an online, automatic and low cost system based on local texture Artículo de revista
En: arXiv preprint arXiv:2402.05977, 2024.
Resumen | Enlaces | BibTeX | Etiquetas: Computer vision, machine learning, Milling, Tool wear
@article{garcia-ordas_tool_2024,
title = {Tool wear monitoring using an online, automatic and low cost system based on local texture},
author = {María Teresa García-Ordás and Enrique Alegre and Rocío Alaiz-Rodríguez and Víctor González-Castro},
url = {https://arxiv.org/abs/2402.05977},
year = {2024},
date = {2024-01-01},
urldate = {2024-01-01},
journal = {arXiv preprint arXiv:2402.05977},
abstract = {This work presents a fast and cost-effective method using computer vision and machine learning to assess cutting tool wear in edge profile milling. A new dataset of 577 images was created, including functional and disposable cutting edges. The method divides the edges into regions (Wear Patches) and classifies them using texture descriptors (LBP). A Support Vector Machine (SVM) achieved 90.26% accuracy in detecting worn tools, demonstrating strong potential for automatic wear monitoring in milling.},
keywords = {Computer vision, machine learning, Milling, Tool wear},
pubstate = {published},
tppubtype = {article}
}
2022
Blanco-Medina, Pablo; Fidalgo, Eduardo; Alegre, Enrique; González-Castro, Víctor
A survey on methods, datasets and implementations for scene text spotting Artículo de revista
En: IET Image Processing, vol. 16, no 13, pp. 3426–3445, 2022.
Resumen | Enlaces | BibTeX | Etiquetas: Computer vision, image text detection, OCR, text spotting
@article{blanco-medina_survey_2022,
title = {A survey on methods, datasets and implementations for scene text spotting},
author = {Pablo Blanco-Medina and Eduardo Fidalgo and Enrique Alegre and Víctor González-Castro},
url = {https://ietresearch.onlinelibrary.wiley.com/doi/full/10.1049/ipr2.12574},
year = {2022},
date = {2022-01-01},
journal = {IET Image Processing},
volume = {16},
number = {13},
pages = {3426–3445},
abstract = {ext Spotting combines the tasks of detecting and transcribing text present in images, addressing challenges like orientation, aspect ratio, vertical text, and multiple languages in a single image. This paper analyzes and compares the most recent methods and publications in the field, extending beyond traditional comparisons of architectures and performance. It also discusses aspects often overlooked, such as hardware, software, backbone architectures, main challenges, and programming languages used in algorithms. The review covers research from 2016 to 2022, highlighting current problems, future trends, and providing a baseline for the development and comparison of future Text Spotting methods.},
keywords = {Computer vision, image text detection, OCR, text spotting},
pubstate = {published},
tppubtype = {article}
}
2020
Fidalgo, Eduardo; Carofilis-Vasco, Andrés; Jáñez-Martino, Francisco; Blanco-Medina, Pablo
Classifying suspicious content in Tor Darknet Artículo de revista
En: arXiv preprint arXiv:2005.10086, 2020.
Resumen | Enlaces | BibTeX | Etiquetas: Computer vision, Criminal Activity Detection, Darknet Analysis, Image classification
@article{fidalgo_classifying_2020-1,
title = {Classifying suspicious content in Tor Darknet},
author = {Eduardo Fidalgo and Andrés Carofilis-Vasco and Francisco Jáñez-Martino and Pablo Blanco-Medina},
url = {https://ui.adsabs.harvard.edu/abs/2020arXiv200510086F/abstract},
year = {2020},
date = {2020-01-01},
journal = {arXiv preprint arXiv:2005.10086},
abstract = {This paper proposes Semantic Attention Keypoint Filtering (SAKF) to classify Tor Darknet images by focusing on significant features related to criminal activities. By combining saliency maps with Bag of Visual Words (BoVW), SAKF outperforms CNN approaches (MobileNet v1, ResNet50) and BoVW with dense SIFT descriptors, achieving 87.98% accuracy.},
keywords = {Computer vision, Criminal Activity Detection, Darknet Analysis, Image classification},
pubstate = {published},
tppubtype = {article}
}
Fidalgo, Eduardo; Carofilis-Vasco, Andrés; Jáñez-Martino, Francisco; Blanco-Medina, Pablo
Classifying Suspicious Content in Tor Darknet Artículo de revista
En: arXiv e-prints, pp. arXiv–2005, 2020.
Resumen | Enlaces | BibTeX | Etiquetas: Computer vision, Criminal Activity Detection, Darknet Analysis, Image classification
@article{fidalgo_classifying_2020,
title = {Classifying Suspicious Content in Tor Darknet},
author = {Eduardo Fidalgo and Andrés Carofilis-Vasco and Francisco Jáñez-Martino and Pablo Blanco-Medina},
url = {https://arxiv.org/abs/2005.10086},
year = {2020},
date = {2020-01-01},
journal = {arXiv e-prints},
pages = {arXiv–2005},
abstract = {This paper proposes Semantic Attention Keypoint Filtering (SAKF) to classify Tor Darknet images by focusing on significant features related to criminal activities. By combining saliency maps with Bag of Visual Words (BoVW), SAKF outperforms CNN approaches (MobileNet v1, ResNet50) and BoVW with dense SIFT descriptors, achieving 87.98% accuracy.},
keywords = {Computer vision, Criminal Activity Detection, Darknet Analysis, Image classification},
pubstate = {published},
tppubtype = {article}
}
2019
Fidalgo, Eduardo; Alegre, Enrique; Fernández-Robles, Laura; González-Castro, Víctor
Classifying suspicious content in tor darknet through Semantic Attention Keypoint Filtering Artículo de revista
En: Digital Investigation, vol. 30, pp. 12–22, 2019, (Publisher: Elsevier).
Resumen | Enlaces | BibTeX | Etiquetas: Computer vision, Darknet Investigation, Image classification, Semantic Attention
@article{fidalgo_classifying_2019,
title = {Classifying suspicious content in tor darknet through Semantic Attention Keypoint Filtering},
author = {Eduardo Fidalgo and Enrique Alegre and Laura Fernández-Robles and Víctor González-Castro},
url = {https://www.sciencedirect.com/science/article/pii/S1742287619300027},
year = {2019},
date = {2019-01-01},
journal = {Digital Investigation},
volume = {30},
pages = {12–22},
abstract = {This paper presents Semantic Attention Keypoint Filtering (SAKF) for automatically classifying relevant parts of images from the Tor Darknet, focusing on salient features while removing non-significant background. By combining saliency maps with Bag of Visual Words (BoVW), SAKF outperforms dense SIFT descriptors and deep CNN features (MobileNet, ResNet50), achieving significantly higher accuracies across multiple datasets. The approach shows promise for aiding law enforcement investigations in the Darknet.},
note = {Publisher: Elsevier},
keywords = {Computer vision, Darknet Investigation, Image classification, Semantic Attention},
pubstate = {published},
tppubtype = {article}
}
2018
Chaves, Deisy; Saikia, Surajit; Fernández-Robles, Laura; Alegre, Enrique; Trujillo, María
A systematic review on object localisation methods in images Artículo de revista
En: Revista Iberoamericana de Automática e Informática Industrial, vol. 15, no 3, pp. 231–242, 2018, (Publisher: UNIV POLITECNICA VALENCIA, EDITORIAL UPV CAMINO VERA SN, VALENCIA, 46022, SPAIN).
Resumen | Enlaces | BibTeX | Etiquetas: automated detection, Computer vision, deep learning, Faster-RCCN, image processing, Mask-RCNN, object localization, visual inspection
@article{chaves_systematic_2018,
title = {A systematic review on object localisation methods in images},
author = {Deisy Chaves and Surajit Saikia and Laura Fernández-Robles and Enrique Alegre and María Trujillo},
url = {https://polipapers.upv.es/index.php/RIAI/article/view/10229},
year = {2018},
date = {2018-01-01},
journal = {Revista Iberoamericana de Automática e Informática Industrial},
volume = {15},
number = {3},
pages = {231–242},
abstract = {This article provides a systematic review of methods for precise object localization in images, covering techniques from traditional sliding window methods (e.g., Viola-Jones) to modern deep learning-based approaches like Faster-RCNN and Mask-RCNN. It discusses the advantages, disadvantages, and applications of these methods in fields such as industrial inspection, clinical diagnosis, and obstacle detection in vehicles and robots. The review offers an organized summary of these techniques and highlights future research directions.},
note = {Publisher: UNIV POLITECNICA VALENCIA, EDITORIAL UPV CAMINO VERA SN, VALENCIA, 46022, SPAIN},
keywords = {automated detection, Computer vision, deep learning, Faster-RCCN, image processing, Mask-RCNN, object localization, visual inspection},
pubstate = {published},
tppubtype = {article}
}
2017
Fidalgo, Eduardo
Selection of relevant information to improve Image Classification using Bag of Visual Words Artículo de revista
En: 2017.
Resumen | Enlaces | BibTeX | Etiquetas: Computer vision, feature selection, Fine-grained Recognition, Image classification
@article{fidalgo_selection_2017-2,
title = {Selection of relevant information to improve Image Classification using Bag of Visual Words},
author = {Eduardo Fidalgo},
url = {https://www.raco.cat/index.php/ELCVIA/article/view/v16-n2-fidalgo},
year = {2017},
date = {2017-01-01},
abstract = {This PhD thesis tackles a major challenge in computer vision: image classification. With the rapid increase in the number of images, it is essential to classify them accurately and efficiently. The typical classification pipeline involves extracting image features, encoding them into vectors, and classifying them with a pre-trained model. The Bag of Words model and its variants, such as pyramid matching and weighted schemes, have proven to be effective. However, errors can occur at any stage, causing performance issues, especially when dealing with multiple objects, small or thin items, incorrect annotations, or fine-grained recognition. The thesis highlights the importance of good feature selection to enhance classification performance, showing that high-quality features can lead to improved fine-grained classification without requiring extensive training datasets.},
keywords = {Computer vision, feature selection, Fine-grained Recognition, Image classification},
pubstate = {published},
tppubtype = {article}
}
Fidalgo, Eduardo
Selection of relevant information to improve Image Classification using Bag of Visual Words Artículo de revista
En: ELCVIA: electronic letters on computer vision and image analysis, vol. 16, no 2, pp. 5–8, 2017.
Resumen | Enlaces | BibTeX | Etiquetas: Computer vision, feature selection, Fine-grained Recognition, Image classification
@article{fidalgo_selection_2017,
title = {Selection of relevant information to improve Image Classification using Bag of Visual Words},
author = {Eduardo Fidalgo},
url = {https://recercat.cat/handle/2072/428567},
year = {2017},
date = {2017-01-01},
journal = {ELCVIA: electronic letters on computer vision and image analysis},
volume = {16},
number = {2},
pages = {5–8},
abstract = {This PhD thesis addresses one of the main challenges in computer vision: image classification. With the rapid growth in the number of images, reliable classification has become increasingly important. The conventional classification pipeline involves extracting local image features, encoding them into a feature vector, and classifying them using a pre-trained model. The Bag of Words model and its extensions, such as pyramid matching and weighted schemes, have shown strong results. However, errors can occur in any step of the process, which may lead to a decline in classification performance. These errors can stem from multiple objects in an image, thin or small objects, incorrect annotations, or fine-grained recognition tasks. The thesis demonstrates that selecting high-quality features can significantly improve fine-grained classification, showing that a large training dataset is not always necessary to achieve good results.},
keywords = {Computer vision, feature selection, Fine-grained Recognition, Image classification},
pubstate = {published},
tppubtype = {article}
}
Olivera, Óscar García-Olalla; Fernández-Robles, Laura; Fidalgo, Eduardo; González-Castro, Víctor; Alegre, Enrique
Evaluation of the State of Cutting Tools According to Its Texture Using LOSIB and LBP Variants Artículo de revista
En: Project Management and Engineering Research: AEIPRO 2016, pp. 217–228, 2017, (Publisher: Springer International Publishing).
Resumen | Enlaces | BibTeX | Etiquetas: Computer vision, Cutting Tools, local binary pattern, Texture
@article{garcia-olalla_olivera_evaluation_2017,
title = {Evaluation of the State of Cutting Tools According to Its Texture Using LOSIB and LBP Variants},
author = {Óscar García-Olalla Olivera and Laura Fernández-Robles and Eduardo Fidalgo and Víctor González-Castro and Enrique Alegre},
url = {https://link.springer.com/chapter/10.1007/978-3-319-51859-6_15},
year = {2017},
date = {2017-01-01},
journal = {Project Management and Engineering Research: AEIPRO 2016},
pages = {217–228},
abstract = {The FRESVIDA project focuses on assessing the lifespan of cutting tools under extreme conditions using digital image processing. It evaluates various texture descriptors based on Local Binary Patterns (LBP), including variants like LBPV and DLBPCS, using the Outex dataset. The descriptors are tested with Support Vector Machines (SVM), and results show that combining them with LOSIB reduces performance due to the dataset’s emphasis on rotation invariance.},
note = {Publisher: Springer International Publishing},
keywords = {Computer vision, Cutting Tools, local binary pattern, Texture},
pubstate = {published},
tppubtype = {article}
}
de Paz-Centeno, Iván; Fidalgo, Eduardo; Alegre, Enrique; Al-Nabki, Wesam
Oculus-Crawl, a software tool for building datasets for computer vision tasks Artículo de revista
En: XXXVIII Jornadas de Automática, pp. 991–998, 2017, (Publisher: Servicio de Publicaciones de la Universidad de Oviedo).
Resumen | Enlaces | BibTeX | Etiquetas: Computer vision, crawler, Dataset, images, search engine
@article{paz_centeno_oculus-crawl_2017,
title = {Oculus-Crawl, a software tool for building datasets for computer vision tasks},
author = {Iván de Paz-Centeno and Eduardo Fidalgo and Enrique Alegre and Wesam Al-Nabki},
url = {https://ruc.udc.es/dspace/handle/2183/25870},
year = {2017},
date = {2017-01-01},
urldate = {2017-01-01},
journal = {XXXVIII Jornadas de Automática},
pages = {991–998},
abstract = {This paper introduces Oculus-Crawl, a modular and scalable tool designed to automatically collect images from Google and Yahoo Images search engines. The tool enables efficient dataset creation for computer vision tasks, featuring capabilities for storing and sharing large datasets along with their metadata. Testing demonstrated its efficiency by successfully collecting over 11,500 images with their metadata in under 14 minutes.},
note = {Publisher: Servicio de Publicaciones de la Universidad de Oviedo},
keywords = {Computer vision, crawler, Dataset, images, search engine},
pubstate = {published},
tppubtype = {article}
}
García-Ordás, María Teresa; Alegre, Enrique; González-Castro, Víctor; Alaiz-Rodríguez, Rocío
A computer vision approach to analyze and classify tool wear level in milling processes using shape descriptors and machine learning techniques Artículo de revista
En: The International Journal of Advanced Manufacturing Technology, vol. 90, pp. 1947–1961, 2017, (Publisher: Springer London).
Resumen | Enlaces | BibTeX | Etiquetas: Computer vision, cutting tool wear, machine learning, shape descriptors, wear monitoring automation
@article{garcia-ordas_computer_2017,
title = {A computer vision approach to analyze and classify tool wear level in milling processes using shape descriptors and machine learning techniques},
author = {María Teresa García-Ordás and Enrique Alegre and Víctor González-Castro and Rocío Alaiz-Rodríguez},
url = {https://link.springer.com/article/10.1007/s00170-016-9541-0},
year = {2017},
date = {2017-01-01},
journal = {The International Journal of Advanced Manufacturing Technology},
volume = {90},
pages = {1947–1961},
abstract = {In this paper, we present a new approach to categorize the wear of cutting tools used in edge profile milling processes. It is based on machine learning and computer vision techniques, specifically using B-ORCHIZ, a novel shape-based descriptor computed from the wear region image. A new Insert dataset with 212 images of tool wear has been created to evaluate our approach. It contains two subsets: one with images of the main cutting edge and the other one with the edges that converge to it (called Insert-C and Insert-I, respectively). The experiments were conducted trying to discriminate between two (low-high) and three (low-medium-high) different wear levels, and the classification stage was carried out using a support vector machine (SVM). Results show that B-ORCHIZ outperforms other shape descriptors (aZIBO and ZMEG) achieving accuracy values between 80.24 and 88.46 % in the different scenarios evaluated. Moreover, a hierarchical cluster analysis was performed, offering prototype images for wear levels, which may help researchers and technicians to understand how the wear process evolves. These results show a very promising opportunity for wear monitoring automation in edge profile milling processes.},
note = {Publisher: Springer London},
keywords = {Computer vision, cutting tool wear, machine learning, shape descriptors, wear monitoring automation},
pubstate = {published},
tppubtype = {article}
}
2016
Fernández-Robles, Laura
Object recognition techniques in real applications Artículo de revista
En: 2016.
Resumen | Enlaces | BibTeX | Etiquetas: Computer vision, image processing, machine learning, object recognition
@article{fernandez-robles_object_2016,
title = {Object recognition techniques in real applications},
author = {Laura Fernández-Robles},
url = {https://research.rug.nl/en/publications/object-recognition-techniques-in-real-applications},
year = {2016},
date = {2016-01-01},
abstract = {This doctoral thesis presents object description and retrieval techniques applied to three different fields: boar spermatozoa classification based on acrosome integrity, tool wear monitoring in machining processes, and specific object detection in images to combat child sexual exploitation. The research develops new methods and descriptors, highlighting the creation of the colour COSFIRE filter, which enhances color description and object discrimination while maintaining background invariance.},
keywords = {Computer vision, image processing, machine learning, object recognition},
pubstate = {published},
tppubtype = {article}
}
2012
Alegre, Enrique; Barreiro, Joaquín; Suárez-Castrillón, Alexci
A new improved Laws-based descriptor for surface roughness evaluation Artículo de revista
En: The International Journal of Advanced Manufacturing Technology, vol. 59, pp. 605–615, 2012, (Publisher: Springer-Verlag).
Resumen | Enlaces | BibTeX | Etiquetas: Computer vision, machining processes, surface roughness, texture analysis
@article{alegre_new_2012,
title = {A new improved Laws-based descriptor for surface roughness evaluation},
author = {Enrique Alegre and Joaquín Barreiro and Alexci Suárez-Castrillón},
url = {https://link.springer.com/article/10.1007/s00170-011-3507-z},
year = {2012},
date = {2012-01-01},
urldate = {2012-01-01},
journal = {The International Journal of Advanced Manufacturing Technology},
volume = {59},
pages = {605–615},
abstract = {A new descriptor that allows to classify turned metallic parts based on their superficial roughness is proposed in this paper. The material used for the tests was AISI 6150 steel, regarded as one of the reference steels in the market. The proposed solution is based on a vision system that calculates the actual roughness by analysing texture on images of machined parts. A new developed R5SR5S kernel for quantifying roughness is based on the R5R5 mask presented by Laws. Results from computing standard deviation from images obtained with the proposed R5SR5S kernel allow us to classify the images with a hit rate of 95.87% using linear discriminant analysis and 97.30% using quadratic discriminant analysis. These results show that the proposed technique can be effectively used to evaluate roughness in machining processes.},
note = {Publisher: Springer-Verlag},
keywords = {Computer vision, machining processes, surface roughness, texture analysis},
pubstate = {published},
tppubtype = {article}
}
2010
Alegre, Enrique; Alaiz-Rodríguez, Rocío; Barreiro, Joaquín; Fidalgo, Eduardo; Fernández-Robles, Laura
Surface finish control in machining processes using haralick descriptors and neuronal networks Artículo de revista
En: Computational Modeling of Objects Represented in Images: Second International Symposium, CompIMAGE 2010, Buffalo, NY, USA, May 5-7, 2010. Proceedings 2, pp. 231–241, 2010, (Publisher: Springer Berlin Heidelberg).
Resumen | Enlaces | BibTeX | Etiquetas: Classification Methods, Computer vision, Haralick Descriptors, Surface Finish Control, surface roughness
@article{alegre_surface_2010,
title = {Surface finish control in machining processes using haralick descriptors and neuronal networks},
author = {Enrique Alegre and Rocío Alaiz-Rodríguez and Joaquín Barreiro and Eduardo Fidalgo and Laura Fernández-Robles},
url = {https://link.springer.com/chapter/10.1007/978-3-642-12712-0_21},
year = {2010},
date = {2010-01-01},
journal = {Computational Modeling of Objects Represented in Images: Second International Symposium, CompIMAGE 2010, Buffalo, NY, USA, May 5-7, 2010. Proceedings 2},
pages = {231–241},
abstract = {This paper presents a computer vision-based method to control surface roughness in steel parts. It classifies steel surfaces into acceptable and defective classes based on roughness. The study uses 143 images of AISI 303 stainless steel and three image description methods: texture local filters, Haralick descriptors, and wavelet transform features. The best error rate of 4.0% was achieved using texture descriptors with K-NN, while the optimal configuration with a neural network achieved a 0.0% error rate using Haralick descriptors.},
note = {Publisher: Springer Berlin Heidelberg},
keywords = {Classification Methods, Computer vision, Haralick Descriptors, Surface Finish Control, surface roughness},
pubstate = {published},
tppubtype = {article}
}
2009
Morala-Argüello, Patricia; Barreiro, Joaquín; Alegre, Enrique; González-Castro, Víctor
Application of textural descriptors for the evaluation of surface roughness class in the machining of metals Artículo de revista
En: 2009.
Resumen | Enlaces | BibTeX | Etiquetas: Computer vision, machine learning, machining, quality control, surface roughness
@article{morala-arguello_application_2009,
title = {Application of textural descriptors for the evaluation of surface roughness class in the machining of metals},
author = {Patricia Morala-Argüello and Joaquín Barreiro and Enrique Alegre and Víctor González-Castro},
url = {https://scholar.google.es/citations?view_op=view_citation&hl=en&user=opCbArQAAAAJ&cstart=20&pagesize=80&sortby=title&citation_for_view=opCbArQAAAAJ:UebtZRa9Y70C},
year = {2009},
date = {2009-01-01},
urldate = {2009-01-01},
abstract = {Surface roughness measurement has been a key topic in metal machining research for decades. Traditional methods rely on tactile devices providing 2D profiles, but advances in computer vision now enable 3D surface characterization. This paper proposes a computer vision-based method to evaluate machined part quality using five feature vectors: Hu, Flusser, Taubin, Zernike, and Legendre moments. Images were classified into low and high roughness using k-NN and neural networks. Results show that Zernike and Legendre descriptors perform best, achieving a 6.5% error rate with k-NN classification.},
keywords = {Computer vision, machine learning, machining, quality control, surface roughness},
pubstate = {published},
tppubtype = {article}
}
2008
Barreiro, Joaquín; Alaiz-Rodríguez, Rocío; Alegre, Enrique; Ablanedo, D
Surface finish control in machining processes using textural descriptors based on moments Miscelánea
2008.
Resumen | Enlaces | BibTeX | Etiquetas: Computer vision, Haralick Descriptors, neural networks, Surface Finish Control, surface roughness
@misc{barreiro_surface_2008,
title = {Surface finish control in machining processes using textural descriptors based on moments},
author = {Joaquín Barreiro and Rocío Alaiz-Rodríguez and Enrique Alegre and D Ablanedo},
url = {https://link.springer.com/chapter/10.1007/978-3-642-12712-0_21},
year = {2008},
date = {2008-01-01},
publisher = {na},
abstract = {This paper introduces a computer vision method for controlling the surface finish of steel parts by classifying them into acceptable and defective categories based on surface roughness. The study uses 143 images of AISI 303 stainless steel, described with three techniques: texture local filters, Haralick descriptors, and wavelet transform features. The classification is done with K-NN and neural networks. The best result, with a 4.0% error rate, was achieved using texture descriptors with K-NN. The optimal configuration with a neural network, using Haralick descriptors, resulted in a 0.0% error rate.},
keywords = {Computer vision, Haralick Descriptors, neural networks, Surface Finish Control, surface roughness},
pubstate = {published},
tppubtype = {misc}
}
Alegre, Enrique; Barreiro, Joaquín; Castejón-Limas, Manuel; Suárez, S
Computer vision and classification techniques on the surface finish control in machining processes Artículo de revista
En: International Conference Image Analysis and Recognition, pp. 1101–1110, 2008, (Publisher: Springer Berlin Heidelberg Berlin, Heidelberg).
Resumen | Enlaces | BibTeX | Etiquetas: AISI 303, Computer vision, Product Quality Inspection, Surface Finish Control, texture descriptors
@article{alegre_computer_2008,
title = {Computer vision and classification techniques on the surface finish control in machining processes},
author = {Enrique Alegre and Joaquín Barreiro and Manuel Castejón-Limas and S Suárez},
url = {https://link.springer.com/chapter/10.1007/978-3-540-69812-8_110},
year = {2008},
date = {2008-01-01},
urldate = {2008-01-01},
journal = {International Conference Image Analysis and Recognition},
pages = {1101–1110},
abstract = {This work presents a method for surface finish control using computer vision. The test parts were made of AISI 303 stainless steel and machined with a CNC lathe. Using a Pulnix camera, diffuse illumination, and industrial zoom, 140 images were captured. Three feature extraction methods were applied: histogram statistics, Haralick descriptors, and Laws descriptors. Using k-NN, the best hit rate achieved was 92.14% with unfiltered images using Laws features. These results demonstrate the feasibility of using texture descriptors to assess the roughness of metallic parts for quality inspection.},
note = {Publisher: Springer Berlin Heidelberg Berlin, Heidelberg},
keywords = {AISI 303, Computer vision, Product Quality Inspection, Surface Finish Control, texture descriptors},
pubstate = {published},
tppubtype = {article}
}
2007
Castejón-Limas, Manuel; Alegre, Enrique; Barreiro, Joaquín; Hernández, LK
On-line tool wear monitoring using geometric descriptors from digital images Artículo de revista
En: International Journal of Machine Tools and Manufacture, vol. 47, no 12-13, pp. 1847–1853, 2007, (Publisher: Pergamon).
Resumen | Enlaces | BibTeX | Etiquetas: Computer vision, Image classification, Monitoring, Tool wear
@article{castejon-limas_-line_2007,
title = {On-line tool wear monitoring using geometric descriptors from digital images},
author = {Manuel Castejón-Limas and Enrique Alegre and Joaquín Barreiro and LK Hernández},
url = {https://www.sciencedirect.com/science/article/pii/S0890695507000892},
year = {2007},
date = {2007-01-01},
journal = {International Journal of Machine Tools and Manufacture},
volume = {47},
number = {12-13},
pages = {1847–1853},
abstract = {A computer vision and statistical learning system is proposed to estimate wear levels in cutting inserts and determine the optimal replacement time. Using a CNC lathe and vision system, 1383 flank images were processed, extracting nine geometrical descriptors. Linear Discriminant Analysis identified three key descriptors—eccentricity, extent, and solidity—capturing 98.63% of relevant information. A finite mixture model classified wear into three levels: low, medium, and high. The monitoring approach tracks tool wear evolution, ensuring replacement before reaching high wear, optimizing performance and preventing failures.},
note = {Publisher: Pergamon},
keywords = {Computer vision, Image classification, Monitoring, Tool wear},
pubstate = {published},
tppubtype = {article}
}
0000
Martín, Guillermo Martínez San; Fernández-Robles, Laura; Alegre, Enrique; Olivera, Óscar García-Olalla
A segmentation approach for evaluating wear of inserts in milling machines with computer vision techniques Artículo de revista
En: 0000.
Resumen | Enlaces | BibTeX | Etiquetas: Computer vision, milling machines, segmentation, Tool wear
@article{martinez_san_martin_segmentation_nodate,
title = {A segmentation approach for evaluating wear of inserts in milling machines with computer vision techniques},
author = {Guillermo Martínez San Martín and Laura Fernández-Robles and Enrique Alegre and Óscar García-Olalla Olivera},
url = {https://scholar.google.es/citations?view_op=view_citation&hl=es&user=4jZgNVkAAAAJ&sortby=title&citation_for_view=4jZgNVkAAAAJ:Se3iqnhoufwC},
abstract = {Measuring tool wear in milling machines is an important task to evaluate the lifetime of the cutting parts (inserts) and deciding whether we should replace them. In our research, we propose to use computer vision algorithms to perform this task. Part of the research is to evaluate the accuracy of different segmentation algorithms that segment the area of wear. We have used two methods: k-Means and Mean Shift. To evaluate the segmentation results the Dice coefficient was used, obtaining with Mean Shift a QS= 0.5923 for all the edges and a QS= 0.6831 just for edges with high wear.},
keywords = {Computer vision, milling machines, segmentation, Tool wear},
pubstate = {published},
tppubtype = {article}
}
Barreiro, Joaquín; Sanz, Alberto; Hernández, LK; Alegre, Enrique; Castejón-Limas, Manuel
Operator and analyst interfaces for monitoring of wear in tool inserts Artículo de revista
En: 0000.
Resumen | Enlaces | BibTeX | Etiquetas: Classification, Computer vision, descriptors, monitoring tool life
@article{barreiro_operator_nodate,
title = {Operator and analyst interfaces for monitoring of wear in tool inserts},
author = {Joaquín Barreiro and Alberto Sanz and LK Hernández and Enrique Alegre and Manuel Castejón-Limas},
url = {https://d1wqtxts1xzle7.cloudfront.net/47455745/Operator_and_analyst_interfaces_for_moni20160723-29317-10pmzkk-libre.pdf?1469286941=&response-content-disposition=inline%3B+filename%3DOperator_and_analyst_interfaces_for_moni.pdf&Expires=1739527854&Signature=RWcL6iwivos9e0wdZGv2op4N17pTROrpur~Yrv~yUJp2ta5PuCsKYNk-gnEyq2hojbariQ5iVTq7oPhutXCpZHCk5x~oE5Z~mH5mTrzcuxUCRwc4hRZlUY~HlJbpBLtJDTKagp2VLdb2UYRaqWMREe4VwJnMKyW8DjlgQtY4NiMx8ECM6FWcANQUhPZmRvT2RLYKEYbI9NMLGaW4qs9FTGATkhPSmkFJ6nt6QJQ4VkGmn55XlN3gE7hYbUDGTr6oZ7LzpBlvKWq856FUqSPD2Uy6Wz0~oQ8NyZM9ynDMGOSEyu640BiwmmSWfvHOuEv9mIkoFy95Kyx9lrf73kSs5A__&Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA},
abstract = {Computer vision techniques have advanced significantly, enabling their effective application in industrial environments. This paper presents graphical interfaces for operators and analysts to monitor tool inserts during steel turning. The system integrates computer vision with classification techniques based on statistical moments and region descriptors, enhancing inspection and control processes.},
keywords = {Classification, Computer vision, descriptors, monitoring tool life},
pubstate = {published},
tppubtype = {article}
}