Instance selection and feature weighting using evolutionary algorithms. Ramírez-Cruz, J., Fuentes, O., Alarcón-Aquino, V., & García-Banuelos, L. In Proceedings - 15th International Conference on Computing, CIC 2006, 2006. abstract bibtex Machine learning algorithms are commonly used in real-world applications for solving complex problems where it is difficult to get a mathematical model. The goal of machine learning algorithms is to learn an objective function from a set of training examples where each example is defined by a feature set. Regularly, real word applications have many examples with many features; however, the objective function depends on few of them. The presence of noisy examples or irrelevant features in a dataset degrades the performance of machine learning algorithms; such is the case of k-nearest neighbor machine learning algorithm (k-NN). Thus choosing good instance and feature subsets may improve the algorithm's performance. Evolutionary algorithms proved to be good techniques for finding solutions in a large solution space and to be stable in the presence of noise. In this work, we address the problem of instance selection and feature weighting for instance-based methods by means of a Genetic Algorithm (GA) and Evolution strategies (ES). We show that combining GA and ES with a k-NN algorithm can improve the predictive accuracy of the resulting classifier. © 2006 IEEE.
@inProceedings{
title = {Instance selection and feature weighting using evolutionary algorithms},
type = {inProceedings},
year = {2006},
identifiers = {[object Object]},
id = {25893cd1-c21b-3092-9e1d-ec2ce79b2988},
created = {2017-12-10T20:09:59.058Z},
file_attached = {false},
profile_id = {940dd160-7d67-3a5f-b9f8-935da0571367},
last_modified = {2018-03-09T18:07:13.561Z},
read = {false},
starred = {false},
authored = {true},
confirmed = {false},
hidden = {false},
private_publication = {false},
abstract = {Machine learning algorithms are commonly used in real-world applications for solving complex problems where it is difficult to get a mathematical model. The goal of machine learning algorithms is to learn an objective function from a set of training examples where each example is defined by a feature set. Regularly, real word applications have many examples with many features; however, the objective function depends on few of them. The presence of noisy examples or irrelevant features in a dataset degrades the performance of machine learning algorithms; such is the case of k-nearest neighbor machine learning algorithm (k-NN). Thus choosing good instance and feature subsets may improve the algorithm's performance. Evolutionary algorithms proved to be good techniques for finding solutions in a large solution space and to be stable in the presence of noise. In this work, we address the problem of instance selection and feature weighting for instance-based methods by means of a Genetic Algorithm (GA) and Evolution strategies (ES). We show that combining GA and ES with a k-NN algorithm can improve the predictive accuracy of the resulting classifier. © 2006 IEEE.},
bibtype = {inProceedings},
author = {Ramírez-Cruz, J.-F. and Fuentes, O. and Alarcón-Aquino, V. and García-Banuelos, L.},
booktitle = {Proceedings - 15th International Conference on Computing, CIC 2006}
}
Downloads: 0
{"_id":"pjkPJTZ6RBnFoi7ZC","bibbaseid":"ramrezcruz-fuentes-alarcnaquino-garcabanuelos-instanceselectionandfeatureweightingusingevolutionaryalgorithms-2006","downloads":0,"creationDate":"2018-11-16T03:10:01.128Z","title":"Instance selection and feature weighting using evolutionary algorithms","author_short":["Ramírez-Cruz, J.","Fuentes, O.","Alarcón-Aquino, V.","García-Banuelos, L."],"year":2006,"bibtype":"inProceedings","biburl":null,"bibdata":{"title":"Instance selection and feature weighting using evolutionary algorithms","type":"inProceedings","year":"2006","identifiers":"[object Object]","id":"25893cd1-c21b-3092-9e1d-ec2ce79b2988","created":"2017-12-10T20:09:59.058Z","file_attached":false,"profile_id":"940dd160-7d67-3a5f-b9f8-935da0571367","last_modified":"2018-03-09T18:07:13.561Z","read":false,"starred":false,"authored":"true","confirmed":false,"hidden":false,"private_publication":false,"abstract":"Machine learning algorithms are commonly used in real-world applications for solving complex problems where it is difficult to get a mathematical model. The goal of machine learning algorithms is to learn an objective function from a set of training examples where each example is defined by a feature set. Regularly, real word applications have many examples with many features; however, the objective function depends on few of them. The presence of noisy examples or irrelevant features in a dataset degrades the performance of machine learning algorithms; such is the case of k-nearest neighbor machine learning algorithm (k-NN). Thus choosing good instance and feature subsets may improve the algorithm's performance. Evolutionary algorithms proved to be good techniques for finding solutions in a large solution space and to be stable in the presence of noise. In this work, we address the problem of instance selection and feature weighting for instance-based methods by means of a Genetic Algorithm (GA) and Evolution strategies (ES). We show that combining GA and ES with a k-NN algorithm can improve the predictive accuracy of the resulting classifier. © 2006 IEEE.","bibtype":"inProceedings","author":"Ramírez-Cruz, J.-F. and Fuentes, O. and Alarcón-Aquino, V. and García-Banuelos, L.","booktitle":"Proceedings - 15th International Conference on Computing, CIC 2006","bibtex":"@inProceedings{\n title = {Instance selection and feature weighting using evolutionary algorithms},\n type = {inProceedings},\n year = {2006},\n identifiers = {[object Object]},\n id = {25893cd1-c21b-3092-9e1d-ec2ce79b2988},\n created = {2017-12-10T20:09:59.058Z},\n file_attached = {false},\n profile_id = {940dd160-7d67-3a5f-b9f8-935da0571367},\n last_modified = {2018-03-09T18:07:13.561Z},\n read = {false},\n starred = {false},\n authored = {true},\n confirmed = {false},\n hidden = {false},\n private_publication = {false},\n abstract = {Machine learning algorithms are commonly used in real-world applications for solving complex problems where it is difficult to get a mathematical model. The goal of machine learning algorithms is to learn an objective function from a set of training examples where each example is defined by a feature set. Regularly, real word applications have many examples with many features; however, the objective function depends on few of them. The presence of noisy examples or irrelevant features in a dataset degrades the performance of machine learning algorithms; such is the case of k-nearest neighbor machine learning algorithm (k-NN). Thus choosing good instance and feature subsets may improve the algorithm's performance. Evolutionary algorithms proved to be good techniques for finding solutions in a large solution space and to be stable in the presence of noise. In this work, we address the problem of instance selection and feature weighting for instance-based methods by means of a Genetic Algorithm (GA) and Evolution strategies (ES). We show that combining GA and ES with a k-NN algorithm can improve the predictive accuracy of the resulting classifier. © 2006 IEEE.},\n bibtype = {inProceedings},\n author = {Ramírez-Cruz, J.-F. and Fuentes, O. and Alarcón-Aquino, V. and García-Banuelos, L.},\n booktitle = {Proceedings - 15th International Conference on Computing, CIC 2006}\n}","author_short":["Ramírez-Cruz, J.","Fuentes, O.","Alarcón-Aquino, V.","García-Banuelos, L."],"bibbaseid":"ramrezcruz-fuentes-alarcnaquino-garcabanuelos-instanceselectionandfeatureweightingusingevolutionaryalgorithms-2006","role":"author","urls":{},"downloads":0},"search_terms":["instance","selection","feature","weighting","using","evolutionary","algorithms","ramírez-cruz","fuentes","alarcón-aquino","garcía-banuelos"],"keywords":[],"authorIDs":[]}