Instance Selection and Feature Weighting Using Evolutionary Algorithms. Ramirez-Cruz, J., Fuentes, O., Alarcon-Aquino, V., & Garcia-Banuelos, L. In 2006 15th International Conference on Computing, pages 73-79, 11, 2006. IEEE.
Instance Selection and Feature Weighting Using Evolutionary Algorithms [link]Website  doi  abstract   bibtex   
Machine learning algorithms are commonly used in real-world applications for solving complex problems where it is difficult to get a mathematical model. The goal of machine learning algorithms is to learn an objective function from a set of training examples where each example is defined by a feature set. Regularly, real world applications have many examples with many features; however, the objective function depends on few of them. The presence of noisy examples or irrelevant features in a dataset degrades the performance of machine learning algorithms; such is the case of k-nearest neighbor machine learning algorithm (k-NN). Thus choosing good instance and feature subsets may improve the algorithm's performance. Evolutionary algorithms proved to be good techniques for finding solutions in a large solution space and to be stable in the presence of noise. In this work, we address the problem of instance selection and feature weighting for instance-based methods by means of a genetic algorithm (GA) and evolution strategies (ES). We show that combining GA and ES with a k-NN algorithm can improve the predictive accuracy of the resulting classifier.
@inproceedings{
 title = {Instance Selection and Feature Weighting Using Evolutionary Algorithms},
 type = {inproceedings},
 year = {2006},
 pages = {73-79},
 websites = {http://ieeexplore.ieee.org/document/4023791/},
 month = {11},
 publisher = {IEEE},
 id = {9d25b946-cb70-358e-972c-8796b7984187},
 created = {2022-08-29T17:43:37.369Z},
 file_attached = {false},
 profile_id = {940dd160-7d67-3a5f-b9f8-935da0571367},
 group_id = {92fccab2-8d44-33bc-b301-7b94bb18523c},
 last_modified = {2022-08-29T17:43:37.369Z},
 read = {false},
 starred = {false},
 authored = {false},
 confirmed = {true},
 hidden = {false},
 source_type = {CONF},
 private_publication = {false},
 abstract = {Machine learning algorithms are commonly used in real-world applications for solving complex problems where it is difficult to get a mathematical model. The goal of machine learning algorithms is to learn an objective function from a set of training examples where each example is defined by a feature set. Regularly, real world applications have many examples with many features; however, the objective function depends on few of them. The presence of noisy examples or irrelevant features in a dataset degrades the performance of machine learning algorithms; such is the case of k-nearest neighbor machine learning algorithm (k-NN). Thus choosing good instance and feature subsets may improve the algorithm's performance. Evolutionary algorithms proved to be good techniques for finding solutions in a large solution space and to be stable in the presence of noise. In this work, we address the problem of instance selection and feature weighting for instance-based methods by means of a genetic algorithm (GA) and evolution strategies (ES). We show that combining GA and ES with a k-NN algorithm can improve the predictive accuracy of the resulting classifier.},
 bibtype = {inproceedings},
 author = {Ramirez-Cruz, Jose-Federico and Fuentes, Olac and Alarcon-Aquino, Vicente and Garcia-Banuelos, Luciano},
 doi = {10.1109/CIC.2006.42},
 booktitle = {2006 15th International Conference on Computing}
}

Downloads: 0